diff --git a/charts/dist/feast-0.1.0.tgz b/charts/dist/feast-0.1.0.tgz index 2ba968c895..73ccb93971 100644 Binary files a/charts/dist/feast-0.1.0.tgz and b/charts/dist/feast-0.1.0.tgz differ diff --git a/charts/dist/index.yaml b/charts/dist/index.yaml index dc07348c22..0594c1d59c 100755 --- a/charts/dist/index.yaml +++ b/charts/dist/index.yaml @@ -3,11 +3,11 @@ entries: feast: - apiVersion: v1 appVersion: 0.1.0 - created: 2019-03-15T19:35:11.848995-07:00 + created: 2019-03-21T18:38:21.941744229+08:00 description: A Helm chart to install Feast on kubernetes - digest: cbc97b4be6a84a33055900b620d2f6b6176ee81d86eb2e8bfcc9096ea040ff47 + digest: 68584c58f742f3ed88040ae8658d549a2ee643334f01c684bad83e2937eae19e name: feast urls: - feast-0.1.0.tgz version: 0.1.0 -generated: 2019-03-15T19:35:11.844526-07:00 +generated: 2019-03-21T18:38:21.937413876+08:00 diff --git a/charts/feast/templates/core-deploy.yaml b/charts/feast/templates/core-deploy.yaml index dca2c5ea50..532f50f90f 100644 --- a/charts/feast/templates/core-deploy.yaml +++ b/charts/feast/templates/core-deploy.yaml @@ -91,6 +91,7 @@ spec: value: "{{ .Values.core.jobs.monitoring.period }}" - name: JOB_MONITOR_INITIAL_DELAY_MS value: "{{ .Values.core.jobs.monitoring.initialDelay }}" + {{- if .Values.store }} {{- if .Values.store.serving }} - name: STORE_SERVING_TYPE value: {{ .Values.store.serving.type }} @@ -109,6 +110,7 @@ spec: - name: STORE_ERRORS_OPTIONS value: {{ .Values.store.errors.options | toJson}} {{- end }} + {{- end }} - name: STATSD_HOST value: {{ .Values.statsd.host }} - name: STATSD_PORT diff --git a/cli/feast/pkg/parse/yaml.go b/cli/feast/pkg/parse/yaml.go index f02698171e..b8d9ac252c 100644 --- a/cli/feast/pkg/parse/yaml.go +++ b/cli/feast/pkg/parse/yaml.go @@ -34,7 +34,6 @@ func YamlToFeatureSpec(in []byte) (*specs.FeatureSpec, error) { if err != nil { return nil, err } - ymlMap["granularity"] = granularityOf(ymlMap["granularity"].(string)) ymlMap["valueType"] = valueTypeOf(ymlMap["valueType"].(string)) if err != nil { return nil, err @@ -131,10 +130,6 @@ func YamlToImportSpec(in []byte) (*specs.ImportSpec, error) { return &is, err } -func granularityOf(str string) types.Granularity_Enum { - return types.Granularity_Enum(types.Granularity_Enum_value[str]) -} - func valueTypeOf(str string) types.ValueType_Enum { return types.ValueType_Enum(types.ValueType_Enum_value[str]) } diff --git a/cli/feast/pkg/parse/yaml_test.go b/cli/feast/pkg/parse/yaml_test.go index 00331668ea..0c7eaf7c01 100644 --- a/cli/feast/pkg/parse/yaml_test.go +++ b/cli/feast/pkg/parse/yaml_test.go @@ -34,13 +34,12 @@ func TestYamlToFeatureSpec(t *testing.T) { }{ { name: "valid yaml", - input: []byte(`id: test.none.test_feature_two + input: []byte(`id: test.test_feature_two name: test_feature_two entity: test owner: bob@example.com description: testing feature valueType: INT64 -granularity: NONE uri: https://github.com/bob/example dataStores: serving: @@ -48,12 +47,11 @@ dataStores: warehouse: id: BIGQUERY`), expected: &specs.FeatureSpec{ - Id: "test.none.test_feature_two", + Id: "test.test_feature_two", Owner: "bob@example.com", Name: "test_feature_two", Description: "testing feature", Uri: "https://github.com/bob/example", - Granularity: types.Granularity_NONE, ValueType: types.ValueType_INT64, Entity: "test", DataStores: &specs.DataStores{ @@ -257,7 +255,7 @@ schema: - name: timestamp - name: driver_id - name: last_opportunity - featureId: driver.none.last_opportunity`), + featureId: driver.last_opportunity`), expected: &specs.ImportSpec{ Type: "file", JobOptions: map[string]string{ @@ -272,7 +270,7 @@ schema: Fields: []*specs.Field{ {Name: "timestamp"}, {Name: "driver_id"}, - {Name: "last_opportunity", FeatureId: "driver.none.last_opportunity"}, + {Name: "last_opportunity", FeatureId: "driver.last_opportunity"}, }, EntityIdColumn: "driver_id", Timestamp: &specs.Schema_TimestampValue{ diff --git a/cli/feast/pkg/printer/printer_test.go b/cli/feast/pkg/printer/printer_test.go index a3ff2a263d..07332324c1 100644 --- a/cli/feast/pkg/printer/printer_test.go +++ b/cli/feast/pkg/printer/printer_test.go @@ -23,12 +23,11 @@ func TestPrintFeature(t *testing.T) { name: "with storage", input: &core.UIServiceTypes_FeatureDetail{ Spec: &specs.FeatureSpec{ - Id: "test.none.test_feature_two", + Id: "test.test_feature_two", Owner: "bob@example.com", Name: "test_feature_two", Description: "testing feature", Uri: "https://github.com/bob/example", - Granularity: types.Granularity_NONE, ValueType: types.ValueType_INT64, Entity: "test", DataStores: &specs.DataStores{ @@ -45,7 +44,7 @@ func TestPrintFeature(t *testing.T) { LastUpdated: ×tamp.Timestamp{Seconds: 1}, Created: ×tamp.Timestamp{Seconds: 1}, }, - expected: fmt.Sprintf(`Id: test.none.test_feature_two + expected: fmt.Sprintf(`Id: test.test_feature_two Entity: test Owner: bob@example.com Description: testing feature @@ -65,12 +64,11 @@ Related Jobs: name: "no storage", input: &core.UIServiceTypes_FeatureDetail{ Spec: &specs.FeatureSpec{ - Id: "test.none.test_feature_two", + Id: "test.test_feature_two", Owner: "bob@example.com", Name: "test_feature_two", Description: "testing feature", Uri: "https://github.com/bob/example", - Granularity: types.Granularity_NONE, ValueType: types.ValueType_INT64, Entity: "test", }, @@ -79,7 +77,7 @@ Related Jobs: LastUpdated: ×tamp.Timestamp{Seconds: 1}, Created: ×tamp.Timestamp{Seconds: 1}, }, - expected: fmt.Sprintf(`Id: test.none.test_feature_two + expected: fmt.Sprintf(`Id: test.test_feature_two Entity: test Owner: bob@example.com Description: testing feature diff --git a/core/pom.xml b/core/pom.xml index 65565286b3..0fd82260ff 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -16,8 +16,8 @@ ~ --> + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> 4.0.0 feast @@ -31,8 +31,45 @@ - org.springframework.boot - spring-boot-maven-plugin + org.apache.maven.plugins + maven-shade-plugin + 2.4.3 + + + + META-INF/spring.handlers + + + META-INF/spring.factories + + + META-INF/spring.schemas + + + META-INF/spring.components + + + feast.core.CoreApplication + + + + + + + + package + + shade + + + + + + org.springframework.boot + spring-boot-maven-plugin + ${springBootVersion} + + org.xolstice.maven.plugins diff --git a/core/src/main/java/feast/core/grpc/CoreServiceImpl.java b/core/src/main/java/feast/core/grpc/CoreServiceImpl.java index 2787f3e262..16732412a6 100644 --- a/core/src/main/java/feast/core/grpc/CoreServiceImpl.java +++ b/core/src/main/java/feast/core/grpc/CoreServiceImpl.java @@ -283,7 +283,7 @@ public void applyFeature( } public FeatureSpec applyDefaultStores(FeatureSpec featureSpec) { - DataStores.Builder dataStoreBuilder = DataStores.newBuilder(); + DataStores.Builder dataStoreBuilder = featureSpec.getDataStores().toBuilder(); if (Strings.isNullOrEmpty(featureSpec.getDataStores().getServing().getId())) { log.info("Feature has no serving store specified using default"); if (storageSpecs.getServingStorageSpec() != null) { diff --git a/core/src/main/java/feast/core/job/StatsdMetricPusher.java b/core/src/main/java/feast/core/job/StatsdMetricPusher.java index 17e4558bc4..7b8c193e33 100644 --- a/core/src/main/java/feast/core/job/StatsdMetricPusher.java +++ b/core/src/main/java/feast/core/job/StatsdMetricPusher.java @@ -43,7 +43,7 @@ public void pushMetrics(List metrics) { // `row:result` <- all // `scope:scope_id:result` <- per scope, either feature or entity // for examples: - // 1. feature:driver.minute.ping_sequence_weak_dir_change_mean:error + // 1. feature:driver.ping_sequence_weak_dir_change_mean:error // 2. entity:driver:stored // currently there are 3 results: // 1. valid diff --git a/core/src/main/java/feast/core/model/FeatureInfo.java b/core/src/main/java/feast/core/model/FeatureInfo.java index d2ac1ed68d..ac497019d3 100644 --- a/core/src/main/java/feast/core/model/FeatureInfo.java +++ b/core/src/main/java/feast/core/model/FeatureInfo.java @@ -23,7 +23,6 @@ import feast.specs.FeatureSpecProto.DataStore; import feast.specs.FeatureSpecProto.DataStores; import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.types.GranularityProto.Granularity; import feast.types.ValueProto.ValueType; import lombok.AllArgsConstructor; import lombok.Getter; @@ -62,9 +61,6 @@ public class FeatureInfo extends AbstractTimestampEntity { @Column(name = "uri", nullable = false) private String uri; - @Enumerated(EnumType.STRING) - private Granularity.Enum granularity; - @Enumerated(EnumType.STRING) private ValueType.Enum valueType; @@ -120,7 +116,6 @@ public FeatureInfo( this.owner = spec.getOwner(); this.description = spec.getDescription(); this.uri = spec.getUri(); - this.granularity = spec.getGranularity(); this.valueType = spec.getValueType(); this.entity = entityInfo; this.featureGroup = featureGroupInfo; @@ -144,7 +139,6 @@ public FeatureInfo(FeatureInfo other) { this.owner = other.owner; this.description = other.description; this.uri = other.uri; - this.granularity = other.granularity; this.valueType = other.valueType; this.entity = other.entity; this.featureGroup = other.featureGroup; @@ -183,7 +177,6 @@ public FeatureSpec getFeatureSpec() { .setOwner(owner) .setDescription(description) .setUri(uri) - .setGranularity(granularity) .setValueType(valueType) .setEntity(entity.getName()) .addAllTags(convertTagStringToList(tags)) @@ -250,8 +243,8 @@ private String createBigqueryViewLink(StorageInfo warehouseStore) { String dataset = opts.get(BigQueryStorageManager.OPT_BIGQUERY_DATASET); return String.format( - "https://bigquery.cloud.google.com/table/%s:%s.%s_%s_view", - projectId, dataset, entity.getName(), granularity.toString().toLowerCase()); + "https://bigquery.cloud.google.com/table/%s:%s.%s_view", + projectId, dataset, entity.getName()); } /** @@ -276,7 +269,6 @@ private boolean isLegalUpdate(FeatureSpec update) { FeatureSpec spec = this.getFeatureSpec(); return spec.getName().equals(update.getName()) && spec.getEntity().equals(update.getEntity()) - && spec.getGranularity().equals(update.getGranularity()) && spec.getValueType().equals(update.getValueType()) && spec.getGroup().equals(update.getGroup()) && spec.getOptionsMap().equals(update.getOptionsMap()) diff --git a/core/src/main/java/feast/core/storage/BigQueryStorageManager.java b/core/src/main/java/feast/core/storage/BigQueryStorageManager.java index 197e0016ae..00f64f8a86 100644 --- a/core/src/main/java/feast/core/storage/BigQueryStorageManager.java +++ b/core/src/main/java/feast/core/storage/BigQueryStorageManager.java @@ -191,8 +191,7 @@ private Field createFeatureField(FeatureSpec featureSpec) { private String createTableName(FeatureSpec featureSpec) { String entityName = featureSpec.getEntity().toLowerCase(); - String granularity = featureSpec.getGranularity().toString().toLowerCase(); - return String.format("%s_%s", entityName, granularity); + return String.format("%s", entityName); } private void createOrUpdateView(String tableName, List features) { diff --git a/core/src/main/java/feast/core/storage/PostgresStorageManager.java b/core/src/main/java/feast/core/storage/PostgresStorageManager.java index 9fef9f0d2c..8875510d17 100644 --- a/core/src/main/java/feast/core/storage/PostgresStorageManager.java +++ b/core/src/main/java/feast/core/storage/PostgresStorageManager.java @@ -91,7 +91,6 @@ private String createFieldType(FeatureSpec featureSpec) { private String createTableName(FeatureSpec featureSpec) { String entityName = featureSpec.getEntity().toLowerCase(); - String granularity = featureSpec.getGranularity().toString().toLowerCase(); - return String.format("%s_%s", entityName, granularity); + return String.format("%s", entityName); } } diff --git a/core/src/main/java/feast/core/training/BigQueryDatasetTemplater.java b/core/src/main/java/feast/core/training/BigQueryDatasetTemplater.java index 201ad220ca..ea73d89d44 100644 --- a/core/src/main/java/feast/core/training/BigQueryDatasetTemplater.java +++ b/core/src/main/java/feast/core/training/BigQueryDatasetTemplater.java @@ -16,8 +16,6 @@ */ package feast.core.training; -import static java.util.stream.Collectors.groupingBy; - import com.google.protobuf.Timestamp; import com.hubspot.jinjava.Jinjava; import feast.core.DatasetServiceProto.FeatureSet; @@ -26,13 +24,10 @@ import feast.core.model.StorageInfo; import feast.specs.FeatureSpecProto.FeatureSpec; import feast.specs.StorageSpecProto.StorageSpec; -import feast.types.GranularityProto.Granularity; import java.time.Instant; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.time.temporal.ChronoUnit; -import java.util.ArrayList; -import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -46,8 +41,6 @@ public class BigQueryDatasetTemplater { private final Jinjava jinjava; private final String template; private final DateTimeFormatter formatter; - private Comparator featureGroupComparator = - new FeatureGroupTemplateComparator().reversed(); public BigQueryDatasetTemplater( Jinjava jinjava, String templateString, FeatureInfoRepository featureInfoRepository) { @@ -77,40 +70,29 @@ public String createQuery( throw new NoSuchElementException("features not found: " + featureIds); } - List features = toFeatureTemplates(featureInfos); - List featureGroups = groupFeatureTemplate(features); + String tableId = getBqTableId(featureInfos.get(0)); + Features features = new Features(featureIds, tableId); String startDateStr = formatDateString(startDate); String endDateStr = formatDateString(endDate); String limitStr = (limit != 0) ? String.valueOf(limit) : null; - return renderTemplate(featureGroups, startDateStr, endDateStr, limitStr); + return renderTemplate(features, startDateStr, endDateStr, limitStr); } private String renderTemplate( - List featureGroups, String startDateStr, String endDateStr, String limitStr) { + Features features, String startDateStr, String endDateStr, String limitStr) { Map context = new HashMap<>(); - featureGroups.sort(featureGroupComparator); - context.put("feature_groups", featureGroups); + context.put("feature_set", features); context.put("start_date", startDateStr); context.put("end_date", endDateStr); context.put("limit", limitStr); return jinjava.render(template, context); } - private List toFeatureTemplates(List featureInfos) { - return featureInfos - .stream() - .map( - fi -> { - StorageInfo whStorage = fi.getWarehouseStore(); - String tableId = getBqTableId(fi.getFeatureSpec(), whStorage); - return new Feature(fi.getId(), fi.getGranularity(), tableId); - }) - .collect(Collectors.toList()); - } + private String getBqTableId(FeatureInfo featureInfo) { + StorageInfo whStorage = featureInfo.getWarehouseStore(); - private String getBqTableId(FeatureSpec featureSpec, StorageInfo whStorage) { String type = whStorage.getType(); if (!"bigquery".equals(type)) { throw new IllegalArgumentException( @@ -121,23 +103,8 @@ private String getBqTableId(FeatureSpec featureSpec, StorageInfo whStorage) { Map options = storageSpec.getOptionsMap(); String projectId = options.get("project"); String dataset = options.get("dataset"); - String entityName = featureSpec.getEntity().toLowerCase(); - String granularity = featureSpec.getGranularity().toString().toLowerCase(); - return String.format("%s.%s.%s_%s", projectId, dataset, entityName, granularity); - } - - private List groupFeatureTemplate(List features) { - Map> groupedFeature = - features.stream().collect(groupingBy(Feature::getTableId)); - List featureGroups = new ArrayList<>(); - for (Map.Entry> entry : groupedFeature.entrySet()) { - String tableId = entry.getKey(); - Granularity.Enum granularity = entry.getValue().get(0).granularity; - FeatureGroup group = new FeatureGroup(tableId, granularity, entry.getValue()); - - featureGroups.add(group); - } - return featureGroups; + String entityName = featureInfo.getFeatureSpec().getEntity().toLowerCase(); + return String.format("%s.%s.%s", projectId, dataset, entityName); } private String formatDateString(Timestamp timestamp) { @@ -146,55 +113,16 @@ private String formatDateString(Timestamp timestamp) { } @Getter - static final class FeatureGroup { - final String tableId; - final Granularity.Enum granularity; - final List features; - - public FeatureGroup(String tableId, Granularity.Enum granularity, List features) { - this.tableId = tableId; - this.granularity = granularity; - this.features = features; - } - - public String getTempTable() { - return tableId.replaceAll("[^a-zA-Z0-9]", "_"); - } - - public String getGranularityStr() { - return granularity.toString().toLowerCase(); - } - } - - @Getter - static final class Feature { - final String featureId; + static final class Features { + final List columns; final String tableId; - final Granularity.Enum granularity; - public Feature(String featureId, Granularity.Enum granularity, String tableId) { - this.featureId = featureId; + public Features(List featureIds, String tableId) { + this.columns = featureIds.stream() + .map(f -> f.replace(".", "_")) + .collect(Collectors.toList()); this.tableId = tableId; - this.granularity = granularity; - } - - public String getName() { - return featureId.split("\\.")[2]; - } - - public String getColumn() { - return featureId.replace(".", "_"); } } - private static final class FeatureGroupTemplateComparator implements Comparator { - @Override - public int compare(FeatureGroup o1, FeatureGroup o2) { - if (o1.granularity != o2.granularity) { - return o1.granularity.getNumber() - o2.granularity.getNumber(); - } - - return o1.tableId.compareTo(o2.tableId); - } - } } diff --git a/core/src/main/java/feast/core/validators/SpecValidator.java b/core/src/main/java/feast/core/validators/SpecValidator.java index 7ec2f33d46..3e70e9922d 100644 --- a/core/src/main/java/feast/core/validators/SpecValidator.java +++ b/core/src/main/java/feast/core/validators/SpecValidator.java @@ -96,16 +96,13 @@ public void validateFeatureSpec(FeatureSpec spec) throws IllegalArgumentExceptio // check id validity String[] idSplit = spec.getId().split("\\."); - checkArgument(idSplit.length == 3, "Id must contain entity, granularity, name"); + checkArgument(idSplit.length == 2, "Id must contain entity, name"); checkArgument( idSplit[0].equals(spec.getEntity()), - "Id must be in format entity.granularity.name, entity in Id does not match entity provided."); + "Id must be in format entity.name, entity in Id does not match entity provided."); checkArgument( - idSplit[1].equals(spec.getGranularity().toString().toLowerCase()), - "Id must be in format entity.granularity.name, granularity in Id does not match granularity provided."); - checkArgument( - idSplit[2].equals(spec.getName()), - "Id must be in format entity.granularity.name, name in Id does not match name provided."); + idSplit[1].equals(spec.getName()), + "Id must be in format entity.name, name in Id does not match name provided."); // check if referenced objects exist checkArgument( diff --git a/core/src/main/resources/templates/bq_training.tmpl b/core/src/main/resources/templates/bq_training.tmpl index faad4f18a4..0a7ce6d322 100644 --- a/core/src/main/resources/templates/bq_training.tmpl +++ b/core/src/main/resources/templates/bq_training.tmpl @@ -1,41 +1,12 @@ -WITH - {% for feature_group in feature_groups -%} - raw_{{ feature_group.tempTable }} AS - (SELECT - ROW_NUMBER() OVER (PARTITION BY id, event_timestamp ORDER BY created_timestamp DESC) rownum, - id, event_timestamp, created_timestamp - {% for feature in feature_group.features -%} - , FIRST_VALUE({{feature.name}} IGNORE NULLS) OVER w AS {{feature.column}} - {% endfor -%} - FROM `{{feature_group.table_id}}` - {% if feature_group.granularityStr != "none" -%} - WHERE event_timestamp >= TIMESTAMP("{{ start_date }}") AND event_timestamp <= TIMESTAMP(DATETIME_ADD("{{ end_date }}", INTERVAL 1 DAY)) - {%- endif %} - WINDOW w AS ( PARTITION BY id, event_timestamp ORDER BY event_timestamp DESC )), - {{ feature_group.temp_table }} AS ( - SELECT * FROM raw_{{ feature_group.temp_table }} WHERE rownum = 1 ) {{ "," if not loop.last }} - {% endfor %} SELECT - {{ feature_groups[0].temp_table }}.id, - {{ feature_groups[0].temp_table }}.event_timestamp - {% for feature_group in feature_groups -%} - {% for feature in feature_group.features -%} - ,{{ feature_group.temp_table }}.{{ feature.column }} - {%- endfor %} + {{ feature_set.tableId }}.id, + {{ feature_set.tableId }}.event_timestamp + {% for feature in feature_set.columns -%} + ,{{ feature }} {%- endfor %} FROM - {{ feature_groups[0].temp_table }} -{% for feature_group in feature_groups -%} -{% if loop.index > 1 %} -LEFT JOIN - {{ feature_group.temp_table }} -ON - {{ feature_groups[0].temp_table }}.id = {{ feature_group.temp_table }}.id - {% if feature_group.granularityStr != "none" -%} - AND TIMESTAMP_TRUNC({{feature_groups[0].temp_table}}.event_timestamp, {{ feature_group.granularityStr|upper }}) = {{ feature_group.temp_table }}.event_timestamp - {%- endif %} -{%- endif %} -{%- endfor %} + {{ feature_set.tableId }} +WHERE event_timestamp >= TIMESTAMP("{{ start_date }}") AND event_timestamp <= TIMESTAMP(DATETIME_ADD("{{ end_date }}", INTERVAL 1 DAY)) {% if limit is not none -%} LIMIT {{ limit }} {%- endif %} \ No newline at end of file diff --git a/core/src/test/java/feast/core/CoreApplicationTest.java b/core/src/test/java/feast/core/CoreApplicationTest.java index 42e22b2566..d80b5b7b42 100644 --- a/core/src/test/java/feast/core/CoreApplicationTest.java +++ b/core/src/test/java/feast/core/CoreApplicationTest.java @@ -22,7 +22,6 @@ import feast.specs.ImportSpecProto.ImportSpec; import feast.specs.ImportSpecProto.Schema; import feast.specs.StorageSpecProto.StorageSpec; -import feast.types.GranularityProto.Granularity.Enum; import feast.types.ValueProto.ValueType; import io.grpc.ManagedChannel; import io.grpc.ManagedChannelBuilder; @@ -94,20 +93,19 @@ public void test_withProperties_systemServingAndWarehouseStoresRegistered() thro EntitySpec entitySpec = EntitySpec.newBuilder().setName("test").build(); FeatureSpec featureSpec = FeatureSpec.newBuilder() - .setId("test.none.int64") + .setId("test.int64") .setName("int64") .setEntity("test") - .setGranularity(Enum.NONE) .setValueType(ValueType.Enum.INT64) .setOwner("hermione@example.com") .setDescription("Test is a test") - .setUri("http://example.com/test.none.int64").build(); + .setUri("http://example.com/test.int64").build(); ImportSpec importSpec = ImportSpec.newBuilder() .setSchema(Schema.newBuilder() .setEntityIdColumn("id") .setTimestampValue(Timestamp.getDefaultInstance()) .addFields(Field.newBuilder().setName("id")) - .addFields(Field.newBuilder().setName("a").setFeatureId("test.none.int64"))) + .addFields(Field.newBuilder().setName("a").setFeatureId("test.int64"))) .addEntities("test") .setType("file.csv") .putSourceOptions("path", "/tmp/foobar").build(); diff --git a/core/src/test/java/feast/core/CoreApplicationWithNoServingTest.java b/core/src/test/java/feast/core/CoreApplicationWithNoServingTest.java index 93298c7604..61f55202ad 100644 --- a/core/src/test/java/feast/core/CoreApplicationWithNoServingTest.java +++ b/core/src/test/java/feast/core/CoreApplicationWithNoServingTest.java @@ -23,7 +23,6 @@ import feast.specs.ImportSpecProto.ImportSpec; import feast.specs.ImportSpecProto.Schema; import feast.specs.StorageSpecProto.StorageSpec; -import feast.types.GranularityProto.Granularity.Enum; import feast.types.ValueProto.ValueType; import io.grpc.ManagedChannel; import io.grpc.ManagedChannelBuilder; @@ -84,20 +83,19 @@ public void test_withProperties_systemServingAndWarehouseStoresRegistered() thro EntitySpec entitySpec = EntitySpec.newBuilder().setName("test").build(); FeatureSpec featureSpec = FeatureSpec.newBuilder() - .setId("test.none.int64") + .setId("test.int64") .setName("int64") .setEntity("test") - .setGranularity(Enum.NONE) .setValueType(ValueType.Enum.INT64) .setOwner("hermione@example.com") .setDescription("Test is a test") - .setUri("http://example.com/test.none.int64").build(); + .setUri("http://example.com/test.int64").build(); ImportSpec importSpec = ImportSpec.newBuilder() .setSchema(Schema.newBuilder() .setEntityIdColumn("id") .setTimestampValue(Timestamp.getDefaultInstance()) .addFields(Field.newBuilder().setName("id")) - .addFields(Field.newBuilder().setName("a").setFeatureId("test.none.int64"))) + .addFields(Field.newBuilder().setName("a").setFeatureId("test.int64"))) .addEntities("test") .setType("file.csv") .putSourceOptions("path", "/tmp/foobar").build(); diff --git a/core/src/test/java/feast/core/CoreApplicationWithNoWarehouseTest.java b/core/src/test/java/feast/core/CoreApplicationWithNoWarehouseTest.java index 5b4c0801e9..6c8ee325aa 100644 --- a/core/src/test/java/feast/core/CoreApplicationWithNoWarehouseTest.java +++ b/core/src/test/java/feast/core/CoreApplicationWithNoWarehouseTest.java @@ -23,7 +23,6 @@ import feast.specs.ImportSpecProto.ImportSpec; import feast.specs.ImportSpecProto.Schema; import feast.specs.StorageSpecProto.StorageSpec; -import feast.types.GranularityProto.Granularity.Enum; import feast.types.ValueProto.ValueType; import io.grpc.ManagedChannel; import io.grpc.ManagedChannelBuilder; @@ -92,20 +91,19 @@ public void test_withProperties_systemServingAndWarehouseStoresRegistered() thro EntitySpec entitySpec = EntitySpec.newBuilder().setName("test").build(); FeatureSpec featureSpec = FeatureSpec.newBuilder() - .setId("test.none.int64") + .setId("test.int64") .setName("int64") .setEntity("test") - .setGranularity(Enum.NONE) .setValueType(ValueType.Enum.INT64) .setOwner("hermione@example.com") .setDescription("Test is a test") - .setUri("http://example.com/test.none.int64").build(); + .setUri("http://example.com/test.int64").build(); ImportSpec importSpec = ImportSpec.newBuilder() .setSchema(Schema.newBuilder() .setEntityIdColumn("id") .setTimestampValue(Timestamp.getDefaultInstance()) .addFields(Field.newBuilder().setName("id")) - .addFields(Field.newBuilder().setName("a").setFeatureId("test.none.int64"))) + .addFields(Field.newBuilder().setName("a").setFeatureId("test.int64"))) .addEntities("test") .setType("file.csv") .putSourceOptions("path", "/tmp/foobar").build(); diff --git a/core/src/test/java/feast/core/grpc/DatasetServiceImplTest.java b/core/src/test/java/feast/core/grpc/DatasetServiceImplTest.java index 5740648848..5bbe7f0d48 100644 --- a/core/src/test/java/feast/core/grpc/DatasetServiceImplTest.java +++ b/core/src/test/java/feast/core/grpc/DatasetServiceImplTest.java @@ -63,8 +63,8 @@ public void setUp() throws Exception { validFeatureSet = FeatureSet.newBuilder() .setEntityName("myentity") - .addFeatureIds("myentity.none.feature1") - .addFeatureIds("myentity.second.feature2") + .addFeatureIds("myentity.feature1") + .addFeatureIds("myentity.feature2") .build(); } @@ -169,8 +169,8 @@ public void shouldThrowExceptionIfFeatureSetHasDifferentEntity() { FeatureSet emptyFeatureSet = FeatureSet.newBuilder() .setEntityName("myentity") - .addFeatureIds("myentity.none.feature1") - .addFeatureIds("driver.none.feature2") + .addFeatureIds("myentity.feature1") + .addFeatureIds("driver.feature2") .build(); CreateDatasetRequest request = diff --git a/core/src/test/java/feast/core/grpc/UIServiceImplTest.java b/core/src/test/java/feast/core/grpc/UIServiceImplTest.java index cca090bec7..00f02f9af6 100644 --- a/core/src/test/java/feast/core/grpc/UIServiceImplTest.java +++ b/core/src/test/java/feast/core/grpc/UIServiceImplTest.java @@ -174,7 +174,7 @@ public void listEntities_shouldReturnClearErrorMessageForAnyFailure() { @Test public void getFeature_shouldReturnCorrectFeatureDetail() { - String featureId = "entity.granularity.feature"; + String featureId = "entity.feature"; FeatureInfo featureInfo = createFeatureInfo(featureId); when(specService.getFeatures(Collections.singletonList(featureId))) @@ -221,8 +221,8 @@ public void getFeature_shouldReturnErrorForAnyFailure() { @Test public void listFeature_shouldReturnAllFeatures() { - String featureId1 = "entity.granularity.feature1"; - String featureId2 = "entity.granularity.feature2"; + String featureId1 = "entity.feature1"; + String featureId2 = "entity.feature2"; FeatureInfo featureInfo1 = createFeatureInfo(featureId1); FeatureInfo featureInfo2 = createFeatureInfo(featureId2); diff --git a/core/src/test/java/feast/core/model/FeatureInfoTest.java b/core/src/test/java/feast/core/model/FeatureInfoTest.java index a85b99711f..6e619a5791 100644 --- a/core/src/test/java/feast/core/model/FeatureInfoTest.java +++ b/core/src/test/java/feast/core/model/FeatureInfoTest.java @@ -22,7 +22,6 @@ import feast.specs.FeatureSpecProto.DataStore; import feast.specs.FeatureSpecProto.DataStores; import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.types.GranularityProto.Granularity; import feast.types.ValueProto.ValueType; import org.junit.Before; import org.junit.Rule; @@ -49,12 +48,11 @@ public void setUp() { entityInfo.setName("entity"); featureInfo = new FeatureInfo(); - featureInfo.setId("entity.NONE.name"); + featureInfo.setId("entity.name"); featureInfo.setName("name"); featureInfo.setOwner("owner"); featureInfo.setDescription("desc"); featureInfo.setUri("uri"); - featureInfo.setGranularity(Granularity.Enum.NONE); featureInfo.setValueType(ValueType.Enum.BYTES); featureInfo.setEntity(entityInfo); featureInfo.setOptions("{}"); @@ -82,13 +80,12 @@ public void setUp() { featureSpec = FeatureSpec.newBuilder() - .setId("entity.NONE.name") + .setId("entity.name") .setName("name") .setOwner("owner") .setDescription("desc") .setEntity("entity") .setUri("uri") - .setGranularity(Granularity.Enum.NONE) .setValueType(ValueType.Enum.BYTES) .addTags("tag1") .addTags("tag2") @@ -133,12 +130,11 @@ public void shouldBuildCorrespondingResolvedSpec() { featureGroupInfo.setWarehouseStore(warehouseStorage); featureGroupInfo.setTags("inherited"); FeatureInfo featureInfo = new FeatureInfo(); - featureInfo.setId("entity.NONE.name"); + featureInfo.setId("entity.name"); featureInfo.setName("name"); featureInfo.setOwner("owner"); featureInfo.setDescription("desc"); featureInfo.setUri("uri"); - featureInfo.setGranularity(Granularity.Enum.NONE); featureInfo.setValueType(ValueType.Enum.BYTES); featureInfo.setEntity(entityInfo); featureInfo.setOptions("{}"); @@ -157,14 +153,13 @@ public void shouldBuildCorrespondingResolvedSpec() { FeatureSpec expected = FeatureSpec.newBuilder() - .setId("entity.NONE.name") + .setId("entity.name") .setName("name") .setOwner("owner") .setDescription("desc") .setEntity("entity") .setUri("uri") .setGroup("testGroup") - .setGranularity(Granularity.Enum.NONE) .setValueType(ValueType.Enum.BYTES) .addTags("tag1") .addTags("tag2") @@ -187,13 +182,12 @@ public void shouldUpdateMutableFields() { FeatureSpec update = FeatureSpec.newBuilder() - .setId("entity.NONE.name") + .setId("entity.name") .setName("name") .setOwner("owner2") .setDescription("overwrite") .setEntity("entity") .setUri("new_uri") - .setGranularity(Granularity.Enum.NONE) .setValueType(ValueType.Enum.BYTES) .addTags("new_tag") .setDataStores(dataStores) @@ -216,13 +210,12 @@ public void shouldThrowExceptionIfImmutableFieldsChanged() { FeatureSpec update = FeatureSpec.newBuilder() - .setId("entity.NONE.name") + .setId("entity.name") .setName("name") .setOwner("owner2") .setDescription("overwrite") .setEntity("entity") .setUri("new_uri") - .setGranularity(Granularity.Enum.NONE) .setValueType(ValueType.Enum.INT32) .addTags("new_tag") .setDataStores(dataStores) @@ -238,13 +231,12 @@ public void shouldThrowExceptionIfImmutableFieldsChanged() { public void shouldThrowExceptionIfImmutableFieldsChangedToNull() { FeatureSpec update = FeatureSpec.newBuilder() - .setId("entity.NONE.name") + .setId("entity.name") .setName("name") .setOwner("owner2") .setDescription("overwrite") .setEntity("entity") .setUri("new_uri") - .setGranularity(Granularity.Enum.NONE) .setValueType(ValueType.Enum.BYTES) .addTags("new_tag") .build(); diff --git a/core/src/test/java/feast/core/service/SpecServiceTest.java b/core/src/test/java/feast/core/service/SpecServiceTest.java index 43f1450f7b..bf7d7e7a7a 100644 --- a/core/src/test/java/feast/core/service/SpecServiceTest.java +++ b/core/src/test/java/feast/core/service/SpecServiceTest.java @@ -36,7 +36,6 @@ import feast.specs.FeatureSpecProto.DataStores; import feast.specs.FeatureSpecProto.FeatureSpec; import feast.specs.StorageSpecProto.StorageSpec; -import feast.types.GranularityProto.Granularity; import feast.types.ValueProto.ValueType; import org.junit.Before; import org.junit.Rule; @@ -86,12 +85,11 @@ private StorageInfo newTestStorageInfo(String id, String type) { private FeatureInfo newTestFeatureInfo(String name) { FeatureInfo feature = new FeatureInfo(); - feature.setId(Strings.lenientFormat("entity.NONE.%s", name)); + feature.setId(Strings.lenientFormat("entity.%s", name)); feature.setName(name); feature.setEntity(newTestEntityInfo("entity")); feature.setDescription(""); feature.setOwner("@test"); - feature.setGranularity(Granularity.Enum.NONE); feature.setValueType(ValueType.Enum.BOOL); feature.setUri(""); feature.setWarehouseStore(newTestStorageInfo("BIGQUERY1", "BIGQUERY")); @@ -180,7 +178,7 @@ public void shouldGetFeaturesMatchingIds() { FeatureInfo feature1 = newTestFeatureInfo("feature1"); FeatureInfo feature2 = newTestFeatureInfo("feature2"); - ArrayList ids = Lists.newArrayList("entity.none.feature1", "entity.none.feature2"); + ArrayList ids = Lists.newArrayList("entity.feature1", "entity.feature2"); when(featureInfoRepository.findAllById(any(Iterable.class))).thenReturn(Lists.newArrayList(feature1, feature2)); SpecService specService = new SpecService( @@ -199,7 +197,7 @@ public void shouldDeduplicateGetFeature() { FeatureInfo feature1 = newTestFeatureInfo("feature1"); FeatureInfo feature2 = newTestFeatureInfo("feature2"); - ArrayList ids = Lists.newArrayList("entity.none.feature1", "entity.none.feature2", "entity.none.feature2"); + ArrayList ids = Lists.newArrayList("entity.feature1", "entity.feature2", "entity.feature2"); when(featureInfoRepository.findAllById(any(Iterable.class))).thenReturn(Lists.newArrayList(feature1, feature2)); SpecService specService = new SpecService( @@ -217,7 +215,7 @@ public void shouldDeduplicateGetFeature() { public void shouldThrowRetrievalExceptionIfAnyFeatureNotFound() { FeatureInfo feature2 = newTestFeatureInfo("feature2"); - ArrayList ids = Lists.newArrayList("entity.none.feature1", "entity.none.feature2"); + ArrayList ids = Lists.newArrayList("entity.feature1", "entity.feature2"); when(featureInfoRepository.findAllById(ids)).thenReturn(Lists.newArrayList(feature2)); SpecService specService = new SpecService( @@ -340,14 +338,13 @@ public void shouldRegisterFeatureWithGroupInheritance() { FeatureSpec spec = FeatureSpec.newBuilder() - .setId("entity.none.name") + .setId("entity.name") .setName("name") .setOwner("owner") .setDescription("desc") .setEntity("entity") .setUri("uri") .setGroup("testGroup") - .setGranularity(Granularity.Enum.NONE) .setValueType(ValueType.Enum.BYTES) .build(); @@ -361,14 +358,13 @@ public void shouldRegisterFeatureWithGroupInheritance() { FeatureSpec resolvedSpec = FeatureSpec.newBuilder() - .setId("entity.none.name") + .setId("entity.name") .setName("name") .setOwner("owner") .setDescription("desc") .setEntity("entity") .setUri("uri") .setGroup("testGroup") - .setGranularity(Granularity.Enum.NONE) .setValueType(ValueType.Enum.BYTES) .setDataStores(dataStores) .build(); diff --git a/core/src/test/java/feast/core/storage/BigQueryStorageManagerTest.java b/core/src/test/java/feast/core/storage/BigQueryStorageManagerTest.java index 3cbc7069a0..4e4408307b 100644 --- a/core/src/test/java/feast/core/storage/BigQueryStorageManagerTest.java +++ b/core/src/test/java/feast/core/storage/BigQueryStorageManagerTest.java @@ -40,8 +40,6 @@ import com.google.cloud.bigquery.TableInfo; import com.google.cloud.bigquery.ViewDefinition; import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.types.GranularityProto.Granularity; -import feast.types.GranularityProto.Granularity.Enum; import feast.types.ValueProto.ValueType; import java.util.ArrayList; import java.util.List; @@ -77,8 +75,7 @@ public void shouldCreateNewDatasetAndTableAndViewIfNotExist() throws Interrupted when(bigQuery.getDataset(any(String.class))).thenReturn(null); String featureName = "my_feature"; String entityName = "my_entity"; - Granularity.Enum granularity = Enum.DAY; - String featureId = createFeatureId(entityName, granularity, featureName); + String featureId = createFeatureId(entityName, featureName); String description = "my feature is awesome"; ValueType.Enum type = ValueType.Enum.INT64; @@ -87,7 +84,6 @@ public void shouldCreateNewDatasetAndTableAndViewIfNotExist() throws Interrupted .setEntity(entityName) .setId(featureId) .setName(featureName) - .setGranularity(granularity) .setDescription(description) .setValueType(type) .build(); @@ -113,7 +109,7 @@ public void shouldCreateNewDatasetAndTableAndViewIfNotExist() throws Interrupted TableInfo actualTable = capturedValues.get(0); assertThat( actualTable.getTableId().getTable(), - equalTo(String.format("%s_%s", entityName, granularity.toString().toLowerCase()))); + equalTo(String.format("%s", entityName))); List fields = actualTable.getDefinition().getSchema().getFields(); assertThat(fields.size(), equalTo(5)); Field idField = fields.get(0); @@ -133,21 +129,20 @@ public void shouldCreateNewDatasetAndTableAndViewIfNotExist() throws Interrupted TableInfo actualView = capturedValues.get(1); assertThat( actualView.getTableId().getTable(), - equalTo(String.format("%s_%s_view", entityName, granularity.toString().toLowerCase()))); + equalTo(String.format("%s_view", entityName))); ViewDefinition actualDefinition = actualView.getDefinition(); assertThat( actualDefinition.getQuery(), equalTo( String.format( - "%s_%s.%s", entityName, granularity.toString().toLowerCase(), featureName))); + "%s.%s", entityName, featureName))); } @Test public void shouldNotUpdateTableIfColumnExists() { String featureName = "my_feature"; String entityName = "my_entity"; - Granularity.Enum granularity = Enum.DAY; - String featureId = createFeatureId(entityName, granularity, featureName); + String featureId = createFeatureId(entityName, featureName); String description = "my feature is awesome"; ValueType.Enum type = ValueType.Enum.BOOL; LegacySQLTypeName sqlType = LegacySQLTypeName.BOOLEAN; @@ -169,7 +164,6 @@ public void shouldNotUpdateTableIfColumnExists() { .setEntity(entityName) .setId(featureId) .setName(featureName) - .setGranularity(granularity) .setDescription(description) .setValueType(type) .build(); @@ -184,8 +178,7 @@ public void shouldNotUpdateTableIfColumnExists() { public void shouldUpdateTableAndViewIfColumnNotExists() { String newFeatureName = "my_feature"; String entityName = "my_entity"; - Granularity.Enum granularity = Enum.DAY; - String featureId = createFeatureId(entityName, granularity, newFeatureName); + String featureId = createFeatureId(entityName, newFeatureName); String description = "my feature is awesome"; ValueType.Enum type = ValueType.Enum.BOOL; LegacySQLTypeName sqlType = LegacySQLTypeName.BOOLEAN; @@ -203,7 +196,6 @@ public void shouldUpdateTableAndViewIfColumnNotExists() { .setEntity(entityName) .setId(featureId) .setName(newFeatureName) - .setGranularity(granularity) .setDescription(description) .setValueType(type) .build(); @@ -230,22 +222,21 @@ public void shouldUpdateTableAndViewIfColumnNotExists() { TableInfo actualView = capturedArgs.get(1); assertThat( actualView.getTableId().getTable(), - equalTo(String.format("%s_%s_view", entityName, granularity.toString().toLowerCase()))); + equalTo(String.format("%s_view", entityName))); ViewDefinition actualDefinition = actualView.getDefinition(); assertThat( actualDefinition.getQuery(), equalTo( String.format( - "%s_%s.%s.%s", + "%s.%s.%s", entityName, - granularity.toString().toLowerCase(), existingFeatureName, newFeatureName))); } private String createFeatureId( - String entityName, Granularity.Enum granualrity, String featureName) { - return String.format("%s.%s.%s", entityName, granualrity.toString(), featureName).toLowerCase(); + String entityName, String featureName) { + return String.format("%s.%s", entityName, featureName).toLowerCase(); } private Table createTable(FeatureSchema... featureSchemas) { diff --git a/core/src/test/java/feast/core/training/BigQueryDatasetCreatorTest.java b/core/src/test/java/feast/core/training/BigQueryDatasetCreatorTest.java index bde137c9c7..9642399534 100644 --- a/core/src/test/java/feast/core/training/BigQueryDatasetCreatorTest.java +++ b/core/src/test/java/feast/core/training/BigQueryDatasetCreatorTest.java @@ -63,7 +63,7 @@ public void shouldCreateCorrectDatasetIfPrefixNotSpecified() { FeatureSet featureSet = FeatureSet.newBuilder() .setEntityName(entityName) - .addAllFeatureIds(Arrays.asList("myentity.none.feature1", "myentity.none.feature2")) + .addAllFeatureIds(Arrays.asList("myentity.feature1", "myentity.feature2")) .build(); Timestamp startDate = @@ -90,7 +90,7 @@ public void shouldCreateCorrectDatasetIfPrefixIsSpecified() { FeatureSet featureSet = FeatureSet.newBuilder() .setEntityName(entityName) - .addAllFeatureIds(Arrays.asList("myentity.none.feature1", "myentity.none.feature2")) + .addAllFeatureIds(Arrays.asList("myentity.feature1", "myentity.feature2")) .build(); Timestamp startDate = @@ -115,7 +115,7 @@ public void shouldPassArgumentToTemplater() { FeatureSet featureSet = FeatureSet.newBuilder() .setEntityName("myentity") - .addAllFeatureIds(Arrays.asList("myentity.none.feature1", "myentity.none.feature2")) + .addAllFeatureIds(Arrays.asList("myentity.feature1", "myentity.feature2")) .build(); Timestamp startDate = Timestamps.fromSeconds(0); diff --git a/core/src/test/java/feast/core/training/BigQueryDatasetTemplaterTest.java b/core/src/test/java/feast/core/training/BigQueryDatasetTemplaterTest.java index 239d44d66e..d8b4595d3e 100644 --- a/core/src/test/java/feast/core/training/BigQueryDatasetTemplaterTest.java +++ b/core/src/test/java/feast/core/training/BigQueryDatasetTemplaterTest.java @@ -33,14 +33,12 @@ import feast.core.model.EntityInfo; import feast.core.model.FeatureInfo; import feast.core.model.StorageInfo; -import feast.core.training.BigQueryDatasetTemplater.FeatureGroup; +import feast.core.training.BigQueryDatasetTemplater.Features; import feast.specs.EntitySpecProto.EntitySpec; import feast.specs.FeatureSpecProto.DataStore; import feast.specs.FeatureSpecProto.DataStores; import feast.specs.FeatureSpecProto.FeatureSpec; import feast.specs.StorageSpecProto.StorageSpec; -import feast.types.GranularityProto.Granularity; -import feast.types.GranularityProto.Granularity.Enum; import java.io.InputStream; import java.io.InputStreamReader; import java.time.Instant; @@ -82,7 +80,7 @@ public void shouldThrowNoSuchElementExceptionIfFeatureNotFound() { FeatureSet fs = FeatureSet.newBuilder() .setEntityName("myentity") - .addAllFeatureIds(Arrays.asList("myentity.day.feature1", "myentity.day.feature2")) + .addAllFeatureIds(Arrays.asList("myentity.feature1", "myentity.feature2")) .build(); templater.createQuery(fs, Timestamps.fromSeconds(0), Timestamps.fromSeconds(1), 0); } @@ -97,11 +95,11 @@ public void shouldPassCorrectArgumentToTemplateEngine() { Timestamp endDate = Timestamps.fromSeconds(Instant.parse("2019-01-01T00:00:00.00Z").getEpochSecond()); int limit = 100; - String featureId = "myentity.day.feature1"; - String tableId = "project.dataset.myentity_day"; + String featureId = "myentity.feature1"; + String tableId = "project.dataset.myentity"; when(featureInfoRespository.findAllById(any(List.class))) - .thenReturn(Collections.singletonList(createFeatureInfo(featureId, Enum.DAY, tableId))); + .thenReturn(Collections.singletonList(createFeatureInfo(featureId, tableId))); FeatureSet fs = FeatureSet.newBuilder() @@ -123,27 +121,24 @@ public void shouldPassCorrectArgumentToTemplateEngine() { assertThat(actualContext.get("end_date"), equalTo("2019-01-01")); assertThat(actualContext.get("limit"), equalTo(String.valueOf(limit))); - List featureGroups = (List) actualContext.get("feature_groups"); - assertThat(featureGroups.size(), equalTo(1)); - assertThat(featureGroups.get(0).granularity, equalTo(Enum.DAY)); - assertThat(featureGroups.get(0).features.size(), equalTo(1)); - assertThat(featureGroups.get(0).features.get(0).featureId, equalTo(featureId)); - assertThat(featureGroups.get(0).features.get(0).granularity, equalTo(Enum.DAY)); - assertThat(featureGroups.get(0).features.get(0).tableId, equalTo(tableId)); + Features features = (Features) actualContext.get("feature_set"); + assertThat(features.getColumns().size(), equalTo(1)); + assertThat(features.getColumns().get(0), equalTo(featureId.replace(".", "_"))); + assertThat(features.getTableId(), equalTo(tableId)); } @Test public void shouldRenderCorrectQuery1() throws Exception { - String tableId1 = "project.dataset.myentity_day"; - String featureId1 = "myentity.day.feature1"; - String featureId2 = "myentity.day.feature2"; + String tableId1 = "project.dataset.myentity"; + String featureId1 = "myentity.feature1"; + String featureId2 = "myentity.feature2"; - FeatureInfo featureInfo1 = createFeatureInfo(featureId1, Enum.DAY, tableId1); - FeatureInfo featureInfo2 = createFeatureInfo(featureId2, Enum.DAY, tableId1); + FeatureInfo featureInfo1 = createFeatureInfo(featureId1, tableId1); + FeatureInfo featureInfo2 = createFeatureInfo(featureId2, tableId1); - String tableId2 = "project.dataset.myentity_none"; - String featureId3 = "myentity.none.feature3"; - FeatureInfo featureInfo3 = createFeatureInfo(featureId3, Enum.NONE, tableId2); + String tableId2 = "project.dataset.myentity"; + String featureId3 = "myentity.feature3"; + FeatureInfo featureInfo3 = createFeatureInfo(featureId3, tableId2); when(featureInfoRespository.findAllById(any(List.class))) .thenReturn(Arrays.asList(featureInfo1, featureInfo2, featureInfo3)); @@ -166,52 +161,14 @@ public void shouldRenderCorrectQuery1() throws Exception { @Test public void shouldRenderCorrectQuery2() throws Exception { - String tableId1 = "project.dataset.myentity_day"; - String featureId1 = "myentity.day.feature1"; - String featureId2 = "myentity.day.feature2"; - - FeatureInfo featureInfo1 = createFeatureInfo(featureId1, Enum.DAY, tableId1); - FeatureInfo featureInfo2 = createFeatureInfo(featureId2, Enum.DAY, tableId1); - - String tableId2 = "project.dataset.myentity_none"; - String featureId3 = "myentity.none.feature3"; - FeatureInfo featureInfo3 = createFeatureInfo(featureId3, Enum.NONE, tableId2); - - when(featureInfoRespository.findAllById(any(List.class))) - .thenReturn(Arrays.asList(featureInfo1, featureInfo2, featureInfo3)); - - FeatureSet fs = - FeatureSet.newBuilder() - .setEntityName("myentity") - .addAllFeatureIds(Arrays.asList(featureId1, featureId2, featureId3)) - .build(); - Timestamp startDate = - Timestamps.fromSeconds(Instant.parse("2018-01-02T00:00:00.00Z").getEpochSecond()); - Timestamp endDate = - Timestamps.fromSeconds(Instant.parse("2018-01-30T12:11:11.00Z").getEpochSecond()); - int limit = 0; - - String query = templater.createQuery(fs, startDate, endDate, limit); - - checkExpectedQuery(query, "expQuery2.sql"); - } - - @Test - public void shouldRenderCorrectQuery3() throws Exception { List featureInfos = new ArrayList<>(); List featureIds = new ArrayList<>(); - for (Granularity.Enum granularity : Granularity.Enum.values()) { - if (granularity.equals(Enum.UNRECOGNIZED)) { - continue; - } - String granularityStr = granularity.toString().toLowerCase(); - String tableId = "project.dataset.myentity_" + granularityStr; - String featureId = "myentity." + granularityStr + ".feature1"; + String tableId = "project.dataset.myentity"; + String featureId = "myentity.feature1"; - featureInfos.add(createFeatureInfo(featureId, granularity, tableId)); - featureIds.add(featureId); - } + featureInfos.add(createFeatureInfo(featureId, tableId)); + featureIds.add(featureId); when(featureInfoRespository.findAllById(any(List.class))).thenReturn(featureInfos); @@ -224,7 +181,7 @@ public void shouldRenderCorrectQuery3() throws Exception { String query = templater.createQuery(featureSet, startDate, endDate, 1000); - checkExpectedQuery(query, "expQuery3.sql"); + checkExpectedQuery(query, "expQuery2.sql"); } private void checkExpectedQuery(String query, String pathToExpQuery) throws Exception { @@ -240,7 +197,7 @@ private void checkExpectedQuery(String query, String pathToExpQuery) throws Exce assertThat(query, equalTo(expQuery)); } - private FeatureInfo createFeatureInfo(String id, Granularity.Enum granularity, String tableId) { + private FeatureInfo createFeatureInfo(String id, String tableId) { StorageSpec storageSpec = StorageSpec.newBuilder() .setId("BQ") @@ -253,7 +210,6 @@ private FeatureInfo createFeatureInfo(String id, Granularity.Enum granularity, S FeatureSpec fs = FeatureSpec.newBuilder() .setId(id) - .setGranularity(granularity) .setDataStores(DataStores.newBuilder().setWarehouse(DataStore.newBuilder().setId("BQ"))) .build(); diff --git a/core/src/test/java/feast/core/validators/SpecValidatorTest.java b/core/src/test/java/feast/core/validators/SpecValidatorTest.java index 9bb2cf461e..1170a036b4 100644 --- a/core/src/test/java/feast/core/validators/SpecValidatorTest.java +++ b/core/src/test/java/feast/core/validators/SpecValidatorTest.java @@ -38,7 +38,6 @@ import feast.specs.ImportSpecProto.ImportSpec; import feast.specs.ImportSpecProto.Schema; import feast.specs.StorageSpecProto.StorageSpec; -import feast.types.GranularityProto.Granularity; import java.util.Optional; import org.junit.Before; import org.junit.Rule; @@ -171,10 +170,9 @@ public void featureSpecWithIdWithoutThreeWordsShouldThrowIllegalArgumentExceptio .setOwner("owner") .setDescription("dasdad") .setEntity("entity") - .setGranularity(Granularity.Enum.forNumber(1)) .build(); exception.expect(IllegalArgumentException.class); - exception.expectMessage("Id must contain entity, granularity, name"); + exception.expectMessage("Id must contain entity, name"); validator.validateFeatureSpec(input); } @@ -188,44 +186,19 @@ public void featureSpecWithIdWithoutMatchingEntityShouldThrowIllegalArgumentExce featureInfoRepository); FeatureSpec input = FeatureSpec.newBuilder() - .setId("notentity.granularity.name") + .setId("notentity.name") .setName("name") .setOwner("owner") .setDescription("dasdad") .setEntity("entity") - .setGranularity(Granularity.Enum.forNumber(1)) .build(); exception.expect(IllegalArgumentException.class); exception.expectMessage( - "Id must be in format entity.granularity.name, " + "Id must be in format entity.name, " + "entity in Id does not match entity provided."); validator.validateFeatureSpec(input); } - @Test - public void featureSpecWithIdWithoutMatchingGranularityShouldThrowIllegalArgumentException() { - SpecValidator validator = - new SpecValidator( - storageInfoRepository, - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - FeatureSpec input = - FeatureSpec.newBuilder() - .setId("entity.granularity.name") - .setName("name") - .setOwner("owner") - .setDescription("dasdad") - .setEntity("entity") - .setGranularity(Granularity.Enum.forNumber(0)) - .build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage( - "Id must be in format entity.granularity.name, " - + "granularity in Id does not match granularity provided."); - validator.validateFeatureSpec(input); - } - @Test public void featureSpecWithIdWithoutMatchingNameShouldThrowIllegalArgumentException() { SpecValidator validator = @@ -236,16 +209,15 @@ public void featureSpecWithIdWithoutMatchingNameShouldThrowIllegalArgumentExcept featureInfoRepository); FeatureSpec input = FeatureSpec.newBuilder() - .setId("entity.none.notname") + .setId("entity.notname") .setName("name") .setOwner("owner") .setDescription("dasdad") .setEntity("entity") - .setGranularity(Granularity.Enum.forNumber(0)) .build(); exception.expect(IllegalArgumentException.class); exception.expectMessage( - "Id must be in format entity.granularity.name, " + "Id must be in format entity.name, " + "name in Id does not match name provided."); validator.validateFeatureSpec(input); } @@ -261,12 +233,11 @@ public void featureSpecWithoutExistingEntityShouldThrowIllegalArgumentException( featureInfoRepository); FeatureSpec input = FeatureSpec.newBuilder() - .setId("entity.none.name") + .setId("entity.name") .setName("name") .setOwner("owner") .setDescription("dasdad") .setEntity("entity") - .setGranularity(Granularity.Enum.forNumber(0)) .build(); exception.expect(IllegalArgumentException.class); exception.expectMessage("Entity with name entity does not exist"); @@ -285,12 +256,11 @@ public void featureSpecWithInvalidFeatureGroupShouldThrowIllegalArgumentExceptio featureInfoRepository); FeatureSpec input = FeatureSpec.newBuilder() - .setId("entity.none.name") + .setId("entity.name") .setName("name") .setOwner("owner") .setDescription("dasdad") .setEntity("entity") - .setGranularity(Granularity.Enum.forNumber(0)) .setGroup("group") .build(); exception.expect(IllegalArgumentException.class); @@ -312,12 +282,11 @@ public void featureSpecWithoutExistingServingStoreShouldThrowIllegalArgumentExce DataStores dataStores = DataStores.newBuilder().setServing(servingStore).build(); FeatureSpec input = FeatureSpec.newBuilder() - .setId("entity.none.name") + .setId("entity.name") .setName("name") .setOwner("owner") .setDescription("dasdad") .setEntity("entity") - .setGranularity(Granularity.Enum.forNumber(0)) .setDataStores(dataStores) .build(); exception.expect(IllegalArgumentException.class); @@ -348,13 +317,12 @@ public void featureSpecWithoutServingStoreShouldInheritServingStoreIdFromGroup() DataStores dataStores = DataStores.newBuilder().setWarehouse(warehouseStore).build(); FeatureSpec input = FeatureSpec.newBuilder() - .setId("entity.none.name") + .setId("entity.name") .setName("name") .setOwner("owner") .setDescription("dasdad") .setEntity("entity") .setGroup("group") - .setGranularity(Granularity.Enum.forNumber(0)) .setDataStores(dataStores) .build(); exception.expect(IllegalArgumentException.class); @@ -387,12 +355,11 @@ public void featureSpecWithoutExistingWarehouseStoreShouldThrowIllegalArgumentEx DataStores.newBuilder().setServing(servingStore).setWarehouse(warehouseStore).build(); FeatureSpec input = FeatureSpec.newBuilder() - .setId("entity.none.name") + .setId("entity.name") .setName("name") .setOwner("owner") .setDescription("dasdad") .setEntity("entity") - .setGranularity(Granularity.Enum.forNumber(0)) .setDataStores(dataStores) .build(); exception.expect(IllegalArgumentException.class); @@ -423,12 +390,11 @@ public void featureSpecWithoutWarehouseStoreShouldBeAllowed() { DataStores.newBuilder().setServing(servingStore).build(); FeatureSpec input = FeatureSpec.newBuilder() - .setId("entity.none.name") + .setId("entity.name") .setName("name") .setOwner("owner") .setDescription("dasdad") .setEntity("entity") - .setGranularity(Granularity.Enum.forNumber(0)) .setDataStores(dataStores) .build(); validator.validateFeatureSpec(input); @@ -464,12 +430,11 @@ public void featureSpecWithUnsupportedWarehouseStoreShouldThrowIllegalArgumentEx DataStores.newBuilder().setServing(servingStore).setWarehouse(warehouseStore).build(); FeatureSpec input = FeatureSpec.newBuilder() - .setId("entity.none.name") + .setId("entity.name") .setName("name") .setOwner("owner") .setDescription("dasdad") .setEntity("entity") - .setGranularity(Granularity.Enum.forNumber(0)) .setDataStores(dataStores) .build(); exception.expect(IllegalArgumentException.class); @@ -511,12 +476,11 @@ public void featureSpecWithUnsupportedServingStoreShouldThrowIllegalArgumentExce DataStores.newBuilder().setServing(servingStore).setWarehouse(warehouseStore).build(); FeatureSpec input = FeatureSpec.newBuilder() - .setId("entity.none.name") + .setId("entity.name") .setName("name") .setOwner("owner") .setDescription("dasdad") .setEntity("entity") - .setGranularity(Granularity.Enum.forNumber(0)) .setDataStores(dataStores) .build(); exception.expect(IllegalArgumentException.class); diff --git a/core/src/test/resources/sql/expQuery1.sql b/core/src/test/resources/sql/expQuery1.sql index 83a91f9502..97d21c3fcb 100644 --- a/core/src/test/resources/sql/expQuery1.sql +++ b/core/src/test/resources/sql/expQuery1.sql @@ -1,59 +1,11 @@ -WITH raw_project_dataset_myentity_day AS (SELECT - ROW_NUMBER() OVER (PARTITION -BY - id, - event_timestamp -ORDER BY - created_timestamp DESC) rownum, - id, - event_timestamp, - created_timestamp , - FIRST_VALUE(feature1 IGNORE NULLS) OVER w AS myentity_day_feature1 , - FIRST_VALUE(feature2 IGNORE NULLS) OVER w AS myentity_day_feature2 -FROM - `project.dataset.myentity_day` +SELECT + project.dataset.myentity.id, + project.dataset.myentity.event_timestamp , + myentity_feature1, + myentity_feature2, + myentity_feature3 +FROM + project.dataset.myentity WHERE event_timestamp >= TIMESTAMP("2018-01-02") - AND event_timestamp <= TIMESTAMP(DATETIME_ADD("2018-01-30", INTERVAL 1 DAY)) WINDOW w AS ( PARTITION -BY - id, - event_timestamp -ORDER BY - event_timestamp DESC )), project_dataset_myentity_day AS ( SELECT - * -FROM - raw_project_dataset_myentity_day -WHERE - rownum = 1 ) , raw_project_dataset_myentity_none AS (SELECT - ROW_NUMBER() OVER (PARTITION -BY - id, - event_timestamp -ORDER BY - created_timestamp DESC) rownum, - id, - event_timestamp, - created_timestamp , - FIRST_VALUE(feature3 IGNORE NULLS) OVER w AS myentity_none_feature3 -FROM - `project.dataset.myentity_none` WINDOW w AS ( PARTITION -BY - id, - event_timestamp -ORDER BY - event_timestamp DESC )), project_dataset_myentity_none AS ( SELECT - * -FROM - raw_project_dataset_myentity_none -WHERE - rownum = 1 ) SELECT - project_dataset_myentity_day.id, - project_dataset_myentity_day.event_timestamp , - project_dataset_myentity_day.myentity_day_feature1, - project_dataset_myentity_day.myentity_day_feature2, - project_dataset_myentity_none.myentity_none_feature3 -FROM - project_dataset_myentity_day LEFT -JOIN - project_dataset_myentity_none - ON project_dataset_myentity_day.id = project_dataset_myentity_none.id LIMIT 100 \ No newline at end of file + AND event_timestamp <= TIMESTAMP(DATETIME_ADD("2018-01-30", INTERVAL 1 DAY)) LIMIT 100 \ No newline at end of file diff --git a/core/src/test/resources/sql/expQuery2.sql b/core/src/test/resources/sql/expQuery2.sql index 17a06b18fd..9b9f74a81d 100644 --- a/core/src/test/resources/sql/expQuery2.sql +++ b/core/src/test/resources/sql/expQuery2.sql @@ -1,59 +1,9 @@ -WITH raw_project_dataset_myentity_day AS (SELECT - ROW_NUMBER() OVER (PARTITION -BY - id, - event_timestamp -ORDER BY - created_timestamp DESC) rownum, - id, - event_timestamp, - created_timestamp , - FIRST_VALUE(feature1 IGNORE NULLS) OVER w AS myentity_day_feature1 , - FIRST_VALUE(feature2 IGNORE NULLS) OVER w AS myentity_day_feature2 +SELECT + project.dataset.myentity.id, + project.dataset.myentity.event_timestamp , + myentity_feature1 FROM - `project.dataset.myentity_day` + project.dataset.myentity WHERE event_timestamp >= TIMESTAMP("2018-01-02") - AND event_timestamp <= TIMESTAMP(DATETIME_ADD("2018-01-30", INTERVAL 1 DAY)) WINDOW w AS ( PARTITION -BY - id, - event_timestamp -ORDER BY - event_timestamp DESC )), project_dataset_myentity_day AS ( SELECT - * -FROM - raw_project_dataset_myentity_day -WHERE - rownum = 1 ) , raw_project_dataset_myentity_none AS (SELECT - ROW_NUMBER() OVER (PARTITION -BY - id, - event_timestamp -ORDER BY - created_timestamp DESC) rownum, - id, - event_timestamp, - created_timestamp , - FIRST_VALUE(feature3 IGNORE NULLS) OVER w AS myentity_none_feature3 -FROM - `project.dataset.myentity_none` WINDOW w AS ( PARTITION -BY - id, - event_timestamp -ORDER BY - event_timestamp DESC )), project_dataset_myentity_none AS ( SELECT - * -FROM - raw_project_dataset_myentity_none -WHERE - rownum = 1 ) SELECT - project_dataset_myentity_day.id, - project_dataset_myentity_day.event_timestamp , - project_dataset_myentity_day.myentity_day_feature1, - project_dataset_myentity_day.myentity_day_feature2, - project_dataset_myentity_none.myentity_none_feature3 -FROM - project_dataset_myentity_day LEFT -JOIN - project_dataset_myentity_none - ON project_dataset_myentity_day.id = project_dataset_myentity_none.id \ No newline at end of file + AND event_timestamp <= TIMESTAMP(DATETIME_ADD("2018-01-30", INTERVAL 1 DAY)) LIMIT 1000 \ No newline at end of file diff --git a/core/src/test/resources/sql/expQuery3.sql b/core/src/test/resources/sql/expQuery3.sql deleted file mode 100644 index 2c5405dd6a..0000000000 --- a/core/src/test/resources/sql/expQuery3.sql +++ /dev/null @@ -1,150 +0,0 @@ -WITH raw_project_dataset_myentity_second AS (SELECT - ROW_NUMBER() OVER (PARTITION -BY - id, - event_timestamp -ORDER BY - created_timestamp DESC) rownum, - id, - event_timestamp, - created_timestamp , - FIRST_VALUE(feature1 IGNORE NULLS) OVER w AS myentity_second_feature1 -FROM - `project.dataset.myentity_second` -WHERE - event_timestamp >= TIMESTAMP("2018-01-02") - AND event_timestamp <= TIMESTAMP(DATETIME_ADD("2018-01-30", INTERVAL 1 DAY)) WINDOW w AS ( PARTITION -BY - id, - event_timestamp -ORDER BY - event_timestamp DESC )), project_dataset_myentity_second AS ( SELECT - * -FROM - raw_project_dataset_myentity_second -WHERE - rownum = 1 ) , raw_project_dataset_myentity_minute AS (SELECT - ROW_NUMBER() OVER (PARTITION -BY - id, - event_timestamp -ORDER BY - created_timestamp DESC) rownum, - id, - event_timestamp, - created_timestamp , - FIRST_VALUE(feature1 IGNORE NULLS) OVER w AS myentity_minute_feature1 -FROM - `project.dataset.myentity_minute` -WHERE - event_timestamp >= TIMESTAMP("2018-01-02") - AND event_timestamp <= TIMESTAMP(DATETIME_ADD("2018-01-30", INTERVAL 1 DAY)) WINDOW w AS ( PARTITION -BY - id, - event_timestamp -ORDER BY - event_timestamp DESC )), project_dataset_myentity_minute AS ( SELECT - * -FROM - raw_project_dataset_myentity_minute -WHERE - rownum = 1 ) , raw_project_dataset_myentity_hour AS (SELECT - ROW_NUMBER() OVER (PARTITION -BY - id, - event_timestamp -ORDER BY - created_timestamp DESC) rownum, - id, - event_timestamp, - created_timestamp , - FIRST_VALUE(feature1 IGNORE NULLS) OVER w AS myentity_hour_feature1 -FROM - `project.dataset.myentity_hour` -WHERE - event_timestamp >= TIMESTAMP("2018-01-02") - AND event_timestamp <= TIMESTAMP(DATETIME_ADD("2018-01-30", INTERVAL 1 DAY)) WINDOW w AS ( PARTITION -BY - id, - event_timestamp -ORDER BY - event_timestamp DESC )), project_dataset_myentity_hour AS ( SELECT - * -FROM - raw_project_dataset_myentity_hour -WHERE - rownum = 1 ) , raw_project_dataset_myentity_day AS (SELECT - ROW_NUMBER() OVER (PARTITION -BY - id, - event_timestamp -ORDER BY - created_timestamp DESC) rownum, - id, - event_timestamp, - created_timestamp , - FIRST_VALUE(feature1 IGNORE NULLS) OVER w AS myentity_day_feature1 -FROM - `project.dataset.myentity_day` -WHERE - event_timestamp >= TIMESTAMP("2018-01-02") - AND event_timestamp <= TIMESTAMP(DATETIME_ADD("2018-01-30", INTERVAL 1 DAY)) WINDOW w AS ( PARTITION -BY - id, - event_timestamp -ORDER BY - event_timestamp DESC )), project_dataset_myentity_day AS ( SELECT - * -FROM - raw_project_dataset_myentity_day -WHERE - rownum = 1 ) , raw_project_dataset_myentity_none AS (SELECT - ROW_NUMBER() OVER (PARTITION -BY - id, - event_timestamp -ORDER BY - created_timestamp DESC) rownum, - id, - event_timestamp, - created_timestamp , - FIRST_VALUE(feature1 IGNORE NULLS) OVER w AS myentity_none_feature1 -FROM - `project.dataset.myentity_none` WINDOW w AS ( PARTITION -BY - id, - event_timestamp -ORDER BY - event_timestamp DESC )), project_dataset_myentity_none AS ( SELECT - * -FROM - raw_project_dataset_myentity_none -WHERE - rownum = 1 ) SELECT - project_dataset_myentity_second.id, - project_dataset_myentity_second.event_timestamp , - project_dataset_myentity_second.myentity_second_feature1, - project_dataset_myentity_minute.myentity_minute_feature1, - project_dataset_myentity_hour.myentity_hour_feature1, - project_dataset_myentity_day.myentity_day_feature1, - project_dataset_myentity_none.myentity_none_feature1 -FROM - project_dataset_myentity_second LEFT -JOIN - project_dataset_myentity_minute - ON project_dataset_myentity_second.id = project_dataset_myentity_minute.id - AND TIMESTAMP_TRUNC(project_dataset_myentity_second.event_timestamp, - MINUTE) = project_dataset_myentity_minute.event_timestamp LEFT -JOIN - project_dataset_myentity_hour - ON project_dataset_myentity_second.id = project_dataset_myentity_hour.id - AND TIMESTAMP_TRUNC(project_dataset_myentity_second.event_timestamp, - HOUR) = project_dataset_myentity_hour.event_timestamp LEFT -JOIN - project_dataset_myentity_day - ON project_dataset_myentity_second.id = project_dataset_myentity_day.id - AND TIMESTAMP_TRUNC(project_dataset_myentity_second.event_timestamp, - DAY) = project_dataset_myentity_day.event_timestamp LEFT -JOIN - project_dataset_myentity_none - ON project_dataset_myentity_second.id = project_dataset_myentity_none.id LIMIT 1000 diff --git a/ingestion/example/core_specs/feature/product.day.completed_orders.json b/ingestion/example/core_specs/feature/product.day.completed_orders.json index 1e5c7e2665..a4582848e0 100644 --- a/ingestion/example/core_specs/feature/product.day.completed_orders.json +++ b/ingestion/example/core_specs/feature/product.day.completed_orders.json @@ -1,7 +1,6 @@ { - "id": "product.day.completed_orders", + "id": "product.completed_orders", "entity": "product", - "granularity": "DAY", "name": "completed_orders", "owner": "feast@example.com", "description": "This feature represents a product's completed orders per day", diff --git a/ingestion/example/core_specs/feature/user.none.age.json b/ingestion/example/core_specs/feature/user.none.age.json index c09957e132..9b8f8a68cf 100644 --- a/ingestion/example/core_specs/feature/user.none.age.json +++ b/ingestion/example/core_specs/feature/user.none.age.json @@ -1,7 +1,6 @@ { - "id": "user.none.age", + "id": "user.age", "entity": "user", - "granularity": "NONE", "name": "age", "owner": "feast@example.com", "description": "This feature represents a user's age", diff --git a/ingestion/example/core_specs/feature/user.none.completed_orders.json b/ingestion/example/core_specs/feature/user.none.completed_orders.json index 8b62f6065e..0280cf7483 100644 --- a/ingestion/example/core_specs/feature/user.none.completed_orders.json +++ b/ingestion/example/core_specs/feature/user.none.completed_orders.json @@ -1,7 +1,6 @@ { - "id": "user.none.completed_orders", + "id": "user.completed_orders", "entity": "user", - "granularity": "NONE", "name": "completed_orders", "owner": "feast@example.com", "description": "This feature represents a user's total completed orders", diff --git a/ingestion/example/import_products.yaml b/ingestion/example/import_products.yaml index 9e20e270d3..cb52580cf0 100644 --- a/ingestion/example/import_products.yaml +++ b/ingestion/example/import_products.yaml @@ -11,5 +11,5 @@ schema: fields: - name: id - name: timestamp - - featureId: product.day.completed_orders + - featureId: product.completed_orders diff --git a/ingestion/example/import_users.yaml b/ingestion/example/import_users.yaml index bd35e87700..5e2d6e553a 100644 --- a/ingestion/example/import_users.yaml +++ b/ingestion/example/import_users.yaml @@ -11,6 +11,6 @@ schema: fields: - name: id - name: name - - featureId: user.none.age - - featureId: user.none.completed_orders + - featureId: user.age + - featureId: user.completed_orders diff --git a/ingestion/src/main/java/feast/ingestion/ImportJob.java b/ingestion/src/main/java/feast/ingestion/ImportJob.java index e05362c15c..a77d07b4cf 100644 --- a/ingestion/src/main/java/feast/ingestion/ImportJob.java +++ b/ingestion/src/main/java/feast/ingestion/ImportJob.java @@ -38,7 +38,6 @@ import feast.ingestion.transform.WarehouseStoreTransform; import feast.ingestion.transform.fn.ConvertTypesDoFn; import feast.ingestion.transform.fn.LoggerDoFn; -import feast.ingestion.transform.fn.RoundEventTimestampsDoFn; import feast.ingestion.values.PFeatureRows; import feast.options.OptionsParser; import feast.specs.ImportJobSpecsProto.ImportJobSpecs; @@ -168,7 +167,7 @@ public void expand() { "A sample of size 1 of incoming rows from MAIN and ERRORS will logged every 30 seconds for visibility"); logNRows(pFeatureRows, "Output sample", 1, Duration.standardSeconds(30)); - PFeatureRows warehouseRows = roundTimestamps("Round timestamps for warehouse", pFeatureRows); + PFeatureRows warehouseRows = pFeatureRows; PFeatureRows servingRows = pFeatureRows; if (jobOptions.isCoalesceRowsEnabled()) { // Should we merge and dedupe rows before writing to the serving store? @@ -176,7 +175,6 @@ public void expand() { jobOptions.getCoalesceRowsDelaySeconds(), jobOptions.getCoalesceRowsTimeoutSeconds())); } - servingRows = roundTimestamps("Round timestamps for serving", servingRows); if (!dryRun) { servingRows.apply("Write to Serving Stores", servingStoreTransform); @@ -185,16 +183,6 @@ public void expand() { } } - public PFeatureRows roundTimestamps(String name, PFeatureRows pFeatureRows) { - return - PFeatureRows.of( - pFeatureRows - .getMain() - .apply(name, - ParDo.of(new RoundEventTimestampsDoFn())), - pFeatureRows.getErrors()); - } - public PipelineResult run() { PipelineResult result = pipeline.run(); log.info(String.format("FeastImportJobId:%s", this.retrieveId(result))); diff --git a/ingestion/src/main/java/feast/ingestion/metrics/FeastMetrics.java b/ingestion/src/main/java/feast/ingestion/metrics/FeastMetrics.java index c357454dbd..d42be02fe3 100644 --- a/ingestion/src/main/java/feast/ingestion/metrics/FeastMetrics.java +++ b/ingestion/src/main/java/feast/ingestion/metrics/FeastMetrics.java @@ -17,12 +17,11 @@ package feast.ingestion.metrics; -import feast.ingestion.util.DateUtil; +import com.google.protobuf.util.Timestamps; import feast.types.FeatureProto.Feature; import feast.types.FeatureRowExtendedProto.FeatureRowExtended; import feast.types.FeatureRowProto; import feast.types.FeatureRowProto.FeatureRow; -import java.time.Instant; import lombok.AllArgsConstructor; import org.apache.beam.sdk.metrics.Metrics; import org.apache.beam.sdk.transforms.DoFn; @@ -31,8 +30,7 @@ public class FeastMetrics { public static final String FEAST_NAMESPACE = "feast"; - private FeastMetrics() { - } + private FeastMetrics() {} private static void inc(String name) { Metrics.counter(FeastMetrics.FEAST_NAMESPACE, name).inc(); @@ -75,19 +73,13 @@ public void processElement( public static class CalculateLagMetricFunc extends DoFn { @ProcessElement - public void processElement(@Element FeatureRowExtended element, - OutputReceiver out) { + public void processElement( + @Element FeatureRowExtended element, OutputReceiver out) { FeatureRowProto.FeatureRow row = element.getRow(); com.google.protobuf.Timestamp eventTimestamp = row.getEventTimestamp(); - Instant now = Instant.now(); - com.google.protobuf.Timestamp roundedCurrentTimestamp = - DateUtil.roundToGranularity( - com.google.protobuf.Timestamp.newBuilder() - .setSeconds(now.getEpochSecond()) - .setNanos(now.getNano()) - .build(), - row.getGranularity()); - long lagSeconds = roundedCurrentTimestamp.getSeconds() - eventTimestamp.getSeconds(); + + com.google.protobuf.Timestamp now = Timestamps.fromMillis(System.currentTimeMillis()); + long lagSeconds = now.getSeconds() - eventTimestamp.getSeconds(); FeastMetrics.update("row:lag", lagSeconds); out.output(element); } diff --git a/ingestion/src/main/java/feast/ingestion/transform/CoalesceFeatureRows.java b/ingestion/src/main/java/feast/ingestion/transform/CoalesceFeatureRows.java index a7c53d21c6..aa8a1754c2 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/CoalesceFeatureRows.java +++ b/ingestion/src/main/java/feast/ingestion/transform/CoalesceFeatureRows.java @@ -59,16 +59,18 @@ * Takes FeatureRow, and merges them if they have the same FeatureRowKey, so that the latest values * will be emitted. It emits only once for batch. * - * For streaming we emits after a delay of 10 seconds (event time) by default we keep the previous - * state around for merging with future events. These timeout after 30 minutes by default. + *

For streaming we emits after a delay of 10 seconds (event time) by default we keep the + * previous state around for merging with future events. These timeout after 30 minutes by default. */ -public class CoalesceFeatureRows extends - PTransform, PCollection> { +public class CoalesceFeatureRows + extends PTransform, PCollection> { - private static final SerializableFunction KEY_FUNCTION = (row) -> - CoalesceKey.newBuilder() - .setEntityName(row.getEntityName()) - .setEntityKey(row.getEntityKey()).build(); + private static final SerializableFunction KEY_FUNCTION = + (row) -> + CoalesceKey.newBuilder() + .setEntityName(row.getEntityName()) + .setEntityKey(row.getEntityKey()) + .build(); private static final Duration DEFAULT_DELAY = Duration.standardSeconds(10); private static final Duration DEFAULT_TIMEOUT = Duration.ZERO; @@ -89,16 +91,15 @@ public CoalesceFeatureRows(Duration delay, Duration timeout) { this.timeout = (timeout.isEqual(Duration.ZERO)) ? DEFAULT_TIMEOUT : timeout; } - /** - * Return a FeatureRow of the new features accumulated since the given timestamp - */ + /** Return a FeatureRow of the new features accumulated since the given timestamp */ public static FeatureRow toFeatureRow(CoalesceAccum accum, long counter) { - Preconditions.checkArgument(counter <= - accum.getCounter(), "Accumulator has no features at or newer than the provided counter"); - FeatureRow.Builder builder = FeatureRow.newBuilder() - .setEntityName(accum.getEntityName()) - .setGranularity(accum.getGranularity()) - .setEntityKey(accum.getEntityKey()); + Preconditions.checkArgument( + counter <= accum.getCounter(), + "Accumulator has no features at or newer than the provided counter"); + FeatureRow.Builder builder = + FeatureRow.newBuilder() + .setEntityName(accum.getEntityName()) + .setEntityKey(accum.getEntityKey()); // This will be the latest timestamp if (accum.hasEventTimestamp()) { builder.setEventTimestamp(accum.getEventTimestamp()); @@ -108,12 +109,15 @@ public static FeatureRow toFeatureRow(CoalesceAccum accum, long counter) { if (counter <= 0) { builder.addAllFeatures(features.values()); } else { - List featureList = accum.getFeatureMarksMap().entrySet().stream() - .filter((e) -> e.getValue() > counter) - .map((e) -> features.get(e.getKey())) - .collect(Collectors.toList()); + List featureList = + accum + .getFeatureMarksMap() + .entrySet() + .stream() + .filter((e) -> e.getValue() > counter) + .map((e) -> features.get(e.getKey())) + .collect(Collectors.toList()); builder.addAllFeatures(featureList); - } return builder.build(); } @@ -129,8 +133,7 @@ public static CoalesceAccum combineFeatureRows(CoalesceAccum seed, Iterable expand(PCollection input) { - PCollection> kvs = input - .apply(WithKeys.of(KEY_FUNCTION).withKeyType(TypeDescriptor.of(CoalesceKey.class))) - .setCoder(KvCoder.of(ProtoCoder.of(CoalesceKey.class), ProtoCoder.of(FeatureRow.class))); + PCollection> kvs = + input + .apply(WithKeys.of(KEY_FUNCTION).withKeyType(TypeDescriptor.of(CoalesceKey.class))) + .setCoder( + KvCoder.of(ProtoCoder.of(CoalesceKey.class), ProtoCoder.of(FeatureRow.class))); if (kvs.isBounded().equals(IsBounded.UNBOUNDED)) { - return kvs.apply("Configure window", Window.>configure() - .withAllowedLateness(Duration.ZERO) - .discardingFiredPanes() - .triggering(AfterProcessingTime.pastFirstElementInPane())) + return kvs.apply( + "Configure window", + Window.>configure() + .withAllowedLateness(Duration.ZERO) + .discardingFiredPanes() + .triggering(AfterProcessingTime.pastFirstElementInPane())) .apply(ParDo.of(new CombineStateDoFn(delay, timeout))) .apply(Values.create()); } else { @@ -185,22 +191,25 @@ public PCollection expand(PCollection input) { } } - @Slf4j @AllArgsConstructor - public static class CombineStateDoFn extends - DoFn, KV> { + public static class CombineStateDoFn + extends DoFn, KV> { @StateId("lastKnownAccumValue") private final StateSpec> lastKnownAccumValueSpecs = StateSpecs.value(ProtoCoder.of(CoalesceAccum.class)); + @StateId("newElementsBag") private final StateSpec> newElementsBag = StateSpecs.bag(ProtoCoder.of(FeatureRow.class)); + @StateId("lastTimerTimestamp") private final StateSpec> lastTimerTimestamp = StateSpecs.value(); + @TimerId("bufferTimer") private final TimerSpec bufferTimer = TimerSpecs.timer(TimeDomain.EVENT_TIME); + @TimerId("timeoutTimer") private final TimerSpec timeoutTimer = TimerSpecs.timer(TimeDomain.EVENT_TIME); @@ -208,7 +217,8 @@ public static class CombineStateDoFn extends private Duration timeout; @ProcessElement - public void processElement(ProcessContext context, + public void processElement( + ProcessContext context, @StateId("newElementsBag") BagState newElementsBag, @TimerId("bufferTimer") Timer bufferTimer, @TimerId("timeoutTimer") Timer timeoutTimer, @@ -222,7 +232,8 @@ public void processElement(ProcessContext context, // We never timeout the state if a timeout is not set. timeoutTimer.offset(timeout).setRelative(); } - if (lastTimerTimestamp == null || lastTimerTimestamp.isBefore(contextTimestamp) + if (lastTimerTimestamp == null + || lastTimerTimestamp.isBefore(contextTimestamp) || lastTimerTimestamp.equals(contextTimestamp)) { lastTimerTimestamp = context.timestamp().plus(delay); log.debug("Setting timer for key {} to {}", context.element().getKey(), lastTimerTimestamp); @@ -233,7 +244,8 @@ public void processElement(ProcessContext context, @OnTimer("bufferTimer") public void bufferOnTimer( - OnTimerContext context, OutputReceiver> out, + OnTimerContext context, + OutputReceiver> out, @StateId("newElementsBag") BagState newElementsBag, @StateId("lastKnownAccumValue") ValueState lastKnownAccumValue) { log.debug("bufferOnTimer triggered {}", context.timestamp()); @@ -242,7 +254,8 @@ public void bufferOnTimer( @OnTimer("timeoutTimer") public void timeoutOnTimer( - OnTimerContext context, OutputReceiver> out, + OnTimerContext context, + OutputReceiver> out, @StateId("newElementsBag") BagState newElementsBag, @StateId("lastKnownAccumValue") ValueState lastKnownAccumValue) { log.debug("timeoutOnTimer triggered {}", context.timestamp()); diff --git a/ingestion/src/main/java/feast/ingestion/transform/fn/ConvertTypesDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/fn/ConvertTypesDoFn.java index cd19a913ff..c5ed560eea 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/fn/ConvertTypesDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/fn/ConvertTypesDoFn.java @@ -41,12 +41,10 @@ public void processElementImpl(ProcessContext context) { rowBuilder.setEventTimestamp(row.getEventTimestamp()); } - for (Feature feature : row.getFeaturesList()) { String featureId = feature.getId(); FeatureSpec featureSpec = specs.getFeatureSpec(featureId); - rowBuilder.setGranularity(featureSpec.getGranularity()); rowBuilder.addFeatures( Feature.newBuilder() .setId(featureId) diff --git a/ingestion/src/main/java/feast/ingestion/transform/fn/RoundEventTimestampsDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/fn/RoundEventTimestampsDoFn.java deleted file mode 100644 index 7170e65915..0000000000 --- a/ingestion/src/main/java/feast/ingestion/transform/fn/RoundEventTimestampsDoFn.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.transform.fn; - -import org.apache.beam.sdk.transforms.DoFn; -import feast.ingestion.util.DateUtil; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import feast.types.FeatureRowProto.FeatureRow; - -public class RoundEventTimestampsDoFn extends DoFn { - @ProcessElement - public void processElement(ProcessContext context) { - FeatureRowExtended rowExtended = context.element(); - FeatureRow row = rowExtended.getRow(); - com.google.protobuf.Timestamp timestamp = rowExtended.getRow().getEventTimestamp(); - row = - row.toBuilder() - .setEventTimestamp(DateUtil.roundToGranularity(timestamp, row.getGranularity())) - .build(); - context.output(rowExtended.toBuilder().setRow(row).build()); - } -} diff --git a/ingestion/src/main/java/feast/ingestion/transform/fn/SplitFeaturesDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/fn/SplitFeaturesDoFn.java index 8fdb226029..5c46a5dd5d 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/fn/SplitFeaturesDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/fn/SplitFeaturesDoFn.java @@ -50,7 +50,6 @@ public void processElement(ProcessContext context) { if (builder == null) { builder = FeatureRow.newBuilder() - .setGranularity(row.getGranularity()) .setEventTimestamp(row.getEventTimestamp()) .setEntityName(row.getEntityName()) .setEntityKey(row.getEntityKey()); diff --git a/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowsDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowsDoFn.java index 8336c8dd32..06332eb8bf 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowsDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowsDoFn.java @@ -37,7 +37,6 @@ import feast.store.warehouse.FeatureWarehouseFactoryService; import feast.types.FeatureProto.Feature; import feast.types.FeatureRowProto.FeatureRow; -import feast.types.GranularityProto.Granularity.Enum; import feast.types.ValueProto.ValueType; import java.util.ArrayList; import java.util.HashSet; @@ -79,7 +78,6 @@ public void processElementImpl(ProcessContext context) { FeatureRow row = context.element().getRow(); EntitySpec entitySpec = specs.getEntitySpec(row.getEntityName()); Preconditions.checkNotNull(entitySpec, "Entity spec not found for " + row.getEntityName()); - ImportSpec importSpec = specs.getImportSpec(); try { checkArgument(!row.getEntityKey().isEmpty(), "Entity key must not be empty"); @@ -90,10 +88,6 @@ public void processElementImpl(ProcessContext context) { String.format( "Row entity not found in import spec entities. entity=%s", row.getEntityName())); - checkArgument( - !row.getGranularity().equals(Enum.UNRECOGNIZED), - String.format("Unrecognised granularity %s", row.getGranularity())); - checkArgument(row.hasEventTimestamp(), "Must have eventTimestamp set"); checkArgument(row.getFeaturesCount() > 0, "Must have at least one feature set"); @@ -124,11 +118,6 @@ public void processElementImpl(ProcessContext context) { "Feature must have same entity as row. featureId=%s FeatureRow.entityName=%s FeatureSpec.entity=%s", feature.getId(), row.getEntityName(), featureSpec.getEntity())); - checkArgument( - featureSpec.getGranularity().equals(row.getGranularity()), - String.format( - "Feature must have same granularity as entity, featureId=%s", feature.getId())); - ValueType.Enum expectedType = featureSpec.getValueType(); ValueType.Enum actualType = Values.toValueType(feature.getValue()); checkArgument( diff --git a/ingestion/src/main/java/feast/ingestion/util/DateUtil.java b/ingestion/src/main/java/feast/ingestion/util/DateUtil.java index 15c4cc4d46..ad779dbea3 100644 --- a/ingestion/src/main/java/feast/ingestion/util/DateUtil.java +++ b/ingestion/src/main/java/feast/ingestion/util/DateUtil.java @@ -20,11 +20,12 @@ import com.google.protobuf.Timestamp; import java.time.Instant; import org.joda.time.DateTime; -import org.joda.time.DateTimeField; import org.joda.time.DateTimeZone; -import org.joda.time.MutableDateTime; -import org.joda.time.format.*; -import feast.types.GranularityProto.Granularity; +import org.joda.time.format.DateTimeFormat; +import org.joda.time.format.DateTimeFormatter; +import org.joda.time.format.DateTimeFormatterBuilder; +import org.joda.time.format.DateTimeParser; +import org.joda.time.format.ISODateTimeFormat; public class DateUtil { @@ -34,12 +35,12 @@ public class DateUtil { DateTimeFormatterBuilder formatterBuilder = new DateTimeFormatterBuilder(); DateTimeFormatter base = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss"); DateTimeFormatter zone = DateTimeFormat.forPattern(" ZZZ"); - DateTimeParser fractionSecondParser = new DateTimeFormatterBuilder() - .appendLiteral(".") - .appendFractionOfSecond(1,6) - .toParser(); + DateTimeParser fractionSecondParser = + new DateTimeFormatterBuilder().appendLiteral(".").appendFractionOfSecond(1, 6).toParser(); - FALLBACK_TIMESTAMP_FORMAT = formatterBuilder.append(base) + FALLBACK_TIMESTAMP_FORMAT = + formatterBuilder + .append(base) .appendOptional(fractionSecondParser) .append(zone) .toFormatter(); @@ -93,30 +94,4 @@ public static Timestamp maxTimestamp(Timestamp a, Timestamp b) { public static long toMillis(Timestamp timestamp) { return toDateTime(timestamp).getMillis(); } - - public static Timestamp roundToGranularity(Timestamp timestamp, Granularity.Enum granularity) { - MutableDateTime dt = new MutableDateTime(DateTimeZone.UTC); - DateTimeField roundingField; - switch (granularity) { - case DAY: - roundingField = dt.getChronology().dayOfMonth(); - break; - case HOUR: - roundingField = dt.getChronology().hourOfDay(); - break; - case MINUTE: - roundingField = dt.getChronology().minuteOfHour(); - break; - case SECOND: - roundingField = dt.getChronology().secondOfMinute(); - break; - case NONE: - return Timestamp.newBuilder().setSeconds(0).setNanos(0).build(); - default: - throw new RuntimeException("Unrecognised time series granularity"); - } - dt.setRounding(roundingField, MutableDateTime.ROUND_FLOOR); - dt.setMillis(toDateTime(timestamp).getMillis()); - return toTimestamp(dt.toDateTime()); - } } diff --git a/ingestion/src/main/java/feast/store/serving/bigtable/FeatureRowToBigTableMutationDoFn.java b/ingestion/src/main/java/feast/store/serving/bigtable/FeatureRowToBigTableMutationDoFn.java index 1d14b1201e..6a9330c7c2 100644 --- a/ingestion/src/main/java/feast/store/serving/bigtable/FeatureRowToBigTableMutationDoFn.java +++ b/ingestion/src/main/java/feast/store/serving/bigtable/FeatureRowToBigTableMutationDoFn.java @@ -60,8 +60,7 @@ public class FeatureRowToBigTableMutationDoFn this.specs = specs; } - public static BigTableRowKey makeBigTableRowKey( - String entityKey) { + public static BigTableRowKey makeBigTableRowKey(String entityKey) { return BigTableRowKey.newBuilder() .setSha1Prefix(DigestUtils.sha1Hex(entityKey).substring(0, 7)) @@ -89,15 +88,14 @@ private String getTableName(FeatureRow row) { /** * Given an row and a feature info service, build a BigTable Put mutation * - *

bigtable row key = {sha1(row.key), row.key, row.timestamp} family = {feature.group, - * granularity} qualifier = {feature.name} value = {feature.value} + *

bigtable row key = {sha1(row.key), row.key, row.timestamp} family = {feature.group} + * qualifier = {feature.name} value = {feature.value} */ public Put makePut(FeatureRowExtended rowExtended) { FeatureRow row = rowExtended.getRow(); // We always additinally overwrite a None granularity row so that it is trivial to retrieve the // latest across all features. - Put latestPut = - new Put(makeBigTableRowKey(row.getEntityKey()).toByteArray()); + Put latestPut = new Put(makeBigTableRowKey(row.getEntityKey()).toByteArray()); for (Feature feature : row.getFeaturesList()) { FeatureSpec featureSpec = specs.getFeatureSpec(feature.getId()); BigTableFeatureOptions options = servingOptionsCache.get(featureSpec); diff --git a/ingestion/src/main/java/feast/store/serving/redis/FeatureRowToRedisMutationDoFn.java b/ingestion/src/main/java/feast/store/serving/redis/FeatureRowToRedisMutationDoFn.java index dbe57bff9f..01f779bff4 100644 --- a/ingestion/src/main/java/feast/store/serving/redis/FeatureRowToRedisMutationDoFn.java +++ b/ingestion/src/main/java/feast/store/serving/redis/FeatureRowToRedisMutationDoFn.java @@ -54,13 +54,6 @@ public FeatureRowToRedisMutationDoFn(Specs specs) { this.random = new Random(); } - // TODO when extracting to a storage module, use reasonable defaults per granularity. - // TODO check is greater than 1 - static long getBucketId(com.google.protobuf.Timestamp eventTimestamp, Duration bucketSize) { - checkArgument(bucketSize.getStandardSeconds() > 0, "BucketSize must be greater than zero"); - return eventTimestamp.getSeconds() / bucketSize.getStandardSeconds(); - } - static RedisBucketKey getRedisBucketKey( String entityId, String featureIdSha1Prefix, long bucketId) { return RedisBucketKey.newBuilder() @@ -91,13 +84,10 @@ public void processElement(ProcessContext context) { RedisFeatureOptions options = servingOptionsCache.get(featureSpec); - com.google.protobuf.Timestamp roundedTimestamp = - DateUtil.roundToGranularity(row.getEventTimestamp(), featureSpec.getGranularity()); - RedisBucketValue value = RedisBucketValue.newBuilder() .setValue(feature.getValue()) - .setEventTimestamp(roundedTimestamp) + .setEventTimestamp(row.getEventTimestamp()) .build(); RedisBucketKey keyForLatest = getRedisBucketKey(entityKey, featureIdHash, 0L); diff --git a/ingestion/src/main/java/feast/store/warehouse/bigquery/FeatureRowBigQueryIO.java b/ingestion/src/main/java/feast/store/warehouse/bigquery/FeatureRowBigQueryIO.java index c8ec8a7825..06180f364a 100644 --- a/ingestion/src/main/java/feast/store/warehouse/bigquery/FeatureRowBigQueryIO.java +++ b/ingestion/src/main/java/feast/store/warehouse/bigquery/FeatureRowBigQueryIO.java @@ -21,13 +21,10 @@ import com.google.common.base.Strings; import com.google.inject.Inject; import feast.ingestion.model.Specs; -import feast.store.FeatureStoreWrite; -import feast.ingestion.transform.SplitFeatures.SingleOutputSplit; import feast.specs.EntitySpecProto.EntitySpec; -import feast.specs.FeatureSpecProto.FeatureSpec; +import feast.store.FeatureStoreWrite; import feast.types.FeatureRowExtendedProto.FeatureRowExtended; import feast.types.FeatureRowProto.FeatureRow; -import feast.types.GranularityProto.Granularity; import lombok.extern.slf4j.Slf4j; import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO; import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.CreateDisposition; @@ -38,6 +35,7 @@ import org.apache.beam.sdk.io.gcp.bigquery.WriteResult; import org.apache.beam.sdk.options.ValueProvider.StaticValueProvider; import org.apache.beam.sdk.values.PCollection; +import org.apache.beam.sdk.values.PCollection.IsBounded; import org.apache.beam.sdk.values.PDone; import org.apache.beam.sdk.values.ValueInSingleWindow; import org.joda.time.Duration; @@ -60,11 +58,6 @@ public Write(BigQueryStoreOptions bigQueryOptions, Specs specs) { @Override public PDone expand(PCollection input) { - SingleOutputSplit granularitySplitter = - new SingleOutputSplit<>(FeatureSpec::getGranularity, specs); - PCollection features = - input.apply("Split by granularity", granularitySplitter); - FeatureRowToBigQueryTableRowDoFn toTableRowDoFn = new FeatureRowToBigQueryTableRowDoFn(specs); BigQueryIO.Write write = BigQueryIO.write() @@ -75,9 +68,7 @@ public String getDestination(ValueInSingleWindow element) { FeatureRow row = featureRowExtended.getRow(); EntitySpec entityInfo = specs.getEntitySpec(row.getEntityName()); - Granularity.Enum granularity = row.getGranularity(); - String tableName = - entityInfo.getName() + "_" + granularity.name().toLowerCase(); + String tableName = entityInfo.getName(); return bigQueryOptions.project + ":" + bigQueryOptions.dataset @@ -105,16 +96,16 @@ public TableSchema getSchema(String destination) { write = write.withCustomGcsTempLocation(StaticValueProvider.of(bigQueryOptions.tempLocation)); } - switch (input.isBounded()) { - case UNBOUNDED: - write = - write - .withTriggeringFrequency(triggerFrequency) - // this is apparently supposed to be the default according to beam code comments. - .withNumFileShards(100); - } - WriteResult result = features.apply(write); + if (input.isBounded() == IsBounded.UNBOUNDED) { + write = + write + .withTriggeringFrequency(triggerFrequency) + // this is apparently supposed to be the default according to beam code + // comments. + .withNumFileShards(100); + } + WriteResult result = input.apply(write); return PDone.in(input.getPipeline()); } } diff --git a/ingestion/src/main/proto/feast_ingestion/types/CoalesceAccum.proto b/ingestion/src/main/proto/feast_ingestion/types/CoalesceAccum.proto index f8e82a5952..3e7ce90cbc 100644 --- a/ingestion/src/main/proto/feast_ingestion/types/CoalesceAccum.proto +++ b/ingestion/src/main/proto/feast_ingestion/types/CoalesceAccum.proto @@ -18,7 +18,6 @@ syntax = "proto3"; import "google/protobuf/timestamp.proto"; import "feast/types/Feature.proto"; -import "feast/types/Granularity.proto"; option java_package = "feast_ingestion.types"; option java_outer_classname = "CoalesceAccumProto"; @@ -28,7 +27,6 @@ message CoalesceAccum { string entityKey = 1; google.protobuf.Timestamp eventTimestamp = 3; string entityName = 4; - feast.types.Granularity.Enum granularity = 5; map features = 6; // map of features to their counter values when they were last added to accumulator diff --git a/ingestion/src/test/java/feast/ingestion/ImportJobCSVTest.java b/ingestion/src/test/java/feast/ingestion/ImportJobCSVTest.java index 67e8ed8e86..356e023347 100644 --- a/ingestion/src/test/java/feast/ingestion/ImportJobCSVTest.java +++ b/ingestion/src/test/java/feast/ingestion/ImportJobCSVTest.java @@ -27,7 +27,7 @@ import com.google.common.io.Resources; import com.google.inject.Guice; import com.google.inject.Injector; -import com.google.protobuf.Timestamp; +import com.google.protobuf.util.Timestamps; import feast.ToOrderedFeatureRows; import feast.ingestion.boot.ImportJobModule; import feast.ingestion.boot.TestPipelineModule; @@ -46,11 +46,11 @@ import feast.store.warehouse.FeatureWarehouseFactoryService; import feast.types.FeatureRowExtendedProto.FeatureRowExtended; import feast.types.FeatureRowProto.FeatureRow; -import feast.types.GranularityProto.Granularity; import java.io.File; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; +import java.text.ParseException; import java.util.List; import lombok.extern.slf4j.Slf4j; import org.apache.beam.sdk.options.PipelineOptionsFactory; @@ -87,7 +87,7 @@ public ImportJobPipelineOptions initOptions() { } @Test - public void testImportCSV() throws IOException { + public void testImportCSV() throws IOException, ParseException { ImportSpec importSpec = ProtoUtil.decodeProtoYaml( "---\n" @@ -101,8 +101,8 @@ public void testImportCSV() throws IOException { + " timestampValue: 2018-09-25T00:00:00.000Z\n" + " fields:\n" + " - name: id\n" - + " - featureId: testEntity.none.testInt32\n" - + " - featureId: testEntity.none.testString\n" + + " - featureId: testEntity.testInt32\n" + + " - featureId: testEntity.testString\n" + "\n", ImportSpec.getDefaultInstance()); @@ -142,33 +142,32 @@ public void testImportCSV() throws IOException { Lists.newArrayList( normalize( FeatureRow.newBuilder() - .setGranularity(Granularity.Enum.NONE) - .setEventTimestamp(Timestamp.getDefaultInstance()) + .setEventTimestamp(Timestamps.parse("2018-09-25T00:00:00.000Z")) .setEntityKey("1") .setEntityName("testEntity") - .addFeatures(Features.of("testEntity.none.testInt32", Values.ofInt32(101))) - .addFeatures(Features.of("testEntity.none.testString", Values.ofString("a"))) + .addFeatures(Features.of("testEntity.testInt32", Values.ofInt32(101))) + .addFeatures(Features.of("testEntity.testString", Values.ofString("a"))) .build()), normalize( FeatureRow.newBuilder() - .setGranularity(Granularity.Enum.NONE) - .setEventTimestamp(Timestamp.getDefaultInstance()) + .setEventTimestamp(Timestamps.parse("2018-09-25T00:00:00.000Z")) .setEntityKey("2") .setEntityName("testEntity") - .addFeatures(Features.of("testEntity.none.testInt32", Values.ofInt32(202))) - .addFeatures(Features.of("testEntity.none.testString", Values.ofString("b"))) + .addFeatures(Features.of("testEntity.testInt32", Values.ofInt32(202))) + .addFeatures(Features.of("testEntity.testString", Values.ofString("b"))) .build()), normalize( FeatureRow.newBuilder() - .setGranularity(Granularity.Enum.NONE) - .setEventTimestamp(Timestamp.getDefaultInstance()) + .setEventTimestamp(Timestamps.parse("2018-09-25T00:00:00.000Z")) .setEntityKey("3") .setEntityName("testEntity") - .addFeatures(Features.of("testEntity.none.testInt32", Values.ofInt32(303))) - .addFeatures(Features.of("testEntity.none.testString", Values.ofString("c"))) + .addFeatures(Features.of("testEntity.testInt32", Values.ofInt32(303))) + .addFeatures(Features.of("testEntity.testString", Values.ofString("c"))) .build())); PAssert.that(writtenToErrors).satisfies(hasCount(0)); + PAssert.that(writtenToServing).satisfies(hasCount(3)); + PAssert.that(writtenToWarehouse).satisfies(hasCount(3)); PAssert.that(writtenToServing.apply("serving toFeatureRows", new ToOrderedFeatureRows())) .containsInAnyOrder(expectedRows); @@ -180,7 +179,7 @@ public void testImportCSV() throws IOException { } @Test - public void testImportFileJson() throws IOException { + public void testImportFileJson() throws IOException, ParseException { ImportSpec importSpec = ProtoUtil.decodeProtoYaml( "---\n" @@ -195,7 +194,7 @@ public void testImportFileJson() throws IOException { + " fields:\n" + " - name: id\n" + " - name: x\n" - + " featureId: testEntity.none.testInt32\n" + + " featureId: testEntity.testInt32\n" + "\n", ImportSpec.getDefaultInstance()); @@ -234,19 +233,17 @@ public void testImportFileJson() throws IOException { Lists.newArrayList( normalize( FeatureRow.newBuilder() - .setGranularity(Granularity.Enum.NONE) - .setEventTimestamp(Timestamp.getDefaultInstance()) + .setEventTimestamp(Timestamps.parse("2018-09-25T00:00:00.000Z")) .setEntityKey("1") .setEntityName("testEntity") - .addFeatures(Features.of("testEntity.none.testInt32", Values.ofInt32(101))) + .addFeatures(Features.of("testEntity.testInt32", Values.ofInt32(101))) .build()), normalize( FeatureRow.newBuilder() - .setGranularity(Granularity.Enum.NONE) - .setEventTimestamp(Timestamp.getDefaultInstance()) + .setEventTimestamp(Timestamps.parse("2018-09-25T00:00:00.000Z")) .setEntityKey("2") .setEntityName("testEntity") - .addFeatures(Features.of("testEntity.none.testInt32", Values.ofInt32(202))) + .addFeatures(Features.of("testEntity.testInt32", Values.ofInt32(202))) .build())); PAssert.that(writtenToErrors).satisfies(hasCount(0)); @@ -277,8 +274,8 @@ public void testImportCSV_withSample1() throws IOException { + " timestampValue: 2018-09-25T00:00:00.000Z\n" + " fields:\n" + " - name: id\n" - + " - featureId: testEntity.none.testInt32\n" - + " - featureId: testEntity.none.testString\n" + + " - featureId: testEntity.testInt32\n" + + " - featureId: testEntity.testString\n" + "\n", ImportSpec.getDefaultInstance()); @@ -322,7 +319,7 @@ public void testImportCSV_withSample1() throws IOException { } @Test - public void testImportCSV_withCoalesceRows() throws IOException { + public void testImportCSV_withCoalesceRows() throws IOException, ParseException { ImportSpec importSpec = ProtoUtil.decodeProtoYaml( "---\n" @@ -339,8 +336,8 @@ public void testImportCSV_withCoalesceRows() throws IOException { + " fields:\n" + " - name: id\n" + " - name: timestamp\n" - + " - featureId: testEntity.none.testInt32\n" - + " - featureId: testEntity.none.testString\n" + + " - featureId: testEntity.testInt32\n" + + " - featureId: testEntity.testString\n" + "\n", ImportSpec.getDefaultInstance()); @@ -383,29 +380,29 @@ public void testImportCSV_withCoalesceRows() throws IOException { .containsInAnyOrder( normalize( FeatureRow.newBuilder() - .setGranularity(Granularity.Enum.NONE) .setEntityKey("1") .setEntityName("testEntity") - .addFeatures(Features.of("testEntity.none.testInt32", Values.ofInt32(101))) - .addFeatures(Features.of("testEntity.none.testString", Values.ofString("b"))) + .addFeatures(Features.of("testEntity.testInt32", Values.ofInt32(101))) + .addFeatures(Features.of("testEntity.testString", Values.ofString("b"))) + .setEventTimestamp(Timestamps.parse("2018-09-26T00:00:00.000Z")) .build())); PAssert.that(writtenToWarehouse.apply("warehouse toFeatureRows", new ToOrderedFeatureRows())) .containsInAnyOrder( normalize( FeatureRow.newBuilder() - .setGranularity(Granularity.Enum.NONE) .setEntityKey("1") .setEntityName("testEntity") - .addFeatures(Features.of("testEntity.none.testInt32", Values.ofInt32(101))) - .addFeatures(Features.of("testEntity.none.testString", Values.ofString("a"))) + .addFeatures(Features.of("testEntity.testInt32", Values.ofInt32(101))) + .addFeatures(Features.of("testEntity.testString", Values.ofString("a"))) + .setEventTimestamp(Timestamps.parse("2018-09-25T00:00:00.000Z")) .build()), normalize( FeatureRow.newBuilder() - .setGranularity(Granularity.Enum.NONE) .setEntityKey("1") .setEntityName("testEntity") - .addFeatures(Features.of("testEntity.none.testString", Values.ofString("b"))) + .addFeatures(Features.of("testEntity.testString", Values.ofString("b"))) + .setEventTimestamp(Timestamps.parse("2018-09-26T00:00:00.000Z")) .build())); testPipeline.run(); @@ -429,9 +426,9 @@ public void testImportCSVUnknownServingStoreError() throws IOException { + " timestampValue: 2018-09-25T00:00:00.000Z\n" + " fields:\n" + " - name: id\n" - + " - featureId: testEntity.none.unknownInt32\n" + + " - featureId: testEntity.unknownInt32\n" // Unknown store is not available - + " - featureId: testEntity.none.testString\n" + + " - featureId: testEntity.testString\n" + "\n", ImportSpec.getDefaultInstance()); @@ -484,8 +481,8 @@ public void testImportWithErrors() throws IOException { + " timestampValue: 2018-09-25T00:00:00.000Z\n" + " fields:\n" + " - name: id\n" - + " - featureId: testEntity.none.testString\n" - + " - featureId: testEntity.none.testInt32\n" + + " - featureId: testEntity.testString\n" + + " - featureId: testEntity.testInt32\n" + "\n", ImportSpec.getDefaultInstance()); @@ -552,8 +549,8 @@ public void testImportWithoutWarehouseStore() throws IOException { + " timestampValue: 2018-09-25T00:00:00.000Z\n" + " fields:\n" + " - name: id\n" - + " - featureId: testEntity.none.testInt64NoWarehouse\n" - + " - featureId: testEntity.none.testStringNoWarehouse\n" + + " - featureId: testEntity.testInt64NoWarehouse\n" + + " - featureId: testEntity.testStringNoWarehouse\n" + "\n", ImportSpec.getDefaultInstance()); @@ -617,8 +614,8 @@ public void testImportWithoutWarehouseStoreSetByFeature() throws IOException { + " timestampValue: 2018-09-25T00:00:00.000Z\n" + " fields:\n" + " - name: id\n" - + " - featureId: testEntity.none.testInt64NoWarehouse\n" - + " - featureId: testEntity.none.testStringNoWarehouse\n" + + " - featureId: testEntity.testInt64NoWarehouse\n" + + " - featureId: testEntity.testStringNoWarehouse\n" + "\n", ImportSpec.getDefaultInstance()); diff --git a/ingestion/src/test/java/feast/ingestion/config/ImportJobSpecsSupplierTest.java b/ingestion/src/test/java/feast/ingestion/config/ImportJobSpecsSupplierTest.java index 3494e4c41b..357df7d6a7 100644 --- a/ingestion/src/test/java/feast/ingestion/config/ImportJobSpecsSupplierTest.java +++ b/ingestion/src/test/java/feast/ingestion/config/ImportJobSpecsSupplierTest.java @@ -29,7 +29,6 @@ import feast.specs.ImportSpecProto.ImportSpec; import feast.specs.ImportSpecProto.Schema; import feast.specs.StorageSpecProto.StorageSpec; -import feast.types.GranularityProto.Granularity; import feast.types.ValueProto.ValueType.Enum; import java.io.File; import java.io.IOException; @@ -41,8 +40,7 @@ public class ImportJobSpecsSupplierTest { - @Rule - public TemporaryFolder temporaryFolder = new TemporaryFolder(); + @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder(); String importSpecYaml = "---\n" + "servingStorageSpecs:\n" @@ -58,9 +56,8 @@ public class ImportJobSpecsSupplierTest { + " description: This is a test entity\n" + " tags: []\n" + "featureSpecs:\n" - + " - id: testEntity.day.testInt64\n" + + " - id: testEntity.testInt64\n" + " entity: testEntity\n" - + " granularity: DAY\n" + " name: testInt64\n" + " owner: feast@example.com\n" + " description: This is test feature of type integer\n" @@ -81,7 +78,7 @@ public class ImportJobSpecsSupplierTest { + " - name: timestamp\n" + " - name: driver_id\n" + " - name: trips_completed\n" - + " featureId: driver.none.trips_completed\n" + + " featureId: driver.trips_completed\n" + "\n"; @Test @@ -93,48 +90,51 @@ public void testSupplierImportSpecYamlFile() throws IOException { ImportJobSpecs importJobSpecs = new ImportJobSpecsSupplier(yamlFile.getParent()).get(); Specs specs = new Specs("", importJobSpecs); - System.out - .println(JsonFormat.printer().omittingInsignificantWhitespace().print(importJobSpecs)); - assertEquals(ImportSpec.newBuilder() - .setType("file.csv") - .putSourceOptions("path", "data.csv") - .addEntities("driver") - .setSchema( - Schema.newBuilder() - .addFields(Field.newBuilder().setName("timestamp")) - .addFields(Field.newBuilder().setName("driver_id")) - .addFields( - Field.newBuilder() - .setName("trips_completed") - .setFeatureId("driver.none.trips_completed")) - .setEntityIdColumn("driver_id") - .setTimestampValue(DateUtil.toTimestamp("2018-09-25T00:00:00.000Z"))) - .build(), importJobSpecs.getImportSpec()); + System.out.println( + JsonFormat.printer().omittingInsignificantWhitespace().print(importJobSpecs)); + assertEquals( + ImportSpec.newBuilder() + .setType("file.csv") + .putSourceOptions("path", "data.csv") + .addEntities("driver") + .setSchema( + Schema.newBuilder() + .addFields(Field.newBuilder().setName("timestamp")) + .addFields(Field.newBuilder().setName("driver_id")) + .addFields( + Field.newBuilder() + .setName("trips_completed") + .setFeatureId("driver.trips_completed")) + .setEntityIdColumn("driver_id") + .setTimestampValue(DateUtil.toTimestamp("2018-09-25T00:00:00.000Z"))) + .build(), + importJobSpecs.getImportSpec()); - assertEquals(StorageSpec.newBuilder() - .setId("TEST_SERVING") - .setType("serving.mock") - .build(), importJobSpecs.getServingStorageSpecs(0)); + assertEquals( + StorageSpec.newBuilder().setId("TEST_SERVING").setType("serving.mock").build(), + importJobSpecs.getServingStorageSpecs(0)); - assertEquals(StorageSpec.newBuilder() - .setId("TEST_WAREHOUSE") - .setType("warehouse.mock") - .build(), importJobSpecs.getWarehouseStorageSpecs(0)); + assertEquals( + StorageSpec.newBuilder().setId("TEST_WAREHOUSE").setType("warehouse.mock").build(), + importJobSpecs.getWarehouseStorageSpecs(0)); - assertEquals(EntitySpec.newBuilder() - .setName("testEntity") - .setDescription("This is a test entity") - .build(), specs.getEntitySpec("testEntity")); + assertEquals( + EntitySpec.newBuilder() + .setName("testEntity") + .setDescription("This is a test entity") + .build(), + specs.getEntitySpec("testEntity")); - assertEquals(FeatureSpec.newBuilder() - .setId("testEntity.day.testInt64") - .setEntity("testEntity") - .setName("testInt64") - .setOwner("feast@example.com") - .setUri("https://example.com/") - .setValueType(Enum.INT64) - .setGranularity(Granularity.Enum.DAY) - .setDescription("This is test feature of type integer") - .build(), specs.getFeatureSpec("testEntity.day.testInt64")); + assertEquals( + FeatureSpec.newBuilder() + .setId("testEntity.testInt64") + .setEntity("testEntity") + .setName("testInt64") + .setOwner("feast@example.com") + .setUri("https://example.com/") + .setValueType(Enum.INT64) + .setDescription("This is test feature of type integer") + .build(), + specs.getFeatureSpec("testEntity.testInt64")); } } diff --git a/ingestion/src/test/java/feast/ingestion/model/SpecsTest.java b/ingestion/src/test/java/feast/ingestion/model/SpecsTest.java index b6f2d5ed98..7bea79405b 100644 --- a/ingestion/src/test/java/feast/ingestion/model/SpecsTest.java +++ b/ingestion/src/test/java/feast/ingestion/model/SpecsTest.java @@ -52,7 +52,7 @@ public void testSingleFeatureAndEntity() { ImportJobSpecs importJobSpecs = this.importJobSpecs.toBuilder() .setImportSpec(ImportSpec.newBuilder() .addEntities("testEntity") - .setSchema(Schema.newBuilder().addFields(newField("testEntity.none.testInt32"))) + .setSchema(Schema.newBuilder().addFields(newField("testEntity.testInt32"))) ).build(); Specs specs = Specs.of("testjob", importJobSpecs); @@ -65,7 +65,7 @@ public void testSingleFeatureAndEntity() { assertTrue(specs.getEntitySpecs().containsKey("testEntity")); assertEquals(1, specs.getFeatureSpecs().size()); - assertTrue(specs.getFeatureSpecs().containsKey("testEntity.none.testInt32")); + assertTrue(specs.getFeatureSpecs().containsKey("testEntity.testInt32")); assertTrue(specs.getServingStorageSpecs().containsKey("TEST_SERVING")); } @@ -75,7 +75,7 @@ public void testErrorOnUnknownEntity() { ImportJobSpecs importJobSpecs = this.importJobSpecs.toBuilder() .setImportSpec(ImportSpec.newBuilder() .addEntities("testEntity") - .setSchema(Schema.newBuilder().addFields(newField("testEntity.none.testInt32"))) + .setSchema(Schema.newBuilder().addFields(newField("testEntity.testInt32"))) ).build(); Specs specs = Specs.of("testjob", importJobSpecs); @@ -89,7 +89,7 @@ public void testErrorOnUnknownFeature() { ImportJobSpecs importJobSpecs = this.importJobSpecs.toBuilder() .setImportSpec(ImportSpec.newBuilder() .addEntities("testEntity") - .setSchema(Schema.newBuilder().addFields(newField("testEntity.none.testInt32"))) + .setSchema(Schema.newBuilder().addFields(newField("testEntity.testInt32"))) ).build(); Specs specs = Specs.of("testjob", importJobSpecs); @@ -103,14 +103,14 @@ public void testGetFeatureSpec() { ImportJobSpecs importJobSpecs = this.importJobSpecs.toBuilder() .setImportSpec(ImportSpec.newBuilder() .addEntities("testEntity") - .setSchema(Schema.newBuilder().addFields(newField("testEntity.none.testInt32"))) + .setSchema(Schema.newBuilder().addFields(newField("testEntity.testInt32"))) ).build(); Specs specs = Specs.of("testjob", importJobSpecs); specs.validate(); assertEquals( - "testEntity.none.testInt32", specs.getFeatureSpec("testEntity.none.testInt32").getId()); + "testEntity.testInt32", specs.getFeatureSpec("testEntity.testInt32").getId()); } @Test @@ -118,7 +118,7 @@ public void testGetEntitySpec() { ImportJobSpecs importJobSpecs = this.importJobSpecs.toBuilder() .setImportSpec(ImportSpec.newBuilder() .addEntities("testEntity") - .setSchema(Schema.newBuilder().addFields(newField("testEntity.none.testInt32"))) + .setSchema(Schema.newBuilder().addFields(newField("testEntity.testInt32"))) ).build(); Specs specs = Specs.of("testjob", importJobSpecs); @@ -132,7 +132,7 @@ public void testGetStorageSpec() { ImportJobSpecs importJobSpecs = this.importJobSpecs.toBuilder() .setImportSpec(ImportSpec.newBuilder() .addEntities("testEntity") - .setSchema(Schema.newBuilder().addFields(newField("testEntity.none.testInt32"))) + .setSchema(Schema.newBuilder().addFields(newField("testEntity.testInt32"))) ).build(); Specs specs = Specs.of("testjob", importJobSpecs); @@ -147,7 +147,7 @@ public void testFeatureSpecReferencesUnknownEntity() { ImportJobSpecs importJobSpecs = this.importJobSpecs.toBuilder() .setImportSpec(ImportSpec.newBuilder() .addEntities("totally_different_entity") - .setSchema(Schema.newBuilder().addFields(newField("testEntity.none.testInt32"))) + .setSchema(Schema.newBuilder().addFields(newField("testEntity.testInt32"))) ).build(); Specs specs = Specs.of("testjob", importJobSpecs); diff --git a/ingestion/src/test/java/feast/ingestion/transform/fn/FilterFeatureRowDoFnTest.java b/ingestion/src/test/java/feast/ingestion/transform/fn/FilterFeatureRowDoFnTest.java index 0068748969..e8fcc5818a 100644 --- a/ingestion/src/test/java/feast/ingestion/transform/fn/FilterFeatureRowDoFnTest.java +++ b/ingestion/src/test/java/feast/ingestion/transform/fn/FilterFeatureRowDoFnTest.java @@ -34,9 +34,9 @@ public class FilterFeatureRowDoFnTest { @Test public void shouldIgnoreUnspecifiedFeatureID() { - String featureId1 = "testentity.none.feature1"; - String featureId2 = "testentity.hour.feature2"; - String featureId3 = "testentity.day.feature3"; + String featureId1 = "testentity.feature1"; + String featureId2 = "testentity.feature2"; + String featureId3 = "testentity.feature3"; List specifiedFeatureIds = Arrays.asList(featureId1, featureId2, featureId3); FilterFeatureRowDoFn doFn = new FilterFeatureRowDoFn(specifiedFeatureIds); @@ -52,7 +52,7 @@ public void shouldIgnoreUnspecifiedFeatureID() { .addFeatures( Feature.newBuilder().setId(featureId3).setValue(Value.newBuilder().setInt64Val(12))) // this feature should be ignored - .addFeatures(Feature.newBuilder().setId("testEntity.none.unknown_feature")) + .addFeatures(Feature.newBuilder().setId("testEntity.unknown_feature")) .build(); PCollection output = testPipeline.apply(Create.of(row)) diff --git a/ingestion/src/test/java/feast/ingestion/util/DateUtilTest.java b/ingestion/src/test/java/feast/ingestion/util/DateUtilTest.java index a81b29a802..9a2d57a23c 100644 --- a/ingestion/src/test/java/feast/ingestion/util/DateUtilTest.java +++ b/ingestion/src/test/java/feast/ingestion/util/DateUtilTest.java @@ -25,7 +25,6 @@ import com.google.protobuf.Timestamp; import junit.framework.TestCase; import org.joda.time.DateTime; -import feast.types.GranularityProto.Granularity.Enum; public class DateUtilTest extends TestCase { public void testStringToTimestamp() { @@ -63,63 +62,4 @@ public void testTimestampToDateTime() { assertThat(34, is(equalTo(datetime.getSecondOfMinute()))); assertThat(123, is(equalTo(datetime.getMillisOfSecond()))); } - - public void testRoundToSecondGranularity() { - Timestamp timestamp1 = DateUtil.toTimestamp("2018-07-03T15:09:34.123888999Z"); - Timestamp timestamp2 = DateUtil.roundToGranularity(timestamp1, Enum.SECOND); - DateTime actual = DateUtil.toDateTime(timestamp2); - assertThat(2018, is(equalTo(actual.getYear()))); - assertThat(7, is(equalTo(actual.getMonthOfYear()))); - assertThat(3, is(equalTo(actual.getDayOfMonth()))); - assertThat(15, is(equalTo(actual.getHourOfDay()))); - assertThat(9, is(equalTo(actual.getMinuteOfHour()))); - assertThat(34, is(equalTo(actual.getSecondOfMinute()))); - assertThat(0, is(equalTo(actual.getMillisOfSecond()))); - } - - public void testRoundToMinuteGranularity() { - Timestamp timestamp1 = DateUtil.toTimestamp("2018-07-03T15:09:34.123888999Z"); - Timestamp timestamp2 = DateUtil.roundToGranularity(timestamp1, Enum.MINUTE); - DateTime actual = DateUtil.toDateTime(timestamp2); - assertThat(2018, is(equalTo(actual.getYear()))); - assertThat(7, is(equalTo(actual.getMonthOfYear()))); - assertThat(3, is(equalTo(actual.getDayOfMonth()))); - assertThat(15, is(equalTo(actual.getHourOfDay()))); - assertThat(9, is(equalTo(actual.getMinuteOfHour()))); - assertThat(0, is(equalTo(actual.getSecondOfMinute()))); - assertThat(0, is(equalTo(actual.getMillisOfSecond()))); - } - - public void testRoundToHourGranularity() { - Timestamp timestamp1 = DateUtil.toTimestamp("2018-07-03T15:09:34.123888999Z"); - Timestamp timestamp2 = DateUtil.roundToGranularity(timestamp1, Enum.HOUR); - DateTime actual = DateUtil.toDateTime(timestamp2); - assertThat(2018, is(equalTo(actual.getYear()))); - assertThat(7, is(equalTo(actual.getMonthOfYear()))); - assertThat(3, is(equalTo(actual.getDayOfMonth()))); - assertThat(15, is(equalTo(actual.getHourOfDay()))); - assertThat(0, is(equalTo(actual.getMinuteOfHour()))); - assertThat(0, is(equalTo(actual.getSecondOfMinute()))); - assertThat(0, is(equalTo(actual.getMillisOfSecond()))); - } - - public void testRoundToDayGranularity() { - Timestamp timestamp1 = DateUtil.toTimestamp("2018-07-03T15:09:34.123888999Z"); - Timestamp timestamp2 = DateUtil.roundToGranularity(timestamp1, Enum.DAY); - DateTime actual = DateUtil.toDateTime(timestamp2); - assertThat(2018, is(equalTo(actual.getYear()))); - assertThat(7, is(equalTo(actual.getMonthOfYear()))); - assertThat(3, is(equalTo(actual.getDayOfMonth()))); - assertThat(0, is(equalTo(actual.getHourOfDay()))); - assertThat(0, is(equalTo(actual.getMinuteOfHour()))); - assertThat(0, is(equalTo(actual.getSecondOfMinute()))); - assertThat(0, is(equalTo(actual.getMillisOfSecond()))); - } - - public void testRoundToNoneGranularity() { - Timestamp timestamp1 = DateUtil.toTimestamp("2018-07-03T15:09:34.123888999Z"); - Timestamp timestamp2 = DateUtil.roundToGranularity(timestamp1, Enum.NONE); - DateTime actual = DateUtil.toDateTime(timestamp2); - assertThat(0L, is(equalTo(actual.getMillis()))); - } } diff --git a/ingestion/src/test/java/feast/source/bigquery/BQToFeatureRowFnTest.java b/ingestion/src/test/java/feast/source/bigquery/BQToFeatureRowFnTest.java index 4c809fb5cc..e33bcc0249 100644 --- a/ingestion/src/test/java/feast/source/bigquery/BQToFeatureRowFnTest.java +++ b/ingestion/src/test/java/feast/source/bigquery/BQToFeatureRowFnTest.java @@ -26,7 +26,6 @@ import com.google.cloud.bigquery.LegacySQLTypeName; import com.google.common.collect.Lists; import com.google.protobuf.Timestamp; -import feast.source.bigquery.BigQueryToFeatureRowFn; import org.apache.avro.generic.GenericRecord; import org.apache.beam.sdk.io.gcp.bigquery.SchemaAndRecord; import org.joda.time.DateTime; @@ -62,7 +61,7 @@ public void testStringEntityKey() { .addFields( Field.newBuilder() .setName("bq_value") - .setFeatureId("testEntity.day.testInt64"))) + .setFeatureId("testEntity.testInt64"))) .build(); GenericRecord record = mock(GenericRecord.class); @@ -86,7 +85,7 @@ public void testStringEntityKey() { row.getFeaturesList(), equalTo( Lists.newArrayList( - Features.of("testEntity.day.testInt64", Values.ofInt64(Long.MAX_VALUE))))); + Features.of("testEntity.testInt64", Values.ofInt64(Long.MAX_VALUE))))); } @Test @@ -105,7 +104,7 @@ public void testInt64EntityKey() { .addFields( Field.newBuilder() .setName("bq_value") - .setFeatureId("testEntity.day.testInt64"))) + .setFeatureId("testEntity.testInt64"))) .build(); GenericRecord record = mock(GenericRecord.class); @@ -129,6 +128,6 @@ public void testInt64EntityKey() { row.getFeaturesList(), equalTo( Lists.newArrayList( - Features.of("testEntity.day.testInt64", Values.ofInt64(Long.MAX_VALUE))))); + Features.of("testEntity.testInt64", Values.ofInt64(Long.MAX_VALUE))))); } } diff --git a/ingestion/src/test/java/feast/store/serving/redis/FeatureRowRedisIOWriteTest.java b/ingestion/src/test/java/feast/store/serving/redis/FeatureRowRedisIOWriteTest.java index 940778ffdb..39d452ce46 100644 --- a/ingestion/src/test/java/feast/store/serving/redis/FeatureRowRedisIOWriteTest.java +++ b/ingestion/src/test/java/feast/store/serving/redis/FeatureRowRedisIOWriteTest.java @@ -22,7 +22,6 @@ import static org.junit.Assert.assertEquals; import com.google.common.io.Resources; -import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.Timestamp; import feast.ingestion.config.ImportJobSpecsSupplier; import feast.ingestion.model.Features; @@ -39,8 +38,6 @@ import feast.store.FeatureStoreWrite; import feast.types.FeatureRowExtendedProto.FeatureRowExtended; import feast.types.FeatureRowProto.FeatureRow; -import feast.types.GranularityProto.Granularity; -import feast.types.ValueProto.Value; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; @@ -58,10 +55,10 @@ public class FeatureRowRedisIOWriteTest { - private static final String featureNoneInt32 = "testEntity.none.redisInt32"; - private static final String featureNoneString = "testEntity.none.redisString"; - private static final String featureHourInt32 = "testEntity.hour.redisInt32"; - private static final String featureHourString = "testEntity.hour.redisString"; + private static final String featureNoneInt32 = "testEntity.redisInt32"; + private static final String featureNoneString = "testEntity.redisString"; + private static final String featureHourInt32 = "testEntity.redisInt32"; + private static final String featureHourString = "testEntity.redisString"; private static int REDIS_PORT = 51234; private static Redis redis; @@ -108,7 +105,7 @@ Specs getSpecs() { } @Test - public void testWriteNoneGranularity() throws IOException { + public void testWrite() throws IOException { Specs specs = getSpecs(); specs.validate(); @@ -117,14 +114,15 @@ public void testWriteNoneGranularity() throws IOException { new FeatureRowRedisIO.Write( RedisStoreOptions.builder().host("localhost").port(REDIS_PORT).build(), specs); + Timestamp now = DateUtil.toTimestamp(DateTime.now()); + FeatureRowExtended rowExtended = FeatureRowExtended.newBuilder() .setRow( FeatureRow.newBuilder() .setEntityName("testEntity") .setEntityKey("1") - .setGranularity(Granularity.Enum.NONE) - .setEventTimestamp(DateUtil.toTimestamp(DateTime.now())) + .setEventTimestamp(now) .addFeatures(Features.of(featureNoneInt32, Values.ofInt32(1))) .addFeatures(Features.of(featureNoneString, Values.ofString("a")))) .build(); @@ -146,27 +144,25 @@ public void testWriteNoneGranularity() throws IOException { RedisBucketValue.parseFrom(jedis.get(featureStringKey.toByteArray())); assertEquals(Values.ofInt32(1), featureInt32Value.getValue()); - // Timestamp is 0 for NONE granularity - assertEquals(Timestamp.getDefaultInstance(), featureInt32Value.getEventTimestamp()); + assertEquals(now, featureInt32Value.getEventTimestamp()); assertEquals(Values.ofString("a"), featureStringValue.getValue()); - // Timestamp is 0 for NONE granularity - assertEquals(Timestamp.getDefaultInstance(), featureStringValue.getEventTimestamp()); + assertEquals(now, featureStringValue.getEventTimestamp()); } @Test - public void testWriteNoneGranularityFromOptions() throws IOException { + public void testWriteFromOptions() throws IOException { Specs specs = getSpecs(); FeatureStoreWrite write = new RedisServingFactory() .create(specs.getServingStorageSpecs().get("REDIS1"), specs); + Timestamp now = DateUtil.toTimestamp(DateTime.now()); FeatureRowExtended rowExtended = FeatureRowExtended.newBuilder() .setRow( FeatureRow.newBuilder() .setEntityName("testEntity") .setEntityKey("1") - .setGranularity(Granularity.Enum.NONE) - .setEventTimestamp(DateUtil.toTimestamp(DateTime.now())) + .setEventTimestamp(now) .addFeatures(Features.of(featureNoneInt32, Values.ofInt32(1))) .addFeatures(Features.of(featureNoneString, Values.ofString("a")))) .build(); @@ -188,53 +184,8 @@ public void testWriteNoneGranularityFromOptions() throws IOException { RedisBucketValue.parseFrom(jedis.get(featureStringKey.toByteArray())); assertEquals(Values.ofInt32(1), featureInt32Value.getValue()); - // Timestamp is 0 for NONE granularity - assertEquals(Timestamp.getDefaultInstance(), featureInt32Value.getEventTimestamp()); + assertEquals(now, featureInt32Value.getEventTimestamp()); assertEquals(Values.ofString("a"), featureStringValue.getValue()); - // Timestamp is 0 for NONE granularity - assertEquals(Timestamp.getDefaultInstance(), featureStringValue.getEventTimestamp()); - } - - @Test - public void testWriteHourGranularity() throws IOException { - Specs specs = getSpecs(); - FeatureStoreWrite write = new RedisServingFactory() - .create(specs.getServingStorageSpecs().get("REDIS1"), specs); - - FeatureRowExtended rowExtended = - FeatureRowExtended.newBuilder() - .setRow( - FeatureRow.newBuilder() - .setEntityName("testEntity") - .setEntityKey("1") - .setGranularity(Granularity.Enum.HOUR) - .setEventTimestamp(DateUtil.toTimestamp(DateTime.now())) - .addFeatures(Features.of(featureHourInt32, Values.ofInt32(1))) - .addFeatures(Features.of(featureHourString, Values.ofString("a")))) - .build(); - - PCollection input = testPipeline.apply(Create.of(rowExtended)); - - input.apply("write to embedded redis", write); - - testPipeline.run(); - - Timestamp rowTimestamp = rowExtended.getRow().getEventTimestamp(); - Timestamp roundedTimestamp = DateUtil.roundToGranularity(rowTimestamp, Granularity.Enum.HOUR); - - RedisBucketKey featureInt32LatestKey = - getRedisBucketKey("1", getFeatureIdSha1Prefix(featureHourInt32), 0L); - RedisBucketKey featureStringLatestKey = - getRedisBucketKey("1", getFeatureIdSha1Prefix(featureHourString), 0L); - - checkRedisValue(featureInt32LatestKey, Values.ofInt32(1), roundedTimestamp); - checkRedisValue(featureStringLatestKey, Values.ofString("a"), roundedTimestamp); - } - - void checkRedisValue(RedisBucketKey key, Value expectedValue, Timestamp expectedTimestamp) - throws InvalidProtocolBufferException { - RedisBucketValue featureInt32Value = RedisBucketValue.parseFrom(jedis.get(key.toByteArray())); - assertEquals(expectedValue, featureInt32Value.getValue()); - assertEquals(expectedTimestamp, featureInt32Value.getEventTimestamp()); + assertEquals(now, featureStringValue.getEventTimestamp()); } } diff --git a/ingestion/src/test/resources/import-specs/csv_to_store1.yaml b/ingestion/src/test/resources/import-specs/csv_to_store1.yaml index c17e5f80d6..57c8d4be5f 100644 --- a/ingestion/src/test/resources/import-specs/csv_to_store1.yaml +++ b/ingestion/src/test/resources/import-specs/csv_to_store1.yaml @@ -11,6 +11,6 @@ schema: fields: - name: timestamp - name: id - - featureId: testEntity.none.testInt32 - - featureId: testEntity.none.testString + - featureId: testEntity.testInt32 + - featureId: testEntity.testString diff --git a/ingestion/src/test/resources/specs/importJobSpecs.yaml b/ingestion/src/test/resources/specs/importJobSpecs.yaml index cbc48b9558..a6b01b2d7f 100644 --- a/ingestion/src/test/resources/specs/importJobSpecs.yaml +++ b/ingestion/src/test/resources/specs/importJobSpecs.yaml @@ -16,9 +16,8 @@ entitySpecs: description: This is a test entity tags: [] featureSpecs: - - id: testEntity.day.testInt64 + - id: testEntity.testInt64 entity: testEntity - granularity: DAY name: testInt64 owner: feast@example.com description: This is test feature of type integer @@ -31,38 +30,8 @@ featureSpecs: id: TEST_SERVING warehouse: id: TEST_WAREHOUSE - - id: testEntity.hour.redisInt32 + - id: testEntity.redisInt32 entity: testEntity - granularity: HOUR - name: redisInt32 - owner: feast@example.com - description: This is test feature of type integer that goes to redis - uri: https://example.com/ - valueType: INT32 - tags: [] - options: {} - dataStores: - serving: - id: REDIS1 - options: - expiry: PT1H - bucketSize: PT10H - - id: testEntity.hour.redisString - entity: testEntity - granularity: HOUR - name: redisString - owner: feast@example.com - description: This is test feature of type integer that goes to redis - uri: https://example.com/ - valueType: STRING - tags: [] - options: {} - dataStores: - serving: - id: REDIS1 - - id: testEntity.none.redisInt32 - entity: testEntity - granularity: NONE name: redisInt32 owner: feast@example.com description: This is test feature of type integer the goes to redis @@ -75,9 +44,8 @@ featureSpecs: id: REDIS1 options: expiry: PT1H - - id: testEntity.none.redisString + - id: testEntity.redisString entity: testEntity - granularity: NONE name: redisString owner: feast@example.com description: This is test feature of type integer that goes to redis @@ -88,9 +56,8 @@ featureSpecs: dataStores: serving: id: REDIS1 - - id: testEntity.none.testInt32 + - id: testEntity.testInt32 entity: testEntity - granularity: NONE name: testInt32 owner: feast@example.com description: This is test feature of type integer @@ -103,9 +70,8 @@ featureSpecs: id: TEST_SERVING warehouse: id: TEST_WAREHOUSE - - id: testEntity.none.testInt64 + - id: testEntity.testInt64 entity: testEntity - granularity: NONE name: testInt64 owner: feast@example.com description: This is test feature of type integer @@ -118,9 +84,8 @@ featureSpecs: id: TEST_SERVING warehouse: id: TEST_WAREHOUSE - - id: testEntity.none.testInt64NoWarehouse + - id: testEntity.testInt64NoWarehouse entity: testEntity - granularity: NONE name: testInt64NoWarehouse owner: feast@example.com description: This is test feature of type integer @@ -131,9 +96,8 @@ featureSpecs: dataStores: serving: id: TEST_SERVING - - id: testEntity.none.testString + - id: testEntity.testString entity: testEntity - granularity: NONE name: testString owner: feast@example.com description: This is test feature of type integer @@ -144,9 +108,8 @@ featureSpecs: id: TEST_SERVING warehouse: id: TEST_WAREHOUSE - - id: testEntity.none.testStringNoWarehouse + - id: testEntity.testStringNoWarehouse entity: testEntity - granularity: NONE name: testStringNoWarehouse owner: feast@example.com description: This is test feature of type integer @@ -155,9 +118,8 @@ featureSpecs: dataStores: serving: id: TEST_SERVING - - id: testEntity.none.unknownInt32 + - id: testEntity.unknownInt32 entity: testEntity - granularity: NONE name: unknownInt32 owner: feast@example.com description: This is test feature of type integer that goes to an unknown serving diff --git a/protos/feast/specs/FeatureSpec.proto b/protos/feast/specs/FeatureSpec.proto index 2024667081..97681d463d 100644 --- a/protos/feast/specs/FeatureSpec.proto +++ b/protos/feast/specs/FeatureSpec.proto @@ -18,7 +18,6 @@ syntax = "proto3"; import "feast/specs/EntitySpec.proto"; import "feast/specs/StorageSpec.proto"; -import "feast/types/Granularity.proto"; import "feast/types/Value.proto"; package feast.specs; @@ -33,7 +32,6 @@ message FeatureSpec { string owner = 3; string description = 4; string uri = 5; - feast.types.Granularity.Enum granularity = 6; feast.types.ValueType.Enum valueType = 7; string entity = 8; string group = 9; diff --git a/protos/feast/storage/BigTable.proto b/protos/feast/storage/BigTable.proto index 1a6f994d8d..61cbed798b 100644 --- a/protos/feast/storage/BigTable.proto +++ b/protos/feast/storage/BigTable.proto @@ -26,6 +26,5 @@ message BigTableRowKey { // This should be the first 7 characters of a sha1 of the entityKey proto encoded string sha1Prefix = 1; string entityKey = 2; - // This should be String.valueOf(int64.MAX_VALUE - roundToEntityGranularity(eventTimestamp)) string reversedMillis = 3; } diff --git a/protos/feast/types/FeatureRow.proto b/protos/feast/types/FeatureRow.proto index 55e9a5acca..b17ad7bbc9 100644 --- a/protos/feast/types/FeatureRow.proto +++ b/protos/feast/types/FeatureRow.proto @@ -18,7 +18,6 @@ syntax = "proto3"; import "google/protobuf/timestamp.proto"; import "feast/types/Feature.proto"; -import "feast/types/Granularity.proto"; package feast.types; @@ -30,7 +29,6 @@ message FeatureRowKey { string entityKey = 1; google.protobuf.Timestamp eventTimestamp = 3; string entityName = 4; - Granularity.Enum granularity = 5; } message FeatureRow { @@ -38,5 +36,4 @@ message FeatureRow { repeated Feature features = 2; google.protobuf.Timestamp eventTimestamp = 3; string entityName = 4; - Granularity.Enum granularity = 5; } diff --git a/protos/feast/types/Granularity.proto b/protos/feast/types/Granularity.proto deleted file mode 100644 index c45dfc23fb..0000000000 --- a/protos/feast/types/Granularity.proto +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -syntax = "proto3"; - -package feast.types; - -option java_package = "feast.types"; -option java_outer_classname = "GranularityProto"; -option go_package = "github.com/gojek/feast/protos/generated/go/feast/types"; - -message Granularity { - enum Enum { - NONE = 0; - DAY = 1; - HOUR = 2; - MINUTE = 3; - SECOND = 4; - } -} diff --git a/protos/generated/go/feast/core/CoreService.pb.go b/protos/generated/go/feast/core/CoreService.pb.go index 76f3897433..4037da58af 100644 --- a/protos/generated/go/feast/core/CoreService.pb.go +++ b/protos/generated/go/feast/core/CoreService.pb.go @@ -35,7 +35,7 @@ func (m *CoreServiceTypes) Reset() { *m = CoreServiceTypes{} } func (m *CoreServiceTypes) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes) ProtoMessage() {} func (*CoreServiceTypes) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_f716411f9ac886b5, []int{0} + return fileDescriptor_CoreService_a0e9a1504f969203, []int{0} } func (m *CoreServiceTypes) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes.Unmarshal(m, b) @@ -66,7 +66,7 @@ func (m *CoreServiceTypes_GetEntitiesRequest) Reset() { *m = CoreService func (m *CoreServiceTypes_GetEntitiesRequest) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes_GetEntitiesRequest) ProtoMessage() {} func (*CoreServiceTypes_GetEntitiesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_f716411f9ac886b5, []int{0, 0} + return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 0} } func (m *CoreServiceTypes_GetEntitiesRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_GetEntitiesRequest.Unmarshal(m, b) @@ -104,7 +104,7 @@ func (m *CoreServiceTypes_GetEntitiesResponse) Reset() { *m = CoreServic func (m *CoreServiceTypes_GetEntitiesResponse) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes_GetEntitiesResponse) ProtoMessage() {} func (*CoreServiceTypes_GetEntitiesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_f716411f9ac886b5, []int{0, 1} + return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 1} } func (m *CoreServiceTypes_GetEntitiesResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_GetEntitiesResponse.Unmarshal(m, b) @@ -142,7 +142,7 @@ func (m *CoreServiceTypes_ListEntitiesResponse) Reset() { *m = CoreServi func (m *CoreServiceTypes_ListEntitiesResponse) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes_ListEntitiesResponse) ProtoMessage() {} func (*CoreServiceTypes_ListEntitiesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_f716411f9ac886b5, []int{0, 2} + return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 2} } func (m *CoreServiceTypes_ListEntitiesResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_ListEntitiesResponse.Unmarshal(m, b) @@ -181,7 +181,7 @@ func (m *CoreServiceTypes_GetFeaturesRequest) Reset() { *m = CoreService func (m *CoreServiceTypes_GetFeaturesRequest) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes_GetFeaturesRequest) ProtoMessage() {} func (*CoreServiceTypes_GetFeaturesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_f716411f9ac886b5, []int{0, 3} + return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 3} } func (m *CoreServiceTypes_GetFeaturesRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_GetFeaturesRequest.Unmarshal(m, b) @@ -219,7 +219,7 @@ func (m *CoreServiceTypes_GetFeaturesResponse) Reset() { *m = CoreServic func (m *CoreServiceTypes_GetFeaturesResponse) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes_GetFeaturesResponse) ProtoMessage() {} func (*CoreServiceTypes_GetFeaturesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_f716411f9ac886b5, []int{0, 4} + return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 4} } func (m *CoreServiceTypes_GetFeaturesResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_GetFeaturesResponse.Unmarshal(m, b) @@ -257,7 +257,7 @@ func (m *CoreServiceTypes_ListFeaturesResponse) Reset() { *m = CoreServi func (m *CoreServiceTypes_ListFeaturesResponse) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes_ListFeaturesResponse) ProtoMessage() {} func (*CoreServiceTypes_ListFeaturesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_f716411f9ac886b5, []int{0, 5} + return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 5} } func (m *CoreServiceTypes_ListFeaturesResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_ListFeaturesResponse.Unmarshal(m, b) @@ -296,7 +296,7 @@ func (m *CoreServiceTypes_GetStorageRequest) Reset() { *m = CoreServiceT func (m *CoreServiceTypes_GetStorageRequest) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes_GetStorageRequest) ProtoMessage() {} func (*CoreServiceTypes_GetStorageRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_f716411f9ac886b5, []int{0, 6} + return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 6} } func (m *CoreServiceTypes_GetStorageRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_GetStorageRequest.Unmarshal(m, b) @@ -334,7 +334,7 @@ func (m *CoreServiceTypes_GetStorageResponse) Reset() { *m = CoreService func (m *CoreServiceTypes_GetStorageResponse) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes_GetStorageResponse) ProtoMessage() {} func (*CoreServiceTypes_GetStorageResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_f716411f9ac886b5, []int{0, 7} + return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 7} } func (m *CoreServiceTypes_GetStorageResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_GetStorageResponse.Unmarshal(m, b) @@ -372,7 +372,7 @@ func (m *CoreServiceTypes_ListStorageResponse) Reset() { *m = CoreServic func (m *CoreServiceTypes_ListStorageResponse) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes_ListStorageResponse) ProtoMessage() {} func (*CoreServiceTypes_ListStorageResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_f716411f9ac886b5, []int{0, 8} + return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 8} } func (m *CoreServiceTypes_ListStorageResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_ListStorageResponse.Unmarshal(m, b) @@ -411,7 +411,7 @@ func (m *CoreServiceTypes_ApplyEntityResponse) Reset() { *m = CoreServic func (m *CoreServiceTypes_ApplyEntityResponse) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes_ApplyEntityResponse) ProtoMessage() {} func (*CoreServiceTypes_ApplyEntityResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_f716411f9ac886b5, []int{0, 9} + return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 9} } func (m *CoreServiceTypes_ApplyEntityResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_ApplyEntityResponse.Unmarshal(m, b) @@ -450,7 +450,7 @@ func (m *CoreServiceTypes_ApplyFeatureResponse) Reset() { *m = CoreServi func (m *CoreServiceTypes_ApplyFeatureResponse) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes_ApplyFeatureResponse) ProtoMessage() {} func (*CoreServiceTypes_ApplyFeatureResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_f716411f9ac886b5, []int{0, 10} + return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 10} } func (m *CoreServiceTypes_ApplyFeatureResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_ApplyFeatureResponse.Unmarshal(m, b) @@ -493,7 +493,7 @@ func (m *CoreServiceTypes_ApplyFeatureGroupResponse) String() string { } func (*CoreServiceTypes_ApplyFeatureGroupResponse) ProtoMessage() {} func (*CoreServiceTypes_ApplyFeatureGroupResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_f716411f9ac886b5, []int{0, 11} + return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 11} } func (m *CoreServiceTypes_ApplyFeatureGroupResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_ApplyFeatureGroupResponse.Unmarshal(m, b) @@ -532,7 +532,7 @@ func (m *CoreServiceTypes_ApplyStorageResponse) Reset() { *m = CoreServi func (m *CoreServiceTypes_ApplyStorageResponse) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes_ApplyStorageResponse) ProtoMessage() {} func (*CoreServiceTypes_ApplyStorageResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_f716411f9ac886b5, []int{0, 12} + return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 12} } func (m *CoreServiceTypes_ApplyStorageResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_ApplyStorageResponse.Unmarshal(m, b) @@ -1006,10 +1006,10 @@ var _CoreService_serviceDesc = grpc.ServiceDesc{ } func init() { - proto.RegisterFile("feast/core/CoreService.proto", fileDescriptor_CoreService_f716411f9ac886b5) + proto.RegisterFile("feast/core/CoreService.proto", fileDescriptor_CoreService_a0e9a1504f969203) } -var fileDescriptor_CoreService_f716411f9ac886b5 = []byte{ +var fileDescriptor_CoreService_a0e9a1504f969203 = []byte{ // 602 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0x5b, 0x6f, 0x12, 0x41, 0x14, 0x86, 0x34, 0x69, 0xe0, 0x40, 0x4c, 0x3b, 0x34, 0x8a, 0x23, 0x35, 0xcd, 0x26, 0x36, 0x7d, diff --git a/protos/generated/go/feast/core/DatasetService.pb.go b/protos/generated/go/feast/core/DatasetService.pb.go index 61b0a09975..d2d3b01c93 100644 --- a/protos/generated/go/feast/core/DatasetService.pb.go +++ b/protos/generated/go/feast/core/DatasetService.pb.go @@ -34,7 +34,7 @@ func (m *DatasetServiceTypes) Reset() { *m = DatasetServiceTypes{} } func (m *DatasetServiceTypes) String() string { return proto.CompactTextString(m) } func (*DatasetServiceTypes) ProtoMessage() {} func (*DatasetServiceTypes) Descriptor() ([]byte, []int) { - return fileDescriptor_DatasetService_9ba96186e7cec93b, []int{0} + return fileDescriptor_DatasetService_37ae639a8c7b5dd5, []int{0} } func (m *DatasetServiceTypes) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DatasetServiceTypes.Unmarshal(m, b) @@ -77,7 +77,7 @@ func (m *DatasetServiceTypes_CreateDatasetRequest) Reset() { func (m *DatasetServiceTypes_CreateDatasetRequest) String() string { return proto.CompactTextString(m) } func (*DatasetServiceTypes_CreateDatasetRequest) ProtoMessage() {} func (*DatasetServiceTypes_CreateDatasetRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_DatasetService_9ba96186e7cec93b, []int{0, 0} + return fileDescriptor_DatasetService_37ae639a8c7b5dd5, []int{0, 0} } func (m *DatasetServiceTypes_CreateDatasetRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DatasetServiceTypes_CreateDatasetRequest.Unmarshal(m, b) @@ -146,7 +146,7 @@ func (m *DatasetServiceTypes_CreateDatasetResponse) Reset() { func (m *DatasetServiceTypes_CreateDatasetResponse) String() string { return proto.CompactTextString(m) } func (*DatasetServiceTypes_CreateDatasetResponse) ProtoMessage() {} func (*DatasetServiceTypes_CreateDatasetResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_DatasetService_9ba96186e7cec93b, []int{0, 1} + return fileDescriptor_DatasetService_37ae639a8c7b5dd5, []int{0, 1} } func (m *DatasetServiceTypes_CreateDatasetResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DatasetServiceTypes_CreateDatasetResponse.Unmarshal(m, b) @@ -188,7 +188,7 @@ func (m *FeatureSet) Reset() { *m = FeatureSet{} } func (m *FeatureSet) String() string { return proto.CompactTextString(m) } func (*FeatureSet) ProtoMessage() {} func (*FeatureSet) Descriptor() ([]byte, []int) { - return fileDescriptor_DatasetService_9ba96186e7cec93b, []int{1} + return fileDescriptor_DatasetService_37ae639a8c7b5dd5, []int{1} } func (m *FeatureSet) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_FeatureSet.Unmarshal(m, b) @@ -237,7 +237,7 @@ func (m *DatasetInfo) Reset() { *m = DatasetInfo{} } func (m *DatasetInfo) String() string { return proto.CompactTextString(m) } func (*DatasetInfo) ProtoMessage() {} func (*DatasetInfo) Descriptor() ([]byte, []int) { - return fileDescriptor_DatasetService_9ba96186e7cec93b, []int{2} + return fileDescriptor_DatasetService_37ae639a8c7b5dd5, []int{2} } func (m *DatasetInfo) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DatasetInfo.Unmarshal(m, b) @@ -354,10 +354,10 @@ var _DatasetService_serviceDesc = grpc.ServiceDesc{ } func init() { - proto.RegisterFile("feast/core/DatasetService.proto", fileDescriptor_DatasetService_9ba96186e7cec93b) + proto.RegisterFile("feast/core/DatasetService.proto", fileDescriptor_DatasetService_37ae639a8c7b5dd5) } -var fileDescriptor_DatasetService_9ba96186e7cec93b = []byte{ +var fileDescriptor_DatasetService_37ae639a8c7b5dd5 = []byte{ // 414 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x53, 0xc1, 0x6e, 0xd4, 0x30, 0x10, 0x25, 0xbb, 0x2d, 0x90, 0x59, 0xc1, 0xc1, 0x14, 0x88, 0x72, 0xa0, 0x51, 0x4e, 0x7b, 0xb2, diff --git a/protos/generated/go/feast/core/JobService.pb.go b/protos/generated/go/feast/core/JobService.pb.go index e830edfe5f..6a3a4e6dc0 100644 --- a/protos/generated/go/feast/core/JobService.pb.go +++ b/protos/generated/go/feast/core/JobService.pb.go @@ -36,7 +36,7 @@ func (m *JobServiceTypes) Reset() { *m = JobServiceTypes{} } func (m *JobServiceTypes) String() string { return proto.CompactTextString(m) } func (*JobServiceTypes) ProtoMessage() {} func (*JobServiceTypes) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_c970f49698845549, []int{0} + return fileDescriptor_JobService_edcd183b773c9f62, []int{0} } func (m *JobServiceTypes) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_JobServiceTypes.Unmarshal(m, b) @@ -70,7 +70,7 @@ func (m *JobServiceTypes_SubmitImportJobRequest) Reset() { func (m *JobServiceTypes_SubmitImportJobRequest) String() string { return proto.CompactTextString(m) } func (*JobServiceTypes_SubmitImportJobRequest) ProtoMessage() {} func (*JobServiceTypes_SubmitImportJobRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_c970f49698845549, []int{0, 0} + return fileDescriptor_JobService_edcd183b773c9f62, []int{0, 0} } func (m *JobServiceTypes_SubmitImportJobRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_JobServiceTypes_SubmitImportJobRequest.Unmarshal(m, b) @@ -117,7 +117,7 @@ func (m *JobServiceTypes_SubmitImportJobResponse) Reset() { func (m *JobServiceTypes_SubmitImportJobResponse) String() string { return proto.CompactTextString(m) } func (*JobServiceTypes_SubmitImportJobResponse) ProtoMessage() {} func (*JobServiceTypes_SubmitImportJobResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_c970f49698845549, []int{0, 1} + return fileDescriptor_JobService_edcd183b773c9f62, []int{0, 1} } func (m *JobServiceTypes_SubmitImportJobResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_JobServiceTypes_SubmitImportJobResponse.Unmarshal(m, b) @@ -155,7 +155,7 @@ func (m *JobServiceTypes_ListJobsResponse) Reset() { *m = JobServiceType func (m *JobServiceTypes_ListJobsResponse) String() string { return proto.CompactTextString(m) } func (*JobServiceTypes_ListJobsResponse) ProtoMessage() {} func (*JobServiceTypes_ListJobsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_c970f49698845549, []int{0, 2} + return fileDescriptor_JobService_edcd183b773c9f62, []int{0, 2} } func (m *JobServiceTypes_ListJobsResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_JobServiceTypes_ListJobsResponse.Unmarshal(m, b) @@ -193,7 +193,7 @@ func (m *JobServiceTypes_GetJobRequest) Reset() { *m = JobServiceTypes_G func (m *JobServiceTypes_GetJobRequest) String() string { return proto.CompactTextString(m) } func (*JobServiceTypes_GetJobRequest) ProtoMessage() {} func (*JobServiceTypes_GetJobRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_c970f49698845549, []int{0, 3} + return fileDescriptor_JobService_edcd183b773c9f62, []int{0, 3} } func (m *JobServiceTypes_GetJobRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_JobServiceTypes_GetJobRequest.Unmarshal(m, b) @@ -231,7 +231,7 @@ func (m *JobServiceTypes_GetJobResponse) Reset() { *m = JobServiceTypes_ func (m *JobServiceTypes_GetJobResponse) String() string { return proto.CompactTextString(m) } func (*JobServiceTypes_GetJobResponse) ProtoMessage() {} func (*JobServiceTypes_GetJobResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_c970f49698845549, []int{0, 4} + return fileDescriptor_JobService_edcd183b773c9f62, []int{0, 4} } func (m *JobServiceTypes_GetJobResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_JobServiceTypes_GetJobResponse.Unmarshal(m, b) @@ -269,7 +269,7 @@ func (m *JobServiceTypes_AbortJobRequest) Reset() { *m = JobServiceTypes func (m *JobServiceTypes_AbortJobRequest) String() string { return proto.CompactTextString(m) } func (*JobServiceTypes_AbortJobRequest) ProtoMessage() {} func (*JobServiceTypes_AbortJobRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_c970f49698845549, []int{0, 5} + return fileDescriptor_JobService_edcd183b773c9f62, []int{0, 5} } func (m *JobServiceTypes_AbortJobRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_JobServiceTypes_AbortJobRequest.Unmarshal(m, b) @@ -307,7 +307,7 @@ func (m *JobServiceTypes_AbortJobResponse) Reset() { *m = JobServiceType func (m *JobServiceTypes_AbortJobResponse) String() string { return proto.CompactTextString(m) } func (*JobServiceTypes_AbortJobResponse) ProtoMessage() {} func (*JobServiceTypes_AbortJobResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_c970f49698845549, []int{0, 6} + return fileDescriptor_JobService_edcd183b773c9f62, []int{0, 6} } func (m *JobServiceTypes_AbortJobResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_JobServiceTypes_AbortJobResponse.Unmarshal(m, b) @@ -356,7 +356,7 @@ func (m *JobServiceTypes_JobDetail) Reset() { *m = JobServiceTypes_JobDe func (m *JobServiceTypes_JobDetail) String() string { return proto.CompactTextString(m) } func (*JobServiceTypes_JobDetail) ProtoMessage() {} func (*JobServiceTypes_JobDetail) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_c970f49698845549, []int{0, 7} + return fileDescriptor_JobService_edcd183b773c9f62, []int{0, 7} } func (m *JobServiceTypes_JobDetail) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_JobServiceTypes_JobDetail.Unmarshal(m, b) @@ -639,10 +639,10 @@ var _JobService_serviceDesc = grpc.ServiceDesc{ } func init() { - proto.RegisterFile("feast/core/JobService.proto", fileDescriptor_JobService_c970f49698845549) + proto.RegisterFile("feast/core/JobService.proto", fileDescriptor_JobService_edcd183b773c9f62) } -var fileDescriptor_JobService_c970f49698845549 = []byte{ +var fileDescriptor_JobService_edcd183b773c9f62 = []byte{ // 621 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0xdd, 0x4e, 0xdb, 0x4c, 0x10, 0x55, 0x62, 0x08, 0xf1, 0xf0, 0x7d, 0x80, 0x56, 0x15, 0x58, 0x4b, 0x25, 0x52, 0xa4, 0x4a, diff --git a/protos/generated/go/feast/core/UIService.pb.go b/protos/generated/go/feast/core/UIService.pb.go index d3cacf7afd..772c548c87 100644 --- a/protos/generated/go/feast/core/UIService.pb.go +++ b/protos/generated/go/feast/core/UIService.pb.go @@ -36,7 +36,7 @@ func (m *UIServiceTypes) Reset() { *m = UIServiceTypes{} } func (m *UIServiceTypes) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes) ProtoMessage() {} func (*UIServiceTypes) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_07f94aecf154b17a, []int{0} + return fileDescriptor_UIService_04866529701c634c, []int{0} } func (m *UIServiceTypes) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes.Unmarshal(m, b) @@ -70,7 +70,7 @@ func (m *UIServiceTypes_EntityDetail) Reset() { *m = UIServiceTypes_Enti func (m *UIServiceTypes_EntityDetail) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_EntityDetail) ProtoMessage() {} func (*UIServiceTypes_EntityDetail) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_07f94aecf154b17a, []int{0, 0} + return fileDescriptor_UIService_04866529701c634c, []int{0, 0} } func (m *UIServiceTypes_EntityDetail) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_EntityDetail.Unmarshal(m, b) @@ -122,7 +122,7 @@ func (m *UIServiceTypes_GetEntityRequest) Reset() { *m = UIServiceTypes_ func (m *UIServiceTypes_GetEntityRequest) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_GetEntityRequest) ProtoMessage() {} func (*UIServiceTypes_GetEntityRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_07f94aecf154b17a, []int{0, 1} + return fileDescriptor_UIService_04866529701c634c, []int{0, 1} } func (m *UIServiceTypes_GetEntityRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_GetEntityRequest.Unmarshal(m, b) @@ -160,7 +160,7 @@ func (m *UIServiceTypes_GetEntityResponse) Reset() { *m = UIServiceTypes func (m *UIServiceTypes_GetEntityResponse) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_GetEntityResponse) ProtoMessage() {} func (*UIServiceTypes_GetEntityResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_07f94aecf154b17a, []int{0, 2} + return fileDescriptor_UIService_04866529701c634c, []int{0, 2} } func (m *UIServiceTypes_GetEntityResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_GetEntityResponse.Unmarshal(m, b) @@ -198,7 +198,7 @@ func (m *UIServiceTypes_ListEntitiesResponse) Reset() { *m = UIServiceTy func (m *UIServiceTypes_ListEntitiesResponse) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_ListEntitiesResponse) ProtoMessage() {} func (*UIServiceTypes_ListEntitiesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_07f94aecf154b17a, []int{0, 3} + return fileDescriptor_UIService_04866529701c634c, []int{0, 3} } func (m *UIServiceTypes_ListEntitiesResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_ListEntitiesResponse.Unmarshal(m, b) @@ -242,7 +242,7 @@ func (m *UIServiceTypes_FeatureDetail) Reset() { *m = UIServiceTypes_Fea func (m *UIServiceTypes_FeatureDetail) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_FeatureDetail) ProtoMessage() {} func (*UIServiceTypes_FeatureDetail) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_07f94aecf154b17a, []int{0, 4} + return fileDescriptor_UIService_04866529701c634c, []int{0, 4} } func (m *UIServiceTypes_FeatureDetail) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_FeatureDetail.Unmarshal(m, b) @@ -315,7 +315,7 @@ func (m *UIServiceTypes_GetFeatureRequest) Reset() { *m = UIServiceTypes func (m *UIServiceTypes_GetFeatureRequest) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_GetFeatureRequest) ProtoMessage() {} func (*UIServiceTypes_GetFeatureRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_07f94aecf154b17a, []int{0, 5} + return fileDescriptor_UIService_04866529701c634c, []int{0, 5} } func (m *UIServiceTypes_GetFeatureRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_GetFeatureRequest.Unmarshal(m, b) @@ -354,7 +354,7 @@ func (m *UIServiceTypes_GetFeatureResponse) Reset() { *m = UIServiceType func (m *UIServiceTypes_GetFeatureResponse) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_GetFeatureResponse) ProtoMessage() {} func (*UIServiceTypes_GetFeatureResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_07f94aecf154b17a, []int{0, 6} + return fileDescriptor_UIService_04866529701c634c, []int{0, 6} } func (m *UIServiceTypes_GetFeatureResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_GetFeatureResponse.Unmarshal(m, b) @@ -399,7 +399,7 @@ func (m *UIServiceTypes_ListFeaturesResponse) Reset() { *m = UIServiceTy func (m *UIServiceTypes_ListFeaturesResponse) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_ListFeaturesResponse) ProtoMessage() {} func (*UIServiceTypes_ListFeaturesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_07f94aecf154b17a, []int{0, 7} + return fileDescriptor_UIService_04866529701c634c, []int{0, 7} } func (m *UIServiceTypes_ListFeaturesResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_ListFeaturesResponse.Unmarshal(m, b) @@ -439,7 +439,7 @@ func (m *UIServiceTypes_FeatureGroupDetail) Reset() { *m = UIServiceType func (m *UIServiceTypes_FeatureGroupDetail) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_FeatureGroupDetail) ProtoMessage() {} func (*UIServiceTypes_FeatureGroupDetail) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_07f94aecf154b17a, []int{0, 8} + return fileDescriptor_UIService_04866529701c634c, []int{0, 8} } func (m *UIServiceTypes_FeatureGroupDetail) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_FeatureGroupDetail.Unmarshal(m, b) @@ -484,7 +484,7 @@ func (m *UIServiceTypes_GetFeatureGroupRequest) Reset() { *m = UIService func (m *UIServiceTypes_GetFeatureGroupRequest) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_GetFeatureGroupRequest) ProtoMessage() {} func (*UIServiceTypes_GetFeatureGroupRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_07f94aecf154b17a, []int{0, 9} + return fileDescriptor_UIService_04866529701c634c, []int{0, 9} } func (m *UIServiceTypes_GetFeatureGroupRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_GetFeatureGroupRequest.Unmarshal(m, b) @@ -524,7 +524,7 @@ func (m *UIServiceTypes_GetFeatureGroupResponse) Reset() { func (m *UIServiceTypes_GetFeatureGroupResponse) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_GetFeatureGroupResponse) ProtoMessage() {} func (*UIServiceTypes_GetFeatureGroupResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_07f94aecf154b17a, []int{0, 10} + return fileDescriptor_UIService_04866529701c634c, []int{0, 10} } func (m *UIServiceTypes_GetFeatureGroupResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_GetFeatureGroupResponse.Unmarshal(m, b) @@ -564,7 +564,7 @@ func (m *UIServiceTypes_ListFeatureGroupsResponse) Reset() { func (m *UIServiceTypes_ListFeatureGroupsResponse) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_ListFeatureGroupsResponse) ProtoMessage() {} func (*UIServiceTypes_ListFeatureGroupsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_07f94aecf154b17a, []int{0, 11} + return fileDescriptor_UIService_04866529701c634c, []int{0, 11} } func (m *UIServiceTypes_ListFeatureGroupsResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_ListFeatureGroupsResponse.Unmarshal(m, b) @@ -604,7 +604,7 @@ func (m *UIServiceTypes_StorageDetail) Reset() { *m = UIServiceTypes_Sto func (m *UIServiceTypes_StorageDetail) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_StorageDetail) ProtoMessage() {} func (*UIServiceTypes_StorageDetail) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_07f94aecf154b17a, []int{0, 12} + return fileDescriptor_UIService_04866529701c634c, []int{0, 12} } func (m *UIServiceTypes_StorageDetail) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_StorageDetail.Unmarshal(m, b) @@ -649,7 +649,7 @@ func (m *UIServiceTypes_GetStorageRequest) Reset() { *m = UIServiceTypes func (m *UIServiceTypes_GetStorageRequest) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_GetStorageRequest) ProtoMessage() {} func (*UIServiceTypes_GetStorageRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_07f94aecf154b17a, []int{0, 13} + return fileDescriptor_UIService_04866529701c634c, []int{0, 13} } func (m *UIServiceTypes_GetStorageRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_GetStorageRequest.Unmarshal(m, b) @@ -687,7 +687,7 @@ func (m *UIServiceTypes_GetStorageResponse) Reset() { *m = UIServiceType func (m *UIServiceTypes_GetStorageResponse) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_GetStorageResponse) ProtoMessage() {} func (*UIServiceTypes_GetStorageResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_07f94aecf154b17a, []int{0, 14} + return fileDescriptor_UIService_04866529701c634c, []int{0, 14} } func (m *UIServiceTypes_GetStorageResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_GetStorageResponse.Unmarshal(m, b) @@ -725,7 +725,7 @@ func (m *UIServiceTypes_ListStorageResponse) Reset() { *m = UIServiceTyp func (m *UIServiceTypes_ListStorageResponse) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_ListStorageResponse) ProtoMessage() {} func (*UIServiceTypes_ListStorageResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_07f94aecf154b17a, []int{0, 15} + return fileDescriptor_UIService_04866529701c634c, []int{0, 15} } func (m *UIServiceTypes_ListStorageResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_ListStorageResponse.Unmarshal(m, b) @@ -1120,10 +1120,10 @@ var _UIService_serviceDesc = grpc.ServiceDesc{ } func init() { - proto.RegisterFile("feast/core/UIService.proto", fileDescriptor_UIService_07f94aecf154b17a) + proto.RegisterFile("feast/core/UIService.proto", fileDescriptor_UIService_04866529701c634c) } -var fileDescriptor_UIService_07f94aecf154b17a = []byte{ +var fileDescriptor_UIService_04866529701c634c = []byte{ // 784 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0x4d, 0x6f, 0xd3, 0x40, 0x10, 0xcd, 0x47, 0x49, 0x9a, 0x69, 0x5a, 0xe8, 0x82, 0xda, 0xb0, 0x50, 0x51, 0x99, 0x03, 0x91, diff --git a/protos/generated/go/feast/serving/Serving.pb.go b/protos/generated/go/feast/serving/Serving.pb.go index 7b00a6cd41..6f61517bee 100644 --- a/protos/generated/go/feast/serving/Serving.pb.go +++ b/protos/generated/go/feast/serving/Serving.pb.go @@ -44,7 +44,7 @@ func (m *QueryFeaturesRequest) Reset() { *m = QueryFeaturesRequest{} } func (m *QueryFeaturesRequest) String() string { return proto.CompactTextString(m) } func (*QueryFeaturesRequest) ProtoMessage() {} func (*QueryFeaturesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_Serving_fa8a820cedf8e8f2, []int{0} + return fileDescriptor_Serving_f91320f9a3f0c4cf, []int{0} } func (m *QueryFeaturesRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_QueryFeaturesRequest.Unmarshal(m, b) @@ -99,7 +99,7 @@ func (m *QueryFeaturesResponse) Reset() { *m = QueryFeaturesResponse{} } func (m *QueryFeaturesResponse) String() string { return proto.CompactTextString(m) } func (*QueryFeaturesResponse) ProtoMessage() {} func (*QueryFeaturesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_Serving_fa8a820cedf8e8f2, []int{1} + return fileDescriptor_Serving_f91320f9a3f0c4cf, []int{1} } func (m *QueryFeaturesResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_QueryFeaturesResponse.Unmarshal(m, b) @@ -145,7 +145,7 @@ func (m *Entity) Reset() { *m = Entity{} } func (m *Entity) String() string { return proto.CompactTextString(m) } func (*Entity) ProtoMessage() {} func (*Entity) Descriptor() ([]byte, []int) { - return fileDescriptor_Serving_fa8a820cedf8e8f2, []int{2} + return fileDescriptor_Serving_f91320f9a3f0c4cf, []int{2} } func (m *Entity) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Entity.Unmarshal(m, b) @@ -186,7 +186,7 @@ func (m *FeatureValue) Reset() { *m = FeatureValue{} } func (m *FeatureValue) String() string { return proto.CompactTextString(m) } func (*FeatureValue) ProtoMessage() {} func (*FeatureValue) Descriptor() ([]byte, []int) { - return fileDescriptor_Serving_fa8a820cedf8e8f2, []int{3} + return fileDescriptor_Serving_f91320f9a3f0c4cf, []int{3} } func (m *FeatureValue) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_FeatureValue.Unmarshal(m, b) @@ -304,10 +304,10 @@ var _ServingAPI_serviceDesc = grpc.ServiceDesc{ } func init() { - proto.RegisterFile("feast/serving/Serving.proto", fileDescriptor_Serving_fa8a820cedf8e8f2) + proto.RegisterFile("feast/serving/Serving.proto", fileDescriptor_Serving_f91320f9a3f0c4cf) } -var fileDescriptor_Serving_fa8a820cedf8e8f2 = []byte{ +var fileDescriptor_Serving_f91320f9a3f0c4cf = []byte{ // 429 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x53, 0xd1, 0x8a, 0xd3, 0x40, 0x14, 0x75, 0x5a, 0x5c, 0x36, 0x77, 0x0d, 0xca, 0xe0, 0x62, 0xc8, 0x8a, 0x96, 0xac, 0x0f, 0x01, diff --git a/protos/generated/go/feast/specs/EntitySpec.pb.go b/protos/generated/go/feast/specs/EntitySpec.pb.go index 8543c6c268..0f4374c679 100644 --- a/protos/generated/go/feast/specs/EntitySpec.pb.go +++ b/protos/generated/go/feast/specs/EntitySpec.pb.go @@ -31,7 +31,7 @@ func (m *EntitySpec) Reset() { *m = EntitySpec{} } func (m *EntitySpec) String() string { return proto.CompactTextString(m) } func (*EntitySpec) ProtoMessage() {} func (*EntitySpec) Descriptor() ([]byte, []int) { - return fileDescriptor_EntitySpec_df39fb3786bb4912, []int{0} + return fileDescriptor_EntitySpec_b8950ded39b854cb, []int{0} } func (m *EntitySpec) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_EntitySpec.Unmarshal(m, b) @@ -77,10 +77,10 @@ func init() { } func init() { - proto.RegisterFile("feast/specs/EntitySpec.proto", fileDescriptor_EntitySpec_df39fb3786bb4912) + proto.RegisterFile("feast/specs/EntitySpec.proto", fileDescriptor_EntitySpec_b8950ded39b854cb) } -var fileDescriptor_EntitySpec_df39fb3786bb4912 = []byte{ +var fileDescriptor_EntitySpec_b8950ded39b854cb = []byte{ // 177 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x49, 0x4b, 0x4d, 0x2c, 0x2e, 0xd1, 0x2f, 0x2e, 0x48, 0x4d, 0x2e, 0xd6, 0x77, 0xcd, 0x2b, 0xc9, 0x2c, 0xa9, 0x0c, 0x2e, diff --git a/protos/generated/go/feast/specs/FeatureGroupSpec.pb.go b/protos/generated/go/feast/specs/FeatureGroupSpec.pb.go index a8f4c6c133..8349e5a53a 100644 --- a/protos/generated/go/feast/specs/FeatureGroupSpec.pb.go +++ b/protos/generated/go/feast/specs/FeatureGroupSpec.pb.go @@ -31,7 +31,7 @@ func (m *FeatureGroupSpec) Reset() { *m = FeatureGroupSpec{} } func (m *FeatureGroupSpec) String() string { return proto.CompactTextString(m) } func (*FeatureGroupSpec) ProtoMessage() {} func (*FeatureGroupSpec) Descriptor() ([]byte, []int) { - return fileDescriptor_FeatureGroupSpec_1fd10deeef20baa7, []int{0} + return fileDescriptor_FeatureGroupSpec_21c67ee01edd412c, []int{0} } func (m *FeatureGroupSpec) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_FeatureGroupSpec.Unmarshal(m, b) @@ -77,10 +77,10 @@ func init() { } func init() { - proto.RegisterFile("feast/specs/FeatureGroupSpec.proto", fileDescriptor_FeatureGroupSpec_1fd10deeef20baa7) + proto.RegisterFile("feast/specs/FeatureGroupSpec.proto", fileDescriptor_FeatureGroupSpec_21c67ee01edd412c) } -var fileDescriptor_FeatureGroupSpec_1fd10deeef20baa7 = []byte{ +var fileDescriptor_FeatureGroupSpec_21c67ee01edd412c = []byte{ // 203 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x4a, 0x4b, 0x4d, 0x2c, 0x2e, 0xd1, 0x2f, 0x2e, 0x48, 0x4d, 0x2e, 0xd6, 0x77, 0x4b, 0x4d, 0x2c, 0x29, 0x2d, 0x4a, 0x75, diff --git a/protos/generated/go/feast/specs/FeatureSpec.pb.go b/protos/generated/go/feast/specs/FeatureSpec.pb.go index 293a5303b0..34ca43cc6b 100644 --- a/protos/generated/go/feast/specs/FeatureSpec.pb.go +++ b/protos/generated/go/feast/specs/FeatureSpec.pb.go @@ -20,28 +20,27 @@ var _ = math.Inf const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type FeatureSpec struct { - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` - Owner string `protobuf:"bytes,3,opt,name=owner,proto3" json:"owner,omitempty"` - Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"` - Uri string `protobuf:"bytes,5,opt,name=uri,proto3" json:"uri,omitempty"` - Granularity types.Granularity_Enum `protobuf:"varint,6,opt,name=granularity,proto3,enum=feast.types.Granularity_Enum" json:"granularity,omitempty"` - ValueType types.ValueType_Enum `protobuf:"varint,7,opt,name=valueType,proto3,enum=feast.types.ValueType_Enum" json:"valueType,omitempty"` - Entity string `protobuf:"bytes,8,opt,name=entity,proto3" json:"entity,omitempty"` - Group string `protobuf:"bytes,9,opt,name=group,proto3" json:"group,omitempty"` - Tags []string `protobuf:"bytes,10,rep,name=tags,proto3" json:"tags,omitempty"` - Options map[string]string `protobuf:"bytes,11,rep,name=options,proto3" json:"options,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - DataStores *DataStores `protobuf:"bytes,12,opt,name=dataStores,proto3" json:"dataStores,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + Owner string `protobuf:"bytes,3,opt,name=owner,proto3" json:"owner,omitempty"` + Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"` + Uri string `protobuf:"bytes,5,opt,name=uri,proto3" json:"uri,omitempty"` + ValueType types.ValueType_Enum `protobuf:"varint,7,opt,name=valueType,proto3,enum=feast.types.ValueType_Enum" json:"valueType,omitempty"` + Entity string `protobuf:"bytes,8,opt,name=entity,proto3" json:"entity,omitempty"` + Group string `protobuf:"bytes,9,opt,name=group,proto3" json:"group,omitempty"` + Tags []string `protobuf:"bytes,10,rep,name=tags,proto3" json:"tags,omitempty"` + Options map[string]string `protobuf:"bytes,11,rep,name=options,proto3" json:"options,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + DataStores *DataStores `protobuf:"bytes,12,opt,name=dataStores,proto3" json:"dataStores,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *FeatureSpec) Reset() { *m = FeatureSpec{} } func (m *FeatureSpec) String() string { return proto.CompactTextString(m) } func (*FeatureSpec) ProtoMessage() {} func (*FeatureSpec) Descriptor() ([]byte, []int) { - return fileDescriptor_FeatureSpec_97d610673462505c, []int{0} + return fileDescriptor_FeatureSpec_de24bbc3c8f2bed6, []int{0} } func (m *FeatureSpec) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_FeatureSpec.Unmarshal(m, b) @@ -96,13 +95,6 @@ func (m *FeatureSpec) GetUri() string { return "" } -func (m *FeatureSpec) GetGranularity() types.Granularity_Enum { - if m != nil { - return m.Granularity - } - return types.Granularity_NONE -} - func (m *FeatureSpec) GetValueType() types.ValueType_Enum { if m != nil { return m.ValueType @@ -157,7 +149,7 @@ func (m *DataStores) Reset() { *m = DataStores{} } func (m *DataStores) String() string { return proto.CompactTextString(m) } func (*DataStores) ProtoMessage() {} func (*DataStores) Descriptor() ([]byte, []int) { - return fileDescriptor_FeatureSpec_97d610673462505c, []int{1} + return fileDescriptor_FeatureSpec_de24bbc3c8f2bed6, []int{1} } func (m *DataStores) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DataStores.Unmarshal(m, b) @@ -203,7 +195,7 @@ func (m *DataStore) Reset() { *m = DataStore{} } func (m *DataStore) String() string { return proto.CompactTextString(m) } func (*DataStore) ProtoMessage() {} func (*DataStore) Descriptor() ([]byte, []int) { - return fileDescriptor_FeatureSpec_97d610673462505c, []int{2} + return fileDescriptor_FeatureSpec_de24bbc3c8f2bed6, []int{2} } func (m *DataStore) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DataStore.Unmarshal(m, b) @@ -246,39 +238,38 @@ func init() { } func init() { - proto.RegisterFile("feast/specs/FeatureSpec.proto", fileDescriptor_FeatureSpec_97d610673462505c) -} - -var fileDescriptor_FeatureSpec_97d610673462505c = []byte{ - // 479 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x53, 0x4d, 0x6b, 0xdb, 0x40, - 0x10, 0x45, 0x56, 0x62, 0x57, 0xa3, 0x10, 0xc2, 0x52, 0x92, 0xc5, 0x6d, 0x40, 0xb8, 0x14, 0x7c, - 0x92, 0x8a, 0x5b, 0xfa, 0x11, 0x28, 0x81, 0x50, 0xb7, 0xc7, 0x16, 0xa5, 0xa4, 0xd0, 0xdb, 0xc6, - 0x9a, 0x2a, 0x6a, 0x62, 0xad, 0xd8, 0x5d, 0x25, 0xe8, 0x8f, 0xf4, 0x6f, 0xf6, 0x2f, 0x94, 0x9d, - 0xb5, 0xa3, 0x35, 0x71, 0x4f, 0xb9, 0xcd, 0xec, 0x7b, 0x33, 0x9a, 0xf7, 0x46, 0x03, 0xc7, 0xbf, - 0x50, 0x68, 0x93, 0xe9, 0x06, 0x17, 0x3a, 0xfb, 0x8c, 0xc2, 0xb4, 0x0a, 0xcf, 0x1b, 0x5c, 0xa4, - 0x8d, 0x92, 0x46, 0xb2, 0x98, 0xe0, 0x94, 0xe0, 0xf1, 0x73, 0x9f, 0x3b, 0xaf, 0x4d, 0x65, 0xba, - 0x9e, 0x3a, 0xde, 0xe8, 0x74, 0x6e, 0xa4, 0x12, 0x25, 0x3e, 0x84, 0x4d, 0xd7, 0xa0, 0xce, 0xbe, - 0x28, 0x51, 0xb7, 0x37, 0x42, 0x55, 0xa6, 0x5b, 0xc1, 0x47, 0x3e, 0x7c, 0x21, 0x6e, 0x5a, 0x74, - 0xc0, 0xe4, 0x6f, 0x08, 0xb1, 0x37, 0x17, 0xdb, 0x87, 0x41, 0x55, 0xf0, 0x20, 0x09, 0xa6, 0x51, - 0x3e, 0xa8, 0x0a, 0xc6, 0x60, 0xa7, 0x16, 0x4b, 0xe4, 0x03, 0x7a, 0xa1, 0x98, 0x3d, 0x85, 0x5d, - 0x79, 0x57, 0xa3, 0xe2, 0x21, 0x3d, 0xba, 0x84, 0x25, 0x10, 0x17, 0xa8, 0x17, 0xaa, 0x6a, 0x4c, - 0x25, 0x6b, 0xbe, 0x43, 0x98, 0xff, 0xc4, 0x0e, 0x20, 0x6c, 0x55, 0xc5, 0x77, 0x09, 0xb1, 0x21, - 0x3b, 0x85, 0xb8, 0xec, 0x67, 0xe5, 0xc3, 0x24, 0x98, 0xee, 0xcf, 0x8e, 0x53, 0xe7, 0x0a, 0x0d, - 0x9b, 0xfa, 0x5a, 0xe6, 0x75, 0xbb, 0xcc, 0xfd, 0x0a, 0xf6, 0x01, 0xa2, 0x5b, 0xab, 0xe6, 0x7b, - 0xd7, 0x20, 0x1f, 0x51, 0xf9, 0xb3, 0x8d, 0xf2, 0x8b, 0x35, 0xea, 0x8a, 0x7b, 0x36, 0x3b, 0x84, - 0x21, 0x92, 0xc9, 0xfc, 0x09, 0x0d, 0xb4, 0xca, 0xac, 0xba, 0x52, 0xc9, 0xb6, 0xe1, 0x91, 0x53, - 0x47, 0x89, 0xf5, 0xc1, 0x88, 0x52, 0x73, 0x48, 0x42, 0xeb, 0x83, 0x8d, 0xd9, 0x29, 0x8c, 0x24, - 0x29, 0xd3, 0x3c, 0x4e, 0xc2, 0x69, 0x3c, 0x7b, 0x99, 0x7a, 0xfb, 0x4c, 0xfd, 0x75, 0x7f, 0x75, - 0xbc, 0x79, 0x6d, 0x54, 0x97, 0xaf, 0xab, 0xd8, 0x3b, 0x80, 0x42, 0x18, 0x61, 0xb7, 0x89, 0x9a, - 0xef, 0x25, 0xc1, 0x34, 0x9e, 0x1d, 0x6d, 0xf4, 0xf8, 0x74, 0x0f, 0xe7, 0x1e, 0x75, 0x7c, 0x02, - 0x7b, 0x7e, 0x47, 0xeb, 0xec, 0x35, 0x76, 0xab, 0xb5, 0xd9, 0xd0, 0xaa, 0x20, 0xa9, 0xab, 0xc5, - 0xb9, 0xe4, 0x64, 0xf0, 0x3e, 0x98, 0x18, 0x80, 0xbe, 0x2b, 0x7b, 0x05, 0x23, 0x8d, 0xea, 0xb6, - 0xaa, 0x4b, 0xaa, 0x8e, 0x67, 0x87, 0xdb, 0xbf, 0x9f, 0xaf, 0x69, 0xec, 0x0d, 0x44, 0x77, 0x42, - 0xe1, 0x95, 0x6c, 0xb5, 0xeb, 0xfe, 0xff, 0x9a, 0x9e, 0x38, 0xf9, 0x13, 0x40, 0x74, 0x0f, 0x3c, - 0xf8, 0xcb, 0x3e, 0xf6, 0x4e, 0x0e, 0xc8, 0xc9, 0x17, 0xdb, 0x3b, 0x6e, 0xf7, 0xf1, 0x31, 0x76, - 0x9c, 0xfd, 0x00, 0xff, 0x08, 0xcf, 0x0e, 0xbc, 0xad, 0x7d, 0xb3, 0x17, 0xf2, 0xf3, 0x6d, 0x59, - 0x99, 0xab, 0xf6, 0x32, 0x5d, 0xc8, 0x65, 0x56, 0xca, 0xdf, 0x78, 0x9d, 0xb9, 0x5b, 0xa2, 0xfb, - 0xd1, 0x59, 0x89, 0x35, 0x2a, 0x61, 0xb0, 0xc8, 0x4a, 0x99, 0x79, 0x37, 0x7a, 0x39, 0x24, 0xc2, - 0xeb, 0x7f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x13, 0x00, 0x0c, 0x12, 0x03, 0x04, 0x00, 0x00, + proto.RegisterFile("feast/specs/FeatureSpec.proto", fileDescriptor_FeatureSpec_de24bbc3c8f2bed6) +} + +var fileDescriptor_FeatureSpec_de24bbc3c8f2bed6 = []byte{ + // 451 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x53, 0xdf, 0x6b, 0xd4, 0x40, + 0x10, 0x26, 0x49, 0xdb, 0x33, 0x93, 0x52, 0xca, 0x22, 0xed, 0x72, 0x2a, 0x84, 0x13, 0xe1, 0x9e, + 0x12, 0x39, 0xc5, 0x1f, 0x05, 0x11, 0x8a, 0xe7, 0xab, 0x92, 0x4a, 0x05, 0xdf, 0xb6, 0xc9, 0x98, + 0xc6, 0x7a, 0xd9, 0xb0, 0xbb, 0x69, 0xc9, 0x3f, 0xe2, 0x9f, 0xe1, 0xdf, 0x28, 0x3b, 0x7b, 0xd7, + 0xec, 0xe1, 0xf9, 0xe4, 0xdb, 0xcc, 0x7e, 0xdf, 0x7e, 0x3b, 0xf3, 0xed, 0x0c, 0x3c, 0xf9, 0x8e, + 0x42, 0x9b, 0x5c, 0x77, 0x58, 0xea, 0xfc, 0x23, 0x0a, 0xd3, 0x2b, 0xbc, 0xe8, 0xb0, 0xcc, 0x3a, + 0x25, 0x8d, 0x64, 0x09, 0xc1, 0x19, 0xc1, 0xd3, 0xc7, 0x3e, 0x77, 0xd9, 0x9a, 0xc6, 0x0c, 0x23, + 0x75, 0xba, 0xa5, 0x74, 0x61, 0xa4, 0x12, 0xb5, 0xa7, 0x34, 0x3d, 0x75, 0xb0, 0x19, 0x3a, 0xd4, + 0xf9, 0xa5, 0xf8, 0xd9, 0xa3, 0x03, 0x66, 0xbf, 0x23, 0x48, 0xbc, 0x87, 0xd9, 0x11, 0x84, 0x4d, + 0xc5, 0x83, 0x34, 0x98, 0xc7, 0x45, 0xd8, 0x54, 0x8c, 0xc1, 0x5e, 0x2b, 0x56, 0xc8, 0x43, 0x3a, + 0xa1, 0x98, 0x3d, 0x84, 0x7d, 0x79, 0xd7, 0xa2, 0xe2, 0x11, 0x1d, 0xba, 0x84, 0xa5, 0x90, 0x54, + 0xa8, 0x4b, 0xd5, 0x74, 0xa6, 0x91, 0x2d, 0xdf, 0x23, 0xcc, 0x3f, 0x62, 0xc7, 0x10, 0xf5, 0xaa, + 0xe1, 0xfb, 0x84, 0xd8, 0x90, 0xbd, 0x85, 0xf8, 0xd6, 0x16, 0xf3, 0x65, 0xe8, 0x90, 0x4f, 0xd2, + 0x60, 0x7e, 0xb4, 0x78, 0x94, 0xb9, 0xa6, 0xa9, 0xd4, 0xec, 0x72, 0x83, 0x66, 0xcb, 0xb6, 0x5f, + 0x15, 0x23, 0x9b, 0x9d, 0xc0, 0x01, 0x92, 0x09, 0xfc, 0x01, 0xe9, 0xad, 0x33, 0x5b, 0x5c, 0xad, + 0x64, 0xdf, 0xf1, 0xd8, 0x15, 0x47, 0x89, 0x6d, 0xc3, 0x88, 0x5a, 0x73, 0x48, 0x23, 0xdb, 0x86, + 0x8d, 0xd9, 0x7b, 0x98, 0x48, 0x2a, 0x4c, 0xf3, 0x24, 0x8d, 0xe6, 0xc9, 0xe2, 0x59, 0xe6, 0xf9, + 0x9d, 0xf9, 0xdf, 0xf1, 0xc9, 0xf1, 0x96, 0xad, 0x51, 0x43, 0xb1, 0xb9, 0xc5, 0x5e, 0x03, 0x54, + 0xc2, 0x08, 0xeb, 0x36, 0x6a, 0x7e, 0x98, 0x06, 0xf3, 0x64, 0x71, 0xba, 0xa5, 0xf1, 0xe1, 0x1e, + 0x2e, 0x3c, 0xea, 0xf4, 0x0c, 0x0e, 0x7d, 0x45, 0x6b, 0xcc, 0x0d, 0x0e, 0x6b, 0xd7, 0x6d, 0x68, + 0xbb, 0xa0, 0x56, 0xd7, 0xbe, 0xbb, 0xe4, 0x2c, 0x7c, 0x13, 0xcc, 0x0c, 0xc0, 0xa8, 0xca, 0x9e, + 0xc3, 0x44, 0xa3, 0xba, 0x6d, 0xda, 0x9a, 0x6e, 0x27, 0x8b, 0x93, 0xdd, 0xef, 0x17, 0x1b, 0x1a, + 0x7b, 0x09, 0xf1, 0x9d, 0x50, 0x78, 0x2d, 0x7b, 0xed, 0xd4, 0xff, 0x7d, 0x67, 0x24, 0xce, 0x7e, + 0x05, 0x10, 0xdf, 0x03, 0x7f, 0x0d, 0xc9, 0xbb, 0xd1, 0xc9, 0x90, 0x9c, 0x7c, 0xba, 0x5b, 0x71, + 0xb7, 0x8f, 0xff, 0x63, 0xc7, 0xf9, 0x57, 0xf0, 0x97, 0xe4, 0xfc, 0xd8, 0xfb, 0xb5, 0xcf, 0x76, + 0xc0, 0xbf, 0xbd, 0xaa, 0x1b, 0x73, 0xdd, 0x5f, 0x65, 0xa5, 0x5c, 0xe5, 0xb5, 0xfc, 0x81, 0x37, + 0xb9, 0x5b, 0x05, 0x1a, 0x7f, 0x9d, 0xd7, 0xd8, 0xa2, 0x12, 0x06, 0xab, 0xbc, 0x96, 0xb9, 0xb7, + 0x43, 0x57, 0x07, 0x44, 0x78, 0xf1, 0x27, 0x00, 0x00, 0xff, 0xff, 0xca, 0xfb, 0xc4, 0x86, 0xa3, + 0x03, 0x00, 0x00, } diff --git a/protos/generated/go/feast/specs/ImportJobSpecs.pb.go b/protos/generated/go/feast/specs/ImportJobSpecs.pb.go new file mode 100644 index 0000000000..d4c940e947 --- /dev/null +++ b/protos/generated/go/feast/specs/ImportJobSpecs.pb.go @@ -0,0 +1,137 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: feast/specs/ImportJobSpecs.proto + +package specs // import "github.com/gojek/feast/protos/generated/go/feast/specs" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type ImportJobSpecs struct { + JobId string `protobuf:"bytes,1,opt,name=jobId,proto3" json:"jobId,omitempty"` + ImportSpec *ImportSpec `protobuf:"bytes,2,opt,name=importSpec,proto3" json:"importSpec,omitempty"` + EntitySpecs []*EntitySpec `protobuf:"bytes,3,rep,name=entitySpecs,proto3" json:"entitySpecs,omitempty"` + FeatureSpecs []*FeatureSpec `protobuf:"bytes,4,rep,name=featureSpecs,proto3" json:"featureSpecs,omitempty"` + ServingStorageSpecs []*StorageSpec `protobuf:"bytes,5,rep,name=servingStorageSpecs,proto3" json:"servingStorageSpecs,omitempty"` + WarehouseStorageSpecs []*StorageSpec `protobuf:"bytes,6,rep,name=warehouseStorageSpecs,proto3" json:"warehouseStorageSpecs,omitempty"` + ErrorsStorageSpec *StorageSpec `protobuf:"bytes,7,opt,name=errorsStorageSpec,proto3" json:"errorsStorageSpec,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportJobSpecs) Reset() { *m = ImportJobSpecs{} } +func (m *ImportJobSpecs) String() string { return proto.CompactTextString(m) } +func (*ImportJobSpecs) ProtoMessage() {} +func (*ImportJobSpecs) Descriptor() ([]byte, []int) { + return fileDescriptor_ImportJobSpecs_c867f2e818ce78e5, []int{0} +} +func (m *ImportJobSpecs) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportJobSpecs.Unmarshal(m, b) +} +func (m *ImportJobSpecs) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportJobSpecs.Marshal(b, m, deterministic) +} +func (dst *ImportJobSpecs) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportJobSpecs.Merge(dst, src) +} +func (m *ImportJobSpecs) XXX_Size() int { + return xxx_messageInfo_ImportJobSpecs.Size(m) +} +func (m *ImportJobSpecs) XXX_DiscardUnknown() { + xxx_messageInfo_ImportJobSpecs.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportJobSpecs proto.InternalMessageInfo + +func (m *ImportJobSpecs) GetJobId() string { + if m != nil { + return m.JobId + } + return "" +} + +func (m *ImportJobSpecs) GetImportSpec() *ImportSpec { + if m != nil { + return m.ImportSpec + } + return nil +} + +func (m *ImportJobSpecs) GetEntitySpecs() []*EntitySpec { + if m != nil { + return m.EntitySpecs + } + return nil +} + +func (m *ImportJobSpecs) GetFeatureSpecs() []*FeatureSpec { + if m != nil { + return m.FeatureSpecs + } + return nil +} + +func (m *ImportJobSpecs) GetServingStorageSpecs() []*StorageSpec { + if m != nil { + return m.ServingStorageSpecs + } + return nil +} + +func (m *ImportJobSpecs) GetWarehouseStorageSpecs() []*StorageSpec { + if m != nil { + return m.WarehouseStorageSpecs + } + return nil +} + +func (m *ImportJobSpecs) GetErrorsStorageSpec() *StorageSpec { + if m != nil { + return m.ErrorsStorageSpec + } + return nil +} + +func init() { + proto.RegisterType((*ImportJobSpecs)(nil), "feast.specs.ImportJobSpecs") +} + +func init() { + proto.RegisterFile("feast/specs/ImportJobSpecs.proto", fileDescriptor_ImportJobSpecs_c867f2e818ce78e5) +} + +var fileDescriptor_ImportJobSpecs_c867f2e818ce78e5 = []byte{ + // 306 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0x41, 0x4b, 0xc3, 0x40, + 0x10, 0x85, 0xa9, 0xb5, 0x15, 0xb7, 0x22, 0xb8, 0x55, 0x5c, 0x44, 0x21, 0x78, 0xea, 0x29, 0x0b, + 0x0a, 0x8a, 0xe0, 0xa9, 0x60, 0xa1, 0x3d, 0x88, 0xa4, 0xb7, 0xde, 0x92, 0x76, 0xba, 0x4d, 0xa5, + 0x99, 0xb0, 0xbb, 0x51, 0xfc, 0x6f, 0xfe, 0x38, 0xc9, 0x06, 0x9a, 0x59, 0x92, 0xe2, 0x71, 0xf3, + 0xbe, 0x37, 0xcc, 0xcb, 0x3c, 0x16, 0xac, 0x21, 0x36, 0x56, 0x9a, 0x1c, 0x96, 0x46, 0x4e, 0x77, + 0x39, 0x6a, 0x3b, 0xc3, 0x64, 0x5e, 0x3e, 0xc3, 0x5c, 0xa3, 0x45, 0x3e, 0x70, 0x44, 0xe8, 0x88, + 0x9b, 0xdb, 0x26, 0x5e, 0xb2, 0x15, 0xea, 0xab, 0x6f, 0x99, 0x4d, 0xed, 0x0f, 0x51, 0xef, 0xa8, + 0x3a, 0x81, 0xd8, 0x16, 0x1a, 0x0e, 0xc9, 0x73, 0x8b, 0x3a, 0x56, 0x44, 0xbe, 0xff, 0xed, 0xb2, + 0x73, 0x7f, 0x3f, 0x7e, 0xc9, 0x7a, 0x5b, 0x4c, 0xa6, 0x2b, 0xd1, 0x09, 0x3a, 0xa3, 0xd3, 0xa8, + 0x7a, 0xf0, 0x67, 0xc6, 0xd2, 0xfd, 0x62, 0xe2, 0x28, 0xe8, 0x8c, 0x06, 0x0f, 0xd7, 0x21, 0x09, + 0x11, 0xd6, 0x7b, 0x47, 0x04, 0xe5, 0x2f, 0x6c, 0x00, 0xfb, 0x9d, 0x8d, 0xe8, 0x06, 0xdd, 0x86, + 0xb3, 0xce, 0x14, 0x51, 0x96, 0xbf, 0xb2, 0xb3, 0x75, 0x1d, 0xc8, 0x88, 0x63, 0xe7, 0x15, 0x9e, + 0x97, 0x24, 0x8e, 0x3c, 0x9a, 0xcf, 0xd8, 0xd0, 0x80, 0xfe, 0x4a, 0x33, 0x45, 0x62, 0x1b, 0xd1, + 0x6b, 0x19, 0x42, 0x80, 0xa8, 0xcd, 0xc4, 0xdf, 0xd9, 0xd5, 0x77, 0xac, 0x61, 0x83, 0x85, 0x01, + 0x6f, 0x5a, 0xff, 0x9f, 0x69, 0xed, 0x36, 0x3e, 0x61, 0x17, 0xa0, 0x35, 0x6a, 0x43, 0xbe, 0x8a, + 0x13, 0xf7, 0x53, 0x0f, 0xcf, 0x6a, 0x5a, 0xc6, 0x0b, 0x46, 0x7b, 0x34, 0x1e, 0xfa, 0xa7, 0xfc, + 0x28, 0x4f, 0xbc, 0x78, 0x52, 0xa9, 0xdd, 0x14, 0x49, 0xb8, 0xc4, 0x9d, 0x54, 0xb8, 0x85, 0x4f, + 0x59, 0x55, 0xc2, 0x15, 0xc0, 0x48, 0x05, 0x19, 0xe8, 0xd8, 0xc2, 0x4a, 0x2a, 0x94, 0xa4, 0x2c, + 0x49, 0xdf, 0x01, 0x8f, 0x7f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xdd, 0xf2, 0xf3, 0xa2, 0xcc, 0x02, + 0x00, 0x00, +} diff --git a/protos/generated/go/feast/specs/ImportSpec.pb.go b/protos/generated/go/feast/specs/ImportSpec.pb.go index 9bcb9b3ae7..d16f9fa07b 100644 --- a/protos/generated/go/feast/specs/ImportSpec.pb.go +++ b/protos/generated/go/feast/specs/ImportSpec.pb.go @@ -34,7 +34,7 @@ func (m *ImportSpec) Reset() { *m = ImportSpec{} } func (m *ImportSpec) String() string { return proto.CompactTextString(m) } func (*ImportSpec) ProtoMessage() {} func (*ImportSpec) Descriptor() ([]byte, []int) { - return fileDescriptor_ImportSpec_a027fec3e71fec8d, []int{0} + return fileDescriptor_ImportSpec_673bc4f248a91137, []int{0} } func (m *ImportSpec) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ImportSpec.Unmarshal(m, b) @@ -107,7 +107,7 @@ func (m *Schema) Reset() { *m = Schema{} } func (m *Schema) String() string { return proto.CompactTextString(m) } func (*Schema) ProtoMessage() {} func (*Schema) Descriptor() ([]byte, []int) { - return fileDescriptor_ImportSpec_a027fec3e71fec8d, []int{1} + return fileDescriptor_ImportSpec_673bc4f248a91137, []int{1} } func (m *Schema) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Schema.Unmarshal(m, b) @@ -260,7 +260,7 @@ func (m *Field) Reset() { *m = Field{} } func (m *Field) String() string { return proto.CompactTextString(m) } func (*Field) ProtoMessage() {} func (*Field) Descriptor() ([]byte, []int) { - return fileDescriptor_ImportSpec_a027fec3e71fec8d, []int{2} + return fileDescriptor_ImportSpec_673bc4f248a91137, []int{2} } func (m *Field) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Field.Unmarshal(m, b) @@ -303,10 +303,10 @@ func init() { } func init() { - proto.RegisterFile("feast/specs/ImportSpec.proto", fileDescriptor_ImportSpec_a027fec3e71fec8d) + proto.RegisterFile("feast/specs/ImportSpec.proto", fileDescriptor_ImportSpec_673bc4f248a91137) } -var fileDescriptor_ImportSpec_a027fec3e71fec8d = []byte{ +var fileDescriptor_ImportSpec_673bc4f248a91137 = []byte{ // 440 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x53, 0x5d, 0x8f, 0x93, 0x40, 0x14, 0x5d, 0xca, 0x16, 0xed, 0x6d, 0xdc, 0x9a, 0xab, 0x0f, 0x84, 0x6c, 0x62, 0xd3, 0x07, 0x6d, diff --git a/protos/generated/go/feast/specs/StorageSpec.pb.go b/protos/generated/go/feast/specs/StorageSpec.pb.go index 8c76b75884..fefe56a1be 100644 --- a/protos/generated/go/feast/specs/StorageSpec.pb.go +++ b/protos/generated/go/feast/specs/StorageSpec.pb.go @@ -36,7 +36,7 @@ func (m *StorageSpec) Reset() { *m = StorageSpec{} } func (m *StorageSpec) String() string { return proto.CompactTextString(m) } func (*StorageSpec) ProtoMessage() {} func (*StorageSpec) Descriptor() ([]byte, []int) { - return fileDescriptor_StorageSpec_ceee6ec6c0eb3849, []int{0} + return fileDescriptor_StorageSpec_bfb8a5e5cf34de95, []int{0} } func (m *StorageSpec) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_StorageSpec.Unmarshal(m, b) @@ -83,10 +83,10 @@ func init() { } func init() { - proto.RegisterFile("feast/specs/StorageSpec.proto", fileDescriptor_StorageSpec_ceee6ec6c0eb3849) + proto.RegisterFile("feast/specs/StorageSpec.proto", fileDescriptor_StorageSpec_bfb8a5e5cf34de95) } -var fileDescriptor_StorageSpec_ceee6ec6c0eb3849 = []byte{ +var fileDescriptor_StorageSpec_bfb8a5e5cf34de95 = []byte{ // 227 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4d, 0x4b, 0x4d, 0x2c, 0x2e, 0xd1, 0x2f, 0x2e, 0x48, 0x4d, 0x2e, 0xd6, 0x0f, 0x2e, 0xc9, 0x2f, 0x4a, 0x4c, 0x4f, 0x0d, diff --git a/protos/generated/go/feast/storage/BigTable.pb.go b/protos/generated/go/feast/storage/BigTable.pb.go index 6dc2c35c80..e65b5e35fc 100644 --- a/protos/generated/go/feast/storage/BigTable.pb.go +++ b/protos/generated/go/feast/storage/BigTable.pb.go @@ -33,7 +33,7 @@ func (m *BigTableRowKey) Reset() { *m = BigTableRowKey{} } func (m *BigTableRowKey) String() string { return proto.CompactTextString(m) } func (*BigTableRowKey) ProtoMessage() {} func (*BigTableRowKey) Descriptor() ([]byte, []int) { - return fileDescriptor_BigTable_967f8b41f64b081b, []int{0} + return fileDescriptor_BigTable_e25bdeffe0669ae7, []int{0} } func (m *BigTableRowKey) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_BigTableRowKey.Unmarshal(m, b) @@ -79,10 +79,10 @@ func init() { } func init() { - proto.RegisterFile("feast/storage/BigTable.proto", fileDescriptor_BigTable_967f8b41f64b081b) + proto.RegisterFile("feast/storage/BigTable.proto", fileDescriptor_BigTable_e25bdeffe0669ae7) } -var fileDescriptor_BigTable_967f8b41f64b081b = []byte{ +var fileDescriptor_BigTable_e25bdeffe0669ae7 = []byte{ // 193 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x5c, 0x8f, 0xb1, 0x0b, 0x82, 0x40, 0x18, 0x47, 0xb1, 0x20, 0xf0, 0x40, 0x87, 0x9b, 0x1c, 0x24, 0xa2, 0x21, 0x9a, 0x3c, 0xa2, 0xa5, diff --git a/protos/generated/go/feast/storage/Redis.pb.go b/protos/generated/go/feast/storage/Redis.pb.go index c277a8f03e..49f80a061e 100644 --- a/protos/generated/go/feast/storage/Redis.pb.go +++ b/protos/generated/go/feast/storage/Redis.pb.go @@ -43,7 +43,7 @@ func (m *RedisBucketKey) Reset() { *m = RedisBucketKey{} } func (m *RedisBucketKey) String() string { return proto.CompactTextString(m) } func (*RedisBucketKey) ProtoMessage() {} func (*RedisBucketKey) Descriptor() ([]byte, []int) { - return fileDescriptor_Redis_749687aada0bf97b, []int{0} + return fileDescriptor_Redis_cef62c817c1622ce, []int{0} } func (m *RedisBucketKey) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_RedisBucketKey.Unmarshal(m, b) @@ -99,7 +99,7 @@ func (m *RedisBucketValue) Reset() { *m = RedisBucketValue{} } func (m *RedisBucketValue) String() string { return proto.CompactTextString(m) } func (*RedisBucketValue) ProtoMessage() {} func (*RedisBucketValue) Descriptor() ([]byte, []int) { - return fileDescriptor_Redis_749687aada0bf97b, []int{1} + return fileDescriptor_Redis_cef62c817c1622ce, []int{1} } func (m *RedisBucketValue) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_RedisBucketValue.Unmarshal(m, b) @@ -147,7 +147,7 @@ func (m *RedisBucketValueList) Reset() { *m = RedisBucketValueList{} } func (m *RedisBucketValueList) String() string { return proto.CompactTextString(m) } func (*RedisBucketValueList) ProtoMessage() {} func (*RedisBucketValueList) Descriptor() ([]byte, []int) { - return fileDescriptor_Redis_749687aada0bf97b, []int{2} + return fileDescriptor_Redis_cef62c817c1622ce, []int{2} } func (m *RedisBucketValueList) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_RedisBucketValueList.Unmarshal(m, b) @@ -180,9 +180,9 @@ func init() { proto.RegisterType((*RedisBucketValueList)(nil), "feast.storage.RedisBucketValueList") } -func init() { proto.RegisterFile("feast/storage/Redis.proto", fileDescriptor_Redis_749687aada0bf97b) } +func init() { proto.RegisterFile("feast/storage/Redis.proto", fileDescriptor_Redis_cef62c817c1622ce) } -var fileDescriptor_Redis_749687aada0bf97b = []byte{ +var fileDescriptor_Redis_cef62c817c1622ce = []byte{ // 325 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x91, 0xcd, 0x4f, 0xf2, 0x40, 0x10, 0xc6, 0xd3, 0x97, 0x57, 0x22, 0x4b, 0x24, 0x66, 0x35, 0xb1, 0x36, 0x26, 0x34, 0x9c, 0x7a, diff --git a/protos/generated/go/feast/types/Feature.pb.go b/protos/generated/go/feast/types/Feature.pb.go index 55c70f46b2..51ed41fb54 100644 --- a/protos/generated/go/feast/types/Feature.pb.go +++ b/protos/generated/go/feast/types/Feature.pb.go @@ -30,7 +30,7 @@ func (m *Feature) Reset() { *m = Feature{} } func (m *Feature) String() string { return proto.CompactTextString(m) } func (*Feature) ProtoMessage() {} func (*Feature) Descriptor() ([]byte, []int) { - return fileDescriptor_Feature_9650e908dedbbf49, []int{0} + return fileDescriptor_Feature_c2a5d99d9bf3ca9c, []int{0} } func (m *Feature) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Feature.Unmarshal(m, b) @@ -68,9 +68,9 @@ func init() { proto.RegisterType((*Feature)(nil), "feast.types.Feature") } -func init() { proto.RegisterFile("feast/types/Feature.proto", fileDescriptor_Feature_9650e908dedbbf49) } +func init() { proto.RegisterFile("feast/types/Feature.proto", fileDescriptor_Feature_c2a5d99d9bf3ca9c) } -var fileDescriptor_Feature_9650e908dedbbf49 = []byte{ +var fileDescriptor_Feature_c2a5d99d9bf3ca9c = []byte{ // 173 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4c, 0x4b, 0x4d, 0x2c, 0x2e, 0xd1, 0x2f, 0xa9, 0x2c, 0x48, 0x2d, 0xd6, 0x77, 0x4b, 0x4d, 0x2c, 0x29, 0x2d, 0x4a, 0xd5, diff --git a/protos/generated/go/feast/types/FeatureRow.pb.go b/protos/generated/go/feast/types/FeatureRow.pb.go index 5e9d6b0e74..dc6099ba75 100644 --- a/protos/generated/go/feast/types/FeatureRow.pb.go +++ b/protos/generated/go/feast/types/FeatureRow.pb.go @@ -23,7 +23,6 @@ type FeatureRowKey struct { EntityKey string `protobuf:"bytes,1,opt,name=entityKey,proto3" json:"entityKey,omitempty"` EventTimestamp *timestamp.Timestamp `protobuf:"bytes,3,opt,name=eventTimestamp,proto3" json:"eventTimestamp,omitempty"` EntityName string `protobuf:"bytes,4,opt,name=entityName,proto3" json:"entityName,omitempty"` - Granularity Granularity_Enum `protobuf:"varint,5,opt,name=granularity,proto3,enum=feast.types.Granularity_Enum" json:"granularity,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -33,7 +32,7 @@ func (m *FeatureRowKey) Reset() { *m = FeatureRowKey{} } func (m *FeatureRowKey) String() string { return proto.CompactTextString(m) } func (*FeatureRowKey) ProtoMessage() {} func (*FeatureRowKey) Descriptor() ([]byte, []int) { - return fileDescriptor_FeatureRow_44b5ccd93feeaf74, []int{0} + return fileDescriptor_FeatureRow_3072498459678ede, []int{0} } func (m *FeatureRowKey) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_FeatureRowKey.Unmarshal(m, b) @@ -74,19 +73,11 @@ func (m *FeatureRowKey) GetEntityName() string { return "" } -func (m *FeatureRowKey) GetGranularity() Granularity_Enum { - if m != nil { - return m.Granularity - } - return Granularity_NONE -} - type FeatureRow struct { EntityKey string `protobuf:"bytes,1,opt,name=entityKey,proto3" json:"entityKey,omitempty"` Features []*Feature `protobuf:"bytes,2,rep,name=features,proto3" json:"features,omitempty"` EventTimestamp *timestamp.Timestamp `protobuf:"bytes,3,opt,name=eventTimestamp,proto3" json:"eventTimestamp,omitempty"` EntityName string `protobuf:"bytes,4,opt,name=entityName,proto3" json:"entityName,omitempty"` - Granularity Granularity_Enum `protobuf:"varint,5,opt,name=granularity,proto3,enum=feast.types.Granularity_Enum" json:"granularity,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -96,7 +87,7 @@ func (m *FeatureRow) Reset() { *m = FeatureRow{} } func (m *FeatureRow) String() string { return proto.CompactTextString(m) } func (*FeatureRow) ProtoMessage() {} func (*FeatureRow) Descriptor() ([]byte, []int) { - return fileDescriptor_FeatureRow_44b5ccd93feeaf74, []int{1} + return fileDescriptor_FeatureRow_3072498459678ede, []int{1} } func (m *FeatureRow) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_FeatureRow.Unmarshal(m, b) @@ -144,42 +135,32 @@ func (m *FeatureRow) GetEntityName() string { return "" } -func (m *FeatureRow) GetGranularity() Granularity_Enum { - if m != nil { - return m.Granularity - } - return Granularity_NONE -} - func init() { proto.RegisterType((*FeatureRowKey)(nil), "feast.types.FeatureRowKey") proto.RegisterType((*FeatureRow)(nil), "feast.types.FeatureRow") } func init() { - proto.RegisterFile("feast/types/FeatureRow.proto", fileDescriptor_FeatureRow_44b5ccd93feeaf74) -} - -var fileDescriptor_FeatureRow_44b5ccd93feeaf74 = []byte{ - // 305 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x92, 0x41, 0x4b, 0xfb, 0x40, - 0x10, 0xc5, 0x49, 0xfb, 0xff, 0x8b, 0x9d, 0x60, 0x85, 0xe0, 0x21, 0x96, 0x56, 0x43, 0x4f, 0x39, - 0xed, 0x4a, 0x04, 0xaf, 0x42, 0x40, 0x3d, 0x14, 0x44, 0x82, 0x78, 0xf0, 0xb6, 0xd1, 0xc9, 0x1a, - 0x6d, 0xb2, 0x21, 0x99, 0x28, 0x39, 0xfa, 0x15, 0xfd, 0x44, 0xe2, 0xae, 0x69, 0x56, 0x11, 0x3c, - 0x7b, 0x9d, 0xf7, 0x66, 0xe6, 0x37, 0xc3, 0x83, 0x79, 0x86, 0xa2, 0x21, 0x4e, 0x5d, 0x85, 0x0d, - 0x3f, 0x47, 0x41, 0x6d, 0x8d, 0x89, 0x7a, 0x61, 0x55, 0xad, 0x48, 0x79, 0xae, 0x56, 0x99, 0x56, - 0x67, 0x87, 0x52, 0x29, 0xb9, 0x46, 0xae, 0xa5, 0xb4, 0xcd, 0x38, 0xe5, 0x05, 0x36, 0x24, 0x8a, - 0xca, 0xb8, 0x67, 0xfb, 0x3f, 0xcc, 0xfa, 0x94, 0x16, 0xb6, 0x74, 0x51, 0x8b, 0xb2, 0x5d, 0x8b, - 0x3a, 0xa7, 0xce, 0xc8, 0xcb, 0x37, 0x07, 0x76, 0x86, 0xe5, 0x2b, 0xec, 0xbc, 0x39, 0x4c, 0xb0, - 0xa4, 0x9c, 0xba, 0x15, 0x76, 0xbe, 0x13, 0x38, 0xe1, 0x24, 0x19, 0x0a, 0x5e, 0x0c, 0x53, 0x7c, - 0xc6, 0x92, 0xae, 0x7b, 0x02, 0x7f, 0x1c, 0x38, 0xa1, 0x1b, 0xcd, 0x98, 0x61, 0x64, 0x3d, 0x23, - 0xdb, 0x38, 0x92, 0x6f, 0x1d, 0xde, 0x01, 0x80, 0x19, 0x78, 0x29, 0x0a, 0xf4, 0xff, 0xe9, 0x15, - 0x56, 0xc5, 0x3b, 0x05, 0x57, 0x0e, 0xa0, 0xfe, 0xff, 0xc0, 0x09, 0xa7, 0xd1, 0x82, 0x59, 0x1f, - 0x61, 0xf6, 0x21, 0x67, 0x65, 0x5b, 0x24, 0x76, 0xc7, 0xf2, 0x75, 0x04, 0x30, 0x1c, 0xf5, 0xcb, - 0x45, 0x47, 0xb0, 0x9d, 0x19, 0x6f, 0xe3, 0x8f, 0x82, 0x71, 0xe8, 0x46, 0x7b, 0x5f, 0x56, 0xf5, - 0x83, 0x36, 0xae, 0x3f, 0xf1, 0x83, 0xf8, 0x06, 0xec, 0x08, 0xc5, 0xbb, 0xc3, 0x3f, 0xae, 0x3e, - 0xe8, 0x6e, 0x4f, 0x64, 0x4e, 0x0f, 0x6d, 0xca, 0xee, 0x54, 0xc1, 0xa5, 0x7a, 0xc4, 0x27, 0x6e, - 0x82, 0xa2, 0xd9, 0x1b, 0x2e, 0xb1, 0xc4, 0x5a, 0x10, 0xde, 0x73, 0xa9, 0xb8, 0x15, 0xa1, 0x74, - 0x4b, 0x1b, 0x8e, 0xdf, 0x03, 0x00, 0x00, 0xff, 0xff, 0xb5, 0xfe, 0x14, 0xc9, 0xbf, 0x02, 0x00, - 0x00, + proto.RegisterFile("feast/types/FeatureRow.proto", fileDescriptor_FeatureRow_3072498459678ede) +} + +var fileDescriptor_FeatureRow_3072498459678ede = []byte{ + // 266 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x91, 0xc1, 0x4a, 0xc4, 0x30, + 0x10, 0x86, 0x89, 0x2b, 0xe2, 0xce, 0xa2, 0x42, 0xf1, 0x50, 0xcb, 0xa2, 0x65, 0x4f, 0x3d, 0x65, + 0x64, 0x05, 0x1f, 0xa0, 0x07, 0x2f, 0x0b, 0x22, 0x45, 0x3c, 0x78, 0x4b, 0x75, 0x1a, 0xab, 0xb6, + 0x29, 0xcd, 0x54, 0xe9, 0x63, 0xf8, 0x46, 0x3e, 0x9a, 0x98, 0xd0, 0x6d, 0x11, 0xc1, 0x9b, 0xd7, + 0xf9, 0xff, 0x7c, 0xf3, 0x4f, 0x7e, 0x58, 0x16, 0xa4, 0x2c, 0x23, 0xf7, 0x0d, 0x59, 0xbc, 0x22, + 0xc5, 0x5d, 0x4b, 0x99, 0x79, 0x97, 0x4d, 0x6b, 0xd8, 0x04, 0x0b, 0xa7, 0x4a, 0xa7, 0x46, 0x67, + 0xda, 0x18, 0xfd, 0x4a, 0xe8, 0xa4, 0xbc, 0x2b, 0x90, 0xcb, 0x8a, 0x2c, 0xab, 0xaa, 0xf1, 0xee, + 0xe8, 0xe4, 0x17, 0x96, 0x97, 0x56, 0x1f, 0x02, 0x0e, 0x46, 0xfa, 0x86, 0xfa, 0x60, 0x09, 0x73, + 0xaa, 0xb9, 0xe4, 0x7e, 0x43, 0x7d, 0x28, 0x62, 0x91, 0xcc, 0xb3, 0x71, 0x10, 0xa4, 0x70, 0x48, + 0x6f, 0x54, 0xf3, 0xed, 0xb0, 0x22, 0x9c, 0xc5, 0x22, 0x59, 0xac, 0x23, 0xe9, 0x43, 0xc8, 0x21, + 0x84, 0xdc, 0x3a, 0xb2, 0x1f, 0x2f, 0x82, 0x53, 0x00, 0x0f, 0xbc, 0x56, 0x15, 0x85, 0xbb, 0x6e, + 0xc5, 0x64, 0xb2, 0xfa, 0x14, 0x00, 0x63, 0xa6, 0x3f, 0x02, 0x9d, 0xc3, 0x7e, 0xe1, 0xbd, 0x36, + 0xdc, 0x89, 0x67, 0xc9, 0x62, 0x7d, 0x2c, 0x27, 0x9f, 0x23, 0x07, 0xd0, 0xd6, 0xf5, 0x1f, 0x27, + 0xa4, 0x77, 0x30, 0x6d, 0x28, 0x3d, 0x1a, 0xcf, 0xb9, 0xf9, 0x86, 0xdf, 0x5f, 0xea, 0x92, 0x9f, + 0xba, 0x5c, 0x3e, 0x98, 0x0a, 0xb5, 0x79, 0xa6, 0x17, 0xf4, 0x15, 0xb9, 0xd5, 0x16, 0x35, 0xd5, + 0xd4, 0x2a, 0xa6, 0x47, 0xd4, 0x06, 0x27, 0xe5, 0xe5, 0x7b, 0xce, 0x70, 0xf1, 0x15, 0x00, 0x00, + 0xff, 0xff, 0xa8, 0x83, 0xb6, 0x9f, 0x1e, 0x02, 0x00, 0x00, } diff --git a/protos/generated/go/feast/types/FeatureRowExtended.pb.go b/protos/generated/go/feast/types/FeatureRowExtended.pb.go index a01715d8db..ad89f27db2 100644 --- a/protos/generated/go/feast/types/FeatureRowExtended.pb.go +++ b/protos/generated/go/feast/types/FeatureRowExtended.pb.go @@ -33,7 +33,7 @@ func (m *Error) Reset() { *m = Error{} } func (m *Error) String() string { return proto.CompactTextString(m) } func (*Error) ProtoMessage() {} func (*Error) Descriptor() ([]byte, []int) { - return fileDescriptor_FeatureRowExtended_f06d01003da6c18e, []int{0} + return fileDescriptor_FeatureRowExtended_bfd3c37956d1a040, []int{0} } func (m *Error) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Error.Unmarshal(m, b) @@ -93,7 +93,7 @@ func (m *Attempt) Reset() { *m = Attempt{} } func (m *Attempt) String() string { return proto.CompactTextString(m) } func (*Attempt) ProtoMessage() {} func (*Attempt) Descriptor() ([]byte, []int) { - return fileDescriptor_FeatureRowExtended_f06d01003da6c18e, []int{1} + return fileDescriptor_FeatureRowExtended_bfd3c37956d1a040, []int{1} } func (m *Attempt) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Attempt.Unmarshal(m, b) @@ -140,7 +140,7 @@ func (m *FeatureRowExtended) Reset() { *m = FeatureRowExtended{} } func (m *FeatureRowExtended) String() string { return proto.CompactTextString(m) } func (*FeatureRowExtended) ProtoMessage() {} func (*FeatureRowExtended) Descriptor() ([]byte, []int) { - return fileDescriptor_FeatureRowExtended_f06d01003da6c18e, []int{2} + return fileDescriptor_FeatureRowExtended_bfd3c37956d1a040, []int{2} } func (m *FeatureRowExtended) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_FeatureRowExtended.Unmarshal(m, b) @@ -188,10 +188,10 @@ func init() { } func init() { - proto.RegisterFile("feast/types/FeatureRowExtended.proto", fileDescriptor_FeatureRowExtended_f06d01003da6c18e) + proto.RegisterFile("feast/types/FeatureRowExtended.proto", fileDescriptor_FeatureRowExtended_bfd3c37956d1a040) } -var fileDescriptor_FeatureRowExtended_f06d01003da6c18e = []byte{ +var fileDescriptor_FeatureRowExtended_bfd3c37956d1a040 = []byte{ // 338 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x91, 0xc1, 0x6b, 0xea, 0x40, 0x10, 0xc6, 0xf1, 0xf9, 0xf2, 0x7c, 0x4e, 0x6e, 0x8b, 0x60, 0x08, 0xd2, 0x16, 0xe9, 0xc1, 0x5e, diff --git a/protos/generated/go/feast/types/Granularity.pb.go b/protos/generated/go/feast/types/Granularity.pb.go index a47e55ec37..5166f7bbc2 100644 --- a/protos/generated/go/feast/types/Granularity.pb.go +++ b/protos/generated/go/feast/types/Granularity.pb.go @@ -47,7 +47,7 @@ func (x Granularity_Enum) String() string { return proto.EnumName(Granularity_Enum_name, int32(x)) } func (Granularity_Enum) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_Granularity_1271850b4f4bfac4, []int{0, 0} + return fileDescriptor_Granularity_4b57756f7a751fdb, []int{0, 0} } type Granularity struct { @@ -60,7 +60,7 @@ func (m *Granularity) Reset() { *m = Granularity{} } func (m *Granularity) String() string { return proto.CompactTextString(m) } func (*Granularity) ProtoMessage() {} func (*Granularity) Descriptor() ([]byte, []int) { - return fileDescriptor_Granularity_1271850b4f4bfac4, []int{0} + return fileDescriptor_Granularity_4b57756f7a751fdb, []int{0} } func (m *Granularity) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Granularity.Unmarshal(m, b) @@ -86,10 +86,10 @@ func init() { } func init() { - proto.RegisterFile("feast/types/Granularity.proto", fileDescriptor_Granularity_1271850b4f4bfac4) + proto.RegisterFile("feast/types/Granularity.proto", fileDescriptor_Granularity_4b57756f7a751fdb) } -var fileDescriptor_Granularity_1271850b4f4bfac4 = []byte{ +var fileDescriptor_Granularity_4b57756f7a751fdb = []byte{ // 183 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4d, 0x4b, 0x4d, 0x2c, 0x2e, 0xd1, 0x2f, 0xa9, 0x2c, 0x48, 0x2d, 0xd6, 0x77, 0x2f, 0x4a, 0xcc, 0x2b, 0xcd, 0x49, 0x2c, diff --git a/protos/generated/go/feast/types/Value.pb.go b/protos/generated/go/feast/types/Value.pb.go index 2b0af9e51c..b9c26f8c4b 100644 --- a/protos/generated/go/feast/types/Value.pb.go +++ b/protos/generated/go/feast/types/Value.pb.go @@ -60,7 +60,7 @@ func (x ValueType_Enum) String() string { return proto.EnumName(ValueType_Enum_name, int32(x)) } func (ValueType_Enum) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_Value_8f69b75784c97601, []int{0, 0} + return fileDescriptor_Value_0680a2f024df1112, []int{0, 0} } type ValueType struct { @@ -73,7 +73,7 @@ func (m *ValueType) Reset() { *m = ValueType{} } func (m *ValueType) String() string { return proto.CompactTextString(m) } func (*ValueType) ProtoMessage() {} func (*ValueType) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_8f69b75784c97601, []int{0} + return fileDescriptor_Value_0680a2f024df1112, []int{0} } func (m *ValueType) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ValueType.Unmarshal(m, b) @@ -113,7 +113,7 @@ func (m *Value) Reset() { *m = Value{} } func (m *Value) String() string { return proto.CompactTextString(m) } func (*Value) ProtoMessage() {} func (*Value) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_8f69b75784c97601, []int{1} + return fileDescriptor_Value_0680a2f024df1112, []int{1} } func (m *Value) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Value.Unmarshal(m, b) @@ -427,7 +427,7 @@ func (m *ValueList) Reset() { *m = ValueList{} } func (m *ValueList) String() string { return proto.CompactTextString(m) } func (*ValueList) ProtoMessage() {} func (*ValueList) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_8f69b75784c97601, []int{2} + return fileDescriptor_Value_0680a2f024df1112, []int{2} } func (m *ValueList) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ValueList.Unmarshal(m, b) @@ -761,7 +761,7 @@ func (m *BytesList) Reset() { *m = BytesList{} } func (m *BytesList) String() string { return proto.CompactTextString(m) } func (*BytesList) ProtoMessage() {} func (*BytesList) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_8f69b75784c97601, []int{3} + return fileDescriptor_Value_0680a2f024df1112, []int{3} } func (m *BytesList) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_BytesList.Unmarshal(m, b) @@ -799,7 +799,7 @@ func (m *StringList) Reset() { *m = StringList{} } func (m *StringList) String() string { return proto.CompactTextString(m) } func (*StringList) ProtoMessage() {} func (*StringList) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_8f69b75784c97601, []int{4} + return fileDescriptor_Value_0680a2f024df1112, []int{4} } func (m *StringList) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_StringList.Unmarshal(m, b) @@ -837,7 +837,7 @@ func (m *Int32List) Reset() { *m = Int32List{} } func (m *Int32List) String() string { return proto.CompactTextString(m) } func (*Int32List) ProtoMessage() {} func (*Int32List) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_8f69b75784c97601, []int{5} + return fileDescriptor_Value_0680a2f024df1112, []int{5} } func (m *Int32List) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Int32List.Unmarshal(m, b) @@ -875,7 +875,7 @@ func (m *Int64List) Reset() { *m = Int64List{} } func (m *Int64List) String() string { return proto.CompactTextString(m) } func (*Int64List) ProtoMessage() {} func (*Int64List) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_8f69b75784c97601, []int{6} + return fileDescriptor_Value_0680a2f024df1112, []int{6} } func (m *Int64List) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Int64List.Unmarshal(m, b) @@ -913,7 +913,7 @@ func (m *DoubleList) Reset() { *m = DoubleList{} } func (m *DoubleList) String() string { return proto.CompactTextString(m) } func (*DoubleList) ProtoMessage() {} func (*DoubleList) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_8f69b75784c97601, []int{7} + return fileDescriptor_Value_0680a2f024df1112, []int{7} } func (m *DoubleList) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DoubleList.Unmarshal(m, b) @@ -951,7 +951,7 @@ func (m *FloatList) Reset() { *m = FloatList{} } func (m *FloatList) String() string { return proto.CompactTextString(m) } func (*FloatList) ProtoMessage() {} func (*FloatList) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_8f69b75784c97601, []int{8} + return fileDescriptor_Value_0680a2f024df1112, []int{8} } func (m *FloatList) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_FloatList.Unmarshal(m, b) @@ -989,7 +989,7 @@ func (m *BoolList) Reset() { *m = BoolList{} } func (m *BoolList) String() string { return proto.CompactTextString(m) } func (*BoolList) ProtoMessage() {} func (*BoolList) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_8f69b75784c97601, []int{9} + return fileDescriptor_Value_0680a2f024df1112, []int{9} } func (m *BoolList) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_BoolList.Unmarshal(m, b) @@ -1027,7 +1027,7 @@ func (m *TimestampList) Reset() { *m = TimestampList{} } func (m *TimestampList) String() string { return proto.CompactTextString(m) } func (*TimestampList) ProtoMessage() {} func (*TimestampList) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_8f69b75784c97601, []int{10} + return fileDescriptor_Value_0680a2f024df1112, []int{10} } func (m *TimestampList) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_TimestampList.Unmarshal(m, b) @@ -1069,9 +1069,9 @@ func init() { proto.RegisterEnum("feast.types.ValueType_Enum", ValueType_Enum_name, ValueType_Enum_value) } -func init() { proto.RegisterFile("feast/types/Value.proto", fileDescriptor_Value_8f69b75784c97601) } +func init() { proto.RegisterFile("feast/types/Value.proto", fileDescriptor_Value_0680a2f024df1112) } -var fileDescriptor_Value_8f69b75784c97601 = []byte{ +var fileDescriptor_Value_0680a2f024df1112 = []byte{ // 626 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x94, 0xd1, 0x6f, 0x9a, 0x50, 0x14, 0xc6, 0xb9, 0x22, 0x0a, 0xc7, 0x36, 0x21, 0x37, 0xd9, 0xda, 0x34, 0x6d, 0x47, 0x7c, 0xe2, diff --git a/rfcs/0001-python-sdk.md b/rfcs/0001-python-sdk.md index 2f7b7a55cf..328127a7c8 100644 --- a/rfcs/0001-python-sdk.md +++ b/rfcs/0001-python-sdk.md @@ -143,14 +143,13 @@ schema: fields: - name: start_time - name: s2id - - featureId: s2id.minute.surge_factor + - featureId: s2id.surge_factor name: surge_factor timestampColumn: start_time -id: s2id.minute.surge_factor +id: s2id.surge_factor name: surge_factor owner: user@website.com -granularity: MINUTE valueType: DOUBLE entity: s2id dataStores: {} diff --git a/sdk/python/examples/quickstart/Quickstart.ipynb b/sdk/python/examples/quickstart/Quickstart.ipynb index 5b9d6f3e8b..9fd1680ea9 100644 --- a/sdk/python/examples/quickstart/Quickstart.ipynb +++ b/sdk/python/examples/quickstart/Quickstart.ipynb @@ -13,7 +13,7 @@ "\n", "from feast.sdk.resources.entity import Entity\n", "from feast.sdk.resources.storage import Storage\n", - "from feast.sdk.resources.feature import Feature, Datastore, ValueType, Granularity\n", + "from feast.sdk.resources.feature import Feature, Datastore, ValueType\n", "from feast.sdk.resources.feature_set import FeatureSet, FileType\n", "import feast.specs.FeatureSpec_pb2 as feature_pb\n", "\n", diff --git a/sdk/python/feast/sdk/client.py b/sdk/python/feast/sdk/client.py index a8d146be89..59b4ddd9f6 100644 --- a/sdk/python/feast/sdk/client.py +++ b/sdk/python/feast/sdk/client.py @@ -1,17 +1,16 @@ # Copyright 2018 The Feast Authors -# +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at -# +# # https://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - """ Main interface for users to interact with the Core API. """ @@ -77,9 +76,10 @@ def core_url(self): if self._core_url is None: self._core_url = os.getenv(FEAST_CORE_URL_ENV_KEY) if self._core_url is None: - raise ValueError("Core API URL not set. Either set the " + - "environment variable {} or set it explicitly." - .format(FEAST_CORE_URL_ENV_KEY)) + raise ValueError( + "Core API URL not set. Either set the " + + "environment variable {} or set it explicitly.".format( + FEAST_CORE_URL_ENV_KEY)) return self._core_url @core_url.setter @@ -91,9 +91,10 @@ def serving_url(self): if self._serving_url is None: self._serving_url = os.getenv(FEAST_SERVING_URL_ENV_KEY) if self._serving_url is None: - raise ValueError("Serving API URL not set. Either set the " + - "environment variable {} or set it explicitly." - .format(FEAST_SERVING_URL_ENV_KEY)) + raise ValueError( + "Serving API URL not set. Either set the " + + "environment variable {} or set it explicitly.".format( + FEAST_SERVING_URL_ENV_KEY)) return self._serving_url @serving_url.setter @@ -127,8 +128,11 @@ def apply(self, obj): else: return self._apply(obj) - def run(self, importer, name_override=None, - apply_entity=False, apply_features=False): + def run(self, + importer, + name_override=None, + apply_entity=False, + apply_features=False): """ Run an import job Args: @@ -143,8 +147,7 @@ def run(self, importer, name_override=None, (str) job ID of the import job """ request = JobServiceTypes.SubmitImportJobRequest( - importSpec=importer.spec - ) + importSpec=importer.spec) if name_override is not None: request.name = name_override @@ -155,18 +158,22 @@ def run(self, importer, name_override=None, self._apply_feature(importer.features[feature]) if importer.require_staging: - print("Staging file to remote path {}" - .format(importer.remote_path)) + print("Staging file to remote path {}".format( + importer.remote_path)) importer.stage() - print("Submitting job with spec:\n {}" - .format(spec_to_yaml(importer.spec))) + print("Submitting job with spec:\n {}".format( + spec_to_yaml(importer.spec))) self._connect_core() response = self._job_service_stub.SubmitJob(request) print("Submitted job with id: {}".format(response.jobId)) return response.jobId - def create_dataset(self, feature_set, start_date, end_date, - limit=None, name_prefix=None): + def create_dataset(self, + feature_set, + start_date, + end_date, + limit=None, + name_prefix=None): """ Create training dataset for a feature set. The training dataset will be bounded by event timestamp between start_date and end_date. @@ -187,16 +194,15 @@ def create_dataset(self, feature_set, start_date, end_date, feast.resources.feature_set.DatasetInfo: DatasetInfo containing the information of training dataset """ - self._check_create_dataset_args(feature_set, start_date, - end_date, limit) + self._check_create_dataset_args(feature_set, start_date, end_date, + limit) req = DatasetServiceTypes.CreateDatasetRequest( featureSet=feature_set.proto, startDate=_timestamp_from_datetime(_parse_date(start_date)), endDate=_timestamp_from_datetime(_parse_date(end_date)), limit=limit, - namePrefix=name_prefix - ) + namePrefix=name_prefix) if self.verbose: print("creating training dataset for features: " + str(feature_set.features)) @@ -238,10 +244,14 @@ def get_serving_data(self, feature_set, entity_keys, ts_range=None): request = self._build_serving_request(feature_set, entity_keys) self._connect_serving() - return self._response_to_df(feature_set, self._serving_service_stub - .QueryFeatures(request), start, end) - - def download_dataset(self, dataset_info, dest, staging_location, + return self._response_to_df( + feature_set, self._serving_service_stub.QueryFeatures(request), + start, end) + + def download_dataset(self, + dataset_info, + dest, + staging_location, file_type=FileType.CSV): """ Download training dataset as file @@ -257,10 +267,7 @@ def download_dataset(self, dataset_info, dest, staging_location, str: path to the downloaded file """ return self._table_downloader.download_table_as_file( - dataset_info.table_id, - dest, - staging_location, - file_type) + dataset_info.table_id, dest, staging_location, file_type) def download_dataset_to_df(self, dataset_info, staging_location): """ @@ -275,8 +282,7 @@ def download_dataset_to_df(self, dataset_info, staging_location): """ return self._table_downloader.download_table_as_df( - dataset_info.table_id, - staging_location) + dataset_info.table_id, staging_location) def close(self): """ @@ -304,9 +310,10 @@ def _connect_serving(self): def _build_serving_request(self, feature_set, entity_keys): """Helper function to build serving service request.""" - return QueryFeaturesRequest(entityName=feature_set.entity, - entityId=entity_keys, - featureId=feature_set.features) + return QueryFeaturesRequest( + entityName=feature_set.entity, + entityId=entity_keys, + featureId=feature_set.features) def _response_to_df(self, feature_set, response, start=None, end=None): is_filter_time = start is not None and end is not None @@ -316,8 +323,7 @@ def _response_to_df(self, feature_set, response, start=None, end=None): row = {response.entityName: entity_id} for feature_id in feature_map: v = feature_map[feature_id].value - if is_filter_time and not _is_granularity_none( - feature_id): + if is_filter_time: ts = feature_map[feature_id].timestamp.ToDatetime() if ts < start or ts > end: continue @@ -353,9 +359,9 @@ def _apply_feature(self, feature): """ self._connect_core() response = self._core_service_stub.ApplyFeature(feature.spec) - if self.verbose: print( - "Successfully applied feature with id: {}\n---\n{}" - .format(response.featureId, feature)) + if self.verbose: + print("Successfully applied feature with id: {}\n---\n{}".format( + response.featureId, feature)) return response.featureId def _apply_entity(self, entity): @@ -367,8 +373,8 @@ def _apply_entity(self, entity): self._connect_core() response = self._core_service_stub.ApplyEntity(entity.spec) if self.verbose: - print("Successfully applied entity with name: {}\n---\n{}" - .format(response.entityName, entity)) + print("Successfully applied entity with name: {}\n---\n{}".format( + response.entityName, entity)) return response.entityName def _apply_feature_group(self, feature_group): @@ -381,9 +387,9 @@ def _apply_feature_group(self, feature_group): self._connect_core() response = self._core_service_stub.ApplyFeatureGroup( feature_group.spec) - if self.verbose: print("Successfully applied feature group with id: " + - "{}\n---\n{}".format(response.featureGroupId, - feature_group)) + if self.verbose: + print("Successfully applied feature group with id: " + + "{}\n---\n{}".format(response.featureGroupId, feature_group)) return response.featureGroupId def _apply_storage(self, storage): @@ -394,12 +400,13 @@ def _apply_storage(self, storage): """ self._connect_core() response = self._core_service_stub.ApplyStorage(storage.spec) - if self.verbose: print("Successfully applied storage with id: " + - "{}\n{}".format(response.storageId, storage)) + if self.verbose: + print("Successfully applied storage with id: " + + "{}\n{}".format(response.storageId, storage)) return response.storageId - def _check_create_dataset_args(self, feature_set, start_date, - end_date, limit): + def _check_create_dataset_args(self, feature_set, start_date, end_date, + limit): if len(feature_set.features) < 1: raise ValueError("feature set is empty") @@ -431,7 +438,3 @@ def _timestamp_from_datetime(dt): ts = Timestamp() ts.FromDatetime(dt) return ts - - -def _is_granularity_none(feature_id): - return feature_id.split(".")[1] == "none" diff --git a/sdk/python/feast/sdk/env.py b/sdk/python/feast/sdk/env.py index b9ff4470f7..28c012aba7 100644 --- a/sdk/python/feast/sdk/env.py +++ b/sdk/python/feast/sdk/env.py @@ -1,17 +1,16 @@ # Copyright 2018 The Feast Authors -# +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at -# +# # https://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - FEAST_SERVING_URL_ENV_KEY = "FEAST_SERVING_URL" FEAST_CORE_URL_ENV_KEY = "FEAST_CORE_URL" diff --git a/sdk/python/feast/sdk/importer.py b/sdk/python/feast/sdk/importer.py index f9a97aad31..d8444be3ab 100644 --- a/sdk/python/feast/sdk/importer.py +++ b/sdk/python/feast/sdk/importer.py @@ -1,11 +1,11 @@ # Copyright 2018 The Feast Authors -# +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at -# +# # https://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -73,9 +73,17 @@ def entity(self): return self._specs.get("entity") @classmethod - def from_csv(cls, path, entity, granularity, owner, staging_location=None, - id_column=None, feature_columns=None, timestamp_column=None, - timestamp_value=None, serving_store=None, warehouse_store=None, + def from_csv(cls, + path, + entity, + owner, + staging_location=None, + id_column=None, + feature_columns=None, + timestamp_column=None, + timestamp_value=None, + serving_store=None, + warehouse_store=None, job_options={}): """Creates an importer from a given csv dataset. This file can be either local or remote (in gcs). If it's a local file @@ -84,7 +92,6 @@ def from_csv(cls, path, entity, granularity, owner, staging_location=None, Args: path (str): path to csv file entity (str): entity id - granularity (Granularity): granularity of data owner (str): owner staging_location (str, optional): Defaults to None. Staging location for ingesting a local csv file. @@ -115,7 +122,7 @@ def from_csv(cls, path, entity, granularity, owner, staging_location=None, else: df = pd.read_csv(path) schema, features = \ - _detect_schema_and_feature(entity, granularity, owner, id_column, + _detect_schema_and_feature(entity, owner, id_column, feature_columns, timestamp_column, timestamp_value, serving_store, warehouse_store, df) @@ -129,9 +136,17 @@ def from_csv(cls, path, entity, granularity, owner, staging_location=None, return cls(specs, df, props) @classmethod - def from_bq(cls, bq_path, entity, granularity, owner, limit=10, - id_column=None, feature_columns=None, timestamp_column=None, - timestamp_value=None, serving_store=None, warehouse_store=None, + def from_bq(cls, + bq_path, + entity, + owner, + limit=10, + id_column=None, + feature_columns=None, + timestamp_column=None, + timestamp_value=None, + serving_store=None, + warehouse_store=None, job_options={}): """Creates an importer from a given bigquery table. @@ -139,7 +154,6 @@ def from_bq(cls, bq_path, entity, granularity, owner, limit=10, bq_path (str): path to bigquery table, in the format project.dataset.table entity (str): entity id - granularity (Granularity): granularity of data owner (str): owner limit (int, optional): Defaults to 10. The maximum number of rows to read into the importer df. @@ -175,21 +189,29 @@ def from_bq(cls, bq_path, entity, granularity, owner, limit=10, } df = head(cli, table, limit) schema, features = \ - _detect_schema_and_feature(entity, granularity, owner, id_column, + _detect_schema_and_feature(entity, owner, id_column, feature_columns, timestamp_column, timestamp_value, serving_store, warehouse_store, df) - iport_spec = _create_import("bigquery", source_options, - job_options, entity, schema) + iport_spec = _create_import("bigquery", source_options, job_options, + entity, schema) props = _properties("bigquery", table.num_rows, False, None) specs = _specs(iport_spec, Entity(name=entity), features) return cls(specs, df, props) @classmethod - def from_df(cls, df, entity, granularity, owner, staging_location, - id_column=None, feature_columns=None, timestamp_column=None, - timestamp_value=None, serving_store=None, warehouse_store=None, + def from_df(cls, + df, + entity, + owner, + staging_location, + id_column=None, + feature_columns=None, + timestamp_column=None, + timestamp_value=None, + serving_store=None, + warehouse_store=None, job_options={}): """Creates an importer from a given pandas dataframe. To import a file from a dataframe, the data will have to be staged. @@ -197,7 +219,6 @@ def from_df(cls, df, entity, granularity, owner, staging_location, Args: path (str): path to csv file entity (str): entity id - granularity (Granularity): granularity of data owner (str): owner staging_location (str): Defaults to None. Staging location for ingesting a local csv file. @@ -219,20 +240,20 @@ def from_df(cls, df, entity, granularity, owner, staging_location, Returns: Importer: the importer for the dataset provided. """ - tmp_file_name = ("tmp_{}_{}.csv" - .format(entity, int(round(time.time() * 1000)))) - source_options = { - "format": "csv" - } - source_options["path"], require_staging = ( - _get_remote_location(tmp_file_name, staging_location)) + tmp_file_name = ("tmp_{}_{}.csv".format(entity, + int(round( + time.time() * 1000)))) + src_type = "file.csv" + source_options = {} + source_options["path"], require_staging = (_get_remote_location( + tmp_file_name, staging_location)) schema, features = \ - _detect_schema_and_feature(entity, granularity, owner, id_column, + _detect_schema_and_feature(entity, owner, id_column, feature_columns, timestamp_column, timestamp_value, serving_store, warehouse_store, df) - iport_spec = _create_import("file", source_options, - job_options, entity, schema) + iport_spec = _create_import(src_type, source_options, job_options, + entity, schema) props = _properties("dataframe", len(df.index), require_staging, source_options["path"]) @@ -302,11 +323,7 @@ def _specs(iport, entity, features): [type] -- [description] """ - return { - "import": iport, - "features": features, - "entity": entity - } + return {"import": iport, "features": features, "entity": entity} def _get_remote_location(path, staging_location): @@ -330,16 +347,13 @@ def _get_remote_location(path, staging_location): return staging_location + "/" + filename, True -def _detect_schema_and_feature(entity, granularity, owner, id_column, - feature_columns, timestamp_column, - timestamp_value, serving_store, - warehouse_store, df): +def _detect_schema_and_feature(entity, owner, id_column, feature_columns, + timestamp_column, timestamp_value, + serving_store, warehouse_store, df): """Create schema object for import spec. Args: entity (str): entity name - granularity (feast.types.Granularity_pb2.Granularity): granularity of - the feature id_column (str): column name of entity id timestamp_column (str): column name of timestamp timestamp_value (datetime.datetime): timestamp to apply to all @@ -374,19 +388,19 @@ def _detect_schema_and_feature(entity, granularity, owner, id_column, ts = Timestamp() ts.GetCurrentTime() else: - ts = Timestamp(seconds= - int((timestamp_value - datetime.datetime(1970, 1, 1)) - .total_seconds())) + ts = Timestamp( + seconds=int((timestamp_value - + datetime.datetime(1970, 1, 1)).total_seconds())) schema.timestampValue.CopyFrom(ts) features = {} if feature_columns is not None: - # check if all column exist and create feature accordingly + # check if all column exist and create feature accordingly for column in feature_columns: if column not in df.columns: - raise ValueError("Column with name {} is not found".format(column)) - features[column] = _create_feature(df[column], entity, - granularity, owner, + raise ValueError( + "Column with name {} is not found".format(column)) + features[column] = _create_feature(df[column], entity, owner, serving_store, warehouse_store) else: # get all column except entity id and timestampColumn @@ -394,8 +408,7 @@ def _detect_schema_and_feature(entity, granularity, owner, id_column, _remove_safely(feature_columns, schema.entityIdColumn) _remove_safely(feature_columns, schema.timestampColumn) for column in feature_columns: - features[column] = _create_feature(df[column], entity, - granularity, owner, + features[column] = _create_feature(df[column], entity, owner, serving_store, warehouse_store) for col in df.columns: @@ -411,15 +424,12 @@ def _detect_schema_and_feature(entity, granularity, owner, id_column, return schema, features_dict -def _create_feature(column, entity, granularity, owner, - serving_store, warehouse_store): +def _create_feature(column, entity, owner, serving_store, warehouse_store): """Create Feature object. Args: column (pandas.Series): data column entity (str): entity name - granularity (feast.types.Granularity_pb2.Granularity): granularity of - the feature owner (str): owner of the feature serving_store (feast.sdk.resources.feature.DataStore): Defaults to None. Serving store to write the features in this instance to. @@ -433,7 +443,6 @@ def _create_feature(column, entity, granularity, owner, feature = Feature( name=column.name, entity=entity, - granularity=granularity, owner=owner, value_type=dtype_to_value_type(column.dtype)) if serving_store is not None: diff --git a/sdk/python/feast/sdk/resources/entity.py b/sdk/python/feast/sdk/resources/entity.py index 59560840bf..ad9b566c8e 100644 --- a/sdk/python/feast/sdk/resources/entity.py +++ b/sdk/python/feast/sdk/resources/entity.py @@ -1,11 +1,11 @@ # Copyright 2018 The Feast Authors -# +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at -# +# # https://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -34,8 +34,7 @@ def __init__(self, name="", description="", tags=[]): tags (list[str], optional): defaults to []. list of tags for this entity """ - self.__spec = EntitySpec(name=name, description=description, - tags=tags) + self.__spec = EntitySpec(name=name, description=description, tags=tags) @property def spec(self): @@ -77,20 +76,15 @@ def from_yaml(cls, path): content = yaml.safe_load(file.read()) entity = cls() entity.__spec = Parse( - json.dumps(content), - EntitySpec(), - ignore_unknown_fields=False) + json.dumps(content), EntitySpec(), ignore_unknown_fields=False) return entity - def create_feature(self, name, granularity, value_type, owner, - description): + def create_feature(self, name, value_type, owner, description): """Create a feature related to this entity Args: name (str): feature name - granularity (feast.types.Granularity_pb2.Granularity): granularity - of the feature. e.g.: Granularity.NONE, Granularity.SECOND, etc - value_type (feast.types.ValueType_pb2.ValueType): value type of + value_type (feast.types.ValueType_pb2.ValueType): value type of the feature owner (str): owner of the feature description (str): feature's description @@ -114,4 +108,4 @@ def dump(self, path): """ with open(path, 'w') as file: file.write(str(self)) - print("Saved spec to {}".format(path)) \ No newline at end of file + print("Saved spec to {}".format(path)) diff --git a/sdk/python/feast/sdk/resources/feature.py b/sdk/python/feast/sdk/resources/feature.py index 5a88ae5369..714a140451 100644 --- a/sdk/python/feast/sdk/resources/feature.py +++ b/sdk/python/feast/sdk/resources/feature.py @@ -1,35 +1,25 @@ # Copyright 2018 The Feast Authors -# +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at -# +# # https://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import yaml -import json import enum -from feast.specs.FeatureSpec_pb2 import FeatureSpec, DataStores, DataStore -from feast.sdk.utils.print_utils import spec_to_yaml -from feast.types.Granularity_pb2 import Granularity as Granularity_pb2 -from google.protobuf.json_format import Parse +import json +import yaml +from google.protobuf.json_format import Parse -class Granularity(enum.Enum): - """ - Feature's Granularity - """ - NONE = 0 - DAY = 1 - HOUR = 2 - MINUTE = 3 - SECOND = 4 +from feast.sdk.utils.print_utils import spec_to_yaml +from feast.specs.FeatureSpec_pb2 import FeatureSpec, DataStores, DataStore class ValueType(enum.Enum): @@ -51,16 +41,24 @@ class Feature: """ Wrapper class for feast feature """ - def __init__(self, name='', entity='', granularity=Granularity.NONE, - owner='', value_type=ValueType.DOUBLE, description='', uri='', - warehouse_store=None, serving_store=None, group='', tags=[], options={}): + + def __init__(self, + name='', + entity='', + owner='', + value_type=ValueType.DOUBLE, + description='', + uri='', + warehouse_store=None, + serving_store=None, + group='', + tags=[], + options={}): """Create feast feature instance. Args: name (str): name of feature, in lower snake case entity (str): entity the feature belongs to, in lower case - granularity (feast.sdk.resources.feature.Granularity): - granularity of the feature owner (str): owner of the feature value_type (feast.sdk.resources.feature.ValueType): defaults to ValueType.DOUBLE. value type of the feature @@ -75,21 +73,27 @@ def __init__(self, name='', entity='', granularity=Granularity.NONE, tags (list[str], optional): tags assigned to the feature options (dic, optional): additional options for the feature """ - id = '.'.join([entity, - Granularity_pb2.Enum.Name(granularity.value), name]).lower() - + id = '{}.{}'.format(entity, name).lower() warehouse_store_spec = None serving_store_spec = None - if (serving_store is not None): + if serving_store is not None: serving_store_spec = serving_store.spec - if (warehouse_store is not None): + if warehouse_store is not None: warehouse_store_spec = warehouse_store.spec - data_stores = DataStores(serving = serving_store_spec, - warehouse = warehouse_store_spec) - self.__spec = FeatureSpec(id=id, granularity=granularity.value, - name=name, entity=entity, owner=owner, dataStores=data_stores, - description=description, uri=uri, valueType=value_type.value, - group=group, tags=tags, options=options) + data_stores = DataStores( + serving=serving_store_spec, warehouse=warehouse_store_spec) + self.__spec = FeatureSpec( + id=id, + name=name, + entity=entity, + owner=owner, + dataStores=data_stores, + description=description, + uri=uri, + valueType=value_type.value, + group=group, + tags=tags, + options=options) @property def spec(self): @@ -107,18 +111,7 @@ def name(self): def name(self, value): self.__spec.name = value id_split = self.id.split('.') - id_split[2] = value - self.__spec.id = '.'.join(id_split) - - @property - def granularity(self): - return Granularity(self.__spec.granularity) - - @granularity.setter - def granularity(self, value): - self.__spec.granularity = value.value - id_split = self.id.split('.') - id_split[1] = Granularity_pb2.Enum.Name(self.__spec.granularity).lower() + id_split[1] = value self.__spec.id = '.'.join(id_split) @property @@ -226,7 +219,7 @@ def from_yaml(cls, path): FeatureSpec(), ignore_unknown_fields=False) return feature - + def __str__(self): """Print the feature in yaml format @@ -249,7 +242,7 @@ def dump(self, path): class Datastore: def __init__(self, id, options={}): - self.__spec = DataStore(id = id, options = options) + self.__spec = DataStore(id=id, options=options) def __str__(self): """Print the datastore in yaml format diff --git a/sdk/python/feast/sdk/resources/feature_group.py b/sdk/python/feast/sdk/resources/feature_group.py index 39d2c2b38a..3f73c571ca 100644 --- a/sdk/python/feast/sdk/resources/feature_group.py +++ b/sdk/python/feast/sdk/resources/feature_group.py @@ -1,11 +1,11 @@ # Copyright 2018 The Feast Authors -# +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at -# +# # https://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -25,6 +25,7 @@ class FeatureGroup(): """ Wrapper class for feast feature group """ + def __init__(self, id, tags=[], warehouse_store=None, serving_store=None): """Create FeatureGroup instance. @@ -43,10 +44,10 @@ def __init__(self, id, tags=[], warehouse_store=None, serving_store=None): serving_store_spec = serving_store.spec if (warehouse_store is not None): warehouse_store_spec = warehouse_store.spec - data_stores = DataStores(serving=serving_store_spec, - warehouse = warehouse_store_spec) - self.__spec = FeatureGroupSpec(id=id, tags=tags, - dataStores=data_stores) + data_stores = DataStores( + serving=serving_store_spec, warehouse=warehouse_store_spec) + self.__spec = FeatureGroupSpec( + id=id, tags=tags, dataStores=data_stores) @property def spec(self): @@ -55,11 +56,11 @@ def spec(self): @property def id(self): return self.__spec.id - + @id.setter def id(self, value): self.__spec.id = value - + @property def warehouse_store(self): return self.__spec.dataStores.warehouse @@ -75,7 +76,7 @@ def serving_store(self): @serving_store.setter def serving_store(self, value): self.__spec.dataStores.warehouse.CopyFrom(value) - + @property def tags(self): return self.__spec.tags diff --git a/sdk/python/feast/sdk/resources/feature_set.py b/sdk/python/feast/sdk/resources/feature_set.py index b39979b155..1895f06c48 100644 --- a/sdk/python/feast/sdk/resources/feature_set.py +++ b/sdk/python/feast/sdk/resources/feature_set.py @@ -1,11 +1,11 @@ # Copyright 2018 The Feast Authors -# +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at -# +# # https://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/sdk/python/feast/sdk/resources/storage.py b/sdk/python/feast/sdk/resources/storage.py index 942d1a477e..601aad5206 100644 --- a/sdk/python/feast/sdk/resources/storage.py +++ b/sdk/python/feast/sdk/resources/storage.py @@ -1,11 +1,11 @@ # Copyright 2018 The Feast Authors -# +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at -# +# # https://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -24,7 +24,8 @@ class Storage: """ Wrapper class for feast storage """ - def __init__(self, id = "", type = "", options={}): + + def __init__(self, id="", type="", options={}): """Create Storage instance. Args: @@ -32,7 +33,7 @@ def __init__(self, id = "", type = "", options={}): type (str): storage type options (dict, optional) : map of storage options """ - self.__spec = StorageSpec(id = id, type = type, options = options) + self.__spec = StorageSpec(id=id, type=type, options=options) @property def spec(self): diff --git a/sdk/python/feast/sdk/utils/bq_util.py b/sdk/python/feast/sdk/utils/bq_util.py index 535d56a14c..8cbc4b090b 100644 --- a/sdk/python/feast/sdk/utils/bq_util.py +++ b/sdk/python/feast/sdk/utils/bq_util.py @@ -1,18 +1,17 @@ # Copyright 2018 The Feast Authors -# +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at -# +# # https://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - import os import time @@ -106,15 +105,14 @@ def download_table_as_file(self, table_id, dest, staging_location, if not is_gs_path(staging_location): raise ValueError("staging_uri must be a directory in GCS") - temp_file_name = 'temp_{}'.format( - int(round(time.time() * 1000))) + temp_file_name = 'temp_{}'.format(int(round(time.time() * 1000))) staging_file_path = os.path.join(staging_location, temp_file_name) job_config = ExtractJobConfig() job_config.destination_format = file_type src_table = Table.from_string(table_id) - job = self.bq.extract_table(src_table, staging_file_path, - job_config=job_config) + job = self.bq.extract_table( + src_table, staging_file_path, job_config=job_config) # await completion job.result() @@ -139,15 +137,15 @@ def download_table_as_df(self, table_id, staging_location): if not is_gs_path(staging_location): raise ValueError("staging_uri must be a directory in GCS") - temp_file_name = 'temp_{}'.format( - int(round(time.time() * 1000))) + temp_file_name = 'temp_{}'.format(int(round(time.time() * 1000))) staging_file_path = os.path.join(staging_location, temp_file_name) job_config = ExtractJobConfig() job_config.destination_format = DestinationFormat.CSV - job = self.bq.extract_table(Table.from_string(table_id), - staging_file_path, - job_config=job_config) + job = self.bq.extract_table( + Table.from_string(table_id), + staging_file_path, + job_config=job_config) # await completion job.result() diff --git a/sdk/python/feast/sdk/utils/gs_utils.py b/sdk/python/feast/sdk/utils/gs_utils.py index 4a37fb416d..2561a115a8 100644 --- a/sdk/python/feast/sdk/utils/gs_utils.py +++ b/sdk/python/feast/sdk/utils/gs_utils.py @@ -1,11 +1,11 @@ # Copyright 2018 The Feast Authors -# +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at -# +# # https://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/sdk/python/feast/sdk/utils/print_utils.py b/sdk/python/feast/sdk/utils/print_utils.py index 8399f2fb52..926e4eeac5 100644 --- a/sdk/python/feast/sdk/utils/print_utils.py +++ b/sdk/python/feast/sdk/utils/print_utils.py @@ -1,11 +1,11 @@ # Copyright 2018 The Feast Authors -# +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at -# +# # https://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -39,4 +39,4 @@ def _dict_representer(dumper, data): def _dict_constructor(loader, node): - return OrderedDict(loader.construct_pairs(node)) \ No newline at end of file + return OrderedDict(loader.construct_pairs(node)) diff --git a/sdk/python/feast/sdk/utils/types.py b/sdk/python/feast/sdk/utils/types.py index 3f441a94fd..cf72425c25 100644 --- a/sdk/python/feast/sdk/utils/types.py +++ b/sdk/python/feast/sdk/utils/types.py @@ -1,11 +1,11 @@ # Copyright 2018 The Feast Authors -# +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at -# +# # https://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/sdk/python/feast/specs/FeatureSpec_pb2.py b/sdk/python/feast/specs/FeatureSpec_pb2.py index 28b10a527c..ce50ca533a 100644 --- a/sdk/python/feast/specs/FeatureSpec_pb2.py +++ b/sdk/python/feast/specs/FeatureSpec_pb2.py @@ -14,7 +14,6 @@ from feast.specs import EntitySpec_pb2 as feast_dot_specs_dot_EntitySpec__pb2 from feast.specs import StorageSpec_pb2 as feast_dot_specs_dot_StorageSpec__pb2 -from feast.types import Granularity_pb2 as feast_dot_types_dot_Granularity__pb2 from feast.types import Value_pb2 as feast_dot_types_dot_Value__pb2 @@ -23,9 +22,9 @@ package='feast.specs', syntax='proto3', serialized_options=_b('\n\013feast.specsB\020FeatureSpecProtoZ6github.com/gojek/feast/protos/generated/go/feast/specs'), - serialized_pb=_b('\n\x1d\x66\x65\x61st/specs/FeatureSpec.proto\x12\x0b\x66\x65\x61st.specs\x1a\x1c\x66\x65\x61st/specs/EntitySpec.proto\x1a\x1d\x66\x65\x61st/specs/StorageSpec.proto\x1a\x1d\x66\x65\x61st/types/Granularity.proto\x1a\x17\x66\x65\x61st/types/Value.proto\"\xfe\x02\n\x0b\x46\x65\x61tureSpec\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\r\n\x05owner\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x0b\n\x03uri\x18\x05 \x01(\t\x12\x32\n\x0bgranularity\x18\x06 \x01(\x0e\x32\x1d.feast.types.Granularity.Enum\x12.\n\tvalueType\x18\x07 \x01(\x0e\x32\x1b.feast.types.ValueType.Enum\x12\x0e\n\x06\x65ntity\x18\x08 \x01(\t\x12\r\n\x05group\x18\t \x01(\t\x12\x0c\n\x04tags\x18\n \x03(\t\x12\x36\n\x07options\x18\x0b \x03(\x0b\x32%.feast.specs.FeatureSpec.OptionsEntry\x12+\n\ndataStores\x18\x0c \x01(\x0b\x32\x17.feast.specs.DataStores\x1a.\n\x0cOptionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"`\n\nDataStores\x12\'\n\x07serving\x18\x01 \x01(\x0b\x32\x16.feast.specs.DataStore\x12)\n\twarehouse\x18\x02 \x01(\x0b\x32\x16.feast.specs.DataStore\"}\n\tDataStore\x12\n\n\x02id\x18\x01 \x01(\t\x12\x34\n\x07options\x18\x02 \x03(\x0b\x32#.feast.specs.DataStore.OptionsEntry\x1a.\n\x0cOptionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42W\n\x0b\x66\x65\x61st.specsB\x10\x46\x65\x61tureSpecProtoZ6github.com/gojek/feast/protos/generated/go/feast/specsb\x06proto3') + serialized_pb=_b('\n\x1d\x66\x65\x61st/specs/FeatureSpec.proto\x12\x0b\x66\x65\x61st.specs\x1a\x1c\x66\x65\x61st/specs/EntitySpec.proto\x1a\x1d\x66\x65\x61st/specs/StorageSpec.proto\x1a\x17\x66\x65\x61st/types/Value.proto\"\xca\x02\n\x0b\x46\x65\x61tureSpec\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\r\n\x05owner\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x0b\n\x03uri\x18\x05 \x01(\t\x12.\n\tvalueType\x18\x07 \x01(\x0e\x32\x1b.feast.types.ValueType.Enum\x12\x0e\n\x06\x65ntity\x18\x08 \x01(\t\x12\r\n\x05group\x18\t \x01(\t\x12\x0c\n\x04tags\x18\n \x03(\t\x12\x36\n\x07options\x18\x0b \x03(\x0b\x32%.feast.specs.FeatureSpec.OptionsEntry\x12+\n\ndataStores\x18\x0c \x01(\x0b\x32\x17.feast.specs.DataStores\x1a.\n\x0cOptionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"`\n\nDataStores\x12\'\n\x07serving\x18\x01 \x01(\x0b\x32\x16.feast.specs.DataStore\x12)\n\twarehouse\x18\x02 \x01(\x0b\x32\x16.feast.specs.DataStore\"}\n\tDataStore\x12\n\n\x02id\x18\x01 \x01(\t\x12\x34\n\x07options\x18\x02 \x03(\x0b\x32#.feast.specs.DataStore.OptionsEntry\x1a.\n\x0cOptionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42W\n\x0b\x66\x65\x61st.specsB\x10\x46\x65\x61tureSpecProtoZ6github.com/gojek/feast/protos/generated/go/feast/specsb\x06proto3') , - dependencies=[feast_dot_specs_dot_EntitySpec__pb2.DESCRIPTOR,feast_dot_specs_dot_StorageSpec__pb2.DESCRIPTOR,feast_dot_types_dot_Granularity__pb2.DESCRIPTOR,feast_dot_types_dot_Value__pb2.DESCRIPTOR,]) + dependencies=[feast_dot_specs_dot_EntitySpec__pb2.DESCRIPTOR,feast_dot_specs_dot_StorageSpec__pb2.DESCRIPTOR,feast_dot_types_dot_Value__pb2.DESCRIPTOR,]) @@ -63,8 +62,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=500, - serialized_end=546, + serialized_start=417, + serialized_end=463, ) _FEATURESPEC = _descriptor.Descriptor( @@ -110,49 +109,42 @@ is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='granularity', full_name='feast.specs.FeatureSpec.granularity', index=5, - number=6, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='valueType', full_name='feast.specs.FeatureSpec.valueType', index=6, + name='valueType', full_name='feast.specs.FeatureSpec.valueType', index=5, number=7, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='entity', full_name='feast.specs.FeatureSpec.entity', index=7, + name='entity', full_name='feast.specs.FeatureSpec.entity', index=6, number=8, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='group', full_name='feast.specs.FeatureSpec.group', index=8, + name='group', full_name='feast.specs.FeatureSpec.group', index=7, number=9, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='tags', full_name='feast.specs.FeatureSpec.tags', index=9, + name='tags', full_name='feast.specs.FeatureSpec.tags', index=8, number=10, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='options', full_name='feast.specs.FeatureSpec.options', index=10, + name='options', full_name='feast.specs.FeatureSpec.options', index=9, number=11, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='dataStores', full_name='feast.specs.FeatureSpec.dataStores', index=11, + name='dataStores', full_name='feast.specs.FeatureSpec.dataStores', index=10, number=12, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, @@ -170,8 +162,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=164, - serialized_end=546, + serialized_start=133, + serialized_end=463, ) @@ -208,8 +200,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=548, - serialized_end=644, + serialized_start=465, + serialized_end=561, ) @@ -246,8 +238,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=500, - serialized_end=546, + serialized_start=417, + serialized_end=463, ) _DATASTORE = _descriptor.Descriptor( @@ -283,12 +275,11 @@ extension_ranges=[], oneofs=[ ], - serialized_start=646, - serialized_end=771, + serialized_start=563, + serialized_end=688, ) _FEATURESPEC_OPTIONSENTRY.containing_type = _FEATURESPEC -_FEATURESPEC.fields_by_name['granularity'].enum_type = feast_dot_types_dot_Granularity__pb2._GRANULARITY_ENUM _FEATURESPEC.fields_by_name['valueType'].enum_type = feast_dot_types_dot_Value__pb2._VALUETYPE_ENUM _FEATURESPEC.fields_by_name['options'].message_type = _FEATURESPEC_OPTIONSENTRY _FEATURESPEC.fields_by_name['dataStores'].message_type = _DATASTORES diff --git a/sdk/python/feast/specs/ImportJobSpecs_pb2.py b/sdk/python/feast/specs/ImportJobSpecs_pb2.py new file mode 100644 index 0000000000..da84644da8 --- /dev/null +++ b/sdk/python/feast/specs/ImportJobSpecs_pb2.py @@ -0,0 +1,123 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/specs/ImportJobSpecs.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from feast.specs import ImportSpec_pb2 as feast_dot_specs_dot_ImportSpec__pb2 +from feast.specs import EntitySpec_pb2 as feast_dot_specs_dot_EntitySpec__pb2 +from feast.specs import FeatureSpec_pb2 as feast_dot_specs_dot_FeatureSpec__pb2 +from feast.specs import StorageSpec_pb2 as feast_dot_specs_dot_StorageSpec__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='feast/specs/ImportJobSpecs.proto', + package='feast.specs', + syntax='proto3', + serialized_options=_b('\n\013feast.specsB\023ImportJobSpecsProtoZ6github.com/gojek/feast/protos/generated/go/feast/specs'), + serialized_pb=_b('\n feast/specs/ImportJobSpecs.proto\x12\x0b\x66\x65\x61st.specs\x1a\x1c\x66\x65\x61st/specs/ImportSpec.proto\x1a\x1c\x66\x65\x61st/specs/EntitySpec.proto\x1a\x1d\x66\x65\x61st/specs/FeatureSpec.proto\x1a\x1d\x66\x65\x61st/specs/StorageSpec.proto\"\xcf\x02\n\x0eImportJobSpecs\x12\r\n\x05jobId\x18\x01 \x01(\t\x12+\n\nimportSpec\x18\x02 \x01(\x0b\x32\x17.feast.specs.ImportSpec\x12,\n\x0b\x65ntitySpecs\x18\x03 \x03(\x0b\x32\x17.feast.specs.EntitySpec\x12.\n\x0c\x66\x65\x61tureSpecs\x18\x04 \x03(\x0b\x32\x18.feast.specs.FeatureSpec\x12\x35\n\x13servingStorageSpecs\x18\x05 \x03(\x0b\x32\x18.feast.specs.StorageSpec\x12\x37\n\x15warehouseStorageSpecs\x18\x06 \x03(\x0b\x32\x18.feast.specs.StorageSpec\x12\x33\n\x11\x65rrorsStorageSpec\x18\x07 \x01(\x0b\x32\x18.feast.specs.StorageSpecBZ\n\x0b\x66\x65\x61st.specsB\x13ImportJobSpecsProtoZ6github.com/gojek/feast/protos/generated/go/feast/specsb\x06proto3') + , + dependencies=[feast_dot_specs_dot_ImportSpec__pb2.DESCRIPTOR,feast_dot_specs_dot_EntitySpec__pb2.DESCRIPTOR,feast_dot_specs_dot_FeatureSpec__pb2.DESCRIPTOR,feast_dot_specs_dot_StorageSpec__pb2.DESCRIPTOR,]) + + + + +_IMPORTJOBSPECS = _descriptor.Descriptor( + name='ImportJobSpecs', + full_name='feast.specs.ImportJobSpecs', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='jobId', full_name='feast.specs.ImportJobSpecs.jobId', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='importSpec', full_name='feast.specs.ImportJobSpecs.importSpec', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='entitySpecs', full_name='feast.specs.ImportJobSpecs.entitySpecs', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='featureSpecs', full_name='feast.specs.ImportJobSpecs.featureSpecs', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='servingStorageSpecs', full_name='feast.specs.ImportJobSpecs.servingStorageSpecs', index=4, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='warehouseStorageSpecs', full_name='feast.specs.ImportJobSpecs.warehouseStorageSpecs', index=5, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='errorsStorageSpec', full_name='feast.specs.ImportJobSpecs.errorsStorageSpec', index=6, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=172, + serialized_end=507, +) + +_IMPORTJOBSPECS.fields_by_name['importSpec'].message_type = feast_dot_specs_dot_ImportSpec__pb2._IMPORTSPEC +_IMPORTJOBSPECS.fields_by_name['entitySpecs'].message_type = feast_dot_specs_dot_EntitySpec__pb2._ENTITYSPEC +_IMPORTJOBSPECS.fields_by_name['featureSpecs'].message_type = feast_dot_specs_dot_FeatureSpec__pb2._FEATURESPEC +_IMPORTJOBSPECS.fields_by_name['servingStorageSpecs'].message_type = feast_dot_specs_dot_StorageSpec__pb2._STORAGESPEC +_IMPORTJOBSPECS.fields_by_name['warehouseStorageSpecs'].message_type = feast_dot_specs_dot_StorageSpec__pb2._STORAGESPEC +_IMPORTJOBSPECS.fields_by_name['errorsStorageSpec'].message_type = feast_dot_specs_dot_StorageSpec__pb2._STORAGESPEC +DESCRIPTOR.message_types_by_name['ImportJobSpecs'] = _IMPORTJOBSPECS +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +ImportJobSpecs = _reflection.GeneratedProtocolMessageType('ImportJobSpecs', (_message.Message,), dict( + DESCRIPTOR = _IMPORTJOBSPECS, + __module__ = 'feast.specs.ImportJobSpecs_pb2' + # @@protoc_insertion_point(class_scope:feast.specs.ImportJobSpecs) + )) +_sym_db.RegisterMessage(ImportJobSpecs) + + +DESCRIPTOR._options = None +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/types/FeatureRow_pb2.py b/sdk/python/feast/types/FeatureRow_pb2.py index c9ed5a29bc..4096955df8 100644 --- a/sdk/python/feast/types/FeatureRow_pb2.py +++ b/sdk/python/feast/types/FeatureRow_pb2.py @@ -14,7 +14,6 @@ from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from feast.types import Feature_pb2 as feast_dot_types_dot_Feature__pb2 -from feast.types import Granularity_pb2 as feast_dot_types_dot_Granularity__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -22,9 +21,9 @@ package='feast.types', syntax='proto3', serialized_options=_b('\n\013feast.typesB\017FeatureRowProtoZ6github.com/gojek/feast/protos/generated/go/feast/types'), - serialized_pb=_b('\n\x1c\x66\x65\x61st/types/FeatureRow.proto\x12\x0b\x66\x65\x61st.types\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x19\x66\x65\x61st/types/Feature.proto\x1a\x1d\x66\x65\x61st/types/Granularity.proto\"\x9e\x01\n\rFeatureRowKey\x12\x11\n\tentityKey\x18\x01 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x12\n\nentityName\x18\x04 \x01(\t\x12\x32\n\x0bgranularity\x18\x05 \x01(\x0e\x32\x1d.feast.types.Granularity.Enum\"\xc3\x01\n\nFeatureRow\x12\x11\n\tentityKey\x18\x01 \x01(\t\x12&\n\x08\x66\x65\x61tures\x18\x02 \x03(\x0b\x32\x14.feast.types.Feature\x12\x32\n\x0e\x65ventTimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x12\n\nentityName\x18\x04 \x01(\t\x12\x32\n\x0bgranularity\x18\x05 \x01(\x0e\x32\x1d.feast.types.Granularity.EnumBV\n\x0b\x66\x65\x61st.typesB\x0f\x46\x65\x61tureRowProtoZ6github.com/gojek/feast/protos/generated/go/feast/typesb\x06proto3') + serialized_pb=_b('\n\x1c\x66\x65\x61st/types/FeatureRow.proto\x12\x0b\x66\x65\x61st.types\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x19\x66\x65\x61st/types/Feature.proto\"j\n\rFeatureRowKey\x12\x11\n\tentityKey\x18\x01 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x12\n\nentityName\x18\x04 \x01(\t\"\x8f\x01\n\nFeatureRow\x12\x11\n\tentityKey\x18\x01 \x01(\t\x12&\n\x08\x66\x65\x61tures\x18\x02 \x03(\x0b\x32\x14.feast.types.Feature\x12\x32\n\x0e\x65ventTimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x12\n\nentityName\x18\x04 \x01(\tBV\n\x0b\x66\x65\x61st.typesB\x0f\x46\x65\x61tureRowProtoZ6github.com/gojek/feast/protos/generated/go/feast/typesb\x06proto3') , - dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,feast_dot_types_dot_Feature__pb2.DESCRIPTOR,feast_dot_types_dot_Granularity__pb2.DESCRIPTOR,]) + dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,feast_dot_types_dot_Feature__pb2.DESCRIPTOR,]) @@ -57,13 +56,6 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='granularity', full_name='feast.types.FeatureRowKey.granularity', index=3, - number=5, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -76,8 +68,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=137, - serialized_end=295, + serialized_start=105, + serialized_end=211, ) @@ -116,13 +108,6 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='granularity', full_name='feast.types.FeatureRow.granularity', index=4, - number=5, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -135,15 +120,13 @@ extension_ranges=[], oneofs=[ ], - serialized_start=298, - serialized_end=493, + serialized_start=214, + serialized_end=357, ) _FEATUREROWKEY.fields_by_name['eventTimestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_FEATUREROWKEY.fields_by_name['granularity'].enum_type = feast_dot_types_dot_Granularity__pb2._GRANULARITY_ENUM _FEATUREROW.fields_by_name['features'].message_type = feast_dot_types_dot_Feature__pb2._FEATURE _FEATUREROW.fields_by_name['eventTimestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_FEATUREROW.fields_by_name['granularity'].enum_type = feast_dot_types_dot_Granularity__pb2._GRANULARITY_ENUM DESCRIPTOR.message_types_by_name['FeatureRowKey'] = _FEATUREROWKEY DESCRIPTOR.message_types_by_name['FeatureRow'] = _FEATUREROW _sym_db.RegisterFileDescriptor(DESCRIPTOR) diff --git a/sdk/python/feast/types/Granularity_pb2.py b/sdk/python/feast/types/Granularity_pb2.py deleted file mode 100644 index 5ccb51cff4..0000000000 --- a/sdk/python/feast/types/Granularity_pb2.py +++ /dev/null @@ -1,99 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: feast/types/Granularity.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='feast/types/Granularity.proto', - package='feast.types', - syntax='proto3', - serialized_options=_b('\n\013feast.typesB\020GranularityProtoZ6github.com/gojek/feast/protos/generated/go/feast/types'), - serialized_pb=_b('\n\x1d\x66\x65\x61st/types/Granularity.proto\x12\x0b\x66\x65\x61st.types\"J\n\x0bGranularity\";\n\x04\x45num\x12\x08\n\x04NONE\x10\x00\x12\x07\n\x03\x44\x41Y\x10\x01\x12\x08\n\x04HOUR\x10\x02\x12\n\n\x06MINUTE\x10\x03\x12\n\n\x06SECOND\x10\x04\x42W\n\x0b\x66\x65\x61st.typesB\x10GranularityProtoZ6github.com/gojek/feast/protos/generated/go/feast/typesb\x06proto3') -) - - - -_GRANULARITY_ENUM = _descriptor.EnumDescriptor( - name='Enum', - full_name='feast.types.Granularity.Enum', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='NONE', index=0, number=0, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='DAY', index=1, number=1, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='HOUR', index=2, number=2, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='MINUTE', index=3, number=3, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='SECOND', index=4, number=4, - serialized_options=None, - type=None), - ], - containing_type=None, - serialized_options=None, - serialized_start=61, - serialized_end=120, -) -_sym_db.RegisterEnumDescriptor(_GRANULARITY_ENUM) - - -_GRANULARITY = _descriptor.Descriptor( - name='Granularity', - full_name='feast.types.Granularity', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _GRANULARITY_ENUM, - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=46, - serialized_end=120, -) - -_GRANULARITY_ENUM.containing_type = _GRANULARITY -DESCRIPTOR.message_types_by_name['Granularity'] = _GRANULARITY -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Granularity = _reflection.GeneratedProtocolMessageType('Granularity', (_message.Message,), dict( - DESCRIPTOR = _GRANULARITY, - __module__ = 'feast.types.Granularity_pb2' - # @@protoc_insertion_point(class_scope:feast.types.Granularity) - )) -_sym_db.RegisterMessage(Granularity) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/tests/sample/valid_feature.yaml b/sdk/python/tests/sample/valid_feature.yaml index ffde1ee194..a54915641b 100644 --- a/sdk/python/tests/sample/valid_feature.yaml +++ b/sdk/python/tests/sample/valid_feature.yaml @@ -1,10 +1,9 @@ -id: myentity.none.feature_bool_redis1 +id: myentity.feature_bool_redis1 name: feature_bool_redis1 entity: myentity owner: bob@example.com description: test entity. valueType: BOOL -granularity: NONE uri: https://github.com/bob/example dataStores: serving: diff --git a/sdk/python/tests/sdk/resources/test_entity.py b/sdk/python/tests/sdk/resources/test_entity.py index 6bf9d70de4..65a10caa1d 100644 --- a/sdk/python/tests/sdk/resources/test_entity.py +++ b/sdk/python/tests/sdk/resources/test_entity.py @@ -1,11 +1,11 @@ # Copyright 2018 The Feast Authors -# +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at -# +# # https://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -18,11 +18,12 @@ class TestEntity(object): def test_read_from_yaml(self): - entity_no_tag = Entity.from_yaml("tests/sample/valid_entity_no_tag.yaml") + entity_no_tag = Entity.from_yaml( + "tests/sample/valid_entity_no_tag.yaml") assert entity_no_tag.name == "myentity" assert entity_no_tag.description == "test entity without tag" assert len(entity_no_tag.tags) == 0 - + entity = Entity.from_yaml("tests/sample/valid_entity.yaml") assert entity.name == "myentity" assert entity.description == "test entity with tag" @@ -39,4 +40,4 @@ def test_dump(self): assert t1 == t2 #cleanup - os.remove("myentity.yaml") \ No newline at end of file + os.remove("myentity.yaml") diff --git a/sdk/python/tests/sdk/resources/test_feature.py b/sdk/python/tests/sdk/resources/test_feature.py index a942c805f1..7f296db235 100644 --- a/sdk/python/tests/sdk/resources/test_feature.py +++ b/sdk/python/tests/sdk/resources/test_feature.py @@ -1,28 +1,33 @@ # Copyright 2018 The Feast Authors -# +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at -# +# # https://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from feast.sdk.resources.feature import Feature, Datastore, Granularity, \ - ValueType +from feast.sdk.resources.feature import Feature, Datastore, ValueType class TestFeature(object): def dummy_feature(self): - warehouse_data_store = Datastore(id = "BIGQUERY1", options = {}) - serving_data_store = Datastore(id = "REDIS1", options = {}) - my_feature = Feature(name = "my_feature", entity = "my_entity", granularity = Granularity.NONE, value_type = ValueType.BYTES, - owner = "feast@web.com", description = "test feature", uri = "github.com/feature_repo", warehouse_store = warehouse_data_store, - serving_store = serving_data_store) + warehouse_data_store = Datastore(id="BIGQUERY1", options={}) + serving_data_store = Datastore(id="REDIS1", options={}) + my_feature = Feature( + name="my_feature", + entity="my_entity", + value_type=ValueType.BYTES, + owner="feast@web.com", + description="test feature", + uri="github.com/feature_repo", + warehouse_store=warehouse_data_store, + serving_store=serving_data_store) return my_feature def test_set_name(self): @@ -30,30 +35,23 @@ def test_set_name(self): new_name = "my_feature_new" my_feature.name = new_name assert my_feature.name == new_name - assert my_feature.id == "my_entity.none.my_feature_new" - - def test_set_granularity(self): - my_feature = self.dummy_feature() - my_feature.granularity = Granularity.DAY - assert my_feature.granularity == Granularity.DAY - assert my_feature.id == "my_entity.day.my_feature" - + assert my_feature.id == "my_entity.my_feature_new" + def test_set_entity(self): my_feature = self.dummy_feature() new_entity = "new_entity" my_feature.entity = new_entity assert my_feature.entity == new_entity - assert my_feature.id == "new_entity.none.my_feature" - + assert my_feature.id == "new_entity.my_feature" + def test_read_from_yaml(self): feature = Feature.from_yaml("tests/sample/valid_feature.yaml") - assert feature.id == "myentity.none.feature_bool_redis1" + assert feature.id == "myentity.feature_bool_redis1" assert feature.name == "feature_bool_redis1" assert feature.entity == "myentity" assert feature.owner == "bob@example.com" assert feature.description == "test entity." assert feature.value_type == ValueType.BOOL - assert feature.granularity == Granularity.NONE assert feature.uri == "https://github.com/bob/example" assert feature.serving_store.id == "REDIS1" assert feature.warehouse_store.id == "BIGQUERY1" diff --git a/sdk/python/tests/sdk/resources/test_feature_group.py b/sdk/python/tests/sdk/resources/test_feature_group.py index 1e431bf135..244211d192 100644 --- a/sdk/python/tests/sdk/resources/test_feature_group.py +++ b/sdk/python/tests/sdk/resources/test_feature_group.py @@ -1,11 +1,11 @@ # Copyright 2018 The Feast Authors -# +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at -# +# # https://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -14,11 +14,12 @@ from feast.sdk.resources.feature_group import FeatureGroup + class TestFeatureGroupSpec(object): - def test_read_from_yaml(self): + def test_read_from_yaml(self): feature_group = FeatureGroup.from_yaml( "tests/sample/valid_feature_group.yaml") assert feature_group.id == "my_fg" assert feature_group.serving_store.id == "REDIS1" assert feature_group.warehouse_store.id == "BIGQUERY1" - assert feature_group.tags == ["tag1", "tag2"] \ No newline at end of file + assert feature_group.tags == ["tag1", "tag2"] diff --git a/sdk/python/tests/sdk/resources/test_feature_set.py b/sdk/python/tests/sdk/resources/test_feature_set.py index ddb66e38c7..3c6c97eba2 100644 --- a/sdk/python/tests/sdk/resources/test_feature_set.py +++ b/sdk/python/tests/sdk/resources/test_feature_set.py @@ -1,11 +1,11 @@ # Copyright 2018 The Feast Authors -# +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at -# +# # https://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -19,7 +19,7 @@ class TestFeatureSet(object): def test_features(self): entity_name = "driver" - features = ["driver.hour.feature1", "driver.hour.feature2"] + features = ["driver.feature1", "driver.feature2"] feature_set = FeatureSet(entity_name, features) assert len(feature_set.features) == 2 @@ -28,9 +28,10 @@ def test_features(self): def test_different_entity(self): entity_name = "driver" - features = ["customer.hour.feature1", "driver.day.feature1"] - with pytest.raises(ValueError, - match="feature set has different entity: customer"): + features = ["customer.feature1", "driver.feature1"] + with pytest.raises( + ValueError, + match="feature set has different entity: customer"): FeatureSet(entity_name, features) @@ -41,4 +42,3 @@ def test_creation(self): dataset = DatasetInfo(name, table_id) assert dataset.name == name assert dataset.table_id == table_id - diff --git a/sdk/python/tests/sdk/resources/test_storage.py b/sdk/python/tests/sdk/resources/test_storage.py index c9e6e9358a..75b9d1a8f8 100644 --- a/sdk/python/tests/sdk/resources/test_storage.py +++ b/sdk/python/tests/sdk/resources/test_storage.py @@ -1,11 +1,11 @@ # Copyright 2018 The Feast Authors -# +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at -# +# # https://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,7 +20,7 @@ class TestStorage(object): def test_update_options(self): storage = Storage(id="storage1", type="redis") assert storage.options == {} - myDict = {'key':'value'} + myDict = {'key': 'value'} storage.options = myDict assert storage.options == myDict @@ -28,16 +28,18 @@ def test_from_yaml(self): storage = Storage.from_yaml("tests/sample/valid_storage.yaml") assert storage.id == "BIGQUERY1" assert storage.type == "bigquery" - expDict = {"dataset" : "feast", - "project" : "gcp-project", - "tempLocation" : "gs://feast-storage"} + expDict = { + "dataset": "feast", + "project": "gcp-project", + "tempLocation": "gs://feast-storage" + } assert storage.options == expDict def test_dump(self): - opt = {"option1" : "value1", "option2" : "value2"} + opt = {"option1": "value1", "option2": "value2"} storage = Storage("storage1", "redis", opt) - storage.dump("storage.yaml") + storage.dump("storage.yaml") storage2 = Storage.from_yaml("storage.yaml") assert storage.id == storage2.id @@ -46,4 +48,4 @@ def test_dump(self): assert storage2.options == opt #cleanup - os.remove("storage.yaml") \ No newline at end of file + os.remove("storage.yaml") diff --git a/sdk/python/tests/sdk/test_client.py b/sdk/python/tests/sdk/test_client.py index c13c350ed4..f0d789afd0 100644 --- a/sdk/python/tests/sdk/test_client.py +++ b/sdk/python/tests/sdk/test_client.py @@ -1,11 +1,11 @@ # Copyright 2018 The Feast Authors -# +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at -# +# # https://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -15,24 +15,24 @@ from datetime import datetime import grpc +import numpy as np import pandas as pd import pytest -import numpy as np from google.protobuf.timestamp_pb2 import Timestamp from pandas.util.testing import assert_frame_equal import feast.core.CoreService_pb2_grpc as core -import feast.core.JobService_pb2_grpc as jobs import feast.core.DatasetService_pb2_grpc as training +import feast.core.JobService_pb2_grpc as jobs import feast.serving.Serving_pb2 as serving_pb from feast.core.CoreService_pb2 import CoreServiceTypes -from feast.core.JobService_pb2 import JobServiceTypes -from feast.core.DatasetService_pb2 import DatasetServiceTypes from feast.core.DatasetService_pb2 import DatasetInfo as DatasetInfo_pb +from feast.core.DatasetService_pb2 import DatasetServiceTypes +from feast.core.JobService_pb2 import JobServiceTypes from feast.sdk.client import Client, _parse_date, _timestamp_from_datetime from feast.sdk.importer import Importer from feast.sdk.resources.entity import Entity -from feast.sdk.resources.feature import Feature, Granularity +from feast.sdk.resources.feature import Feature from feast.sdk.resources.feature_group import FeatureGroup from feast.sdk.resources.feature_set import FeatureSet, DatasetInfo, FileType from feast.sdk.resources.storage import Storage @@ -54,24 +54,27 @@ def client(self, mocker): return cli def test_apply_single_feature(self, client, mocker): - my_feature = Feature(name="test", - entity="test", granularity=Granularity.NONE) + my_feature = Feature(name="test", entity="test") grpc_stub = core.CoreServiceStub(grpc.insecure_channel("")) - with mocker.patch.object(grpc_stub, 'ApplyFeature', - return_value=CoreServiceTypes.ApplyFeatureResponse( - featureId="test.none.test")): + with mocker.patch.object( + grpc_stub, + 'ApplyFeature', + return_value=CoreServiceTypes.ApplyFeatureResponse( + featureId="test.test")): client._core_service_stub = grpc_stub id = client.apply(my_feature) - assert id == "test.none.test" + assert id == "test.test" def test_apply_single_entity(self, client, mocker): my_entity = Entity(name="test") grpc_stub = core.CoreServiceStub(grpc.insecure_channel("")) - with mocker.patch.object(grpc_stub, 'ApplyEntity', - return_value=CoreServiceTypes.ApplyEntityResponse( - entityName="test")): + with mocker.patch.object( + grpc_stub, + 'ApplyEntity', + return_value=CoreServiceTypes.ApplyEntityResponse( + entityName="test")): client._core_service_stub = grpc_stub name = client.apply(my_entity) assert name == "test" @@ -80,9 +83,11 @@ def test_apply_single_feature_group(self, client, mocker): my_feature_group = FeatureGroup(id="test") grpc_stub = core.CoreServiceStub(grpc.insecure_channel("")) - with mocker.patch.object(grpc_stub, 'ApplyFeatureGroup', - return_value=CoreServiceTypes.ApplyFeatureGroupResponse( - featureGroupId="test")): + with mocker.patch.object( + grpc_stub, + 'ApplyFeatureGroup', + return_value=CoreServiceTypes.ApplyFeatureGroupResponse( + featureGroupId="test")): client._core_service_stub = grpc_stub name = client.apply(my_feature_group) assert name == "test" @@ -91,9 +96,11 @@ def test_apply_single_storage(self, client, mocker): my_storage = Storage(id="TEST", type="redis") grpc_stub = core.CoreServiceStub(grpc.insecure_channel("")) - with mocker.patch.object(grpc_stub, 'ApplyStorage', - return_value=CoreServiceTypes.ApplyStorageResponse( - storageId="TEST")): + with mocker.patch.object( + grpc_stub, + 'ApplyStorage', + return_value=CoreServiceTypes.ApplyStorageResponse( + storageId="TEST")): client._core_service_stub = grpc_stub name = client.apply(my_storage) assert name == "TEST" @@ -111,15 +118,21 @@ def test_apply_multiple(self, client, mocker): grpc_stub = core.CoreServiceStub(grpc.insecure_channel("")) - mocker.patch.object(grpc_stub, 'ApplyStorage', - return_value=CoreServiceTypes.ApplyStorageResponse( - storageId="TEST")) - mocker.patch.object(grpc_stub, 'ApplyFeatureGroup', - return_value=CoreServiceTypes.ApplyFeatureGroupResponse( - featureGroupId="test")) - mocker.patch.object(grpc_stub, 'ApplyEntity', - return_value=CoreServiceTypes.ApplyEntityResponse( - entityName="test")) + mocker.patch.object( + grpc_stub, + 'ApplyStorage', + return_value=CoreServiceTypes.ApplyStorageResponse( + storageId="TEST")) + mocker.patch.object( + grpc_stub, + 'ApplyFeatureGroup', + return_value=CoreServiceTypes.ApplyFeatureGroupResponse( + featureGroupId="test")) + mocker.patch.object( + grpc_stub, + 'ApplyEntity', + return_value=CoreServiceTypes.ApplyEntityResponse( + entityName="test")) client._core_service_stub = grpc_stub ids = client.apply([my_storage, my_entity, my_feature_group]) @@ -128,59 +141,58 @@ def test_apply_multiple(self, client, mocker): def test_run_job_no_staging(self, client, mocker): grpc_stub = jobs.JobServiceStub(grpc.insecure_channel("")) - mocker.patch.object(grpc_stub, 'SubmitJob', - return_value=JobServiceTypes.SubmitImportJobResponse( - jobId="myjob12312")) + mocker.patch.object( + grpc_stub, + 'SubmitJob', + return_value=JobServiceTypes.SubmitImportJobResponse( + jobId="myjob12312")) client._job_service_stub = grpc_stub - importer = Importer( - {"import": ImportSpec()}, - None, - {"require_staging": False}) + importer = Importer({"import": ImportSpec()}, None, + {"require_staging": False}) job_id = client.run(importer) assert job_id == "myjob12312" def test_create_dataset_invalid_args(self, client): - feature_set = FeatureSet("entity", ["entity.none.feature1"]) + feature_set = FeatureSet("entity", ["entity.feature1"]) # empty feature set with pytest.raises(ValueError, match="feature set is empty"): inv_feature_set = FeatureSet("entity", []) - client.create_dataset(inv_feature_set, "2018-12-01", - "2018-12-02") + client.create_dataset(inv_feature_set, "2018-12-01", "2018-12-02") # invalid start date - with pytest.raises(ValueError, - match="Incorrect date format, should be YYYY-MM-DD"): - client.create_dataset(feature_set, "20181201", - "2018-12-02") + with pytest.raises( + ValueError, + match="Incorrect date format, should be YYYY-MM-DD"): + client.create_dataset(feature_set, "20181201", "2018-12-02") # invalid end date - with pytest.raises(ValueError, - match="Incorrect date format, should be YYYY-MM-DD"): - client.create_dataset(feature_set, "2018-12-01", - "20181202") + with pytest.raises( + ValueError, + match="Incorrect date format, should be YYYY-MM-DD"): + client.create_dataset(feature_set, "2018-12-01", "20181202") # start date > end date with pytest.raises(ValueError, match="end_date is before start_date"): - client.create_dataset(feature_set, "2018-12-02", - "2018-12-01") + client.create_dataset(feature_set, "2018-12-02", "2018-12-01") # invalid limit - with pytest.raises(ValueError, - match="limit is not a positive integer"): - client.create_dataset(feature_set, "2018-12-01", - "2018-12-02", -1) + with pytest.raises( + ValueError, match="limit is not a positive integer"): + client.create_dataset(feature_set, "2018-12-01", "2018-12-02", -1) def test_create_dataset(self, client, mocker): entity_name = "myentity" - feature_ids = ["myentity.none.feature1", "myentity.second.feature2"] + feature_ids = ["myentity.feature1", "myentity.feature2"] fs = FeatureSet(entity_name, feature_ids) start_date = "2018-01-02" end_date = "2018-12-31" - ds_pb = DatasetInfo_pb(name="dataset_name", - tableUrl="project.dataset.table") + ds_pb = DatasetInfo_pb( + name="dataset_name", tableUrl="project.dataset.table") mock_trn_stub = training.DatasetServiceStub(grpc.insecure_channel("")) - mocker.patch.object(mock_trn_stub, "CreateDataset", - return_value=DatasetServiceTypes - .CreateDatasetResponse(datasetInfo=ds_pb)) + mocker.patch.object( + mock_trn_stub, + "CreateDataset", + return_value=DatasetServiceTypes.CreateDatasetResponse( + datasetInfo=ds_pb)) client._dataset_service_stub = mock_trn_stub ds = client.create_dataset(fs, start_date, end_date) @@ -193,29 +205,28 @@ def test_create_dataset(self, client, mocker): startDate=_timestamp_from_datetime(_parse_date(start_date)), endDate=_timestamp_from_datetime(_parse_date(end_date)), limit=None, - namePrefix=None - ) - ) + namePrefix=None)) def test_create_dataset_with_limit(self, client, mocker): entity_name = "myentity" - feature_ids = ["myentity.none.feature1", "myentity.second.feature2"] + feature_ids = ["myentity.feature1", "myentity.feature2"] fs = FeatureSet(entity_name, feature_ids) start_date = "2018-01-02" end_date = "2018-12-31" limit = 100 - ds_pb = DatasetInfo_pb(name="dataset_name", - tableUrl="project.dataset.table") + ds_pb = DatasetInfo_pb( + name="dataset_name", tableUrl="project.dataset.table") mock_trn_stub = training.DatasetServiceStub(grpc.insecure_channel("")) - mocker.patch.object(mock_trn_stub, "CreateDataset", - return_value=DatasetServiceTypes - .CreateDatasetResponse(datasetInfo=ds_pb)) + mocker.patch.object( + mock_trn_stub, + "CreateDataset", + return_value=DatasetServiceTypes.CreateDatasetResponse( + datasetInfo=ds_pb)) client._dataset_service_stub = mock_trn_stub - ds = client.create_dataset(fs, start_date, end_date, - limit=limit) + ds = client.create_dataset(fs, start_date, end_date, limit=limit) assert "dataset_name" == ds.name assert "project.dataset.table" == ds.table_id @@ -225,32 +236,31 @@ def test_create_dataset_with_limit(self, client, mocker): startDate=_timestamp_from_datetime(_parse_date(start_date)), endDate=_timestamp_from_datetime(_parse_date(end_date)), limit=limit, - namePrefix=None - ) - ) + namePrefix=None)) def test_create_dataset_with_name_prefix(self, client, mocker): entity_name = "myentity" - feature_ids = ["myentity.none.feature1", "myentity.second.feature2"] + feature_ids = ["myentity.feature1", "myentity.feature2"] fs = FeatureSet(entity_name, feature_ids) start_date = "2018-01-02" end_date = "2018-12-31" limit = 100 name_prefix = "feast" - ds_pb = DatasetInfo_pb(name="dataset_name", - tableUrl="project.dataset.table") + ds_pb = DatasetInfo_pb( + name="dataset_name", tableUrl="project.dataset.table") mock_dssvc_stub = training.DatasetServiceStub( grpc.insecure_channel("")) - mocker.patch.object(mock_dssvc_stub, "CreateDataset", - return_value=DatasetServiceTypes - .CreateDatasetResponse(datasetInfo=ds_pb)) + mocker.patch.object( + mock_dssvc_stub, + "CreateDataset", + return_value=DatasetServiceTypes.CreateDatasetResponse( + datasetInfo=ds_pb)) client._dataset_service_stub = mock_dssvc_stub - ds = client.create_dataset(fs, start_date, end_date, - limit=limit, - name_prefix=name_prefix) + ds = client.create_dataset( + fs, start_date, end_date, limit=limit, name_prefix=name_prefix) assert "dataset_name" == ds.name assert "project.dataset.table" == ds.table_id @@ -260,28 +270,31 @@ def test_create_dataset_with_name_prefix(self, client, mocker): startDate=_timestamp_from_datetime(_parse_date(start_date)), endDate=_timestamp_from_datetime(_parse_date(end_date)), limit=limit, - namePrefix=name_prefix - ) - ) + namePrefix=name_prefix)) def test_build_serving_request(self, client): - feature_set = FeatureSet("entity", - ["entity.none.feat1", "entity.none.feat2"]) + feature_set = FeatureSet("entity", ["entity.feat1", "entity.feat2"]) req = client._build_serving_request(feature_set, ["1", "2", "3"]) - expected = QueryFeaturesRequest(entityName="entity", - entityId=["1", "2", "3"], - featureId=feature_set.features) + expected = QueryFeaturesRequest( + entityName="entity", + entityId=["1", "2", "3"], + featureId=feature_set.features) assert req == expected def test_serving_response_to_df(self, client): response = self._create_query_features_response( entity_name="entity", - entities={"1": {"entity.feat1": (1, Timestamp(seconds=1)), - "entity.feat2": (2, Timestamp(seconds=2))}, - "2": {"entity.feat1": (3, Timestamp(seconds=3)), - "entity.feat2": (4, Timestamp(seconds=4))}} - ) + entities={ + "1": { + "entity.feat1": (1, Timestamp(seconds=1)), + "entity.feat2": (2, Timestamp(seconds=2)) + }, + "2": { + "entity.feat1": (3, Timestamp(seconds=3)), + "entity.feat2": (4, Timestamp(seconds=4)) + } + }) expected_df = pd.DataFrame({'entity': ["1", "2"], 'entity.feat1': [1, 3], 'entity.feat2': [2, 4]}) \ @@ -291,18 +304,25 @@ def test_serving_response_to_df(self, client): response) \ .sort_values(['entity']) \ .reset_index(drop=True)[expected_df.columns] - assert_frame_equal(df, expected_df, - check_dtype=False, - check_column_type=False, - check_index_type=False) + assert_frame_equal( + df, + expected_df, + check_dtype=False, + check_column_type=False, + check_index_type=False) def test_serving_response_to_df_with_missing_value(self, client): response = self._create_query_features_response( entity_name="entity", - entities={"1": {"entity.feat1": (1, Timestamp(seconds=1))}, - "2": {"entity.feat1": (3, Timestamp(seconds=3)), - "entity.feat2": (4, Timestamp(seconds=4))}} - ) + entities={ + "1": { + "entity.feat1": (1, Timestamp(seconds=1)) + }, + "2": { + "entity.feat1": (3, Timestamp(seconds=3)), + "entity.feat2": (4, Timestamp(seconds=4)) + } + }) expected_df = pd.DataFrame({'entity': ["1", "2"], 'entity.feat1': [1, 3], 'entity.feat2': [np.NaN, 4]}) \ @@ -312,17 +332,24 @@ def test_serving_response_to_df_with_missing_value(self, client): response) \ .sort_values(['entity']) \ .reset_index(drop=True)[expected_df.columns] - assert_frame_equal(df, expected_df, - check_dtype=False, - check_column_type=False, - check_index_type=False) + assert_frame_equal( + df, + expected_df, + check_dtype=False, + check_column_type=False, + check_index_type=False) def test_serving_response_to_df_with_missing_feature(self, client): response = self._create_query_features_response( entity_name="entity", - entities={"1": {"entity.feat1": (1, Timestamp(seconds=1))}, - "2": {"entity.feat1": (3, Timestamp(seconds=3))}} - ) + entities={ + "1": { + "entity.feat1": (1, Timestamp(seconds=1)) + }, + "2": { + "entity.feat1": (3, Timestamp(seconds=3)) + } + }) expected_df = pd.DataFrame({'entity': ["1", "2"], 'entity.feat1': [1, 3], 'entity.feat2': [np.NaN, np.NaN]}) \ @@ -332,30 +359,37 @@ def test_serving_response_to_df_with_missing_feature(self, client): response) \ .sort_values(['entity']) \ .reset_index(drop=True)[expected_df.columns] - assert_frame_equal(df, expected_df, - check_dtype=False, - check_column_type=False, - check_index_type=False) + assert_frame_equal( + df, + expected_df, + check_dtype=False, + check_column_type=False, + check_index_type=False) def test_serving_response_to_df_no_data(self, client): response = QueryFeaturesResponse(entityName="entity") - expected_df = pd.DataFrame(columns= - ['entity', 'entity.day.feat1', - 'entity.day.feat2']) - df = client._response_to_df(FeatureSet("entity", - ["entity.day.feat1", - "entity.day.feat2"]), response) - assert_frame_equal(df, expected_df, - check_dtype=False, - check_column_type=False, - check_index_type=False) + expected_df = pd.DataFrame( + columns=['entity', 'entity.feat1', 'entity.feat2']) + df = client._response_to_df( + FeatureSet("entity", ["entity.feat1", "entity.feat2"]), response) + assert_frame_equal( + df, + expected_df, + check_dtype=False, + check_column_type=False, + check_index_type=False) def test_serving_response_to_df_with_time_filter(self, client): response = self._create_query_features_response( entity_name="entity", - entities={"1": {"entity.feat1": (1, Timestamp(seconds=1))}, - "2": {"entity.feat1": (3, Timestamp(seconds=3))}} - ) + entities={ + "1": { + "entity.feat1": (1, Timestamp(seconds=1)) + }, + "2": { + "entity.feat1": (3, Timestamp(seconds=3)) + } + }) expected_df = pd.DataFrame({'entity': ["1", "2"], 'entity.feat1': [np.NaN, 3], 'entity.feat2': [np.NaN, np.NaN]}) \ @@ -367,68 +401,45 @@ def test_serving_response_to_df_with_time_filter(self, client): response, start, end) \ .sort_values(['entity']) \ .reset_index(drop=True)[expected_df.columns] - print(df) - assert_frame_equal(df, expected_df, - check_dtype=False, - check_column_type=False, - check_index_type=False) - - def test_serving_response_to_df_with_time_filter_granularity_none(self, - client): - response = self._create_query_features_response( - entity_name="entity", - entities={"1": {"entity.none.feat1": (1, Timestamp(seconds=1))}, - "2": {"entity.none.feat1": (3, Timestamp(seconds=3))}} - ) - expected_df = pd.DataFrame({'entity': ["1", "2"], - 'entity.none.feat1': [1, 3], - 'entity.none.feat2': [np.NaN, np.NaN]}) \ - .reset_index(drop=True) - start = datetime.utcfromtimestamp(2) - end = datetime.utcfromtimestamp(5) - df = client._response_to_df(FeatureSet("entity", ["entity.none.feat1", - "entity.none.feat2"]), - response, start, end) \ - .sort_values(['entity']) \ - .reset_index(drop=True)[expected_df.columns] - print(df) - assert_frame_equal(df, expected_df, - check_dtype=False, - check_column_type=False, - check_index_type=False) + assert_frame_equal( + df, + expected_df, + check_dtype=False, + check_column_type=False, + check_index_type=False) def test_serving_invalid_type(self, client): start = "2018-01-01T01:01:01" end = "2018-01-01T01:01:01" ts_range = [start, end] - with pytest.raises(TypeError, match="start and end must be datetime " - "type"): - client.get_serving_data(FeatureSet("entity", ["entity.none.feat1", - "entity.none.feat2"]), - ["1234", "5678"], - ts_range) + with pytest.raises( + TypeError, match="start and end must be datetime " + "type"): + client.get_serving_data( + FeatureSet("entity", ["entity.feat1", "entity.feat2"]), + ["1234", "5678"], ts_range) def test_download_dataset_as_file(self, client, mocker): destination = "/tmp/dest_file" table_dlder = TableDownloader() - mocker.patch.object(table_dlder, "download_table_as_file", - return_value=destination) + mocker.patch.object( + table_dlder, "download_table_as_file", return_value=destination) client._table_downloader = table_dlder table_id = "project.dataset.table" staging_location = "gs://gcs_bucket/" dataset = DatasetInfo("mydataset", table_id) - result = client.download_dataset(dataset, destination, - staging_location=staging_location, - file_type=FileType.CSV) + result = client.download_dataset( + dataset, + destination, + staging_location=staging_location, + file_type=FileType.CSV) assert result == destination - table_dlder.download_table_as_file.assert_called_once_with(table_id, - destination, - staging_location, - FileType.CSV) + table_dlder.download_table_as_file.assert_called_once_with( + table_id, destination, staging_location, FileType.CSV) def _create_query_features_response(self, entity_name, entities): response = QueryFeaturesResponse(entityName=entity_name) @@ -446,9 +457,13 @@ def _create_query_features_response(self, entity_name, entities): def _create_feature_spec(self, feature_id, wh_id): wh_store = DataStore(id=wh_id) datastores = DataStores(warehouse=wh_store) - return FeatureSpec(id=feature_id, - dataStores=datastores) + return FeatureSpec(id=feature_id, dataStores=datastores) def _create_bq_spec(self, id, project, dataset): - return StorageSpec(id=id, type="bigquery", options={ - "project": project, "dataset": dataset}) + return StorageSpec( + id=id, + type="bigquery", + options={ + "project": project, + "dataset": dataset + }) diff --git a/sdk/python/tests/sdk/test_importer.py b/sdk/python/tests/sdk/test_importer.py index 7bf1c05c3f..b1049831e7 100644 --- a/sdk/python/tests/sdk/test_importer.py +++ b/sdk/python/tests/sdk/test_importer.py @@ -1,11 +1,11 @@ # Copyright 2018 The Feast Authors -# +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at -# +# # https://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -15,124 +15,132 @@ import pandas as pd import pytest import ntpath -from feast.sdk.resources.feature import Feature, Granularity, ValueType, \ +from feast.sdk.resources.feature import Feature, ValueType, \ Datastore from feast.sdk.importer import _create_feature, Importer from feast.sdk.utils.gs_utils import is_gs_path -from feast.types.Granularity_pb2 import Granularity as Granularity_pb2 class TestImporter(object): def test_from_csv(self): csv_path = "tests/data/driver_features.csv" entity_name = "driver" - feature_granularity = Granularity.DAY owner = "owner@feast.com" staging_location = "gs://test-bucket" id_column = "driver_id" - feature_columns = ["avg_distance_completed", - "avg_customer_distance_completed"] + feature_columns = [ + "avg_distance_completed", "avg_customer_distance_completed" + ] timestamp_column = "ts" - importer = Importer.from_csv(path=csv_path, - entity=entity_name, - granularity=feature_granularity, - owner=owner, - staging_location=staging_location, - id_column=id_column, - feature_columns=feature_columns, - timestamp_column=timestamp_column) + importer = Importer.from_csv( + path=csv_path, + entity=entity_name, + owner=owner, + staging_location=staging_location, + id_column=id_column, + feature_columns=feature_columns, + timestamp_column=timestamp_column) self._validate_csv_importer(importer, csv_path, entity_name, - feature_granularity, owner, - staging_location, id_column, + owner, staging_location, id_column, feature_columns, timestamp_column) def test_from_csv_id_column_not_specified(self): - with pytest.raises(ValueError, - match="Column with name driver is not found"): - feature_columns = ["avg_distance_completed", - "avg_customer_distance_completed"] + with pytest.raises( + ValueError, match="Column with name driver is not found"): + feature_columns = [ + "avg_distance_completed", "avg_customer_distance_completed" + ] csv_path = "tests/data/driver_features.csv" - Importer.from_csv(path=csv_path, - entity="driver", - granularity=Granularity.DAY, - owner="owner@feast.com", - staging_location="gs://test-bucket", - feature_columns=feature_columns, - timestamp_column="ts") + Importer.from_csv( + path=csv_path, + entity="driver", + owner="owner@feast.com", + staging_location="gs://test-bucket", + feature_columns=feature_columns, + timestamp_column="ts") def test_from_csv_timestamp_column_not_specified(self): - feature_columns = ["avg_distance_completed", - "avg_customer_distance_completed", - "avg_distance_cancelled"] + feature_columns = [ + "avg_distance_completed", "avg_customer_distance_completed", + "avg_distance_cancelled" + ] csv_path = "tests/data/driver_features.csv" entity_name = "driver" - granularity = Granularity.DAY owner = "owner@feast.com" staging_location = "gs://test-bucket" id_column = "driver_id" - importer = Importer.from_csv(path=csv_path, - entity=entity_name, - granularity=granularity, - owner=owner, - staging_location=staging_location, - id_column=id_column, - feature_columns=feature_columns) + importer = Importer.from_csv( + path=csv_path, + entity=entity_name, + owner=owner, + staging_location=staging_location, + id_column=id_column, + feature_columns=feature_columns) - self._validate_csv_importer(importer, csv_path, entity_name, - granularity, owner, - staging_location=staging_location, - id_column=id_column, - feature_columns=feature_columns) + self._validate_csv_importer( + importer, + csv_path, + entity_name, + owner, + staging_location=staging_location, + id_column=id_column, + feature_columns=feature_columns) def test_from_csv_feature_columns_not_specified(self): csv_path = "tests/data/driver_features.csv" entity_name = "driver" - granularity = Granularity.DAY owner = "owner@feast.com" staging_location = "gs://test-bucket" id_column = "driver_id" timestamp_column = "ts" - importer = Importer.from_csv(path=csv_path, - entity=entity_name, - granularity=granularity, - owner=owner, - staging_location=staging_location, - id_column=id_column, - timestamp_column=timestamp_column) + importer = Importer.from_csv( + path=csv_path, + entity=entity_name, + owner=owner, + staging_location=staging_location, + id_column=id_column, + timestamp_column=timestamp_column) - self._validate_csv_importer(importer, csv_path, entity_name, - granularity, owner, - staging_location=staging_location, - id_column=id_column, - timestamp_column=timestamp_column) + self._validate_csv_importer( + importer, + csv_path, + entity_name, + owner, + staging_location=staging_location, + id_column=id_column, + timestamp_column=timestamp_column) def test_from_csv_staging_location_not_specified(self): - with pytest.raises(ValueError, - match="Specify staging_location for importing local file/dataframe"): - feature_columns = ["avg_distance_completed", - "avg_customer_distance_completed"] + with pytest.raises( + ValueError, + match= + "Specify staging_location for importing local file/dataframe"): + feature_columns = [ + "avg_distance_completed", "avg_customer_distance_completed" + ] csv_path = "tests/data/driver_features.csv" - Importer.from_csv(path=csv_path, - entity="driver", - granularity=Granularity.DAY, - owner="owner@feast.com", - feature_columns=feature_columns, - timestamp_column="ts") - - with pytest.raises(ValueError, - match="Staging location must be in GCS") as e_info: - feature_columns = ["avg_distance_completed", - "avg_customer_distance_completed"] + Importer.from_csv( + path=csv_path, + entity="driver", + owner="owner@feast.com", + feature_columns=feature_columns, + timestamp_column="ts") + + with pytest.raises( + ValueError, match="Staging location must be in GCS") as e_info: + feature_columns = [ + "avg_distance_completed", "avg_customer_distance_completed" + ] csv_path = "tests/data/driver_features.csv" - Importer.from_csv(path=csv_path, - entity="driver", - granularity=Granularity.DAY, - owner="owner@feast.com", - staging_location="/home", - feature_columns=feature_columns, - timestamp_column="ts") + Importer.from_csv( + path=csv_path, + entity="driver", + owner="owner@feast.com", + staging_location="/home", + feature_columns=feature_columns, + timestamp_column="ts") def test_from_df(self): csv_path = "tests/data/driver_features.csv" @@ -140,58 +148,61 @@ def test_from_df(self): staging_location = "gs://test-bucket" entity = "driver" - importer = Importer.from_df(df=df, - entity=entity, - granularity=Granularity.DAY, - owner="owner@feast.com", - staging_location=staging_location, - id_column="driver_id", - timestamp_column="ts") + importer = Importer.from_df( + df=df, + entity=entity, + owner="owner@feast.com", + staging_location=staging_location, + id_column="driver_id", + timestamp_column="ts") assert importer.require_staging == True - assert ("{}/tmp_{}".format(staging_location, entity) - in importer.remote_path) + assert ("{}/tmp_{}".format(staging_location, + entity) in importer.remote_path) for feature in importer.features.values(): assert feature.name in df.columns - assert feature.id == "driver.day." + feature.name + assert feature.id == "driver." + feature.name import_spec = importer.spec assert import_spec.type == "file" - assert import_spec.sourceOptions == {"format": "csv", - "path": importer.remote_path} + assert import_spec.sourceOptions == { + "format": "csv", + "path": importer.remote_path + } assert import_spec.entities == ["driver"] schema = import_spec.schema assert schema.entityIdColumn == "driver_id" assert schema.timestampValue is not None - feature_columns = ["completed", "avg_distance_completed", - "avg_customer_distance_completed", - "avg_distance_cancelled"] + feature_columns = [ + "completed", "avg_distance_completed", + "avg_customer_distance_completed", "avg_distance_cancelled" + ] for col, field in zip(df.columns.values, schema.fields): assert col == field.name if col in feature_columns: - assert field.featureId == "driver.day." + col + assert field.featureId == "driver." + col def _validate_csv_importer(self, - importer, csv_path, entity_name, - feature_granularity, owner, - staging_location=None, id_column=None, + importer, + csv_path, + entity_name, + owner, + staging_location=None, + id_column=None, feature_columns=None, - timestamp_column=None, timestamp_value=None): + timestamp_column=None, + timestamp_value=None): df = pd.read_csv(csv_path) assert not importer.require_staging == is_gs_path(csv_path) if importer.require_staging: - assert importer.remote_path == "{}/{}".format(staging_location, - ntpath.basename( - csv_path)) + assert importer.remote_path == "{}/{}".format( + staging_location, ntpath.basename(csv_path)) # check features created for feature in importer.features.values(): assert feature.name in df.columns - assert feature.id == "{}.{}.{}".format(entity_name, - Granularity_pb2.Enum.Name( - feature_granularity.value).lower(), - feature.name) + assert feature.id == "{}.{}".format(entity_name, feature.name) import_spec = importer.spec assert import_spec.type == "file.csv" @@ -215,37 +226,33 @@ def _validate_csv_importer(self, for col, field in zip(df.columns.values, schema.fields): assert col == field.name if col in feature_columns: - assert field.featureId == \ - "{}.{}.{}".format(entity_name, - Granularity_pb2.Enum.Name( - feature_granularity.value).lower(), - col) + assert field.featureId == '{}.{}'.format(entity_name, + col).lower() class TestHelpers: def test_create_feature(self): col = pd.Series([1] * 3, dtype='int32', name="test") - expected = Feature(name="test", - entity="test", - granularity=Granularity.NONE, - owner="person", - value_type=ValueType.INT32) - actual = _create_feature(col, "test", Granularity.NONE, "person", None, - None) + expected = Feature( + name="test", + entity="test", + owner="person", + value_type=ValueType.INT32) + actual = _create_feature(col, "test", "person", None, None) assert actual.id == expected.id assert actual.value_type == expected.value_type assert actual.owner == expected.owner def test_create_feature_with_stores(self): col = pd.Series([1] * 3, dtype='int32', name="test") - expected = Feature(name="test", - entity="test", - granularity=Granularity.NONE, - owner="person", - value_type=ValueType.INT32, - serving_store=Datastore(id="SERVING"), - warehouse_store=Datastore(id="WAREHOUSE")) - actual = _create_feature(col, "test", Granularity.NONE, "person", + expected = Feature( + name="test", + entity="test", + owner="person", + value_type=ValueType.INT32, + serving_store=Datastore(id="SERVING"), + warehouse_store=Datastore(id="WAREHOUSE")) + actual = _create_feature(col, "test", "person", Datastore(id="SERVING"), Datastore(id="WAREHOUSE")) assert actual.id == expected.id diff --git a/sdk/python/tests/sdk/utils/test_bq_utils.py b/sdk/python/tests/sdk/utils/test_bq_utils.py index f45202ee78..206180197c 100644 --- a/sdk/python/tests/sdk/utils/test_bq_utils.py +++ b/sdk/python/tests/sdk/utils/test_bq_utils.py @@ -1,11 +1,11 @@ # Copyright 2018 The Feast Authors -# +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at -# +# # https://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -25,19 +25,23 @@ def test_get_table_name(): project_name = "my_project" dataset_name = "my_dataset" - feature_id = "myentity.none.feature1" - storage_spec = StorageSpec(id="BIGQUERY1", type="bigquery", - options={"project": project_name, - "dataset": dataset_name}) + feature_id = "myentity.feature1" + storage_spec = StorageSpec( + id="BIGQUERY1", + type="bigquery", + options={ + "project": project_name, + "dataset": dataset_name + }) assert get_table_name(feature_id, storage_spec) == \ "my_project.my_dataset.myentity_none" def test_get_table_name_not_bq(): - feature_id = "myentity.none.feature1" + feature_id = "myentity.feature1" storage_spec = StorageSpec(id="REDIS1", type="redis") - with pytest.raises(ValueError, - match="storage spec is not BigQuery storage spec"): + with pytest.raises( + ValueError, match="storage spec is not BigQuery storage spec"): get_table_name(feature_id, storage_spec) @@ -45,8 +49,7 @@ class TestTableDownloader(object): def test_download_table_as_df(self, mocker): self._stop_time(mocker) mocked_gcs_to_df = mocker.patch( - "feast.sdk.utils.bq_util.gcs_to_df", - return_value=None) + "feast.sdk.utils.bq_util.gcs_to_df", return_value=None) staging_path = "gs://temp/" staging_file_name = "temp_0" @@ -56,14 +59,13 @@ def test_download_table_as_df(self, mocker): exp_staging_path = os.path.join(staging_path, staging_file_name) table_dldr._bq = _Mock_BQ_Client() - mocker.patch.object(table_dldr._bq, "extract_table", - return_value=_Job()) + mocker.patch.object( + table_dldr._bq, "extract_table", return_value=_Job()) - table_dldr.download_table_as_df(table_id, - staging_location=staging_path) + table_dldr.download_table_as_df( + table_id, staging_location=staging_path) - assert len( - table_dldr._bq.extract_table.call_args_list) == 1 + assert len(table_dldr._bq.extract_table.call_args_list) == 1 args, kwargs = \ table_dldr._bq.extract_table.call_args_list[0] assert args[0].full_table_id == Table.from_string( @@ -87,19 +89,16 @@ def test_download_json(self, mocker): def test_download_invalid_staging_url(self): table_id = "project_id.dataset_id.table_id" table_dldr = TableDownloader() - with pytest.raises(ValueError, - match="staging_uri must be a directory in " - "GCS"): - table_dldr.download_table_as_file(table_id, - "/tmp/dst", - "/local/directory", - FileType.CSV) - - with pytest.raises(ValueError, - match="staging_uri must be a directory in " - "GCS"): - table_dldr.download_table_as_df(table_id, - "/local/directory") + with pytest.raises( + ValueError, match="staging_uri must be a directory in " + "GCS"): + table_dldr.download_table_as_file(table_id, "/tmp/dst", + "/local/directory", FileType.CSV) + + with pytest.raises( + ValueError, match="staging_uri must be a directory in " + "GCS"): + table_dldr.download_table_as_df(table_id, "/local/directory") def _test_download_file(self, mocker, type): staging_path = "gs://temp/" @@ -111,23 +110,21 @@ def _test_download_file(self, mocker, type): mock_blob = _Blob() mocker.patch.object(mock_blob, "download_to_filename") table_dldr._bq = _Mock_BQ_Client() - mocker.patch.object(table_dldr._bq, "extract_table", - return_value=_Job()) + mocker.patch.object( + table_dldr._bq, "extract_table", return_value=_Job()) table_dldr._gcs = _Mock_GCS_Client() - mocker.patch.object(table_dldr._gcs, "get_bucket", - return_value=_Bucket(mock_blob)) + mocker.patch.object( + table_dldr._gcs, "get_bucket", return_value=_Bucket(mock_blob)) - table_dldr.download_table_as_file(table_id, - dst_path, - staging_location=staging_path, - file_type=type) + table_dldr.download_table_as_file( + table_id, dst_path, staging_location=staging_path, file_type=type) exp_staging_path = os.path.join(staging_path, staging_file_name) - assert len( - table_dldr._bq.extract_table.call_args_list) == 1 + assert len(table_dldr._bq.extract_table.call_args_list) == 1 args, kwargs = \ table_dldr._bq.extract_table.call_args_list[0] - assert args[0].full_table_id == Table.from_string(table_id).full_table_id + assert args[0].full_table_id == Table.from_string( + table_id).full_table_id assert args[1] == exp_staging_path assert kwargs['job_config'].destination_format == str(type) @@ -136,14 +133,17 @@ def _test_download_file(self, mocker, type): def _stop_time(self, mocker): mocker.patch('time.time', return_value=0) + class _Mock_BQ_Client: def extract_table(self): pass + class _Mock_GCS_Client: def get_bucket(self): pass + class _Job: def result(self): return None diff --git a/sdk/python/tests/sdk/utils/test_gs_utils.py b/sdk/python/tests/sdk/utils/test_gs_utils.py index 402e6b0355..5e5a2e13bd 100644 --- a/sdk/python/tests/sdk/utils/test_gs_utils.py +++ b/sdk/python/tests/sdk/utils/test_gs_utils.py @@ -1,11 +1,11 @@ # Copyright 2018 The Feast Authors -# +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at -# +# # https://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -15,6 +15,7 @@ import pytest from feast.sdk.utils.gs_utils import is_gs_path + def test_is_gs_path(): assert is_gs_path("gs://valid/gs/file.csv") == True - assert is_gs_path("local/path/file.csv") == False \ No newline at end of file + assert is_gs_path("local/path/file.csv") == False diff --git a/sdk/python/tests/sdk/utils/test_types.py b/sdk/python/tests/sdk/utils/test_types.py index edfc741a3d..021e51e9eb 100644 --- a/sdk/python/tests/sdk/utils/test_types.py +++ b/sdk/python/tests/sdk/utils/test_types.py @@ -1,11 +1,11 @@ # Copyright 2018 The Feast Authors -# +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at -# +# # https://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -14,23 +14,26 @@ import pytest import pandas as pd -import numpy as np +import numpy as np from feast.sdk.utils.types import dtype_to_value_type, ValueType + def test_convert_dtype_to_value_type(): - dft = pd.DataFrame(dict(A = np.random.rand(3), - B = 1, - C = 'foo', - D = pd.Timestamp('20010102'), - E = pd.Series([1.0]*3).astype('float32'), - F = False, - G = pd.Series([1]*3,dtype='int8'))) - + dft = pd.DataFrame( + dict( + A=np.random.rand(3), + B=1, + C='foo', + D=pd.Timestamp('20010102'), + E=pd.Series([1.0] * 3).astype('float32'), + F=False, + G=pd.Series([1] * 3, dtype='int8'))) + assert dtype_to_value_type(dft['A'].dtype) == ValueType.DOUBLE assert dtype_to_value_type(dft['B'].dtype) == ValueType.INT64 assert dtype_to_value_type(dft['C'].dtype) == ValueType.STRING assert dtype_to_value_type(dft['D'].dtype) == ValueType.TIMESTAMP assert dtype_to_value_type(dft['E'].dtype) == ValueType.FLOAT assert dtype_to_value_type(dft['F'].dtype) == ValueType.BOOL - assert dtype_to_value_type(dft['G'].dtype) == ValueType.INT32 \ No newline at end of file + assert dtype_to_value_type(dft['G'].dtype) == ValueType.INT32 diff --git a/serving/src/main/java/feast/serving/service/BigTableFeatureStorage.java b/serving/src/main/java/feast/serving/service/BigTableFeatureStorage.java index a990985839..379e1b6aa0 100644 --- a/serving/src/main/java/feast/serving/service/BigTableFeatureStorage.java +++ b/serving/src/main/java/feast/serving/service/BigTableFeatureStorage.java @@ -140,7 +140,7 @@ private List createGets(Collection entityIds, FeatureSpec featureSp } /** - * Create BigTableRowKey based on entityId, timestamp, and granularity. + * Create BigTableRowKey based on entityId, and timestamp. * * @param entityIdPrefix hash prefix of entity ID. * @param entityId entity ID value diff --git a/serving/src/test/java/feast/serving/grpc/FeastServingTest.java b/serving/src/test/java/feast/serving/grpc/FeastServingTest.java index 3ba9c2b735..ffdf66fd99 100644 --- a/serving/src/test/java/feast/serving/grpc/FeastServingTest.java +++ b/serving/src/test/java/feast/serving/grpc/FeastServingTest.java @@ -66,7 +66,7 @@ public void shouldReturnSameEntityNameAsRequest() { QueryFeaturesRequest request = QueryFeaturesRequest.newBuilder() .setEntityName(entityName) - .addFeatureId("driver.day.total_completed_booking") + .addFeatureId("driver.total_completed_booking") .build(); QueryFeaturesResponse response = feast.queryFeatures(request); @@ -79,7 +79,7 @@ public void shouldReturnSameEntityNameAsRequest() { public void shouldPassValidRequestToFeatureRetrievalDispatcher() { String entityName = "driver"; Collection entityIds = Arrays.asList("entity1", "entity2", "entity3"); - Collection featureIds = Arrays.asList("driver.day.total_completed_booking"); + Collection featureIds = Arrays.asList("driver.total_completed_booking"); QueryFeaturesRequest request = QueryFeaturesRequest.newBuilder() .setEntityName(entityName) diff --git a/serving/src/test/java/feast/serving/grpc/ServingGrpcServiceTest.java b/serving/src/test/java/feast/serving/grpc/ServingGrpcServiceTest.java index 75c80f8aa2..d7eff6af4d 100644 --- a/serving/src/test/java/feast/serving/grpc/ServingGrpcServiceTest.java +++ b/serving/src/test/java/feast/serving/grpc/ServingGrpcServiceTest.java @@ -55,7 +55,7 @@ public void setUp() throws Exception { .setEntityName("driver") .addAllEntityId(Arrays.asList("driver1", "driver2", "driver3")) .addAllFeatureId( - Arrays.asList("driver.day.completed_booking", "driver.none.last_opportunity")) + Arrays.asList("driver.completed_booking", "driver.last_opportunity")) .build(); Tracer tracer = Configuration.fromEnv("dummy").getTracer(); @@ -102,7 +102,7 @@ public void shouldCallOnErrorIfFeatureIdsIsNotSet() { public void shouldCallOnErrorIfFeatureIdsContainsDifferentEntity() { QueryFeaturesRequest differentEntityReq = QueryFeaturesRequest.newBuilder(validRequest) - .addFeatureId("customer.day.order_made") + .addFeatureId("customer.order_made") .build(); service.queryFeatures(differentEntityReq, mockStreamObserver); diff --git a/serving/src/test/java/feast/serving/service/BigTableFeatureStorageTestITCase.java b/serving/src/test/java/feast/serving/service/BigTableFeatureStorageTestITCase.java index f8905e6342..6212a6e29d 100644 --- a/serving/src/test/java/feast/serving/service/BigTableFeatureStorageTestITCase.java +++ b/serving/src/test/java/feast/serving/service/BigTableFeatureStorageTestITCase.java @@ -23,8 +23,6 @@ import feast.serving.model.FeatureValue; import feast.serving.testutil.BigTablePopulator; import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.types.GranularityProto.Granularity; -import feast.types.GranularityProto.Granularity.Enum; import feast.types.ValueProto.ValueType; import java.util.ArrayList; import java.util.Arrays; @@ -66,22 +64,20 @@ public void tearDown() throws Exception { } @Test - public void getFeatures_shouldWorkForGranularityNone() { + public void getFeatures_shouldReturnLastValue() { FeatureSpec featureSpec1 = FeatureSpec.newBuilder() .setEntity(ENTITY_NAME) - .setId("test_entity.none.feature_1") + .setId("test_entity.feature_1") .setName("feature_1") - .setGranularity(Granularity.Enum.NONE) .setValueType(ValueType.Enum.STRING) .build(); FeatureSpec featureSpec2 = FeatureSpec.newBuilder() .setEntity(ENTITY_NAME) - .setId("test_entity.none.feature_2") + .setId("test_entity.feature_2") .setName("feature_2") - .setGranularity(Granularity.Enum.NONE) .setValueType(ValueType.Enum.STRING) .build(); @@ -97,18 +93,16 @@ public void getFeatures_shouldGracefullyHandleMissingEntity() { FeatureSpec featureSpec1 = FeatureSpec.newBuilder() .setEntity(ENTITY_NAME) - .setId("test_entity.none.feature_1") + .setId("test_entity.feature_1") .setName("feature_granularity_none") - .setGranularity(Granularity.Enum.NONE) .setValueType(ValueType.Enum.STRING) .build(); FeatureSpec featureSpec2 = FeatureSpec.newBuilder() .setEntity(ENTITY_NAME) - .setId("test_entity.none.feature_2") + .setId("test_entity.feature_2") .setName("feature_2") - .setGranularity(Granularity.Enum.NONE) .setValueType(ValueType.Enum.STRING) .build(); @@ -121,41 +115,6 @@ public void getFeatures_shouldGracefullyHandleMissingEntity() { bigTablePopulator.validate(results, entityIds, featureSpecs); } - @Test - public void getFeatures_shouldWorkForOtherGranularity() { - for (Granularity.Enum granularity : Granularity.Enum.values()) { - if (granularity.equals(Enum.NONE) || granularity.equals(Enum.UNRECOGNIZED)) { - continue; - } - FeatureSpec spec1 = createFeatureSpec("feature_1", granularity, ValueType.Enum.STRING); - FeatureSpec spec2 = createFeatureSpec("feature_2", granularity, ValueType.Enum.STRING); - - List featureSpecs = Arrays.asList(spec1, spec2); - bigTablePopulator.populate(ENTITY_NAME, entityIds, featureSpecs, now); - - List result = featureStorage.getFeature(ENTITY_NAME, entityIds, featureSpecs); - - bigTablePopulator.validate(result, entityIds, featureSpecs); - } - } - - private FeatureSpec createFeatureSpec( - String featureName, Enum granularity, ValueType.Enum valType) { - String entityName = ENTITY_NAME; - String featureId = - String.format("%s.%s.%s", entityName, granularity.toString().toLowerCase(), featureName); - FeatureSpec spec = - FeatureSpec.newBuilder() - .setEntity(entityName) - .setId(featureId) - .setName(featureName) - .setGranularity(granularity) - .setValueType(valType) - .build(); - - return spec; - } - private List createEntityIds(int count) { List entityIds = new ArrayList<>(); for (int i = 0; i < count; i++) { diff --git a/serving/src/test/java/feast/serving/service/CoreServiceTest.java b/serving/src/test/java/feast/serving/service/CoreServiceTest.java index ef62ea7b25..cc0551948a 100644 --- a/serving/src/test/java/feast/serving/service/CoreServiceTest.java +++ b/serving/src/test/java/feast/serving/service/CoreServiceTest.java @@ -17,8 +17,29 @@ package feast.serving.service; +import static org.hamcrest.CoreMatchers.everyItem; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.collection.IsIn.isIn; +import static org.junit.Assert.assertThat; + import com.google.protobuf.ByteString; import com.google.protobuf.Empty; +import feast.core.CoreServiceGrpc.CoreServiceImplBase; +import feast.core.CoreServiceProto.CoreServiceTypes.GetEntitiesRequest; +import feast.core.CoreServiceProto.CoreServiceTypes.GetEntitiesResponse; +import feast.core.CoreServiceProto.CoreServiceTypes.GetFeaturesRequest; +import feast.core.CoreServiceProto.CoreServiceTypes.GetFeaturesResponse; +import feast.core.CoreServiceProto.CoreServiceTypes.GetStorageRequest; +import feast.core.CoreServiceProto.CoreServiceTypes.GetStorageResponse; +import feast.core.CoreServiceProto.CoreServiceTypes.ListEntitiesResponse; +import feast.core.CoreServiceProto.CoreServiceTypes.ListFeaturesResponse; +import feast.core.CoreServiceProto.CoreServiceTypes.ListStorageResponse; +import feast.serving.exception.SpecRetrievalException; +import feast.specs.EntitySpecProto.EntitySpec; +import feast.specs.FeatureSpecProto.FeatureSpec; +import feast.specs.StorageSpecProto.StorageSpec; +import feast.types.ValueProto.ValueType; import io.grpc.ManagedChannelBuilder; import io.grpc.StatusRuntimeException; import io.grpc.inprocess.InProcessChannelBuilder; @@ -26,20 +47,6 @@ import io.grpc.stub.StreamObserver; import io.grpc.testing.GrpcCleanupRule; import io.grpc.util.MutableHandlerRegistry; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import feast.core.CoreServiceGrpc.CoreServiceImplBase; -import feast.core.CoreServiceProto.CoreServiceTypes.*; -import feast.serving.exception.SpecRetrievalException; -import feast.specs.EntitySpecProto.EntitySpec; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.types.GranularityProto.Granularity.Enum; -import feast.types.ValueProto.ValueType; - import java.util.Arrays; import java.util.List; import java.util.Map; @@ -47,12 +54,11 @@ import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static org.hamcrest.CoreMatchers.everyItem; -import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.collection.IsIn.isIn; -import static org.junit.Assert.assertThat; +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; public class CoreServiceTest { @@ -175,7 +181,7 @@ public void getAllEntitySpecs_shouldThrowExceptionWhenErrorHappens() { @Test public void getFeatureSpecs_shouldSendCorrectRequest() { List featureIds = - Arrays.asList("driver.day.total_accepted_booking", "driver.second.ping_location"); + Arrays.asList("driver.total_accepted_booking", "driver.ping_location"); AtomicReference deliveredRequest = new AtomicReference<>(); CoreServiceImplBase service = new CoreServiceImplBase() { @@ -202,7 +208,7 @@ public void getFeatures( @Test public void getFeatureSpecs_shouldReturnRequestedFeatureSpecs() { List featureIds = - Arrays.asList("driver.day.total_accepted_booking", "driver.second.ping_location"); + Arrays.asList("driver.total_accepted_booking", "driver.ping_location"); AtomicReference deliveredRequest = new AtomicReference<>(); CoreServiceImplBase service = new CoreServiceImplBase() { @@ -232,7 +238,7 @@ public void getFeatureSpecs_shouldThrowSpecsRetrievalExceptionWhenErrorHappen() expectedException.expectCause(instanceOf(StatusRuntimeException.class)); List featureIds = - Arrays.asList("driver.day.total_accepted_booking", "driver.second.ping_location"); + Arrays.asList("driver.total_accepted_booking", "driver.ping_location"); client.getFeatureSpecs(featureIds); } @@ -363,21 +369,19 @@ public void getAllStorageSpecs_shouldThrowSpecsRetrievalExceptionWhenErrorHappen private Map getFakeFeatureSpecs() { FeatureSpec spec1 = FeatureSpec.newBuilder() - .setId("driver.day.total_accepted_booking") + .setId("driver.total_accepted_booking") .setName("total_accepted_booking") .setOwner("dummy@go-jek.com") .setDescription("awesome feature") - .setGranularity(Enum.DAY) .setValueType(ValueType.Enum.STRING) .build(); FeatureSpec spec2 = FeatureSpec.newBuilder() - .setId("driver.second.ping") + .setId("driver.ping") .setName("ping") .setOwner("dummy@go-jek.com") .setDescription("awesome feature") - .setGranularity(Enum.SECOND) .setValueType(ValueType.Enum.INT64) .build(); diff --git a/serving/src/test/java/feast/serving/service/FeatureRetrievalDispatcherTest.java b/serving/src/test/java/feast/serving/service/FeatureRetrievalDispatcherTest.java index 11b3f8fa87..2103be849b 100644 --- a/serving/src/test/java/feast/serving/service/FeatureRetrievalDispatcherTest.java +++ b/serving/src/test/java/feast/serving/service/FeatureRetrievalDispatcherTest.java @@ -69,7 +69,7 @@ public void shouldUseCurrentThreadIfRequestIsSmallEnough() { .thenReturn(Collections.emptyList()); when(featureStorageRegistry.get(any(String.class))).thenReturn(featureStorage); - String featureId = "entity.none.feature_1"; + String featureId = "entity.feature_1"; FeatureSpec featureSpec = FeatureSpec.newBuilder().setId(featureId).build(); dispatcher.dispatchFeatureRetrieval( entityName, entityIds, Collections.singletonList(featureSpec)); @@ -90,7 +90,7 @@ public void shouldUseCurrentThreadIfRequestFromSameStorage() { .thenReturn(Collections.emptyList()); String entityName = "entity"; - String featureId1 = "entity.none.feature_1"; + String featureId1 = "entity.feature_1"; FeatureSpec featureSpec1 = FeatureSpec.newBuilder() .setId(featureId1) @@ -98,7 +98,7 @@ public void shouldUseCurrentThreadIfRequestFromSameStorage() { DataStores.newBuilder().setServing(DataStore.newBuilder().setId(storageId1))) .build(); - String featureId2 = "entity.none.feature_2"; + String featureId2 = "entity.feature_2"; FeatureSpec featureSpec2 = FeatureSpec.newBuilder() .setId(featureId2) @@ -131,7 +131,7 @@ public void shouldUseExecutorServiceIfRequestFromMoreThanOneStorage() { .thenReturn(Collections.emptyList()); String entityName = "entity"; - String featureId1 = "entity.none.feature_1"; + String featureId1 = "entity.feature_1"; FeatureSpec featureSpec1 = FeatureSpec.newBuilder() .setId(featureId1) @@ -139,7 +139,7 @@ public void shouldUseExecutorServiceIfRequestFromMoreThanOneStorage() { DataStores.newBuilder().setServing(DataStore.newBuilder().setId(storageId1))) .build(); - String featureId2 = "entity.none.feature_2"; + String featureId2 = "entity.feature_2"; FeatureSpec featureSpec2 = FeatureSpec.newBuilder() .setId(featureId2) diff --git a/serving/src/test/java/feast/serving/service/RedisFeatureStorageTest.java b/serving/src/test/java/feast/serving/service/RedisFeatureStorageTest.java index be70fee48d..2a70b78983 100644 --- a/serving/src/test/java/feast/serving/service/RedisFeatureStorageTest.java +++ b/serving/src/test/java/feast/serving/service/RedisFeatureStorageTest.java @@ -25,8 +25,6 @@ import feast.specs.FeatureSpecProto.DataStore; import feast.specs.FeatureSpecProto.DataStores; import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.types.GranularityProto.Granularity; -import feast.types.GranularityProto.Granularity.Enum; import feast.types.ValueProto.ValueType; import io.opentracing.util.GlobalTracer; import java.util.ArrayList; @@ -78,17 +76,15 @@ public void setUp() throws Exception { public void getFeatures_shouldNotReturnMissingValue() { FeatureSpec featureSpec1 = FeatureSpec.newBuilder() - .setId("entity.none.feature_1") + .setId("entity.feature_1") .setEntity(entityName) - .setGranularity(Enum.NONE) .setValueType(ValueType.Enum.STRING) .build(); FeatureSpec featureSpec2 = FeatureSpec.newBuilder() - .setId("entity.none.feature_2") + .setId("entity.feature_2") .setEntity(entityName) - .setGranularity(Enum.NONE) .setValueType(ValueType.Enum.STRING) .build(); @@ -104,39 +100,32 @@ public void getFeatures_shouldNotReturnMissingValue() { @Test public void getFeatures_shouldReturnLastValue() { - for (Granularity.Enum granularity : Granularity.Enum.values()) { - if (granularity.equals(Enum.NONE) || granularity.equals(Enum.UNRECOGNIZED)) { - continue; - } - FeatureSpec spec1 = createFeatureSpec("feature_1", Enum.NONE); - FeatureSpec spec2 = createFeatureSpec("feature_2", granularity); - List featureSpecs = Arrays.asList(spec1, spec2); + FeatureSpec spec1 = createFeatureSpec("feature_1"); + FeatureSpec spec2 = createFeatureSpec("feature_2"); + List featureSpecs = Arrays.asList(spec1, spec2); - redisPopulator.populate(entityName, entityIds, featureSpecs, now); + redisPopulator.populate(entityName, entityIds, featureSpecs, now); - List result = redisFs.getFeature(entityName, entityIds, featureSpecs); + List result = redisFs.getFeature(entityName, entityIds, featureSpecs); - redisPopulator.validate(result, entityIds, featureSpecs); - } + redisPopulator.validate(result, entityIds, featureSpecs); } - private FeatureSpec createFeatureSpec(String featureName, Enum granularity) { + private FeatureSpec createFeatureSpec(String featureName) { DataStore servingDatastoreSpec = DataStore.newBuilder().setId("REDIS").build(); - return createFeatureSpec(featureName, granularity, ValueType.Enum.STRING, servingDatastoreSpec); + return createFeatureSpec(featureName, ValueType.Enum.STRING, servingDatastoreSpec); } private FeatureSpec createFeatureSpec( - String featureName, Enum granularity, ValueType.Enum valType, DataStore dataStoreSpec) { + String featureName, ValueType.Enum valType, DataStore dataStoreSpec) { String entityName = "entity"; - String featureId = - String.format("%s.%s.%s", entityName, granularity.toString().toLowerCase(), featureName); + String featureId = String.format("%s.%s", entityName, featureName); FeatureSpec spec = FeatureSpec.newBuilder() .setDataStores(DataStores.newBuilder().setServing(dataStoreSpec)) .setEntity(entityName) .setId(featureId) .setName(featureName) - .setGranularity(granularity) .setValueType(valType) .build(); diff --git a/serving/src/test/java/feast/serving/testutil/BigTablePopulator.java b/serving/src/test/java/feast/serving/testutil/BigTablePopulator.java index 9f7b2292b3..f004086428 100644 --- a/serving/src/test/java/feast/serving/testutil/BigTablePopulator.java +++ b/serving/src/test/java/feast/serving/testutil/BigTablePopulator.java @@ -100,9 +100,7 @@ private void populateTableWithFakeData( try (Table table = connection.getTable(tableName)) { for (FeatureSpec featureSpec : featureSpecs) { for (String entityId : entityIds) { - Timestamp roundedTimestamp = - TimeUtil.roundFloorTimestamp(timestamp, featureSpec.getGranularity()); - Put put = makePut(entityId, featureSpec, roundedTimestamp); + Put put = makePut(entityId, featureSpec, timestamp); table.put(put); } } diff --git a/serving/src/test/java/feast/serving/testutil/FakeSpecStorage.java b/serving/src/test/java/feast/serving/testutil/FakeSpecStorage.java index 331835dc5e..3c110cda83 100644 --- a/serving/src/test/java/feast/serving/testutil/FakeSpecStorage.java +++ b/serving/src/test/java/feast/serving/testutil/FakeSpecStorage.java @@ -25,7 +25,6 @@ import feast.specs.FeatureSpecProto.DataStores; import feast.specs.FeatureSpecProto.FeatureSpec; import feast.specs.StorageSpecProto.StorageSpec; -import feast.types.GranularityProto.Granularity; import feast.types.ValueProto.ValueType; import java.util.Collections; @@ -44,9 +43,9 @@ public class FakeSpecStorage implements SpecStorage { public FakeSpecStorage() { // populate with hardcoded value String bigTableId = "BIGTABLE1"; - String lastOpportunityId = "driver.none.last_opportunity"; + String lastOpportunityId = "driver.last_opportunity"; String lastOpportunityName = "last_opportunity"; - String dailyCompletedBookingId = "driver.day.total_completed_booking"; + String dailyCompletedBookingId = "driver.total_completed_booking"; String dailyCompletedBookingName = "total_completed_booking"; DataStore bigTable = DataStore.newBuilder().setId(bigTableId).build(); @@ -59,7 +58,6 @@ public FakeSpecStorage() { FeatureSpec lastOpportunity = FeatureSpec.newBuilder() - .setGranularity(Granularity.Enum.NONE) .setId(lastOpportunityId) .setName(lastOpportunityName) .setValueType(ValueType.Enum.INT64) @@ -68,7 +66,6 @@ public FakeSpecStorage() { FeatureSpec totalCompleted = FeatureSpec.newBuilder() - .setGranularity(Granularity.Enum.DAY) .setId(dailyCompletedBookingId) .setName(dailyCompletedBookingName) .setValueType(ValueType.Enum.INT64) diff --git a/serving/src/test/java/feast/serving/testutil/RedisPopulator.java b/serving/src/test/java/feast/serving/testutil/RedisPopulator.java index 68340405e6..21235a9bfa 100644 --- a/serving/src/test/java/feast/serving/testutil/RedisPopulator.java +++ b/serving/src/test/java/feast/serving/testutil/RedisPopulator.java @@ -40,8 +40,7 @@ public void populate( Timestamp timestamp) { for (FeatureSpec fs : featureSpecs) { for (String entityId : entityIds) { - Timestamp roundedTimestamp = TimeUtil.roundFloorTimestamp(timestamp, fs.getGranularity()); - addData(entityId, fs, roundedTimestamp); + addData(entityId, fs, timestamp); } } } diff --git a/serving/src/test/java/feast/serving/testutil/TimeUtil.java b/serving/src/test/java/feast/serving/testutil/TimeUtil.java deleted file mode 100644 index 77e4edea97..0000000000 --- a/serving/src/test/java/feast/serving/testutil/TimeUtil.java +++ /dev/null @@ -1,60 +0,0 @@ -package feast.serving.testutil; - -import com.google.protobuf.Timestamp; -import feast.types.GranularityProto.Granularity.Enum; -import org.joda.time.DateTime; -import org.joda.time.DateTimeField; -import org.joda.time.DateTimeZone; -import org.joda.time.MutableDateTime; - -public class TimeUtil { - public static final int NANO_IN_MILLI = 1000000; - public static final int NANO_IN_MICRO = 1000; - public static final int MILLI_IN_SECOND = 1000; - - /** - * Round down timestamp to the nearest granularity. - * - * @param timestamp original timestamp. - * @param granularity granularity of the rounded timestamp. - * @return - */ - public static Timestamp roundFloorTimestamp(Timestamp timestamp, Enum granularity) { - MutableDateTime dt = new MutableDateTime(DateTimeZone.UTC); - DateTimeField roundingField; - switch (granularity) { - case DAY: - roundingField = dt.getChronology().dayOfMonth(); - break; - case HOUR: - roundingField = dt.getChronology().hourOfDay(); - break; - case MINUTE: - roundingField = dt.getChronology().minuteOfHour(); - break; - case SECOND: - roundingField = dt.getChronology().secondOfMinute(); - break; - case NONE: - return Timestamp.newBuilder().setSeconds(0).setNanos(0).build(); - default: - throw new RuntimeException("Unrecognised time series granularity"); - } - dt.setMillis(timestamp.getSeconds() * 1000 + timestamp.getNanos() / 1000000); - dt.setRounding(roundingField, MutableDateTime.ROUND_FLOOR); - return dateTimeToTimestamp(dt.toDateTime()); - } - - /** - * Convert {@link DateTime} into {@link Timestamp} - * - * @param dateTime - * @return - */ - private static Timestamp dateTimeToTimestamp(DateTime dateTime) { - return Timestamp.newBuilder() - .setSeconds(dateTime.getMillis() / MILLI_IN_SECOND) - .setNanos((int) (dateTime.getMillis() % MILLI_IN_SECOND) * NANO_IN_MILLI) - .build(); - } -} diff --git a/serving/src/test/java/feast/serving/util/EntityMapBuilderTest.java b/serving/src/test/java/feast/serving/util/EntityMapBuilderTest.java index e2cf193876..596f54c1d9 100644 --- a/serving/src/test/java/feast/serving/util/EntityMapBuilderTest.java +++ b/serving/src/test/java/feast/serving/util/EntityMapBuilderTest.java @@ -208,7 +208,7 @@ private void validate( private List createFeatureIds(int count) { List featureIds = new ArrayList<>(); for (int i = 0; i < count; i++) { - featureIds.add("entity.none.feature_" + i); + featureIds.add("entity.feature_" + i); } return featureIds; } diff --git a/ui/src/components/Feature/SearchTable.vue b/ui/src/components/Feature/SearchTable.vue index 07a1675fd0..ac82c7ae8e 100644 --- a/ui/src/components/Feature/SearchTable.vue +++ b/ui/src/components/Feature/SearchTable.vue @@ -14,8 +14,6 @@ {{ column.name }} - @@ -45,11 +43,6 @@ {{ item.spec.entity }} - - - {{ item.spec.granularity }} - - @@ -143,7 +136,6 @@ list: [], columns: [ {name: "Entity", sortable: true, reverse: false}, - {name: "Granularity", sortable: true, reverse: false}, {name: "Name", sortable: true, reverse: false}, {name: "Description", sortable: false}, {name: "Owner", sortable: true, reverse: false}, @@ -157,7 +149,7 @@ empty: false, loading: false, search: [], - searchEntities: ["entity", "granularity", "name", "owner","tags"], + searchEntities: ["entity", "name", "owner","tags"], searchSeparator: ':', selectedFeature: {}, options: [], @@ -208,8 +200,6 @@ if (typeof response.body['features'] === "undefined") { this.empty = true; } else { - // TODO: Handle nulls better. - response.body['features'].forEach(b => filters.fillGranularity(b)); this.list = response.body['features']; this.populate(); } @@ -238,11 +228,7 @@ updateRoute (route) { // TODO: Handle arbitrary search queries if (typeof route['entity'] !== "undefined") { - if (typeof route['granularity'] !== "undefined") { - this.$router.push({path: '/features/' + route['entity'] + '/' + route['granularity'], params: route}); - } else { - this.$router.push({path: '/features/' + route['entity'], params: route}); - } + this.$router.push({path: '/features/' + route['entity'], params: route}); } else { this.$router.push({path: '/features', params: route}); } diff --git a/ui/src/components/Job/SearchTable.vue b/ui/src/components/Job/SearchTable.vue index 07a1675fd0..ac82c7ae8e 100644 --- a/ui/src/components/Job/SearchTable.vue +++ b/ui/src/components/Job/SearchTable.vue @@ -14,8 +14,6 @@ {{ column.name }} - @@ -45,11 +43,6 @@ {{ item.spec.entity }} - - - {{ item.spec.granularity }} - - @@ -143,7 +136,6 @@ list: [], columns: [ {name: "Entity", sortable: true, reverse: false}, - {name: "Granularity", sortable: true, reverse: false}, {name: "Name", sortable: true, reverse: false}, {name: "Description", sortable: false}, {name: "Owner", sortable: true, reverse: false}, @@ -157,7 +149,7 @@ empty: false, loading: false, search: [], - searchEntities: ["entity", "granularity", "name", "owner","tags"], + searchEntities: ["entity", "name", "owner","tags"], searchSeparator: ':', selectedFeature: {}, options: [], @@ -208,8 +200,6 @@ if (typeof response.body['features'] === "undefined") { this.empty = true; } else { - // TODO: Handle nulls better. - response.body['features'].forEach(b => filters.fillGranularity(b)); this.list = response.body['features']; this.populate(); } @@ -238,11 +228,7 @@ updateRoute (route) { // TODO: Handle arbitrary search queries if (typeof route['entity'] !== "undefined") { - if (typeof route['granularity'] !== "undefined") { - this.$router.push({path: '/features/' + route['entity'] + '/' + route['granularity'], params: route}); - } else { - this.$router.push({path: '/features/' + route['entity'], params: route}); - } + this.$router.push({path: '/features/' + route['entity'], params: route}); } else { this.$router.push({path: '/features', params: route}); } diff --git a/ui/src/filters.js b/ui/src/filters.js index b2a93a6631..f335d77e05 100644 --- a/ui/src/filters.js +++ b/ui/src/filters.js @@ -2,12 +2,6 @@ export default { formatTimestamp (timestamp) { let datetime = new Date(timestamp); return datetime.toLocaleTimeString('en-US'); - }, - fillGranularity (feature) { - if (typeof feature.spec['granularity'] === "undefined") { - feature.spec['granularity'] = "NONE"; - } - return feature; } } diff --git a/ui/src/router.js b/ui/src/router.js index 9cc13e9ca5..9d7fec8d6b 100644 --- a/ui/src/router.js +++ b/ui/src/router.js @@ -25,11 +25,6 @@ export default new Router({ name: 'features-by-entity', component: FeatureList }, - { - path: '/features/:entity/:granularity', - name: 'features-by-entity-granularity', - component: FeatureList - }, { path: '/feature/:id', name: 'feature-details', diff --git a/ui/src/views/Feature/Details.vue b/ui/src/views/Feature/Details.vue index ba40531091..45a97e13cb 100644 --- a/ui/src/views/Feature/Details.vue +++ b/ui/src/views/Feature/Details.vue @@ -8,11 +8,6 @@ {{ feature.spec.entity }} -

  • - - {{ feature.spec.granularity }} - -
  • {{ feature.spec.name }}
  • @@ -35,10 +30,6 @@ - - Granularity: - {{ feature.spec.granularity }} - Value type: {{ feature.spec.valueType }} @@ -55,10 +46,6 @@ URI: {{ feature.spec.uri }} - - BigQuery view: - {{ feature.bigqueryView }} - Created: {{ feature.created }} @@ -75,6 +62,14 @@ + + Warehouse store: + + + {{ feature.spec.dataStores.warehouse.id }} + + + Status: OK @@ -168,7 +163,6 @@ name: "", owner: "", description: "", - granularity: 'NONE', valueType: "", entity: "", group: "", @@ -205,9 +199,7 @@ return `{ "entityName": "${ this.feature.spec.entity }", "entityId": [${ '"' + this.entityList.join('","') + '"' }], - "requestDetails": [{ - "featureId": "${ this.feature.spec.id }" - }] + "featureId": ["${ this.feature.spec.id }"] }` } }, @@ -216,7 +208,7 @@ fetchData() { let featureId = this.$route.params.id; this.$http.get(process.env.VUE_APP_ROOT_API + '/features/'+featureId).then(response => { - this.feature = filters.fillGranularity(response.body['feature']); + this.feature = response.body['feature']; this.yaml = json2yaml.stringify(response.body['rawSpec']); }, response => { this.error = response.statusText;