From cc475f7bdfaa5ff8244abca14438d8feea98eacd Mon Sep 17 00:00:00 2001 From: tetiana-karasova <62887365+tetiana-karasova@users.noreply.github.com> Date: Fri, 25 Feb 2022 02:59:34 +0100 Subject: [PATCH] docs(samples): add samples for events (#155) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Retail. Products importing code samples * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * lint fix * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * update copyright year * remove ClientOptions * update requirements * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * add requirement for pytest-xdist * test samples on all py3.7+ versions * add EVENTS_BUCKET_NAME * importing trsts fix * importing trsts fix * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Update import_products_gcs_test.py * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * add google-cloud-testutils==1.3.1 to requirements-test.txt * rename setup->setup_events * fix tests * lint * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix flaky tests; address review feedback * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou Co-authored-by: Karl Weinmeister <11586922+kweinmeister@users.noreply.github.com> --- .../interactive-tutorials/events/conftest.py | 30 ++ .../events/import_user_events_big_query.py | 108 +++++++ .../import_user_events_bigquery_test.py | 65 ++++ .../events/import_user_events_gcs.py | 115 ++++++++ .../events/import_user_events_gcs_test.py | 53 ++++ .../events/import_user_events_inline.py | 111 +++++++ .../events/import_user_events_inline_test.py | 38 +++ .../interactive-tutorials/events/noxfile.py | 279 ++++++++++++++++++ .../events/noxfile_config.py | 36 +++ .../events/requirements-test.txt | 3 + .../events/requirements.txt | 4 + .../events_create_bigquery_table.py | 31 ++ .../setup_events/events_create_gcs_bucket.py | 28 ++ .../setup_events/events_delete_gcs_bucket.py | 25 ++ .../events/setup_events/setup_cleanup.py | 207 +++++++++++++ .../setup_events/update_user_events_json.py | 24 ++ .../interactive-tutorials/noxfile_config.py | 2 +- .../import_products_big_query_table.py | 6 +- .../product/import_products_gcs.py | 3 +- .../resources/user_events.json | 8 +- .../resources/user_events_some_invalid.json | 8 +- 21 files changed, 1171 insertions(+), 13 deletions(-) create mode 100644 samples/interactive-tutorials/events/conftest.py create mode 100644 samples/interactive-tutorials/events/import_user_events_big_query.py create mode 100644 samples/interactive-tutorials/events/import_user_events_bigquery_test.py create mode 100644 samples/interactive-tutorials/events/import_user_events_gcs.py create mode 100644 samples/interactive-tutorials/events/import_user_events_gcs_test.py create mode 100644 samples/interactive-tutorials/events/import_user_events_inline.py create mode 100644 samples/interactive-tutorials/events/import_user_events_inline_test.py create mode 100644 samples/interactive-tutorials/events/noxfile.py create mode 100644 samples/interactive-tutorials/events/noxfile_config.py create mode 100644 samples/interactive-tutorials/events/requirements-test.txt create mode 100644 samples/interactive-tutorials/events/requirements.txt create mode 100644 samples/interactive-tutorials/events/setup_events/events_create_bigquery_table.py create mode 100644 samples/interactive-tutorials/events/setup_events/events_create_gcs_bucket.py create mode 100644 samples/interactive-tutorials/events/setup_events/events_delete_gcs_bucket.py create mode 100644 samples/interactive-tutorials/events/setup_events/setup_cleanup.py create mode 100644 samples/interactive-tutorials/events/setup_events/update_user_events_json.py diff --git a/samples/interactive-tutorials/events/conftest.py b/samples/interactive-tutorials/events/conftest.py new file mode 100644 index 00000000..8cfb8596 --- /dev/null +++ b/samples/interactive-tutorials/events/conftest.py @@ -0,0 +1,30 @@ +# Copyright 2022 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +import test_utils.prefixer + +prefixer = test_utils.prefixer.Prefixer( + "python-retail", "samples/interactive-tutorials/product" +) + + +@pytest.fixture(scope="session") +def table_id_prefix() -> str: + return prefixer.create_prefix() + + +@pytest.fixture(scope="session") +def bucket_name_prefix() -> str: + return prefixer.create_prefix() diff --git a/samples/interactive-tutorials/events/import_user_events_big_query.py b/samples/interactive-tutorials/events/import_user_events_big_query.py new file mode 100644 index 00000000..40821f8a --- /dev/null +++ b/samples/interactive-tutorials/events/import_user_events_big_query.py @@ -0,0 +1,108 @@ +# Copyright 2022 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import os + +project_id = os.environ["GOOGLE_CLOUD_PROJECT"] + + +def main(project_id, dataset_id, table_id): + # [START retail_import_user_events_from_big_query] + # TODO: Set project_id to your Google Cloud Platform project ID. + # project_id = "my-project" + + # TODO: Set dataset_id + # dataset_id = "user_events" + + # TODO: Set table_id + # table_id = "events" + + # Import products into a catalog from big query table using Retail API + import time + + from google.cloud.retail import ( + BigQuerySource, + ImportUserEventsRequest, + UserEventInputConfig, + UserEventServiceClient, + ) + + default_catalog = f"projects/{project_id}/locations/global/catalogs/default_catalog" + + # TO CHECK ERROR HANDLING USE THE TABLE OF INVALID USER EVENTS: + # table_id = "events_some_invalid" + + # get import user events from big query request + def get_import_events_big_query_request(): + # TO CHECK ERROR HANDLING PASTE THE INVALID CATALOG NAME HERE: + # default_catalog = "invalid_catalog_name" + big_query_source = BigQuerySource() + big_query_source.project_id = project_id + big_query_source.dataset_id = dataset_id + big_query_source.table_id = table_id + big_query_source.data_schema = "user_event" + + input_config = UserEventInputConfig() + input_config.big_query_source = big_query_source + + import_request = ImportUserEventsRequest() + import_request.parent = default_catalog + import_request.input_config = input_config + + print("---import user events from BigQuery source request---") + print(import_request) + + return import_request + + # call the Retail API to import user events + def import_user_events_from_big_query(): + import_big_query_request = get_import_events_big_query_request() + big_query_operation = UserEventServiceClient().import_user_events( + import_big_query_request + ) + + print("---the operation was started:----") + print(big_query_operation.operation.name) + + while not big_query_operation.done(): + print("---please wait till operation is done---") + time.sleep(30) + print("---import user events operation is done---") + + if big_query_operation.metadata is not None: + print("---number of successfully imported events---") + print(big_query_operation.metadata.success_count) + print("---number of failures during the importing---") + print(big_query_operation.metadata.failure_count) + else: + print("---operation.metadata is empty---") + + if big_query_operation.result is not None: + print("---operation result:---") + print(big_query_operation.result()) + else: + print("---operation.result is empty---") + + import_user_events_from_big_query() + + # [END retail_import_user_events_from_big_query] + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("dataset_id", nargs="?", default="user_events") + parser.add_argument("table_id", nargs="?", default="events") + args = parser.parse_args() + main(project_id, args.dataset_id, args.table_id) diff --git a/samples/interactive-tutorials/events/import_user_events_bigquery_test.py b/samples/interactive-tutorials/events/import_user_events_bigquery_test.py new file mode 100644 index 00000000..69ebd716 --- /dev/null +++ b/samples/interactive-tutorials/events/import_user_events_bigquery_test.py @@ -0,0 +1,65 @@ +# Copyright 2022 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re +import subprocess + +from setup_events.setup_cleanup import ( + create_bq_dataset, + create_bq_table, + delete_bq_table, + upload_data_to_bq_table, +) +from setup_events.update_user_events_json import update_events_timestamp + + +def test_import_products_bq(table_id_prefix): + dataset = "user_events" + valid_products_table = f"{table_id_prefix}events" + product_schema = "../resources/events_schema.json" + valid_products_source_file = "../resources/user_events.json" + + try: + update_events_timestamp("../resources/user_events.json") + update_events_timestamp("../resources/user_events_some_invalid.json") + create_bq_dataset(dataset) + create_bq_table(dataset, valid_products_table, product_schema) + upload_data_to_bq_table( + dataset, valid_products_table, valid_products_source_file, product_schema + ) + output = str( + subprocess.check_output( + f"python import_user_events_big_query.py {dataset} {valid_products_table}", + shell=True, + ) + ) + finally: + delete_bq_table(dataset, valid_products_table) + + assert re.match( + '.*import user events from BigQuery source request.*?parent: "projects/.*?/locations/global/catalogs/default_catalog.*', + output, + ) + assert re.match( + ".*import user events from BigQuery source request.*?input_config.*?big_query_source.*", + output, + ) + assert re.match( + ".*the operation was started.*?projects/.*?/locations/global/catalogs/default_catalog/operations/import-user-events.*", + output, + ) + assert re.match(".*import user events operation is done.*", output) + assert re.match(".*number of successfully imported events.*", output) + assert re.match(".*number of failures during the importing.*?0.*", output) + assert re.match(".*operation result.*?errors_config.*", output) diff --git a/samples/interactive-tutorials/events/import_user_events_gcs.py b/samples/interactive-tutorials/events/import_user_events_gcs.py new file mode 100644 index 00000000..c9da3f85 --- /dev/null +++ b/samples/interactive-tutorials/events/import_user_events_gcs.py @@ -0,0 +1,115 @@ +# Copyright 2022 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import os + + +def main(bucket_name): + # [START retail_import_user_events_from_gcs] + # Import user events into a catalog from GCS using Retail API + + import time + + from google.cloud.retail import ( + GcsSource, + ImportErrorsConfig, + ImportUserEventsRequest, + UserEventInputConfig, + UserEventServiceClient, + ) + + # Read the project number from the environment variable + project_id = os.getenv("GOOGLE_CLOUD_PROJECT") + + # Read bucket name from the environment variable + bucket_name = os.getenv("EVENTS_BUCKET_NAME") + + # TODO: Developer set the bucket_name + # bucket_name = 'user_events_bucket' + + default_catalog = "projects/{0}/locations/global/catalogs/default_catalog".format( + project_id + ) + + gcs_bucket = "gs://{}".format(bucket_name) + gcs_errors_bucket = "{}/error".format(gcs_bucket) + gcs_events_object = "user_events.json" + + # TO CHECK ERROR HANDLING USE THE JSON WITH INVALID PRODUCT + # gcs_events_object = "user_events_some_invalid.json" + + # get import user events from gcs request + def get_import_events_gcs_request(gcs_object_name: str): + # TO CHECK ERROR HANDLING PASTE THE INVALID CATALOG NAME HERE: + # default_catalog = "invalid_catalog_name" + gcs_source = GcsSource() + gcs_source.input_uris = [f"{gcs_bucket}/{gcs_object_name}"] + + input_config = UserEventInputConfig() + input_config.gcs_source = gcs_source + + errors_config = ImportErrorsConfig() + errors_config.gcs_prefix = gcs_errors_bucket + + import_request = ImportUserEventsRequest() + import_request.parent = default_catalog + import_request.input_config = input_config + import_request.errors_config = errors_config + + print("---import user events from google cloud source request---") + print(import_request) + + return import_request + + # call the Retail API to import user events + def import_user_events_from_gcs(): + import_gcs_request = get_import_events_gcs_request(gcs_events_object) + gcs_operation = UserEventServiceClient().import_user_events(import_gcs_request) + + print("---the operation was started:----") + print(gcs_operation.operation.name) + + while not gcs_operation.done(): + print("---please wait till operation is done---") + time.sleep(30) + + print("---import user events operation is done---") + + if gcs_operation.metadata is not None: + print("---number of successfully imported events---") + print(gcs_operation.metadata.success_count) + print("---number of failures during the importing---") + print(gcs_operation.metadata.failure_count) + else: + print("---operation.metadata is empty---") + + if gcs_operation.result is not None: + print("---operation result:---") + print(gcs_operation.result()) + else: + print("---operation.result is empty---") + + import_user_events_from_gcs() + + +# [END retail_import_user_events_from_gcs] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument( + "bucket_name", nargs="?", default=os.environ["EVENTS_BUCKET_NAME"] + ) + args = parser.parse_args() + main(args.bucket_name) diff --git a/samples/interactive-tutorials/events/import_user_events_gcs_test.py b/samples/interactive-tutorials/events/import_user_events_gcs_test.py new file mode 100644 index 00000000..5ef77a72 --- /dev/null +++ b/samples/interactive-tutorials/events/import_user_events_gcs_test.py @@ -0,0 +1,53 @@ +# Copyright 2022 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re +import subprocess + +from setup_events.setup_cleanup import create_bucket, delete_bucket, upload_blob +from setup_events.update_user_events_json import update_events_timestamp + + +def test_import_events_gcs(bucket_name_prefix): + # gcs buckets have a limit of 63 characters. Get the last 60 characters + bucket_name = bucket_name_prefix[63:] + + try: + update_events_timestamp("../resources/user_events.json") + update_events_timestamp("../resources/user_events_some_invalid.json") + create_bucket(bucket_name) + upload_blob(bucket_name, "../resources/user_events.json") + + output = str( + subprocess.check_output("python import_user_events_gcs.py", shell=True) + ) + finally: + delete_bucket(bucket_name) + + assert re.match( + '.*import user events from google cloud source request.*?parent: "projects/.*?/locations/global/catalogs/default_catalog.*', + output, + ) + assert re.match( + ".*import user events from google cloud source request.*?input_config.*?gcs_source.*", + output, + ) + assert re.match( + ".*the operation was started.*?projects/.*?/locations/global/catalogs/default_catalog/operations/import-user-events.*", + output, + ) + assert re.match(".*import user events operation is done.*", output) + assert re.match(".*number of successfully imported events.*?4.*", output) + assert re.match(".*number of failures during the importing.*?0.*", output) + assert re.match(".*operation result.*?errors_config.*", output) diff --git a/samples/interactive-tutorials/events/import_user_events_inline.py b/samples/interactive-tutorials/events/import_user_events_inline.py new file mode 100644 index 00000000..7c165a8d --- /dev/null +++ b/samples/interactive-tutorials/events/import_user_events_inline.py @@ -0,0 +1,111 @@ +# Copyright 2022 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# [START retail_import_user_events_from_inline_source] +# Import user events into a catalog from inline source using Retail API +# +import datetime +import os +import random +import string +import time + +from google.cloud.retail import ( + ImportUserEventsRequest, + UserEvent, + UserEventInlineSource, + UserEventInputConfig, + UserEventServiceClient, +) +from google.protobuf.timestamp_pb2 import Timestamp + +project_id = os.getenv("GOOGLE_CLOUD_PROJECT") + +default_catalog = "projects/{0}/locations/global/catalogs/default_catalog".format( + project_id +) + + +# get user events for import +def get_user_events(): + user_events = [] + for x in range(3): + timestamp = Timestamp() + timestamp.seconds = int(datetime.datetime.now().timestamp()) + + user_event = UserEvent() + user_event.event_type = "home-page-view" + user_event.visitor_id = ( + "".join(random.sample(string.ascii_lowercase, 4)) + "event_" + str(x) + ) + user_event.event_time = timestamp + user_events.append(user_event) + + print(user_events) + return user_events + + +# get import user events from inline source request +def get_import_events_inline_source_request(user_events_to_import): + inline_source = UserEventInlineSource() + inline_source.user_events = user_events_to_import + + input_config = UserEventInputConfig() + input_config.user_event_inline_source = inline_source + + import_request = ImportUserEventsRequest() + import_request.parent = default_catalog + import_request.input_config = input_config + + print("---import user events from inline source request---") + print(import_request) + + return import_request + + +# call the Retail API to import user events +def import_user_events_from_inline_source(): + import_inline_request = get_import_events_inline_source_request(get_user_events()) + import_operation = UserEventServiceClient().import_user_events( + import_inline_request + ) + + print("---the operation was started:----") + print(import_operation.operation.name) + + while not import_operation.done(): + print("---please wait till operation is done---") + time.sleep(5) + + print("---import user events operation is done---") + + if import_operation.metadata is not None: + print("---number of successfully imported events---") + print(import_operation.metadata.success_count) + print("---number of failures during the importing---") + print(import_operation.metadata.failure_count) + else: + print("---operation.metadata is empty---") + + if import_operation.result is not None: + print("---operation result:---") + print(import_operation.result()) + else: + print("---operation.result is empty---") + + +import_user_events_from_inline_source() + +# [END retail_import_user_events_from_inline_source] diff --git a/samples/interactive-tutorials/events/import_user_events_inline_test.py b/samples/interactive-tutorials/events/import_user_events_inline_test.py new file mode 100644 index 00000000..79546b40 --- /dev/null +++ b/samples/interactive-tutorials/events/import_user_events_inline_test.py @@ -0,0 +1,38 @@ +# Copyright 2022 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re +import subprocess + + +def test_create_product(): + output = str( + subprocess.check_output("python import_user_events_inline.py", shell=True) + ) + + assert re.match( + '.*import user events from inline source request.*?parent: "projects/.*?/locations/global/catalogs/default_catalog.*', + output, + ) + assert re.match( + ".*import user events from inline source request.*?input_config.*?user_event_inline_source.*", + output, + ) + assert re.match( + ".*the operation was started.*?projects/.*?/locations/global/catalogs/default_catalog/operations/import-user-events.*", + output, + ) + assert re.match(".*import user events operation is done.*", output) + assert re.match(".*number of successfully imported events.*?3.*", output) + assert re.match(".*number of failures during the importing.*?0.*", output) diff --git a/samples/interactive-tutorials/events/noxfile.py b/samples/interactive-tutorials/events/noxfile.py new file mode 100644 index 00000000..20cdfc62 --- /dev/null +++ b/samples/interactive-tutorials/events/noxfile.py @@ -0,0 +1,279 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import glob +import os +from pathlib import Path +import sys +from typing import Callable, Dict, List, Optional + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +BLACK_VERSION = "black==19.10b0" + +# Copy `noxfile_config.py` to your directory and modify it instead. + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append(".") + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars() -> Dict[str, str]: + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG["gcloud_project_env"] + # This should error out if not set. + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG["envs"]) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + +# +# Style Checks +# + + +def _determine_local_import_names(start_dir: str) -> List[str]: + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + ".", + ] + session.run("flake8", *args) + + +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + test_list.extend(glob.glob("tests")) + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session: nox.sessions.Session) -> None: + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) + + +# +# Readmegen +# + + +def _get_repo_root() -> Optional[str]: + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session: nox.sessions.Session, path: str) -> None: + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) diff --git a/samples/interactive-tutorials/events/noxfile_config.py b/samples/interactive-tutorials/events/noxfile_config.py new file mode 100644 index 00000000..3141a030 --- /dev/null +++ b/samples/interactive-tutorials/events/noxfile_config.py @@ -0,0 +1,36 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Default TEST_CONFIG_OVERRIDE for python repos. + +# The source of truth: +# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/noxfile_config.py + +TEST_CONFIG_OVERRIDE = { + # You can opt out from the test for specific Python versions. + "ignored_versions": ["2.7", "3.6"], + # An envvar key for determining the project id to use. Change it + # to 'PROJECT_NUMBER' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + # 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": { + "DATA_LABELING_API_ENDPOINT": "us-central1-autopush-aiplatform.sandbox.googleapis.com", + "PYTEST_ADDOPTS": "-n=auto", # Run tests parallel using all available CPUs + "EVENTS_BUCKET_NAME": "retail-interactive-tutorials-events", + }, +} diff --git a/samples/interactive-tutorials/events/requirements-test.txt b/samples/interactive-tutorials/events/requirements-test.txt new file mode 100644 index 00000000..3ab16c90 --- /dev/null +++ b/samples/interactive-tutorials/events/requirements-test.txt @@ -0,0 +1,3 @@ +pytest==7.0.1 +pytest-xdist==2.5.0 +google-cloud-testutils==1.3.1 diff --git a/samples/interactive-tutorials/events/requirements.txt b/samples/interactive-tutorials/events/requirements.txt new file mode 100644 index 00000000..259782b1 --- /dev/null +++ b/samples/interactive-tutorials/events/requirements.txt @@ -0,0 +1,4 @@ +google==3.0.0 +google-cloud-retail==1.3.0 +google-cloud-storage==2.1.0 +google-cloud-bigquery==2.33.0 \ No newline at end of file diff --git a/samples/interactive-tutorials/events/setup_events/events_create_bigquery_table.py b/samples/interactive-tutorials/events/setup_events/events_create_bigquery_table.py new file mode 100644 index 00000000..bf9a2436 --- /dev/null +++ b/samples/interactive-tutorials/events/setup_events/events_create_bigquery_table.py @@ -0,0 +1,31 @@ +# Copyright 2022 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from setup_cleanup import create_bq_dataset, create_bq_table, \ + upload_data_to_bq_table + +dataset = "user_events" +valid_events_table = "events" +invalid_events_table = "events_some_invalid" +events_schema = "../resources/events_schema.json" +valid_events_source_file = "../resources/user_events.json" +invalid_events_source_file = "../resources/user_events_some_invalid.json" + +create_bq_dataset(dataset) +create_bq_table(dataset, valid_events_table, events_schema) +upload_data_to_bq_table(dataset, valid_events_table, valid_events_source_file, + events_schema) +create_bq_table(dataset, invalid_events_table, events_schema) +upload_data_to_bq_table(dataset, invalid_events_table, + invalid_events_source_file, events_schema) diff --git a/samples/interactive-tutorials/events/setup_events/events_create_gcs_bucket.py b/samples/interactive-tutorials/events/setup_events/events_create_gcs_bucket.py new file mode 100644 index 00000000..e1ccf829 --- /dev/null +++ b/samples/interactive-tutorials/events/setup_events/events_create_gcs_bucket.py @@ -0,0 +1,28 @@ +# Copyright 2022 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import os + +from setup_cleanup import create_bucket, upload_blob + +project_id = os.environ["GOOGLE_CLOUD_PROJECT"] +timestamp_ = datetime.datetime.now().timestamp().__round__() +bucket_name = os.environ["EVENTS_BUCKET_NAME"] + +create_bucket(bucket_name) +upload_blob(bucket_name, "../resources/user_events.json") +upload_blob(bucket_name, "../resources/user_events_some_invalid.json") + +print("\nThe gcs bucket {} was created".format(bucket_name)) diff --git a/samples/interactive-tutorials/events/setup_events/events_delete_gcs_bucket.py b/samples/interactive-tutorials/events/setup_events/events_delete_gcs_bucket.py new file mode 100644 index 00000000..c51b06bb --- /dev/null +++ b/samples/interactive-tutorials/events/setup_events/events_delete_gcs_bucket.py @@ -0,0 +1,25 @@ +# Copyright 2022 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from setup_cleanup import delete_bucket + + +def delete_bucket_by_name(name: str): + if name is None: + bucket_name = os.getenv("EVENTS_BUCKET_NAME") + delete_bucket(bucket_name) + else: + delete_bucket(name) diff --git a/samples/interactive-tutorials/events/setup_events/setup_cleanup.py b/samples/interactive-tutorials/events/setup_events/setup_cleanup.py new file mode 100644 index 00000000..6fa29ecd --- /dev/null +++ b/samples/interactive-tutorials/events/setup_events/setup_cleanup.py @@ -0,0 +1,207 @@ +# Copyright 2022 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import datetime +import json +import os +import re +import shlex +import subprocess + +from google.api_core.exceptions import NotFound + +from google.cloud import bigquery +from google.cloud import storage +from google.cloud.retail import ProductDetail, PurgeUserEventsRequest, \ + UserEvent, UserEventServiceClient, WriteUserEventRequest +from google.cloud.retail_v2 import Product +from google.protobuf.timestamp_pb2 import Timestamp + +project_id = os.getenv('GOOGLE_CLOUD_PROJECT') +default_catalog = "projects/{0}/locations/global/catalogs/default_catalog".format( + project_id) + + +# get user event +def get_user_event(visitor_id): + timestamp = Timestamp() + timestamp.seconds = int(datetime.datetime.now().timestamp()) + + product = Product() + product.id = 'test_id' + + product_detail = ProductDetail() + product_detail.product = product + + user_event = UserEvent() + user_event.event_type = "detail-page-view" + user_event.visitor_id = visitor_id + user_event.event_time = timestamp + user_event.product_details = [product_detail] + + print(user_event) + return user_event + + +# write user event +def write_user_event(visitor_id): + write_user_event_request = WriteUserEventRequest() + write_user_event_request.user_event = get_user_event(visitor_id) + write_user_event_request.parent = default_catalog + user_event = UserEventServiceClient().write_user_event( + write_user_event_request) + print("---the user event is written---") + print(user_event) + return user_event + + +# purge user event +def purge_user_event(visitor_id): + purge_user_event_request = PurgeUserEventsRequest() + purge_user_event_request.filter = 'visitorId="{}"'.format(visitor_id) + purge_user_event_request.parent = default_catalog + purge_user_event_request.force = True + purge_operation = UserEventServiceClient().purge_user_events( + purge_user_event_request) + + print("---the purge operation was started:----") + print(purge_operation.operation.name) + + +def get_project_id(): + get_project_command = "gcloud config get-value project --format json" + config = subprocess.check_output(shlex.split(get_project_command)) + project_id = re.search('\"(.*?)\"', str(config)).group(1) + return project_id + + +def create_bucket(bucket_name: str): + """Create a new bucket in Cloud Storage""" + print("Creating new bucket:" + bucket_name) + buckets_in_your_project = list_buckets() + if bucket_name in buckets_in_your_project: + print("Bucket {} already exists".format(bucket_name)) + else: + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + bucket.storage_class = "STANDARD" + new_bucket = storage_client.create_bucket(bucket, location="us") + print( + "Created bucket {} in {} with storage class {}".format( + new_bucket.name, new_bucket.location, new_bucket.storage_class + ) + ) + return new_bucket + + +def delete_bucket(bucket_name: str): + """Delete a bucket from Cloud Storage""" + storage_client = storage.Client() + print("Deleting bucket:" + bucket_name) + buckets_in_your_project = list_buckets() + if bucket_name in buckets_in_your_project: + blobs = storage_client.list_blobs(bucket_name) + for blob in blobs: + blob.delete() + bucket = storage_client.get_bucket(bucket_name) + bucket.delete() + print("Bucket {} is deleted".format(bucket.name)) + else: + print("Bucket {} is not found".format(bucket_name)) + + +def list_buckets(): + """Lists all buckets""" + bucket_list = [] + storage_client = storage.Client() + buckets = storage_client.list_buckets() + for bucket in buckets: + bucket_list.append(bucket.name) + return bucket_list + + +def upload_blob(bucket_name, source_file_name): + """Uploads a file to the bucket.""" + # The path to your file to upload + # source_file_name = "local/path/to/file" + print("Uploading data form {} to the bucket {}".format(source_file_name, + bucket_name)) + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + object_name = re.search('resources/(.*?)$', source_file_name).group(1) + blob = bucket.blob(object_name) + blob.upload_from_filename(source_file_name) + print( + "File {} uploaded to {}.".format( + source_file_name, object_name + ) + ) + + +def create_bq_dataset(dataset_name): + """Create a BigQuery dataset""" + full_dataset_id = f"{project_id}.{dataset_name}" + bq = bigquery.Client() + print(f"Creating dataset {full_dataset_id}") + try: + bq.get_dataset(full_dataset_id) + print(f"dataset {full_dataset_id} already exists") + except NotFound: + # Construct a Dataset object to send to the API. + dataset = bq.Dataset(full_dataset_id) + dataset.location = "US" + bq.create_dataset(dataset) + print("dataset is created") + + +def create_bq_table(dataset, table_name, schema_file_path): + """Create a BigQuery table""" + full_table_id = f"{project_id}.{dataset}.{table_name}" + bq = bigquery.Client() + print(f"Creating BigQuery table {full_table_id}") + try: + bq.get_table(full_table_id) + print(f"table {full_table_id} already exists") + except NotFound: + # Construct a Table object to send to the API. + with open(schema_file_path, "rb") as schema: + schema_dict = json.load(schema) + table = bigquery.Table(full_table_id, schema=schema_dict) + bq.create_table(table) + print("table is created") + + +def delete_bq_table(dataset, table_name): + full_table_id = f"{project_id}.{dataset}.{table_name}" + bq = bigquery.Client() + bq.delete_table(full_table_id, not_found_ok=True) + print("Table '{}' is deleted.".format(full_table_id)) + + +def upload_data_to_bq_table(dataset, table_name, source, schema_file_path): + """Upload data to the table from specified source file""" + full_table_id = f"{project_id}.{dataset}.{table_name}" + bq = bigquery.Client() + print(f"Uploading data from {source} to the table {full_table_id}") + with open(schema_file_path, "rb") as schema: + schema_dict = json.load(schema) + job_config = bigquery.LoadJobConfig( + source_format=bigquery.SourceFormat.NEWLINE_DELIMITED_JSON, + schema=schema_dict) + with open(source, "rb") as source_file: + job = bq.load_table_from_file(source_file, full_table_id, + job_config=job_config) + job.result() # Waits for the job to complete. + print("data was uploaded") diff --git a/samples/interactive-tutorials/events/setup_events/update_user_events_json.py b/samples/interactive-tutorials/events/setup_events/update_user_events_json.py new file mode 100644 index 00000000..f04b352e --- /dev/null +++ b/samples/interactive-tutorials/events/setup_events/update_user_events_json.py @@ -0,0 +1,24 @@ +import datetime +import re + +"""Run the file to update the user_events.json file with more recent timestamp""" + + +def update_events_timestamp(json_file): + # Get the yesterday's date + request_time = datetime.datetime.now() - datetime.timedelta(days=1) + day = request_time.date().strftime("%Y-%m-%d") + print(day) + + # Read in the file + with open(json_file, 'r') as file: + filedata = file.read() + + # Replace the target string '"eventTime":"YYYY-mm-dd' with yesterday date + filedata = re.sub('\"eventTime\":\"([0-9]{4})-([0-9]{2})-([0-9]{2})', + '\"eventTime\":\"' + day, filedata, flags=re.M) + + # Write the file out again + with open(json_file, 'w') as file: + file.write(filedata) + print("The {} is updated".format(json_file)) diff --git a/samples/interactive-tutorials/noxfile_config.py b/samples/interactive-tutorials/noxfile_config.py index 9b9b1756..4a009358 100644 --- a/samples/interactive-tutorials/noxfile_config.py +++ b/samples/interactive-tutorials/noxfile_config.py @@ -19,7 +19,7 @@ TEST_CONFIG_OVERRIDE = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7", "3.6", "3.8", "3.9"], + "ignored_versions": ["2.7", "3.6"], # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string diff --git a/samples/interactive-tutorials/product/import_products_big_query_table.py b/samples/interactive-tutorials/product/import_products_big_query_table.py index 91976cc9..f3942bc1 100644 --- a/samples/interactive-tutorials/product/import_products_big_query_table.py +++ b/samples/interactive-tutorials/product/import_products_big_query_table.py @@ -26,7 +26,7 @@ def main(project_id, dataset_id, table_id): # TODO: Set dataset_id # dataset_id = "products" - # TODO: Set dataset_id + # TODO: Set table_id # table_id = "products" # Import products into a catalog from big query table using Retail API @@ -108,7 +108,7 @@ def import_products_from_big_query(): if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument("dataset_id") - parser.add_argument("table_id") + parser.add_argument("dataset_id", nargs="?", default="products") + parser.add_argument("table_id", nargs="?", default="products") args = parser.parse_args() main(project_id, args.dataset_id, args.table_id) diff --git a/samples/interactive-tutorials/product/import_products_gcs.py b/samples/interactive-tutorials/product/import_products_gcs.py index abcb6230..156e0735 100644 --- a/samples/interactive-tutorials/product/import_products_gcs.py +++ b/samples/interactive-tutorials/product/import_products_gcs.py @@ -13,6 +13,7 @@ # limitations under the License. import argparse +import os def main(bucket_name): @@ -111,6 +112,6 @@ def import_products_from_gcs(): if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument("bucket_name") + parser.add_argument("bucket_name", nargs="?", default=os.environ["BUCKET_NAME"]) args = parser.parse_args() main(args.bucket_name) diff --git a/samples/interactive-tutorials/resources/user_events.json b/samples/interactive-tutorials/resources/user_events.json index 5360c309..87ee6d05 100644 --- a/samples/interactive-tutorials/resources/user_events.json +++ b/samples/interactive-tutorials/resources/user_events.json @@ -1,4 +1,4 @@ -{"eventType":"home-page-view","visitorId":"bjbs_group1_visitor1","eventTime":"2021-12-12T10:27:42+00:00"} -{"eventType":"search","visitorId":"bjbs_group1_visitor1","eventTime":"2021-12-12T10:27:42+00:00","searchQuery":"RockerJeans teenagers blue jeans"} -{"eventType":"search","visitorId":"bjbs_group1_visitor1","eventTime":"2021-12-12T10:27:42+00:00","searchQuery":"SocksUnlimited teenagers black socks"} -{"eventType":"detail-page-view","visitorId":"bjbs_group1_visitor1","eventTime":"2021-12-12T10:27:42+00:00","productDetails":{"product":{"id":"GGCOGAEC100616"},"quantity":3}} +{"eventType":"home-page-view","visitorId":"bjbs_group1_visitor1","eventTime":"2022-02-23T10:27:42+00:00"} +{"eventType":"search","visitorId":"bjbs_group1_visitor1","eventTime":"2022-02-23T10:27:42+00:00","searchQuery":"RockerJeans teenagers blue jeans"} +{"eventType":"search","visitorId":"bjbs_group1_visitor1","eventTime":"2022-02-23T10:27:42+00:00","searchQuery":"SocksUnlimited teenagers black socks"} +{"eventType":"detail-page-view","visitorId":"bjbs_group1_visitor1","eventTime":"2022-02-23T10:27:42+00:00","productDetails":{"product":{"id":"GGCOGAEC100616"},"quantity":3}} diff --git a/samples/interactive-tutorials/resources/user_events_some_invalid.json b/samples/interactive-tutorials/resources/user_events_some_invalid.json index c98b1699..164d9f34 100644 --- a/samples/interactive-tutorials/resources/user_events_some_invalid.json +++ b/samples/interactive-tutorials/resources/user_events_some_invalid.json @@ -1,4 +1,4 @@ -{"eventType":"home-page-view","visitorId":"bjbs_group1_visitor1","eventTime":"2021-12-12T10:27:42+00:00"} -{"eventType":"invalid","visitorId":"bjbs_group1_visitor1","eventTime":"2021-12-12T10:27:42+00:00","searchQuery":"RockerJeans teenagers blue jeans"} -{"eventType":"search","visitorId":"bjbs_group1_visitor1","eventTime":"2021-12-12T10:27:42+00:00","searchQuery":"SocksUnlimited teenagers black socks"} -{"eventType":"detail-page-view","visitorId":"bjbs_group1_visitor1","eventTime":"2021-12-12T10:27:42+00:00","productDetails":{"product":{"id":"GGCOGAEC100616"},"quantity":3}} +{"eventType":"home-page-view","visitorId":"bjbs_group1_visitor1","eventTime":"2022-02-23T10:27:42+00:00"} +{"eventType":"invalid","visitorId":"bjbs_group1_visitor1","eventTime":"2022-02-23T10:27:42+00:00","searchQuery":"RockerJeans teenagers blue jeans"} +{"eventType":"search","visitorId":"bjbs_group1_visitor1","eventTime":"2022-02-23T10:27:42+00:00","searchQuery":"SocksUnlimited teenagers black socks"} +{"eventType":"detail-page-view","visitorId":"bjbs_group1_visitor1","eventTime":"2022-02-23T10:27:42+00:00","productDetails":{"product":{"id":"GGCOGAEC100616"},"quantity":3}}