diff --git a/modules/iot-core/README.md b/modules/iot-core/README.md index d28006db96..f72bcac100 100644 --- a/modules/iot-core/README.md +++ b/modules/iot-core/README.md @@ -87,13 +87,13 @@ module "iot-platform" { ``` ## Example integrated with Data Foundation Platform -In this example, we will show how to extend the **[Data Foundations Platform](../../data-solutions/data-platform-foundations/)** to include IoT Platform as a new source of data. +In this example, we will show how to extend the **[Data Foundations Platform](../../examples/data-solutions/data-platform-foundations/)** to include IoT Platform as a new source of data. ![Target architecture](./diagram_iot.png) -1. First, we will setup Environment following instructions in **[Environment Setup](../../data-solutions/data-platform-foundations/01-environment/)** to setup projects and SAs required. Get output variable project_ids.landing as will be used later +1. First, we will setup Environment following instructions in **[Environment Setup](../../examples/data-solutions/data-platform-foundations/)** to setup projects and SAs required. Get output variable project_ids.landing as will be used later -1. Second, execute instructions in **[Environment Setup](../../data-solutions/data-platform-foundations/02-resources/)** to provision PubSub, DataFlow, BQ,... Get variable landing-pubsub as will be used later to create IoT Registry +1. Second, execute instructions in **[Environment Setup](../../examples/data-solutions/data-platform-foundations/)** to provision PubSub, DataFlow, BQ,... Get variable landing-pubsub as will be used later to create IoT Registry 1. Now it is time to provision IoT Platform. Modify landing-project-id and landing_pubsub_topic_id with output variables obtained before. Create device certificates as shown in the Simple Example and register them in devices.yaml file together with deviceids. @@ -112,7 +112,7 @@ module "iot-platform" { } # tftest:skip ``` -1. After that, we can setup the pipeline "PubSub to BigQuery" shown at **[Pipeline Setup](../../data-solutions/data-platform-foundations/03-pipeline/pubsub_to_bigquery.md)** +1. After that, we can setup the pipeline "PubSub to BigQuery" shown at **[Pipeline Setup](../../examples/data-solutions/data-platform-foundations/)** 1. Finally, instead of testing the pipeline by sending messages to PubSub, we can now test sending telemetry messages from simulated IoT devices to our IoT Platform, for example using the MQTT demo client at https://github.com/googleapis/nodejs-iot/tree/main/samples/mqtt_example . We shall edit the client script cloudiot_mqtt_example_nodejs.js to send messages following the pipeline message format, so they are processed by DataFlow job and inserted in the BigQuery table. ``` diff --git a/tools/check_links.py b/tools/check_links.py index 00a57f704b..bbc7ff5f55 100755 --- a/tools/check_links.py +++ b/tools/check_links.py @@ -29,8 +29,6 @@ BASEDIR = pathlib.Path(__file__).resolve().parents[1] DOC = collections.namedtuple('DOC', 'path relpath links') LINK = collections.namedtuple('LINK', 'dest valid') -OBJS_EXPAND = (marko.block.List, marko.block.ListItem, marko.block.Paragraph) -OBJS_LINK = marko.inline.Link def check_link(link, readme_path): @@ -45,39 +43,24 @@ def check_link(link, readme_path): return LINK(link.dest, link_valid) -def check_elements(elements, readme_path): - 'Recursively finds and checks links in a list of elements.' - if len(elements) == 0: - return [] - - el = elements[0] - - # If there is one element, check the link, - # expand it (if possible), return [] otherwise - if len(elements) == 1: - if isinstance(el, OBJS_LINK): - return [check_link(el, readme_path)] - if isinstance(el, OBJS_EXPAND): - return check_elements(el.children, readme_path) - return [] - - # If there is more than one element call recursively: - # concatenate call on the first element and call on all other elements - if len(elements) > 1: - link_in_first_element = check_elements([el], readme_path) - link_in_other_elements = check_elements(elements[1:len(elements)], - readme_path) - return link_in_first_element + link_in_other_elements - - def check_docs(dir_name): 'Traverses dir_name and checks for all Markdown files.' dir_path = BASEDIR / dir_name + parser = marko.parser.Parser() for readme_path in sorted(dir_path.glob('**/*.md')): if '.terraform' in str(readme_path) or '.pytest' in str(readme_path): continue - els = marko.parser.Parser().parse(readme_path.read_text()).children - links = check_elements(els, readme_path) + + root = parser.parse(readme_path.read_text()) + elements = collections.deque([root]) + links = [] + while elements: + el = elements.popleft() + if isinstance(el, marko.inline.Link): + links.append(check_link(el, readme_path)) + elif hasattr(el, 'children'): + elements.extend(el.children) + yield DOC(readme_path, str(readme_path.relative_to(dir_path)), links)