Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update boilerplate version #4541

Merged
merged 1 commit into from
Dec 7, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
94 changes: 13 additions & 81 deletions boilerplate/flyte/end2end/run-tests.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
#!/usr/bin/env python3

import datetime
import json
import sys
import time
import traceback
from typing import Dict, List, Mapping, Tuple, Optional
from typing import Dict, List, Optional

import click
import requests
Expand All @@ -17,79 +15,6 @@
WAIT_TIME = 10
MAX_ATTEMPTS = 200

# This dictionary maps the names found in the flytesnacks manifest to a list of workflow names and
# inputs. This is so we can progressively cover all priorities in the original flytesnacks manifest,
# starting with "core".
FLYTESNACKS_WORKFLOW_GROUPS: Mapping[str, List[Tuple[str, dict]]] = {
"lite": [
("basics.hello_world.hello_world_wf", {}),
],
"core": [
# ("development_lifecycle.decks.image_renderer_wf", {}),
# The chain_workflows example in flytesnacks expects to be running in a sandbox.
("advanced_composition.chain_entities.chain_workflows_wf", {}),
("advanced_composition.dynamics.wf", {"s1": "Pear", "s2": "Earth"}),
("advanced_composition.map_task.my_map_workflow", {"a": [1, 2, 3, 4, 5]}),
# Workflows that use nested executions cannot be launched via flyteremote.
# This issue is being tracked in https://github.com/flyteorg/flyte/issues/1482.
# ("control_flow.run_conditions.multiplier", {"my_input": 0.5}),
# ("control_flow.run_conditions.multiplier_2", {"my_input": 10}),
# ("control_flow.run_conditions.multiplier_3", {"my_input": 5}),
# ("control_flow.run_conditions.basic_boolean_wf", {"seed": 5}),
# ("control_flow.run_conditions.bool_input_wf", {"b": True}),
# ("control_flow.run_conditions.nested_conditions", {"my_input": 0.4}),
# ("control_flow.run_conditions.consume_outputs", {"my_input": 0.4, "seed": 7}),
# ("control_flow.run_merge_sort.merge_sort", {"numbers": [5, 4, 3, 2, 1], "count": 5}),
("advanced_composition.subworkflows.parent_workflow", {"my_input1": "hello"}),
("advanced_composition.subworkflows.nested_parent_wf", {"a": 3}),
("basics.workflow.simple_wf", {"x": [1, 2, 3], "y": [1, 2, 3]}),
# TODO: enable new files and folders workflows
# ("basics.files.rotate_one_workflow", {"in_image": "https://upload.wikimedia.org/wikipedia/commons/d/d2/Julia_set_%28C_%3D_0.285%2C_0.01%29.jpg"}),
# ("basics.folders.download_and_rotate", {}),
("basics.hello_world.hello_world_wf", {}),
("basics.named_outputs.simple_wf_with_named_outputs", {}),
# # Getting a 403 for the wikipedia image
# # ("basics.reference_task.wf", {}),
("data_types_and_io.dataclass.dataclass_wf", {"x": 10, "y": 20}),
# Enums are not supported in flyteremote
# ("type_system.enums.enum_wf", {"c": "red"}),
("data_types_and_io.structured_dataset.simple_sd_wf", {"a": 42}),
# ("my.imperative.workflow.example", {"in1": "hello", "in2": "foo"}),
],
"integrations-k8s-spark": [
(
"k8s_spark_plugin.pyspark_pi.my_spark",
{"triggered_date": datetime.datetime.now()},
),
],
"integrations-kfpytorch": [
("kfpytorch_plugin.pytorch_mnist.pytorch_training_wf", {}),
],
"integrations-kftensorflow": [
("kftensorflow_plugin.tf_mnist.mnist_tensorflow_workflow", {}),
],
# "integrations-pod": [
# ("pod.pod.pod_workflow", {}),
# ],
"integrations-pandera_examples": [
("pandera_plugin.basic_schema_example.process_data", {}),
# TODO: investigate type mismatch float -> numpy.float64
# ("pandera_plugin.validating_and_testing_ml_pipelines.pipeline", {"data_random_state": 42, "model_random_state": 99}),
],
"integrations-modin_examples": [
("modin_plugin.knn_classifier.pipeline", {}),
],
"integrations-papermilltasks": [
("papermill_plugin.simple.nb_to_python_wf", {"f": 3.1415926535}),
],
"integrations-greatexpectations": [
("greatexpectations_plugin.task_example.simple_wf", {}),
("greatexpectations_plugin.task_example.file_wf", {}),
("greatexpectations_plugin.task_example.schema_wf", {}),
("greatexpectations_plugin.task_example.runtime_wf", {}),
],
}


def execute_workflow(
remote: FlyteRemote,
Expand Down Expand Up @@ -137,6 +62,7 @@ def schedule_workflow_groups(
workflow_groups: List[str],
remote: FlyteRemote,
terminate_workflow_on_failure: bool,
parsed_manifest: List[dict],
cluster_pool_name: Optional[str] = None,
) -> Dict[str, bool]:
"""
Expand All @@ -146,7 +72,12 @@ def schedule_workflow_groups(
executions_by_wfgroup = {}
# Schedule executions for each workflow group,
for wf_group in workflow_groups:
workflows = FLYTESNACKS_WORKFLOW_GROUPS.get(wf_group, [])
workflow_group_item = list(
filter(lambda item: item["name"] == wf_group, parsed_manifest)
)
workflows = []
if workflow_group_item:
workflows = workflow_group_item[0]["examples"]
executions_by_wfgroup[wf_group] = [
execute_workflow(remote, tag, workflow[0], workflow[1], cluster_pool_name)
for workflow in workflows
Expand Down Expand Up @@ -188,12 +119,12 @@ def schedule_workflow_groups(
return results


def valid(workflow_group):
def valid(workflow_group, parsed_manifest):
"""
Return True if a workflow group is contained in FLYTESNACKS_WORKFLOW_GROUPS,
Return True if a workflow group is contained in parsed_manifest,
False otherwise.
"""
return workflow_group in FLYTESNACKS_WORKFLOW_GROUPS.keys()
return workflow_group in set(wf_group["name"] for wf_group in parsed_manifest)


def run(
Expand Down Expand Up @@ -233,7 +164,7 @@ def run(
results = []
valid_workgroups = []
for workflow_group in workflow_groups:
if not valid(workflow_group):
if not valid(workflow_group, parsed_manifest):
results.append(
{
"label": workflow_group,
Expand All @@ -249,6 +180,7 @@ def run(
valid_workgroups,
remote,
terminate_workflow_on_failure,
parsed_manifest,
cluster_pool_name,
)

Expand Down
7 changes: 2 additions & 5 deletions boilerplate/flyte/golang_test_targets/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,6 @@
#
# TO OPT OUT OF UPDATES, SEE https://github.com/flyteorg/boilerplate/blob/master/Readme.rst

.PHONY: codespell
codespell:
git ls-files | grep -vE 'go\.mod|go\.sum|flyteidl/|\.pb$$|\.git|\.pdf|\.svg|requirements\.txt|gen/' | xargs codespell -w

.PHONY: download_tooling
download_tooling: #download dependencies (including test deps) for the package
Expand All @@ -16,8 +13,8 @@ generate: download_tooling #generate go code
@boilerplate/flyte/golang_test_targets/go-gen.sh

.PHONY: lint
lint: codespell download_tooling #lints the package for common code smells
GL_DEBUG=linters_output,env golangci-lint run --fix --deadline=5m --exclude deprecated -v
lint: download_tooling #lints the package for common code smells
GL_DEBUG=linters_output,env golangci-lint run --deadline=5m --exclude deprecated -v

# If code is failing goimports linter, this will fix.
# skips 'vendor'
Expand Down
1 change: 1 addition & 0 deletions boilerplate/flyte/golang_test_targets/download_tooling.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ tools=(
"github.com/EngHabu/mockery/cmd/mockery"
"github.com/flyteorg/flytestdlib/cli/pflags@latest"
"github.com/golangci/golangci-lint/cmd/golangci-lint"
"github.com/daixiang0/gci"
"github.com/alvaroloes/enumer"
"github.com/pseudomuto/protoc-gen-doc/cmd/protoc-gen-doc"
)
Expand Down
1 change: 1 addition & 0 deletions boilerplate/flyte/golang_test_targets/goimports
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,4 @@
# TO OPT OUT OF UPDATES, SEE https://github.com/flyteorg/boilerplate/blob/master/Readme.rst

goimports -w $(find . -type f -name '*.go' -not -path "./vendor/*" -not -path "./pkg/client/*" -not -path "./boilerplate/*")
gci write -s standard -s default -s "prefix(github.com/flyteorg)" --custom-order --skip-generated .
Loading