Skip to content

Commit

Permalink
Move configuraiton of a few tools to pyproject.toml (#428)
Browse files Browse the repository at this point in the history
* Move configuraiton of a few tools to pyproject.toml

Signed-off-by: Hongxin Liang <[email protected]>

* Stop pinning black

Formatted by later version of black

Signed-off-by: Hongxin Liang <[email protected]>
  • Loading branch information
honnix authored Mar 26, 2021
1 parent 7afe94d commit e76b28d
Show file tree
Hide file tree
Showing 153 changed files with 2,028 additions and 604 deletions.
4 changes: 2 additions & 2 deletions dev-requirements.in
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
-c requirements.txt

black==19.10b0
coverage
black
coverage[toml]
flake8
flake8-black
flake8-isort
Expand Down
8 changes: 5 additions & 3 deletions dev-requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,8 @@ appdirs==1.4.4
attrs==20.3.0
# via
# -c requirements.txt
# black
# pytest
black==19.10b0
black==20.8b1
# via
# -c requirements.txt
# -r dev-requirements.in
Expand All @@ -22,7 +21,7 @@ click==7.1.2
# via
# -c requirements.txt
# black
coverage==5.5
coverage[toml]==5.5
# via -r dev-requirements.in
flake8-black==0.2.1
# via -r dev-requirements.in
Expand All @@ -46,6 +45,7 @@ mock==4.0.3
mypy-extensions==0.4.3
# via
# -c requirements.txt
# black
# mypy
mypy==0.812
# via -r dev-requirements.in
Expand Down Expand Up @@ -83,6 +83,7 @@ toml==0.10.2
# via
# -c requirements.txt
# black
# coverage
# pytest
typed-ast==1.4.2
# via
Expand All @@ -92,4 +93,5 @@ typed-ast==1.4.2
typing-extensions==3.7.4.3
# via
# -c requirements.txt
# black
# mypy
32 changes: 18 additions & 14 deletions doc-requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -12,17 +12,12 @@ ansiwrap==0.8.4
# via papermill
appdirs==1.4.4
# via black
appnope==0.1.2
# via
# ipykernel
# ipython
astroid==2.5.1
# via sphinx-autoapi
async-generator==1.10
# via nbclient
attrs==20.3.0
# via
# black
# jsonschema
# scantree
babel==2.9.0
Expand All @@ -35,15 +30,13 @@ beautifulsoup4==4.9.3
# via
# sphinx-code-include
# sphinx-material
black==19.10b0
# via
# flytekit
# papermill
black==20.8b1
# via papermill
bleach==3.3.0
# via nbconvert
boto3==1.17.36
boto3==1.17.39
# via sagemaker-training
botocore==1.20.36
botocore==1.20.39
# via
# boto3
# s3transfer
Expand All @@ -64,10 +57,11 @@ click==7.1.2
# papermill
croniter==1.0.10
# via flytekit
cryptography==3.4.6
cryptography==3.4.7
# via
# -r doc-requirements.in
# paramiko
# secretstorage
css-html-js-minify==2.5.5
# via sphinx-material
dataclasses-json==0.5.2
Expand Down Expand Up @@ -120,6 +114,10 @@ ipython==7.21.0
# via ipykernel
jedi==0.18.0
# via ipython
jeepney==0.6.0
# via
# keyring
# secretstorage
jinja2==2.11.3
# via
# nbconvert
Expand Down Expand Up @@ -161,7 +159,9 @@ marshmallow==3.10.0
mistune==0.8.4
# via nbconvert
mypy-extensions==0.4.3
# via typing-inspect
# via
# black
# typing-inspect
natsort==7.1.1
# via flytekit
nbclient==0.5.3
Expand Down Expand Up @@ -287,6 +287,8 @@ scantree==0.0.1
# via dirhash
scipy==1.6.2
# via sagemaker-training
secretstorage==3.3.1
# via keyring
six==1.15.0
# via
# bcrypt
Expand Down Expand Up @@ -377,7 +379,9 @@ traitlets==5.0.5
typed-ast==1.4.2
# via black
typing-extensions==3.7.4.3
# via typing-inspect
# via
# black
# typing-inspect
typing-inspect==0.6.0
# via dataclasses-json
unidecode==1.2.0
Expand Down
36 changes: 29 additions & 7 deletions flytekit/bin/entrypoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,11 @@ def _dispatch_execute(ctx: FlyteContext, task_def: PythonTask, inputs_path: str,
_logging.error("!! Begin Unknown System Error Captured by Flyte !!")
exc_str = _traceback.format_exc()
output_file_dict[_constants.ERROR_FILE_NAME] = _error_models.ErrorDocument(
_error_models.ContainerError("SYSTEM:Unknown", exc_str, _error_models.ContainerError.Kind.RECOVERABLE,)
_error_models.ContainerError(
"SYSTEM:Unknown",
exc_str,
_error_models.ContainerError.Kind.RECOVERABLE,
)
)
_logging.error(exc_str)
_logging.error("!! End Error Captured by Flyte !!")
Expand Down Expand Up @@ -185,11 +189,13 @@ def _handle_annotated_task(task_def: PythonTask, inputs: str, output_prefix: str

if cloud_provider == _constants.CloudProvider.AWS:
file_access = _data_proxy.FileAccessProvider(
local_sandbox_dir=_sdk_config.LOCAL_SANDBOX.get(), remote_proxy=_s3proxy.AwsS3Proxy(raw_output_data_prefix),
local_sandbox_dir=_sdk_config.LOCAL_SANDBOX.get(),
remote_proxy=_s3proxy.AwsS3Proxy(raw_output_data_prefix),
)
elif cloud_provider == _constants.CloudProvider.GCP:
file_access = _data_proxy.FileAccessProvider(
local_sandbox_dir=_sdk_config.LOCAL_SANDBOX.get(), remote_proxy=_gcs_proxy.GCSProxy(raw_output_data_prefix),
local_sandbox_dir=_sdk_config.LOCAL_SANDBOX.get(),
remote_proxy=_gcs_proxy.GCSProxy(raw_output_data_prefix),
)
elif cloud_provider == _constants.CloudProvider.LOCAL:
# A fake remote using the local disk will automatically be created
Expand Down Expand Up @@ -353,7 +359,9 @@ def _pass_through():
@_click.option("--test", is_flag=True)
@_click.option("--resolver", required=False)
@_click.argument(
"resolver-args", type=_click.UNPROCESSED, nargs=-1,
"resolver-args",
type=_click.UNPROCESSED,
nargs=-1,
)
def execute_task_cmd(
task_module, task_name, inputs, output_prefix, raw_output_data_prefix, test, resolver, resolver_args
Expand Down Expand Up @@ -408,15 +416,29 @@ def fast_execute_task_cmd(additional_distribution, dest_dir, task_execute_cmd):
@_click.option("--test", is_flag=True)
@_click.option("--resolver", required=True)
@_click.argument(
"resolver-args", type=_click.UNPROCESSED, nargs=-1,
"resolver-args",
type=_click.UNPROCESSED,
nargs=-1,
)
def map_execute_task_cmd(
inputs, output_prefix, raw_output_data_prefix, max_concurrency, test, resolver, resolver_args,
inputs,
output_prefix,
raw_output_data_prefix,
max_concurrency,
test,
resolver,
resolver_args,
):
_click.echo(_utils.get_version_message())

_execute_map_task(
inputs, output_prefix, raw_output_data_prefix, max_concurrency, test, resolver, resolver_args,
inputs,
output_prefix,
raw_output_data_prefix,
max_concurrency,
test,
resolver,
resolver_args,
)


Expand Down
49 changes: 39 additions & 10 deletions flytekit/clients/friendly.py
Original file line number Diff line number Diff line change
Expand Up @@ -316,7 +316,8 @@ def create_launch_plan(self, launch_plan_identifer, launch_plan_spec):
"""
super(SynchronousFlyteClient, self).create_launch_plan(
_launch_plan_pb2.LaunchPlanCreateRequest(
id=launch_plan_identifer.to_flyte_idl(), spec=launch_plan_spec.to_flyte_idl(),
id=launch_plan_identifer.to_flyte_idl(),
spec=launch_plan_spec.to_flyte_idl(),
)
)

Expand Down Expand Up @@ -506,7 +507,9 @@ def update_named_entity(self, resource_type, id, metadata):
"""
super(SynchronousFlyteClient, self).update_named_entity(
_common_pb2.NamedEntityUpdateRequest(
resource_type=resource_type, id=id.to_flyte_idl(), metadata=metadata.to_flyte_idl(),
resource_type=resource_type,
id=id.to_flyte_idl(),
metadata=metadata.to_flyte_idl(),
)
)

Expand Down Expand Up @@ -661,7 +664,12 @@ def get_node_execution_data(self, node_execution_identifier):
)

def list_node_executions(
self, workflow_execution_identifier, limit=100, token=None, filters=None, sort_by=None,
self,
workflow_execution_identifier,
limit=100,
token=None,
filters=None,
sort_by=None,
):
"""
TODO: Comment
Expand Down Expand Up @@ -689,7 +697,12 @@ def list_node_executions(
)

def list_node_executions_for_task_paginated(
self, task_execution_identifier, limit=100, token=None, filters=None, sort_by=None,
self,
task_execution_identifier,
limit=100,
token=None,
filters=None,
sort_by=None,
):
"""
This returns nodes spawned by a specific task execution. This is generally from things like dynamic tasks.
Expand Down Expand Up @@ -747,7 +760,12 @@ def get_task_execution_data(self, task_execution_identifier):
)

def list_task_executions_paginated(
self, node_execution_identifier, limit=100, token=None, filters=None, sort_by=None,
self,
node_execution_identifier,
limit=100,
token=None,
filters=None,
sort_by=None,
):
"""
:param flytekit.models.core.identifier.NodeExecutionIdentifier node_execution_identifier:
Expand Down Expand Up @@ -786,7 +804,9 @@ def register_project(self, project):
:rtype: flyteidl.admin.project_pb2.ProjectRegisterResponse
"""
super(SynchronousFlyteClient, self).register_project(
_project_pb2.ProjectRegisterRequest(project=project.to_flyte_idl(),)
_project_pb2.ProjectRegisterRequest(
project=project.to_flyte_idl(),
)
)

def update_project(self, project):
Expand Down Expand Up @@ -853,7 +873,9 @@ def update_project_domain_attributes(self, project, domain, matching_attributes)
super(SynchronousFlyteClient, self).update_project_domain_attributes(
_project_domain_attributes_pb2.ProjectDomainAttributesUpdateRequest(
attributes=_project_domain_attributes_pb2.ProjectDomainAttributes(
project=project, domain=domain, matching_attributes=matching_attributes.to_flyte_idl(),
project=project,
domain=domain,
matching_attributes=matching_attributes.to_flyte_idl(),
)
)
)
Expand Down Expand Up @@ -888,7 +910,9 @@ def get_project_domain_attributes(self, project, domain, resource_type):
"""
return super(SynchronousFlyteClient, self).get_project_domain_attributes(
_project_domain_attributes_pb2.ProjectDomainAttributesGetRequest(
project=project, domain=domain, resource_type=resource_type,
project=project,
domain=domain,
resource_type=resource_type,
)
)

Expand All @@ -903,7 +927,10 @@ def get_workflow_attributes(self, project, domain, workflow, resource_type):
"""
return super(SynchronousFlyteClient, self).get_workflow_attributes(
_workflow_attributes_pb2.WorkflowAttributesGetRequest(
project=project, domain=domain, workflow=workflow, resource_type=resource_type,
project=project,
domain=domain,
workflow=workflow,
resource_type=resource_type,
)
)

Expand All @@ -914,5 +941,7 @@ def list_matchable_attributes(self, resource_type):
:return:
"""
return super(SynchronousFlyteClient, self).list_matchable_attributes(
_matchable_resource_pb2.ListMatchableAttributesRequest(resource_type=resource_type,)
_matchable_resource_pb2.ListMatchableAttributesRequest(
resource_type=resource_type,
)
)
16 changes: 13 additions & 3 deletions flytekit/clients/helpers.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
def iterate_node_executions(
client, workflow_execution_identifier=None, task_execution_identifier=None, limit=None, filters=None,
client,
workflow_execution_identifier=None,
task_execution_identifier=None,
limit=None,
filters=None,
):
"""
This returns a generator for node executions.
Expand All @@ -25,7 +29,10 @@ def iterate_node_executions(
)
else:
node_execs, next_token = client.list_node_executions_for_task_paginated(
task_execution_identifier=task_execution_identifier, limit=num_to_fetch, token=token, filters=filters,
task_execution_identifier=task_execution_identifier,
limit=num_to_fetch,
token=token,
filters=filters,
)
for n in node_execs:
counter += 1
Expand Down Expand Up @@ -53,7 +60,10 @@ def iterate_task_executions(client, node_execution_identifier, limit=None, filte
counter = 0
while True:
task_execs, next_token = client.list_task_executions_paginated(
node_execution_identifier=node_execution_identifier, limit=num_to_fetch, token=token, filters=filters,
node_execution_identifier=node_execution_identifier,
limit=num_to_fetch,
token=token,
filters=filters,
)
for t in task_execs:
counter += 1
Expand Down
11 changes: 9 additions & 2 deletions flytekit/clients/raw.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,9 @@ def __init__(self, url, insecure=False, credentials=None, options=None):
self._channel = _insecure_channel(url, options=list((options or {}).items()))
else:
self._channel = _secure_channel(
url, credentials or _ssl_channel_credentials(), options=list((options or {}).items()),
url,
credentials or _ssl_channel_credentials(),
options=list((options or {}).items()),
)
self._stub = _admin_service.AdminServiceStub(self._channel)
self._metadata = None
Expand All @@ -165,7 +167,12 @@ def url(self) -> str:
def set_access_token(self, access_token):
# Always set the header to lower-case regardless of what the config is. The grpc libraries that Admin uses
# to parse the metadata don't change the metadata, but they do automatically lower the key you're looking for.
self._metadata = [(_creds_config.AUTHORIZATION_METADATA_KEY.get().lower(), "Bearer {}".format(access_token),)]
self._metadata = [
(
_creds_config.AUTHORIZATION_METADATA_KEY.get().lower(),
"Bearer {}".format(access_token),
)
]

def force_auth_flow(self):
refresh_handler_fn = _get_refresh_handler(_creds_config.AUTH_MODE.get())
Expand Down
Loading

0 comments on commit e76b28d

Please sign in to comment.