diff --git a/examples/quickstart_aws/pyproject.toml b/examples/quickstart_aws/pyproject.toml index fed528d4a7a14..092e7f576cd64 100644 --- a/examples/quickstart_aws/pyproject.toml +++ b/examples/quickstart_aws/pyproject.toml @@ -1,3 +1,6 @@ [build-system] requires = ["setuptools"] build-backend = "setuptools.build_meta" + +[tool.dagster] +module_name = "quickstart_aws" diff --git a/examples/quickstart_aws/quickstart_aws/__init__.py b/examples/quickstart_aws/quickstart_aws/__init__.py index e465d1b3fece2..a22903e7adf79 100644 --- a/examples/quickstart_aws/quickstart_aws/__init__.py +++ b/examples/quickstart_aws/quickstart_aws/__init__.py @@ -1 +1,32 @@ -from .repository import quickstart_aws +from dagster_aws.s3.io_manager import s3_pickle_io_manager +from dagster_aws.s3.resources import s3_resource + +from dagster import ( + Definitions, + ScheduleDefinition, + define_asset_job, + load_assets_from_package_module, +) + +from . import assets + +daily_refresh_schedule = ScheduleDefinition( + job=define_asset_job(name="all_assets_job"), cron_schedule="0 0 * * *" +) + + +defs = Definitions( + assets=load_assets_from_package_module(assets), + # The AWS resources use boto under the hood, so if you are accessing your private + # buckets, you will need to provide the AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY + # environment variables or follow one of the other boto authentication methods. + # Read about using environment variables and secrets in Dagster: + # https://docs.dagster.io/guides/dagster/using-environment-variables-and-secrets + resources={ + # With this I/O manager in place, your job runs will store data passed between assets + # on S3 in the location s3:///dagster/storage/. + "io_manager": s3_pickle_io_manager.configured({"s3_bucket": {"env": "S3_BUCKET"}}), + "s3": s3_resource, + }, + schedules=[daily_refresh_schedule], +) diff --git a/examples/quickstart_aws/quickstart_aws/repository.py b/examples/quickstart_aws/quickstart_aws/repository.py deleted file mode 100644 index 85b3683c97173..0000000000000 --- a/examples/quickstart_aws/quickstart_aws/repository.py +++ /dev/null @@ -1,37 +0,0 @@ -from dagster_aws.s3.io_manager import s3_pickle_io_manager -from dagster_aws.s3.resources import s3_resource - -from dagster import ( - ScheduleDefinition, - define_asset_job, - load_assets_from_package_module, - repository, - with_resources, -) - -from . import assets - -daily_refresh_schedule = ScheduleDefinition( - job=define_asset_job(name="all_assets_job"), cron_schedule="0 0 * * *" -) - - -@repository -def quickstart_aws(): - return [ - *with_resources( - load_assets_from_package_module(assets), - # The AWS resources use boto under the hood, so if you are accessing your private - # buckets, you will need to provide the AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY - # environment variables or follow one of the other boto authentication methods. - # Read about using environment variables and secrets in Dagster: - # https://docs.dagster.io/guides/dagster/using-environment-variables-and-secrets - resource_defs={ - # With this I/O manager in place, your job runs will store data passed between assets - # on S3 in the location s3:///dagster/storage/. - "io_manager": s3_pickle_io_manager.configured({"s3_bucket": {"env": "S3_BUCKET"}}), - "s3": s3_resource, - }, - ), - daily_refresh_schedule, - ] diff --git a/examples/quickstart_aws/quickstart_aws_tests/test_defs.py b/examples/quickstart_aws/quickstart_aws_tests/test_defs.py new file mode 100644 index 0000000000000..51b8d4cd2f120 --- /dev/null +++ b/examples/quickstart_aws/quickstart_aws_tests/test_defs.py @@ -0,0 +1,5 @@ +from quickstart_aws import defs + + +def test_def_can_load(): + assert defs.get_job_def("all_assets_job") diff --git a/examples/quickstart_aws/workspace.yaml b/examples/quickstart_aws/workspace.yaml deleted file mode 100644 index 6991699b3d595..0000000000000 --- a/examples/quickstart_aws/workspace.yaml +++ /dev/null @@ -1,2 +0,0 @@ -load_from: - - python_package: quickstart_aws