Skip to content

Commit

Permalink
Rename method
Browse files Browse the repository at this point in the history
Signed-off-by: Tsotne Tabidze <[email protected]>
  • Loading branch information
Tsotne Tabidze committed Jul 15, 2021
1 parent c896f0b commit fa0c494
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion sdk/python/feast/infra/offline_stores/redshift.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ def to_arrow(self) -> pa.Table:

def to_s3(self) -> str:
""" Export dataset to S3 in Parquet format and return path """
aws_utils.unload_redshift_query_to_s3(
aws_utils.execute_redshift_query_and_unload_to_s3(
self._redshift_client,
self._config.offline_store.cluster_id,
self._config.offline_store.database,
Expand Down
4 changes: 2 additions & 2 deletions sdk/python/feast/infra/utils/aws_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ def delete_s3_directory(s3_resource, bucket: str, key: str):
obj.delete()


def unload_redshift_query_to_s3(
def execute_redshift_query_and_unload_to_s3(
redshift_data_client,
cluster_id: str,
database: str,
Expand Down Expand Up @@ -262,7 +262,7 @@ def unload_redshift_query_to_pa(
""" Unload Redshift Query results to S3 and get the results in PyArrow Table format """
bucket, key = get_bucket_and_key(s3_path)

unload_redshift_query_to_s3(
execute_redshift_query_and_unload_to_s3(
redshift_data_client, cluster_id, database, user, s3_path, iam_role, query
)

Expand Down

0 comments on commit fa0c494

Please sign in to comment.