Skip to content

Commit

Permalink
Merge pull request #573 from laughingman7743/pin_uv_tox
Browse files Browse the repository at this point in the history
Pin the version of uv and tox
  • Loading branch information
laughingman7743 authored Jan 12, 2025
2 parents b58ec77 + 1849787 commit 3ee151e
Show file tree
Hide file tree
Showing 7 changed files with 19 additions and 31 deletions.
4 changes: 2 additions & 2 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -28,5 +28,5 @@ docs:

.PHONY: tool
tool:
uv tool install ruff
uv tool install tox --with tox-uv --with tox-gh-actions
uv tool install ruff@0.9.1
uv tool install tox@4.23.2 --with tox-uv --with tox-gh-actions
6 changes: 3 additions & 3 deletions pyathena/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,9 +175,9 @@ def __init__(
self.profile_name = profile_name
self.config: Optional[Config] = config if config else Config()

assert (
self.s3_staging_dir or self.work_group
), "Required argument `s3_staging_dir` or `work_group` not found."
assert self.s3_staging_dir or self.work_group, (
"Required argument `s3_staging_dir` or `work_group` not found."
)

if session:
self._session = session
Expand Down
3 changes: 1 addition & 2 deletions pyathena/sqlalchemy/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -702,8 +702,7 @@ def _get_table_location_specification(
else:
if connect_opts:
raise exc.CompileError(
"`location` or `s3_staging_dir` parameter is required "
"in the connection string"
"`location` or `s3_staging_dir` parameter is required in the connection string"
)
raise exc.CompileError(
"The location of the table should be specified "
Expand Down
12 changes: 6 additions & 6 deletions tests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,18 +14,18 @@ def __init__(self):
self.region_name = os.getenv("AWS_DEFAULT_REGION")
assert self.region_name, "Required environment variable `AWS_DEFAULT_REGION` not found."
self.s3_staging_dir = os.getenv("AWS_ATHENA_S3_STAGING_DIR")
assert (
self.s3_staging_dir
), "Required environment variable `AWS_ATHENA_S3_STAGING_DIR` not found."
assert self.s3_staging_dir, (
"Required environment variable `AWS_ATHENA_S3_STAGING_DIR` not found."
)
self.s3_staging_bucket, self.s3_staging_key = self.s3_staging_dir.replace(
"s3://", ""
).split("/", 1)
self.work_group = os.getenv("AWS_ATHENA_WORKGROUP")
assert self.work_group, "Required environment variable `AWS_ATHENA_WORKGROUP` not found."
self.spark_work_group = os.getenv("AWS_ATHENA_SPARK_WORKGROUP")
assert (
self.spark_work_group
), "Required environment variable `AWS_ATHENA_SPARK_WORKGROUP` not found."
assert self.spark_work_group, (
"Required environment variable `AWS_ATHENA_SPARK_WORKGROUP` not found."
)
self.default_work_group = os.getenv("AWS_ATHENA_DEFAULT_WORKGROUP", "primary")
self.schema = "pyathena_test_" + "".join(
[random.choice(string.ascii_lowercase + string.digits) for _ in range(10)]
Expand Down
20 changes: 5 additions & 15 deletions tests/pyathena/filesystem/test_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,8 +253,7 @@ def test_ls_buckets(self, fs):

def test_ls_dirs(self, fs):
dir_ = (
f"s3://{ENV.s3_staging_bucket}/{ENV.s3_staging_key}{ENV.schema}/"
f"filesystem/test_ls_dirs"
f"s3://{ENV.s3_staging_bucket}/{ENV.s3_staging_key}{ENV.schema}/filesystem/test_ls_dirs"
)
for i in range(5):
fs.pipe(f"{dir_}/prefix/test_{i}", bytes(i))
Expand Down Expand Up @@ -362,10 +361,7 @@ def test_info_file(self, fs):

def test_find(self, fs):
# TODO maxdepsth and withdirs options
dir_ = (
f"s3://{ENV.s3_staging_bucket}/{ENV.s3_staging_key}{ENV.schema}/"
f"filesystem/test_find"
)
dir_ = f"s3://{ENV.s3_staging_bucket}/{ENV.s3_staging_key}{ENV.schema}/filesystem/test_find"
for i in range(5):
fs.pipe(f"{dir_}/prefix/test_{i}", bytes(i))
fs.touch(f"{dir_}/prefix2")
Expand Down Expand Up @@ -393,10 +389,7 @@ def test_du(self):
pass

def test_glob(self, fs):
dir_ = (
f"s3://{ENV.s3_staging_bucket}/{ENV.s3_staging_key}{ENV.schema}/"
f"filesystem/test_glob"
)
dir_ = f"s3://{ENV.s3_staging_bucket}/{ENV.s3_staging_key}{ENV.schema}/filesystem/test_glob"
path = f"{dir_}/nested/test_{uuid.uuid4()}"
fs.touch(path)

Expand Down Expand Up @@ -433,8 +426,7 @@ def test_exists_object(self, fs):

def test_rm_file(self, fs):
dir_ = (
f"s3://{ENV.s3_staging_bucket}/{ENV.s3_staging_key}{ENV.schema}/"
f"filesystem/test_rm_rile"
f"s3://{ENV.s3_staging_bucket}/{ENV.s3_staging_key}{ENV.schema}/filesystem/test_rm_rile"
)
file = f"{dir_}/{uuid.uuid4()}"
fs.touch(file)
Expand All @@ -444,9 +436,7 @@ def test_rm_file(self, fs):
assert not fs.exists(dir_)

def test_rm(self, fs):
dir_ = (
f"s3://{ENV.s3_staging_bucket}/{ENV.s3_staging_key}{ENV.schema}/" f"filesystem/test_rm"
)
dir_ = f"s3://{ENV.s3_staging_bucket}/{ENV.s3_staging_key}{ENV.schema}/filesystem/test_rm"
file = f"{dir_}/{uuid.uuid4()}"
fs.touch(file)
fs.rm(file)
Expand Down
3 changes: 1 addition & 2 deletions tests/pyathena/pandas/test_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -573,6 +573,5 @@ def test_to_sql_invalid_args(cursor):
partitions=["partition_key"],
)
assert str(exc_info.value) == (
"Partition key: `partition_key` contains None values, "
"no data will be written to the table."
"Partition key: `partition_key` contains None values, no data will be written to the table."
)
2 changes: 1 addition & 1 deletion tests/pyathena/test_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,7 @@ def _create_table_metadata_parameters_parquet(self):
"parquet.compress": "SNAPPY",
"serde.param.serialization.format": "1",
"serde.serialization.lib": (
"org.apache.hadoop.hive.ql.io.parquet.serde." "ParquetHiveSerDe"
"org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe"
),
"transient_lastDdlTime": "1234567890",
}
Expand Down

0 comments on commit 3ee151e

Please sign in to comment.