Skip to content

Commit

Permalink
cleaned up imports
Browse files Browse the repository at this point in the history
  • Loading branch information
daniel-sanche committed Nov 12, 2024
1 parent e8d122e commit d489ad3
Show file tree
Hide file tree
Showing 4 changed files with 17 additions and 25 deletions.
1 change: 1 addition & 0 deletions google/cloud/bigtable/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@
CrossSync.add_mapping("_ReadRowsOperation", _ReadRowsOperationAsync)
CrossSync.add_mapping("_MutateRowsOperation", _MutateRowsOperationAsync)
CrossSync.add_mapping("ExecuteQueryIterator", ExecuteQueryIteratorAsync)
CrossSync.add_mapping("MutationsBatcher", MutationsBatcherAsync)


__version__: str = package_version.__version__
Expand Down
35 changes: 15 additions & 20 deletions google/cloud/bigtable/data/_async/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,30 +83,25 @@
from google.cloud.bigtable_v2.services.bigtable.transports import (
BigtableGrpcAsyncIOTransport as TransportType,
)
from google.cloud.bigtable.data._async.mutations_batcher import (
MutationsBatcherAsync,
_MB_SIZE,
)
from google.cloud.bigtable.data.execute_query._async.execute_query_iterator import (
ExecuteQueryIteratorAsync,
)

from google.cloud.bigtable.data._async.mutations_batcher import _MB_SIZE
else:
from grpc import insecure_channel
from google.cloud.bigtable_v2.services.bigtable.transports import BigtableGrpcTransport as TransportType # type: ignore
from google.cloud.bigtable.data._sync_autogen.mutations_batcher import ( # noqa: F401
MutationsBatcher,
_MB_SIZE,
)
from google.cloud.bigtable.data.execute_query._sync_autogen.execute_query_iterator import ( # noqa: F401
ExecuteQueryIterator,
)


if TYPE_CHECKING:
from google.cloud.bigtable.data._helpers import RowKeySamples
from google.cloud.bigtable.data._helpers import ShardedQuery

if CrossSync.is_async:
from google.cloud.bigtable.data._async.mutations_batcher import (
MutationsBatcherAsync,
)
from google.cloud.bigtable.data.execute_query._async.execute_query_iterator import (
ExecuteQueryIteratorAsync,
)


__CROSS_SYNC_OUTPUT__ = "google.cloud.bigtable.data._sync_autogen.client"


Expand Down Expand Up @@ -372,8 +367,8 @@ async def _manage_channel(
await old_channel.close(grace_period)
else:
if grace_period:
self._is_closed.wait(grace_period)
old_channel.close()
self._is_closed.wait(grace_period) # type: ignore
old_channel.close() # type: ignore
# subtract thed time spent waiting for the channel to be replaced
next_refresh = random.uniform(refresh_interval_min, refresh_interval_max)
next_sleep = max(next_refresh - (time.monotonic() - start_timestamp), 0)
Expand Down Expand Up @@ -421,7 +416,7 @@ async def _register_instance(
}
)
async def _remove_instance_registration(
self, instance_id: str, owner: TableAsync | ExecuteQueryIteratorAsync
self, instance_id: str, owner: TableAsync | "ExecuteQueryIteratorAsync"
) -> bool:
"""
Removes an instance from the client's registered instances, to prevent
Expand Down Expand Up @@ -585,7 +580,7 @@ async def execute_query(
"proto_format": {},
}

return ExecuteQueryIteratorAsync(
return CrossSync.ExecuteQueryIterator(
self,
instance_id,
app_profile_id,
Expand Down Expand Up @@ -1132,7 +1127,7 @@ def mutations_batcher(
batch_attempt_timeout: float | None | TABLE_DEFAULT = TABLE_DEFAULT.MUTATE_ROWS,
batch_retryable_errors: Sequence[type[Exception]]
| TABLE_DEFAULT = TABLE_DEFAULT.MUTATE_ROWS,
) -> MutationsBatcherAsync:
) -> "MutationsBatcherAsync":
"""
Returns a new mutations batcher instance.
Expand Down
4 changes: 1 addition & 3 deletions google/cloud/bigtable/data/_async/mutations_batcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,9 +176,7 @@ async def add_to_flow(self, mutations: RowMutationEntry | list[RowMutationEntry]
yield mutations[start_idx:end_idx]


@CrossSync.convert_class(
sync_name="MutationsBatcher", add_mapping_for_name="MutationsBatcher"
)
@CrossSync.convert_class(sync_name="MutationsBatcher")
class MutationsBatcherAsync:
"""
Allows users to send batches using context manager API:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,6 @@
if TYPE_CHECKING:
if CrossSync.is_async:
from google.cloud.bigtable.data import BigtableDataClientAsync as DataClientType
else:
from google.cloud.bigtable.data import BigtableDataClient as DataClientType

__CROSS_SYNC_OUTPUT__ = (
"google.cloud.bigtable.data.execute_query._sync_autogen.execute_query_iterator"
Expand Down

0 comments on commit d489ad3

Please sign in to comment.