Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: use DML batches in executemany() method #412

Merged
merged 25 commits into from
Aug 9, 2021
Merged
Show file tree
Hide file tree
Changes from 22 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 34 additions & 16 deletions google/cloud/spanner_dbapi/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@
from google.cloud.spanner_dbapi.version import DEFAULT_USER_AGENT
from google.cloud.spanner_dbapi.version import PY_VERSION

from google.rpc.code_pb2 import ABORTED


AUTOCOMMIT_MODE_WARNING = "This method is non-operational in autocommit mode"
MAX_INTERNAL_RETRIES = 50
Expand Down Expand Up @@ -175,25 +177,41 @@ def _rerun_previous_statements(self):
from the last transaction.
"""
for statement in self._statements:
res_iter, retried_checksum = self.run_statement(statement, retried=True)
# executing all the completed statements
if statement != self._statements[-1]:
for res in res_iter:
retried_checksum.consume_result(res)

_compare_checksums(statement.checksum, retried_checksum)
# executing the failed statement
if isinstance(statement, list):
statements, checksum = statement

transaction = self.transaction_checkout()
IlyaFaer marked this conversation as resolved.
Show resolved Hide resolved
status, res = transaction.batch_update(statements)

if status.code == ABORTED:
self.connection._transaction = None
raise Aborted(status.details)

retried_checksum = ResultsChecksum()
retried_checksum.consume_result(res)
retried_checksum.consume_result(status.code)

_compare_checksums(checksum, retried_checksum)
else:
# streaming up to the failed result or
# to the end of the streaming iterator
while len(retried_checksum) < len(statement.checksum):
try:
res = next(iter(res_iter))
res_iter, retried_checksum = self.run_statement(statement, retried=True)
# executing all the completed statements
if statement != self._statements[-1]:
for res in res_iter:
retried_checksum.consume_result(res)
except StopIteration:
break

_compare_checksums(statement.checksum, retried_checksum)
_compare_checksums(statement.checksum, retried_checksum)
# executing the failed statement
else:
# streaming up to the failed result or
# to the end of the streaming iterator
while len(retried_checksum) < len(statement.checksum):
try:
res = next(iter(res_iter))
retried_checksum.consume_result(res)
except StopIteration:
break

_compare_checksums(statement.checksum, retried_checksum)

def transaction_checkout(self):
"""Get a Cloud Spanner transaction.
Expand Down
58 changes: 55 additions & 3 deletions google/cloud/spanner_dbapi/cursor.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,8 @@
from google.cloud.spanner_dbapi.utils import PeekIterator
from google.cloud.spanner_dbapi.utils import StreamedManyResultSets

from google.rpc.code_pb2 import ABORTED, OK

_UNSET_COUNT = -1

ColumnDetails = namedtuple("column_details", ["null_ok", "spanner_type"])
Expand Down Expand Up @@ -156,6 +158,15 @@ def _do_execute_update(self, transaction, sql, params):

return result

def _do_batch_update(self, transaction, statements, many_result_set):
status, res = transaction.batch_update(statements)
many_result_set.add_iter(res)

if status.code == ABORTED:
raise Aborted(status.details)
elif status.code != OK:
raise OperationalError(status.details)

def execute(self, sql, args=None):
"""Prepares and executes a Spanner database operation.

Expand Down Expand Up @@ -258,9 +269,50 @@ def executemany(self, operation, seq_of_params):

many_result_set = StreamedManyResultSets()

for params in seq_of_params:
self.execute(operation, params)
many_result_set.add_iter(self._itr)
if classification in (parse_utils.STMT_INSERT, parse_utils.STMT_UPDATING):
IlyaFaer marked this conversation as resolved.
Show resolved Hide resolved
IlyaFaer marked this conversation as resolved.
Show resolved Hide resolved
statements = []

for params in seq_of_params:
sql, params = parse_utils.sql_pyformat_args_to_spanner(
operation, params
)
statements.append((sql, params, get_param_types(params)))

if self.connection.autocommit:
self.connection.database.run_in_transaction(
self._do_batch_update, statements, many_result_set
)
else:
retried = False
while True:
try:
transaction = self.connection.transaction_checkout()

res_checksum = ResultsChecksum()
if not retried:
self.connection._statements.append(
(statements, res_checksum)
)

status, res = transaction.batch_update(statements)
many_result_set.add_iter(res)
res_checksum.consume_result(res)
IlyaFaer marked this conversation as resolved.
Show resolved Hide resolved
res_checksum.consume_result(status.code)

if status.code == ABORTED:
self.connection._transaction = None
raise Aborted(status.details)
elif status.code != OK:
raise OperationalError(status.details)
break
except Aborted:
self.connection.retry_transaction()
retried = True

else:
for params in seq_of_params:
self.execute(operation, params)
many_result_set.add_iter(self._itr)

self._result_set = many_result_set
self._itr = many_result_set
Expand Down
Loading