Skip to content

Commit

Permalink
feat: convert BIGNUMERIC values to decimal objects (#414)
Browse files Browse the repository at this point in the history
Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly:
- [x] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-bigquery/issues/new/choose) before writing your code!  That way we can discuss the change, evaluate designs, and agree on the general idea
- [x] Ensure the tests and linter pass
- [x] Code coverage does not decrease (if any source code was changed)
- [ ] Appropriate docs were updated (if necessary)

Towards #367 🦕
  • Loading branch information
tswast authored Dec 4, 2020
1 parent 985a8cf commit d472d2d
Show file tree
Hide file tree
Showing 2 changed files with 67 additions and 23 deletions.
2 changes: 2 additions & 0 deletions google/cloud/bigquery/_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,6 +188,7 @@ def _record_from_json(value, field):
"FLOAT": _float_from_json,
"FLOAT64": _float_from_json,
"NUMERIC": _decimal_from_json,
"BIGNUMERIC": _decimal_from_json,
"BOOLEAN": _bool_from_json,
"BOOL": _bool_from_json,
"STRING": _string_from_json,
Expand Down Expand Up @@ -347,6 +348,7 @@ def _time_to_json(value):
"FLOAT": _float_to_json,
"FLOAT64": _float_to_json,
"NUMERIC": _decimal_to_json,
"BIGNUMERIC": _decimal_to_json,
"BOOLEAN": _bool_to_json,
"BOOL": _bool_to_json,
"BYTES": _bytes_to_json,
Expand Down
88 changes: 65 additions & 23 deletions tests/unit/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -6290,38 +6290,43 @@ def test_insert_rows_w_numeric(self):
creds = _make_credentials()
http = object()
client = self._make_one(project=project, credentials=creds, _http=http)
conn = client._connection = make_connection({})
table_ref = DatasetReference(project, ds_id).table(table_id)
schema = [SchemaField("account", "STRING"), SchemaField("balance", "NUMERIC")]
insert_table = table.Table(table_ref, schema=schema)
rows = [
("Savings", decimal.Decimal("23.47")),
("Checking", decimal.Decimal("1.98")),
("Mortgage", decimal.Decimal("-12345678909.87654321")),
]
schemas = [
[SchemaField("account", "STRING"), SchemaField("balance", "NUMERIC")],
[SchemaField("account", "STRING"), SchemaField("balance", "BIGNUMERIC")],
]

with mock.patch("uuid.uuid4", side_effect=map(str, range(len(rows)))):
errors = client.insert_rows(insert_table, rows)
for schema in schemas:
conn = client._connection = make_connection({})

self.assertEqual(len(errors), 0)
rows_json = [
{"account": "Savings", "balance": "23.47"},
{"account": "Checking", "balance": "1.98"},
{"account": "Mortgage", "balance": "-12345678909.87654321"},
]
sent = {
"rows": [
{"json": row, "insertId": str(i)} for i, row in enumerate(rows_json)
insert_table = table.Table(table_ref, schema=schema)
with mock.patch("uuid.uuid4", side_effect=map(str, range(len(rows)))):
errors = client.insert_rows(insert_table, rows)

self.assertEqual(len(errors), 0)
rows_json = [
{"account": "Savings", "balance": "23.47"},
{"account": "Checking", "balance": "1.98"},
{"account": "Mortgage", "balance": "-12345678909.87654321"},
]
}
conn.api_request.assert_called_once_with(
method="POST",
path="/projects/{}/datasets/{}/tables/{}/insertAll".format(
project, ds_id, table_id
),
data=sent,
timeout=None,
)
sent = {
"rows": [
{"json": row, "insertId": str(i)} for i, row in enumerate(rows_json)
]
}
conn.api_request.assert_called_once_with(
method="POST",
path="/projects/{}/datasets/{}/tables/{}/insertAll".format(
project, ds_id, table_id
),
data=sent,
timeout=None,
)

@unittest.skipIf(pandas is None, "Requires `pandas`")
def test_insert_rows_from_dataframe(self):
Expand Down Expand Up @@ -6915,6 +6920,43 @@ def test_list_rows_query_params(self):
test[1]["formatOptions.useInt64Timestamp"] = True
self.assertEqual(req[1]["query_params"], test[1], "for kwargs %s" % test[0])

def test_list_rows_w_numeric(self):
from google.cloud.bigquery.schema import SchemaField
from google.cloud.bigquery.table import Table

resource = {
"totalRows": 3,
"rows": [
{"f": [{"v": "-1.23456789"}, {"v": "-123456789.987654321"}]},
{"f": [{"v": None}, {"v": "3.141592653589793238462643383279502884"}]},
{"f": [{"v": "2718281828459045235360287471.352662497"}, {"v": None}]},
],
}
creds = _make_credentials()
http = object()
client = self._make_one(project=self.PROJECT, credentials=creds, _http=http)
client._connection = make_connection(resource)
schema = [
SchemaField("num", "NUMERIC"),
SchemaField("bignum", "BIGNUMERIC"),
]
table = Table(self.TABLE_REF, schema=schema)

iterator = client.list_rows(table)
rows = list(iterator)

self.assertEqual(len(rows), 3)
self.assertEqual(rows[0]["num"], decimal.Decimal("-1.23456789"))
self.assertEqual(rows[0]["bignum"], decimal.Decimal("-123456789.987654321"))
self.assertIsNone(rows[1]["num"])
self.assertEqual(
rows[1]["bignum"], decimal.Decimal("3.141592653589793238462643383279502884")
)
self.assertEqual(
rows[2]["num"], decimal.Decimal("2718281828459045235360287471.352662497")
)
self.assertIsNone(rows[2]["bignum"])

def test_list_rows_repeated_fields(self):
from google.cloud.bigquery.schema import SchemaField

Expand Down

0 comments on commit d472d2d

Please sign in to comment.