diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
index 453b540c..773c1dfd 100644
--- a/.github/.OwlBot.lock.yaml
+++ b/.github/.OwlBot.lock.yaml
@@ -13,5 +13,5 @@
# limitations under the License.
docker:
image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
- digest: sha256:caffe0a9277daeccc4d1de5c9b55ebba0901b57c2f713ec9c876b0d4ec064f61
-# created: 2023-11-08T19:46:45.022803742Z
+ digest: sha256:2f155882785883336b4468d5218db737bb1d10c9cea7cb62219ad16fe248c03c
+# created: 2023-11-29T14:54:29.548172703Z
diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml
index 0de0f691..17369fa2 100644
--- a/.github/workflows/unittest.yml
+++ b/.github/workflows/unittest.yml
@@ -8,7 +8,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
- python: ['3.7', '3.8', '3.9', '3.10', '3.11']
+ python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
steps:
- name: Checkout
uses: actions/checkout@v3
diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt
index 8957e211..e5c1ffca 100644
--- a/.kokoro/requirements.txt
+++ b/.kokoro/requirements.txt
@@ -93,30 +93,30 @@ colorlog==6.7.0 \
# via
# gcp-docuploader
# nox
-cryptography==41.0.5 \
- --hash=sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf \
- --hash=sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84 \
- --hash=sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e \
- --hash=sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8 \
- --hash=sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7 \
- --hash=sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1 \
- --hash=sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88 \
- --hash=sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86 \
- --hash=sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179 \
- --hash=sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81 \
- --hash=sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20 \
- --hash=sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548 \
- --hash=sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d \
- --hash=sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d \
- --hash=sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5 \
- --hash=sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1 \
- --hash=sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147 \
- --hash=sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936 \
- --hash=sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797 \
- --hash=sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696 \
- --hash=sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72 \
- --hash=sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da \
- --hash=sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723
+cryptography==41.0.6 \
+ --hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \
+ --hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \
+ --hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \
+ --hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \
+ --hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \
+ --hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \
+ --hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \
+ --hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \
+ --hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \
+ --hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \
+ --hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \
+ --hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \
+ --hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \
+ --hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \
+ --hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \
+ --hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \
+ --hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \
+ --hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \
+ --hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \
+ --hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \
+ --hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \
+ --hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \
+ --hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae
# via
# gcp-releasetool
# secretstorage
diff --git a/.kokoro/samples/python3.12/common.cfg b/.kokoro/samples/python3.12/common.cfg
new file mode 100644
index 00000000..17c144c4
--- /dev/null
+++ b/.kokoro/samples/python3.12/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.12"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-312"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-bigquery-pandas/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-bigquery-pandas/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.12/continuous.cfg b/.kokoro/samples/python3.12/continuous.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.12/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.12/periodic-head.cfg b/.kokoro/samples/python3.12/periodic-head.cfg
new file mode 100644
index 00000000..98efde4d
--- /dev/null
+++ b/.kokoro/samples/python3.12/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-bigquery-pandas/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.12/periodic.cfg b/.kokoro/samples/python3.12/periodic.cfg
new file mode 100644
index 00000000..71cd1e59
--- /dev/null
+++ b/.kokoro/samples/python3.12/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.12/presubmit.cfg b/.kokoro/samples/python3.12/presubmit.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.12/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index b93f2240..487deabb 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -22,7 +22,7 @@ In order to add a feature:
documentation.
- The feature must work fully on the following CPython versions:
- 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows.
+ 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows.
- The feature must not add unnecessary dependencies (where
"unnecessary" is of course subjective, but new dependencies should
@@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests.
- To run a single unit test::
- $ nox -s unit-3.11 -- -k
+ $ nox -s unit-3.12 -- -k
.. note::
@@ -143,12 +143,12 @@ Running System Tests
$ nox -s system
# Run a single system test
- $ nox -s system-3.11 -- -k
+ $ nox -s system-3.12 -- -k
.. note::
- System tests are only configured to run under Python 3.7, 3.8, 3.9, 3.10 and 3.11.
+ System tests are only configured to run under Python 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12.
For expediency, we do not run them in older versions of Python 3.
This alone will not run the tests. You'll need to change some local
@@ -226,12 +226,14 @@ We support:
- `Python 3.9`_
- `Python 3.10`_
- `Python 3.11`_
+- `Python 3.12`_
.. _Python 3.7: https://docs.python.org/3.7/
.. _Python 3.8: https://docs.python.org/3.8/
.. _Python 3.9: https://docs.python.org/3.9/
.. _Python 3.10: https://docs.python.org/3.10/
.. _Python 3.11: https://docs.python.org/3.11/
+.. _Python 3.12: https://docs.python.org/3.12/
Supported versions can be found in our ``noxfile.py`` `config`_.
diff --git a/docs/reading.rst b/docs/reading.rst
index c5e814bf..6361280a 100644
--- a/docs/reading.rst
+++ b/docs/reading.rst
@@ -28,7 +28,7 @@ destination DataFrame as well as a preferred column order as follows:
'SELECT * FROM `test_dataset.test_table`',
project_id=projectid,
index_col='index_column_name',
- col_order=['col1', 'col2', 'col3'])
+ columns=['col1', 'col2'])
Querying with legacy SQL syntax
-------------------------------
diff --git a/noxfile.py b/noxfile.py
index 051f5da4..a0d5a94c 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -32,7 +32,7 @@
DEFAULT_PYTHON_VERSION = "3.8"
-UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"]
+UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]
UNIT_TEST_STANDARD_DEPENDENCIES = [
"mock",
"asyncmock",
@@ -57,7 +57,7 @@
UNIT_TEST_PYTHON_VERSIONS[-1],
]
-SYSTEM_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"]
+SYSTEM_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]
SYSTEM_TEST_STANDARD_DEPENDENCIES = [
"mock",
"pytest",
diff --git a/owlbot.py b/owlbot.py
index e648334d..e6c59be4 100644
--- a/owlbot.py
+++ b/owlbot.py
@@ -35,8 +35,8 @@
}
extras = ["tqdm"]
templated_files = common.py_library(
- unit_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11"],
- system_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11"],
+ unit_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"],
+ system_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"],
cov_level=96,
unit_test_external_dependencies=["freezegun"],
unit_test_extras=extras,
diff --git a/pandas_gbq/features.py b/pandas_gbq/features.py
index ad20c640..4b70a14a 100644
--- a/pandas_gbq/features.py
+++ b/pandas_gbq/features.py
@@ -23,15 +23,15 @@ def __init__(self):
@property
def bigquery_installed_version(self):
import google.cloud.bigquery
- import pkg_resources
+ import packaging.version
if self._bigquery_installed_version is not None:
return self._bigquery_installed_version
- self._bigquery_installed_version = pkg_resources.parse_version(
+ self._bigquery_installed_version = packaging.version.parse(
google.cloud.bigquery.__version__
)
- bigquery_minimum_version = pkg_resources.parse_version(BIGQUERY_MINIMUM_VERSION)
+ bigquery_minimum_version = packaging.version.parse(BIGQUERY_MINIMUM_VERSION)
if self._bigquery_installed_version < bigquery_minimum_version:
raise ImportError(
@@ -45,68 +45,68 @@ def bigquery_installed_version(self):
@property
def bigquery_has_accurate_timestamp(self):
- import pkg_resources
+ import packaging.version
- min_version = pkg_resources.parse_version(BIGQUERY_ACCURATE_TIMESTAMP_VERSION)
+ min_version = packaging.version.parse(BIGQUERY_ACCURATE_TIMESTAMP_VERSION)
return self.bigquery_installed_version >= min_version
@property
def bigquery_has_bignumeric(self):
- import pkg_resources
+ import packaging.version
- min_version = pkg_resources.parse_version(BIGQUERY_SUPPORTS_BIGNUMERIC_VERSION)
+ min_version = packaging.version.parse(BIGQUERY_SUPPORTS_BIGNUMERIC_VERSION)
return self.bigquery_installed_version >= min_version
@property
def bigquery_has_from_dataframe_with_csv(self):
- import pkg_resources
+ import packaging.version
- bigquery_from_dataframe_version = pkg_resources.parse_version(
+ bigquery_from_dataframe_version = packaging.version.parse(
BIGQUERY_FROM_DATAFRAME_CSV_VERSION
)
return self.bigquery_installed_version >= bigquery_from_dataframe_version
@property
def bigquery_needs_date_as_object(self):
- import pkg_resources
+ import packaging.version
- max_version = pkg_resources.parse_version(BIGQUERY_NO_DATE_AS_OBJECT_VERSION)
+ max_version = packaging.version.parse(BIGQUERY_NO_DATE_AS_OBJECT_VERSION)
return self.bigquery_installed_version < max_version
@property
def pandas_installed_version(self):
import pandas
- import pkg_resources
+ import packaging.version
if self._pandas_installed_version is not None:
return self._pandas_installed_version
- self._pandas_installed_version = pkg_resources.parse_version(pandas.__version__)
+ self._pandas_installed_version = packaging.version.parse(pandas.__version__)
return self._pandas_installed_version
@property
def pandas_has_deprecated_verbose(self):
- import pkg_resources
+ import packaging.version
# Add check for Pandas version before showing deprecation warning.
# https://github.com/pydata/pandas-gbq/issues/157
- pandas_verbosity_deprecation = pkg_resources.parse_version(
+ pandas_verbosity_deprecation = packaging.version.parse(
PANDAS_VERBOSITY_DEPRECATION_VERSION
)
return self.pandas_installed_version >= pandas_verbosity_deprecation
@property
def pandas_has_boolean_dtype(self):
- import pkg_resources
+ import packaging.version
- desired_version = pkg_resources.parse_version(PANDAS_BOOLEAN_DTYPE_VERSION)
+ desired_version = packaging.version.parse(PANDAS_BOOLEAN_DTYPE_VERSION)
return self.pandas_installed_version >= desired_version
@property
def pandas_has_parquet_with_lossless_timestamp(self):
- import pkg_resources
+ import packaging.version
- desired_version = pkg_resources.parse_version(
+ desired_version = packaging.version.parse(
PANDAS_PARQUET_LOSSLESS_TIMESTAMP_VERSION
)
return self.pandas_installed_version >= desired_version
diff --git a/pandas_gbq/gbq.py b/pandas_gbq/gbq.py
index 3d43884a..dbb9e5b5 100644
--- a/pandas_gbq/gbq.py
+++ b/pandas_gbq/gbq.py
@@ -35,9 +35,9 @@
def _test_google_api_imports():
try:
- import pkg_resources # noqa
+ import packaging # noqa
except ImportError as ex: # pragma: NO COVER
- raise ImportError("pandas-gbq requires setuptools") from ex
+ raise ImportError("pandas-gbq requires db-dtypes") from ex
try:
import db_dtypes # noqa
@@ -734,7 +734,7 @@ def read_gbq(
query_or_table,
project_id=None,
index_col=None,
- col_order=None,
+ columns=None,
reauth=False,
auth_local_webserver=True,
dialect=None,
@@ -750,6 +750,8 @@ def read_gbq(
auth_redirect_uri=None,
client_id=None,
client_secret=None,
+ *,
+ col_order=None,
):
r"""Load data from Google BigQuery using google-cloud-python
@@ -773,7 +775,7 @@ def read_gbq(
the environment.
index_col : str, optional
Name of result column to use for index in results DataFrame.
- col_order : list(str), optional
+ columns : list(str), optional
List of BigQuery column names in the desired order for results
DataFrame.
reauth : boolean, default False
@@ -888,6 +890,8 @@ def read_gbq(
client_secret : str
The Client Secret associated with the Client ID for the Google Cloud Project
the user is attempting to connect to.
+ col_order : list(str), optional
+ Alias for columns, retained for backwards compatibility.
Returns
-------
@@ -966,10 +970,19 @@ def read_gbq(
'Index column "{0}" does not exist in DataFrame.'.format(index_col)
)
+ # Using columns as an alias for col_order, raising an error if both provided
+ if col_order and not columns:
+ columns = col_order
+ elif col_order and columns:
+ raise ValueError(
+ "Must specify either columns (preferred) or col_order, not both"
+ )
+
# Change the order of columns in the DataFrame based on provided list
- if col_order is not None:
- if sorted(col_order) == sorted(final_df.columns):
- final_df = final_df[col_order]
+ # TODO(kiraksi): allow columns to be a subset of all columns in the table, with follow up PR
+ if columns is not None:
+ if sorted(columns) == sorted(final_df.columns):
+ final_df = final_df[columns]
else:
raise InvalidColumnOrder("Column order does not match this DataFrame.")
diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py
index 1224cbe2..3b713594 100644
--- a/samples/snippets/noxfile.py
+++ b/samples/snippets/noxfile.py
@@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]:
# DO NOT EDIT - automatically generated.
# All versions used to test samples.
-ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"]
+ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]
# Any default versions that should be ignored.
IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"]
diff --git a/setup.py b/setup.py
index d896164a..d0b16c2e 100644
--- a/setup.py
+++ b/setup.py
@@ -41,6 +41,7 @@
# indefinitely. https://github.com/pydata/pandas-gbq/issues/343
"google-cloud-bigquery >=3.3.5,<4.0.0dev,!=2.4.*",
"google-cloud-bigquery-storage >=2.16.2,<3.0.0dev",
+ "packaging >=20.0.0",
]
extras = {
"tqdm": "tqdm>=4.23.0",
@@ -90,6 +91,7 @@
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
"Operating System :: OS Independent",
"Topic :: Internet",
"Topic :: Scientific/Engineering",
diff --git a/testing/constraints-3.12.txt b/testing/constraints-3.12.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt
index dc26cdee..2a4141bd 100644
--- a/testing/constraints-3.7.txt
+++ b/testing/constraints-3.7.txt
@@ -17,3 +17,4 @@ pyarrow==3.0.0
pydata-google-auth==1.5.0
tqdm==4.23.0
protobuf==3.19.5
+packaging==20.0.0
diff --git a/tests/system/test_gbq.py b/tests/system/test_gbq.py
index 9aac2357..7afa4ae9 100644
--- a/tests/system/test_gbq.py
+++ b/tests/system/test_gbq.py
@@ -14,19 +14,15 @@
import pandas.testing as tm
from pandas import DataFrame
-try:
- import pkg_resources # noqa
-except ImportError:
- raise ImportError("Could not import pkg_resources (setuptools).")
-import pytest
import pytz
+import pytest
from pandas_gbq import gbq
import pandas_gbq.schema
TABLE_ID = "new_test"
-PANDAS_VERSION = pkg_resources.parse_version(pandas.__version__)
+PANDAS_VERSION = packaging.version.parse(pandas.__version__)
def test_imports():
@@ -600,6 +596,40 @@ def test_tokyo(self, tokyo_dataset, tokyo_table, project_id):
)
assert df["max_year"][0] >= 2000
+ def test_columns_as_alias(self, project_id):
+ query = "SELECT 'a' AS string_1, 'b' AS string_2, 'c' AS string_3"
+ columns = ["string_2", "string_1", "string_3"]
+
+ df = gbq.read_gbq(
+ query,
+ project_id=project_id,
+ columns=columns,
+ credentials=self.credentials,
+ dialect="standard",
+ )
+
+ expected = DataFrame({"string_1": ["a"], "string_2": ["b"], "string_3": ["c"]})[
+ columns
+ ]
+
+ # Verify that the result_frame matches the expected DataFrame
+ tm.assert_frame_equal(df, expected)
+
+ def test_columns_and_col_order_raises_error(self, project_id):
+ query = "SELECT 'a' AS string_1, 'b' AS string_2, 'c' AS string_3"
+ columns = ["string_2", "string_1"]
+ col_order = ["string_3", "string_1", "string_2"]
+
+ with pytest.raises(ValueError):
+ gbq.read_gbq(
+ query,
+ project_id=project_id,
+ columns=columns,
+ col_order=col_order,
+ credentials=self.credentials,
+ dialect="standard",
+ )
+
class TestToGBQIntegration(object):
@pytest.fixture(autouse=True, scope="function")