Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: sync v3 with main branch #1088

Merged
merged 15 commits into from
Dec 16, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
74 changes: 36 additions & 38 deletions google/cloud/bigquery/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from __future__ import absolute_import

import copy
from typing import Dict, Any

import google.cloud._helpers # type: ignore

Expand Down Expand Up @@ -77,28 +78,29 @@ def _get_routine_reference(self, routine_id):
class AccessEntry(object):
"""Represents grant of an access role to an entity.

An entry must have exactly one of the allowed :attr:`ENTITY_TYPES`. If
anything but ``view`` or ``routine`` are set, a ``role`` is also required.
``role`` is omitted for ``view`` and ``routine``, because they are always
read-only.
An entry must have exactly one of the allowed
:class:`google.cloud.bigquery.enums.EntityTypes`. If anything but ``view``, ``routine``,
or ``dataset`` are set, a ``role`` is also required. ``role`` is omitted for ``view``,
``routine``, ``dataset``, because they are always read-only.

See https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets.

Args:
role (str):
Role granted to the entity. The following string values are
supported: `'READER'`, `'WRITER'`, `'OWNER'`. It may also be
:data:`None` if the ``entity_type`` is ``view`` or ``routine``.
:data:`None` if the ``entity_type`` is ``view``, ``routine``, or ``dataset``.

entity_type (str):
Type of entity being granted the role. One of :attr:`ENTITY_TYPES`.
Type of entity being granted the role. See
:class:`google.cloud.bigquery.enums.EntityTypes` for supported types.

entity_id (Union[str, Dict[str, str]]):
If the ``entity_type`` is not 'view' or 'routine', the ``entity_id``
is the ``str`` ID of the entity being granted the role. If the
``entity_type`` is 'view' or 'routine', the ``entity_id`` is a ``dict``
representing the view or routine from a different dataset to grant
access to in the following format for views::
If the ``entity_type`` is not 'view', 'routine', or 'dataset', the
``entity_id`` is the ``str`` ID of the entity being granted the role. If
the ``entity_type`` is 'view' or 'routine', the ``entity_id`` is a ``dict``
representing the view or routine from a different dataset to grant access
to in the following format for views::

{
'projectId': string,
Expand All @@ -114,11 +116,22 @@ class AccessEntry(object):
'routineId': string
}

If the ``entity_type`` is 'dataset', the ``entity_id`` is a ``dict`` that includes
a 'dataset' field with a ``dict`` representing the dataset and a 'target_types'
field with a ``str`` value of the dataset's resource type::

{
'dataset': {
'projectId': string,
'datasetId': string,
},
'target_types: 'VIEWS'
}

Raises:
ValueError:
If the ``entity_type`` is not among :attr:`ENTITY_TYPES`, or if a
``view`` or a ``routine`` has ``role`` set, or a non ``view`` and
non ``routine`` **does not** have a ``role`` set.
If a ``view``, ``routine``, or ``dataset`` has ``role`` set, or a non ``view``,
non ``routine``, and non ``dataset`` **does not** have a ``role`` set.

Examples:
>>> entry = AccessEntry('OWNER', 'userByEmail', '[email protected]')
Expand All @@ -131,27 +144,9 @@ class AccessEntry(object):
>>> entry = AccessEntry(None, 'view', view)
"""

ENTITY_TYPES = frozenset(
[
"userByEmail",
"groupByEmail",
"domain",
"specialGroup",
"view",
"iamMember",
"routine",
]
)
"""Allowed entity types."""

def __init__(self, role, entity_type, entity_id) -> None:
if entity_type not in self.ENTITY_TYPES:
message = "Entity type %r not among: %s" % (
entity_type,
", ".join(self.ENTITY_TYPES),
)
raise ValueError(message)
if entity_type in ("view", "routine"):
def __init__(self, role=None, entity_type=None, entity_id=None) -> None:
self._properties: Dict[str, Any] = {}
if entity_type in ("view", "routine", "dataset"):
if role is not None:
raise ValueError(
"Role must be None for a %r. Received "
Expand All @@ -162,7 +157,6 @@ def __init__(self, role, entity_type, entity_id) -> None:
raise ValueError(
"Role must be set for entity " "type %r" % (entity_type,)
)

self._role = role
self._entity_type = entity_type
self._entity_id = entity_id
Expand Down Expand Up @@ -214,7 +208,8 @@ def to_api_repr(self):
Returns:
Dict[str, object]: Access entry represented as an API resource
"""
resource = {self._entity_type: self._entity_id}
resource = copy.deepcopy(self._properties)
resource[self._entity_type] = self._entity_id
if self._role is not None:
resource["role"] = self._role
return resource
Expand All @@ -241,7 +236,10 @@ def from_api_repr(cls, resource: dict) -> "AccessEntry":
entity_type, entity_id = entry.popitem()
if len(entry) != 0:
raise ValueError("Entry has unexpected keys remaining.", entry)
return cls(role, entity_type, entity_id)

config = cls(role, entity_type, entity_id)
config._properties = copy.deepcopy(resource)
return config


class DatasetReference(object):
Expand Down
13 changes: 13 additions & 0 deletions google/cloud/bigquery/enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,6 +236,19 @@ def _generate_next_value_(name, start, count, last_values):
STRUCT = enum.auto()


class EntityTypes(str, enum.Enum):
"""Enum of allowed entity type names in AccessEntry"""

USER_BY_EMAIL = "userByEmail"
GROUP_BY_EMAIL = "groupByEmail"
DOMAIN = "domain"
DATASET = "dataset"
SPECIAL_GROUP = "specialGroup"
VIEW = "view"
IAM_MEMBER = "iamMember"
ROUTINE = "routine"


# See also: https://cloud.google.com/bigquery/data-types#legacy_sql_data_types
# and https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types
class SqlTypeNames(str, enum.Enum):
Expand Down
55 changes: 28 additions & 27 deletions samples/geography/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,50 +1,51 @@
attrs==21.2.0
cachetools==4.2.4
certifi==2021.5.30
cffi==1.14.6
charset-normalizer==2.0.6
click==8.0.1
certifi==2021.10.8
cffi==1.15.0
charset-normalizer==2.0.9
click==8.0.3
click-plugins==1.1.1
cligj==0.7.2
dataclasses==0.6; python_version < '3.7'
db-dtypes==0.3.0
Fiona==1.8.20
geojson==2.5.0
geopandas==0.9.0
google-api-core==2.0.1
google-auth==2.2.1
google-cloud-bigquery==2.27.1
google-cloud-bigquery-storage==2.9.0
google-cloud-core==2.0.0
google-crc32c==1.2.0
google-resumable-media==2.0.3
googleapis-common-protos==1.53.0
grpcio==1.41.0
idna==3.2
importlib-metadata==4.8.1
libcst==0.3.21
geopandas==0.9.0; python_version < '3.7'
geopandas==0.10.2; python_version >= '3.7'
google-api-core==2.3.0
google-auth==2.3.3
google-cloud-bigquery==2.31.0
google-cloud-bigquery-storage==2.10.1
google-cloud-core==2.2.1
google-crc32c==1.3.0
google-resumable-media==2.1.0
googleapis-common-protos==1.54.0
grpcio==1.42.0
idna==3.3
importlib-metadata==4.8.2
libcst==0.3.23
munch==2.5.0
mypy-extensions==0.4.3
packaging==21.0
packaging==21.3
pandas==1.1.5; python_version < '3.7'
pandas==1.3.4; python_version >= '3.7'
proto-plus==1.19.2
protobuf==3.18.0
pyarrow==6.0.0
proto-plus==1.19.8
protobuf==3.19.1
pyarrow==6.0.1
pyasn1==0.4.8
pyasn1-modules==0.2.8
pycparser==2.20
pyparsing==2.4.7
pycparser==2.21
pyparsing==3.0.6
pyproj==3.0.1; python_version < "3.7"
pyproj==3.1.0; python_version > "3.6"
python-dateutil==2.8.2
pytz==2021.1
PyYAML==5.4.1
pytz==2021.3
PyYAML==6.0
requests==2.26.0
rsa==4.7.2
rsa==4.8
Shapely==1.8.0
six==1.16.0
typing-extensions==3.10.0.2
typing-extensions==4.0.1
typing-inspect==0.7.1
urllib3==1.26.7
zipp==3.6.0
2 changes: 1 addition & 1 deletion samples/magics/requirements-test.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
google-cloud-testutils==1.1.0
google-cloud-testutils==1.3.1
pytest==6.2.5
mock==4.0.3
10 changes: 5 additions & 5 deletions samples/magics/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
db-dtypes==0.3.0
google-cloud-bigquery-storage==2.9.0
db-dtypes==0.3.1
google-cloud-bigquery-storage==2.10.1
google-auth-oauthlib==0.4.6
grpcio==1.41.0
grpcio==1.42.0
ipython==7.16.1; python_version < '3.7'
ipython==7.29.0; python_version >= '3.7'
matplotlib==3.3.4; python_version < '3.7'
matplotlib==3.5.0rc1; python_version >= '3.7'
pandas==1.1.5; python_version < '3.7'
pandas==1.3.4; python_version >= '3.7'
pyarrow==6.0.0
pytz==2021.1
pyarrow==6.0.1
pytz==2021.3
typing-extensions==3.10.0.2
5 changes: 3 additions & 2 deletions samples/snippets/authorized_view_tutorial.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ def run_authorized_view_tutorial(
# Create a source dataset
# [START bigquery_avt_create_source_dataset]
from google.cloud import bigquery
from google.cloud.bigquery.enums import EntityTypes

client = bigquery.Client()
source_dataset_id = "github_source_data"
Expand Down Expand Up @@ -113,7 +114,7 @@ def run_authorized_view_tutorial(
# analyst_group_email = '[email protected]'
access_entries = shared_dataset.access_entries
access_entries.append(
bigquery.AccessEntry("READER", "groupByEmail", analyst_group_email)
bigquery.AccessEntry("READER", EntityTypes.GROUP_BY_EMAIL, analyst_group_email)
)
shared_dataset.access_entries = access_entries
shared_dataset = client.update_dataset(
Expand All @@ -125,7 +126,7 @@ def run_authorized_view_tutorial(
# [START bigquery_avt_source_dataset_access]
access_entries = source_dataset.access_entries
access_entries.append(
bigquery.AccessEntry(None, "view", view.reference.to_api_repr())
bigquery.AccessEntry(None, EntityTypes.VIEW, view.reference.to_api_repr())
)
source_dataset.access_entries = access_entries
source_dataset = client.update_dataset(
Expand Down
2 changes: 1 addition & 1 deletion samples/snippets/requirements-test.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
google-cloud-testutils==1.1.0
google-cloud-testutils==1.3.1
pytest==6.2.5
mock==4.0.3
10 changes: 5 additions & 5 deletions samples/snippets/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
db-dtypes==0.3.0
google-cloud-bigquery-storage==2.9.0
db-dtypes==0.3.1
google-cloud-bigquery-storage==2.10.1
google-auth-oauthlib==0.4.6
grpcio==1.41.0
grpcio==1.42.0
ipython==7.16.1; python_version < '3.7'
ipython==7.29.0; python_version >= '3.7'
matplotlib==3.3.4; python_version < '3.7'
matplotlib==3.4.1; python_version >= '3.7'
pandas==1.1.5; python_version < '3.7'
pandas==1.3.4; python_version >= '3.7'
pyarrow==6.0.0
pytz==2021.1
pyarrow==6.0.1
pytz==2021.3
typing-extensions==3.10.0.2
4 changes: 3 additions & 1 deletion samples/snippets/update_dataset_access.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ def update_dataset_access(dataset_id: str, entity_id: str) -> None:
# of the entity, such as a view's table reference.
entity_id = "[email protected]"

from google.cloud.bigquery.enums import EntityTypes

# TODO(developer): Set entity_type to the type of entity you are granting access to.
# Common types include:
#
Expand All @@ -37,7 +39,7 @@ def update_dataset_access(dataset_id: str, entity_id: str) -> None:
#
# For a complete reference, see the REST API reference documentation:
# https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets#Dataset.FIELDS.access
entity_type = "groupByEmail"
entity_type = EntityTypes.GROUP_BY_EMAIL

# TODO(developer): Set role to a one of the "Basic roles for datasets"
# described here:
Expand Down
4 changes: 3 additions & 1 deletion samples/tests/test_copy_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,11 @@

import typing

import pytest

from .. import copy_table

if typing.TYPE_CHECKING:
import pytest
from google.cloud import bigquery


Expand All @@ -27,6 +28,7 @@ def test_copy_table(
random_table_id: str,
client: "bigquery.Client",
) -> None:
pytest.skip("b/210907595: copy fails for shakespeare table")

copy_table.copy_table(table_with_data_id, random_table_id)
out, err = capsys.readouterr()
Expand Down
6 changes: 2 additions & 4 deletions samples/tests/test_copy_table_cmek.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,20 +12,18 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import typing
import pytest

from .. import copy_table_cmek

if typing.TYPE_CHECKING:
import pytest


def test_copy_table_cmek(
capsys: "pytest.CaptureFixture[str]",
random_table_id: str,
table_with_data_id: str,
kms_key_name: str,
) -> None:
pytest.skip("b/210907595: copy fails for shakespeare table")

copy_table_cmek.copy_table_cmek(random_table_id, table_with_data_id, kms_key_name)
out, err = capsys.readouterr()
Expand Down
2 changes: 2 additions & 0 deletions tests/system/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -1137,6 +1137,8 @@ def test_extract_table(self):
self.assertIn("Bharney Rhubble", got)

def test_copy_table(self):
pytest.skip("b/210907595: copy fails for shakespeare table")

# If we create a new table to copy from, the test won't work
# because the new rows will be stored in the streaming buffer,
# and copy jobs don't read the streaming buffer.
Expand Down
Loading