Skip to content

Commit

Permalink
fix: type annotations include Optional when None is accepted (#1554)
Browse files Browse the repository at this point in the history
Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly:
- [ ] Make sure to open an issue as a [bug/issue](https://togithub.com/googleapis/python-bigquery/issues/new/choose) before writing your code!  That way we can discuss the change, evaluate designs, and agree on the general idea
- [ ] Ensure the tests and linter pass
- [ ] Code coverage does not decrease (if any source code was changed)
- [ ] Appropriate docs were updated (if necessary)

Fixes #1545 🦕
  • Loading branch information
tswast authored Jun 22, 2023
1 parent a5d86a3 commit 6c1ab80
Show file tree
Hide file tree
Showing 8 changed files with 86 additions and 73 deletions.
4 changes: 2 additions & 2 deletions google/cloud/bigquery/_job_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def query_jobs_insert(
job_config: Optional[job.QueryJobConfig],
job_id: Optional[str],
job_id_prefix: Optional[str],
location: str,
location: Optional[str],
project: str,
retry: retries.Retry,
timeout: Optional[float],
Expand Down Expand Up @@ -215,7 +215,7 @@ def query_jobs_query(
client: "Client",
query: str,
job_config: Optional[job.QueryJobConfig],
location: str,
location: Optional[str],
project: str,
retry: retries.Retry,
timeout: Optional[float],
Expand Down
108 changes: 55 additions & 53 deletions google/cloud/bigquery/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -307,7 +307,7 @@ def close(self):

def get_service_account_email(
self,
project: str = None,
project: Optional[str] = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: TimeoutType = DEFAULT_TIMEOUT,
) -> str:
Expand Down Expand Up @@ -355,7 +355,7 @@ def get_service_account_email(
def list_projects(
self,
max_results: Optional[int] = None,
page_token: str = None,
page_token: Optional[str] = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: TimeoutType = DEFAULT_TIMEOUT,
page_size: Optional[int] = None,
Expand Down Expand Up @@ -417,11 +417,11 @@ def api_request(*args, **kwargs):

def list_datasets(
self,
project: str = None,
project: Optional[str] = None,
include_all: bool = False,
filter: str = None,
filter: Optional[str] = None,
max_results: Optional[int] = None,
page_token: str = None,
page_token: Optional[str] = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: TimeoutType = DEFAULT_TIMEOUT,
page_size: Optional[int] = None,
Expand Down Expand Up @@ -498,7 +498,9 @@ def api_request(*args, **kwargs):
page_size=page_size,
)

def dataset(self, dataset_id: str, project: str = None) -> DatasetReference:
def dataset(
self, dataset_id: str, project: Optional[str] = None
) -> DatasetReference:
"""Deprecated: Construct a reference to a dataset.
.. deprecated:: 1.24.0
Expand Down Expand Up @@ -890,7 +892,7 @@ def set_iam_policy(
self,
table: Union[Table, TableReference, TableListItem, str],
policy: Policy,
updateMask: str = None,
updateMask: Optional[str] = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: TimeoutType = DEFAULT_TIMEOUT,
) -> Policy:
Expand Down Expand Up @@ -1350,7 +1352,7 @@ def list_models(
self,
dataset: Union[Dataset, DatasetReference, DatasetListItem, str],
max_results: Optional[int] = None,
page_token: str = None,
page_token: Optional[str] = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: TimeoutType = DEFAULT_TIMEOUT,
page_size: Optional[int] = None,
Expand Down Expand Up @@ -1427,7 +1429,7 @@ def list_routines(
self,
dataset: Union[Dataset, DatasetReference, DatasetListItem, str],
max_results: Optional[int] = None,
page_token: str = None,
page_token: Optional[str] = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: TimeoutType = DEFAULT_TIMEOUT,
page_size: Optional[int] = None,
Expand Down Expand Up @@ -1504,7 +1506,7 @@ def list_tables(
self,
dataset: Union[Dataset, DatasetReference, DatasetListItem, str],
max_results: Optional[int] = None,
page_token: str = None,
page_token: Optional[str] = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: TimeoutType = DEFAULT_TIMEOUT,
page_size: Optional[int] = None,
Expand Down Expand Up @@ -1862,9 +1864,9 @@ def _get_query_results(
self,
job_id: str,
retry: retries.Retry,
project: str = None,
project: Optional[str] = None,
timeout_ms: Optional[int] = None,
location: str = None,
location: Optional[str] = None,
timeout: TimeoutType = DEFAULT_TIMEOUT,
) -> _QueryResults:
"""Get the query results object for a query job.
Expand Down Expand Up @@ -2039,8 +2041,8 @@ def create_job(
def get_job(
self,
job_id: Union[str, job.LoadJob, job.CopyJob, job.ExtractJob, job.QueryJob],
project: str = None,
location: str = None,
project: Optional[str] = None,
location: Optional[str] = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: TimeoutType = DEFAULT_TIMEOUT,
) -> Union[job.LoadJob, job.CopyJob, job.ExtractJob, job.QueryJob, job.UnknownJob]:
Expand Down Expand Up @@ -2103,8 +2105,8 @@ def get_job(
def cancel_job(
self,
job_id: str,
project: str = None,
location: str = None,
project: Optional[str] = None,
location: Optional[str] = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: TimeoutType = DEFAULT_TIMEOUT,
) -> Union[job.LoadJob, job.CopyJob, job.ExtractJob, job.QueryJob]:
Expand Down Expand Up @@ -2181,12 +2183,12 @@ def cancel_job(

def list_jobs(
self,
project: str = None,
project: Optional[str] = None,
parent_job: Optional[Union[QueryJob, str]] = None,
max_results: Optional[int] = None,
page_token: str = None,
page_token: Optional[str] = None,
all_users: bool = None,
state_filter: str = None,
state_filter: Optional[str] = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: TimeoutType = DEFAULT_TIMEOUT,
min_creation_time: datetime.datetime = None,
Expand Down Expand Up @@ -2297,11 +2299,11 @@ def load_table_from_uri(
self,
source_uris: Union[str, Sequence[str]],
destination: Union[Table, TableReference, TableListItem, str],
job_id: str = None,
job_id_prefix: str = None,
location: str = None,
project: str = None,
job_config: LoadJobConfig = None,
job_id: Optional[str] = None,
job_id_prefix: Optional[str] = None,
location: Optional[str] = None,
project: Optional[str] = None,
job_config: Optional[LoadJobConfig] = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: TimeoutType = DEFAULT_TIMEOUT,
) -> job.LoadJob:
Expand Down Expand Up @@ -2386,11 +2388,11 @@ def load_table_from_file(
rewind: bool = False,
size: Optional[int] = None,
num_retries: int = _DEFAULT_NUM_RETRIES,
job_id: str = None,
job_id_prefix: str = None,
location: str = None,
project: str = None,
job_config: LoadJobConfig = None,
job_id: Optional[str] = None,
job_id_prefix: Optional[str] = None,
location: Optional[str] = None,
project: Optional[str] = None,
job_config: Optional[LoadJobConfig] = None,
timeout: ResumableTimeoutType = DEFAULT_TIMEOUT,
) -> job.LoadJob:
"""Upload the contents of this table from a file-like object.
Expand Down Expand Up @@ -2494,11 +2496,11 @@ def load_table_from_dataframe(
dataframe: "pandas.DataFrame",
destination: Union[Table, TableReference, str],
num_retries: int = _DEFAULT_NUM_RETRIES,
job_id: str = None,
job_id_prefix: str = None,
location: str = None,
project: str = None,
job_config: LoadJobConfig = None,
job_id: Optional[str] = None,
job_id_prefix: Optional[str] = None,
location: Optional[str] = None,
project: Optional[str] = None,
job_config: Optional[LoadJobConfig] = None,
parquet_compression: str = "snappy",
timeout: ResumableTimeoutType = DEFAULT_TIMEOUT,
) -> job.LoadJob:
Expand Down Expand Up @@ -2751,11 +2753,11 @@ def load_table_from_json(
json_rows: Iterable[Dict[str, Any]],
destination: Union[Table, TableReference, TableListItem, str],
num_retries: int = _DEFAULT_NUM_RETRIES,
job_id: str = None,
job_id_prefix: str = None,
location: str = None,
project: str = None,
job_config: LoadJobConfig = None,
job_id: Optional[str] = None,
job_id_prefix: Optional[str] = None,
location: Optional[str] = None,
project: Optional[str] = None,
job_config: Optional[LoadJobConfig] = None,
timeout: ResumableTimeoutType = DEFAULT_TIMEOUT,
) -> job.LoadJob:
"""Upload the contents of a table from a JSON string or dict.
Expand Down Expand Up @@ -3064,10 +3066,10 @@ def copy_table(
Sequence[Union[Table, TableReference, TableListItem, str]],
],
destination: Union[Table, TableReference, TableListItem, str],
job_id: str = None,
job_id_prefix: str = None,
location: str = None,
project: str = None,
job_id: Optional[str] = None,
job_id_prefix: Optional[str] = None,
location: Optional[str] = None,
project: Optional[str] = None,
job_config: CopyJobConfig = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: TimeoutType = DEFAULT_TIMEOUT,
Expand Down Expand Up @@ -3170,10 +3172,10 @@ def extract_table(
self,
source: Union[Table, TableReference, TableListItem, Model, ModelReference, str],
destination_uris: Union[str, Sequence[str]],
job_id: str = None,
job_id_prefix: str = None,
location: str = None,
project: str = None,
job_id: Optional[str] = None,
job_id_prefix: Optional[str] = None,
location: Optional[str] = None,
project: Optional[str] = None,
job_config: ExtractJobConfig = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: TimeoutType = DEFAULT_TIMEOUT,
Expand Down Expand Up @@ -3270,10 +3272,10 @@ def query(
self,
query: str,
job_config: QueryJobConfig = None,
job_id: str = None,
job_id_prefix: str = None,
location: str = None,
project: str = None,
job_id: Optional[str] = None,
job_id_prefix: Optional[str] = None,
location: Optional[str] = None,
project: Optional[str] = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: TimeoutType = DEFAULT_TIMEOUT,
job_retry: retries.Retry = DEFAULT_JOB_RETRY,
Expand Down Expand Up @@ -3563,7 +3565,7 @@ def insert_rows_json(
] = AutoRowIDs.GENERATE_UUID,
skip_invalid_rows: bool = None,
ignore_unknown_values: bool = None,
template_suffix: str = None,
template_suffix: Optional[str] = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: TimeoutType = DEFAULT_TIMEOUT,
) -> Sequence[dict]:
Expand Down Expand Up @@ -3755,7 +3757,7 @@ def list_rows(
table: Union[Table, TableListItem, TableReference, str],
selected_fields: Sequence[SchemaField] = None,
max_results: Optional[int] = None,
page_token: str = None,
page_token: Optional[str] = None,
start_index: Optional[int] = None,
page_size: Optional[int] = None,
retry: retries.Retry = DEFAULT_RETRY,
Expand Down
2 changes: 1 addition & 1 deletion google/cloud/bigquery/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ def from_api_repr(cls, resource: dict) -> "DatasetReference":

@classmethod
def from_string(
cls, dataset_id: str, default_project: str = None
cls, dataset_id: str, default_project: Optional[str] = None
) -> "DatasetReference":
"""Construct a dataset reference from dataset ID string.
Expand Down
21 changes: 16 additions & 5 deletions google/cloud/bigquery/job/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -703,7 +703,10 @@ def _begin(self, client=None, retry=DEFAULT_RETRY, timeout=None):
self._set_properties(api_response)

def exists(
self, client=None, retry: "retries.Retry" = DEFAULT_RETRY, timeout: float = None
self,
client=None,
retry: "retries.Retry" = DEFAULT_RETRY,
timeout: Optional[float] = None,
) -> bool:
"""API call: test for the existence of the job via a GET request
Expand Down Expand Up @@ -748,7 +751,10 @@ def exists(
return True

def reload(
self, client=None, retry: "retries.Retry" = DEFAULT_RETRY, timeout: float = None
self,
client=None,
retry: "retries.Retry" = DEFAULT_RETRY,
timeout: Optional[float] = None,
):
"""API call: refresh job properties via a GET request.
Expand Down Expand Up @@ -785,7 +791,10 @@ def reload(
self._set_properties(api_response)

def cancel(
self, client=None, retry: "retries.Retry" = DEFAULT_RETRY, timeout: float = None
self,
client=None,
retry: "retries.Retry" = DEFAULT_RETRY,
timeout: Optional[float] = None,
) -> bool:
"""API call: cancel job via a POST request
Expand Down Expand Up @@ -855,7 +864,7 @@ def _set_future_result(self):
def done(
self,
retry: "retries.Retry" = DEFAULT_RETRY,
timeout: float = None,
timeout: Optional[float] = None,
reload: bool = True,
) -> bool:
"""Checks if the job is complete.
Expand All @@ -881,7 +890,9 @@ def done(
return self.state == _DONE_STATE

def result( # type: ignore # (signature complaint)
self, retry: "retries.Retry" = DEFAULT_RETRY, timeout: float = None
self,
retry: "retries.Retry" = DEFAULT_RETRY,
timeout: Optional[float] = None,
) -> "_AsyncJob":
"""Start the job and wait for it to complete and get the result.
Expand Down
10 changes: 5 additions & 5 deletions google/cloud/bigquery/job/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -1317,7 +1317,7 @@ def _begin(self, client=None, retry=DEFAULT_RETRY, timeout=None):
raise

def _reload_query_results(
self, retry: "retries.Retry" = DEFAULT_RETRY, timeout: float = None
self, retry: "retries.Retry" = DEFAULT_RETRY, timeout: Optional[float] = None
):
"""Refresh the cached query results.
Expand Down Expand Up @@ -1405,7 +1405,7 @@ def result( # type: ignore # (complaints about the overloaded signature)
page_size: Optional[int] = None,
max_results: Optional[int] = None,
retry: "retries.Retry" = DEFAULT_RETRY,
timeout: float = None,
timeout: Optional[float] = None,
start_index: Optional[int] = None,
job_retry: "retries.Retry" = DEFAULT_JOB_RETRY,
) -> Union["RowIterator", _EmptyRowIterator]:
Expand Down Expand Up @@ -1557,7 +1557,7 @@ def do_get_result():
# that should only exist here in the QueryJob method.
def to_arrow(
self,
progress_bar_type: str = None,
progress_bar_type: Optional[str] = None,
bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None,
create_bqstorage_client: bool = True,
max_results: Optional[int] = None,
Expand Down Expand Up @@ -1634,7 +1634,7 @@ def to_dataframe(
self,
bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None,
dtypes: Dict[str, Any] = None,
progress_bar_type: str = None,
progress_bar_type: Optional[str] = None,
create_bqstorage_client: bool = True,
max_results: Optional[int] = None,
geography_as_object: bool = False,
Expand Down Expand Up @@ -1820,7 +1820,7 @@ def to_geodataframe(
self,
bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None,
dtypes: Dict[str, Any] = None,
progress_bar_type: str = None,
progress_bar_type: Optional[str] = None,
create_bqstorage_client: bool = True,
max_results: Optional[int] = None,
geography_column: Optional[str] = None,
Expand Down
2 changes: 1 addition & 1 deletion google/cloud/bigquery/routine/routine.py
Original file line number Diff line number Diff line change
Expand Up @@ -537,7 +537,7 @@ def from_api_repr(cls, resource: dict) -> "RoutineReference":

@classmethod
def from_string(
cls, routine_id: str, default_project: str = None
cls, routine_id: str, default_project: Optional[str] = None
) -> "RoutineReference":
"""Factory: construct a routine reference from routine ID string.
Expand Down
Loading

0 comments on commit 6c1ab80

Please sign in to comment.