diff --git a/google/cloud/bigquery/_job_helpers.py b/google/cloud/bigquery/_job_helpers.py index 57846b190..09daaa2a2 100644 --- a/google/cloud/bigquery/_job_helpers.py +++ b/google/cloud/bigquery/_job_helpers.py @@ -64,7 +64,7 @@ def query_jobs_insert( job_config: Optional[job.QueryJobConfig], job_id: Optional[str], job_id_prefix: Optional[str], - location: str, + location: Optional[str], project: str, retry: retries.Retry, timeout: Optional[float], @@ -215,7 +215,7 @@ def query_jobs_query( client: "Client", query: str, job_config: Optional[job.QueryJobConfig], - location: str, + location: Optional[str], project: str, retry: retries.Retry, timeout: Optional[float], diff --git a/google/cloud/bigquery/client.py b/google/cloud/bigquery/client.py index d81816d41..5a929fea4 100644 --- a/google/cloud/bigquery/client.py +++ b/google/cloud/bigquery/client.py @@ -307,7 +307,7 @@ def close(self): def get_service_account_email( self, - project: str = None, + project: Optional[str] = None, retry: retries.Retry = DEFAULT_RETRY, timeout: TimeoutType = DEFAULT_TIMEOUT, ) -> str: @@ -355,7 +355,7 @@ def get_service_account_email( def list_projects( self, max_results: Optional[int] = None, - page_token: str = None, + page_token: Optional[str] = None, retry: retries.Retry = DEFAULT_RETRY, timeout: TimeoutType = DEFAULT_TIMEOUT, page_size: Optional[int] = None, @@ -417,11 +417,11 @@ def api_request(*args, **kwargs): def list_datasets( self, - project: str = None, + project: Optional[str] = None, include_all: bool = False, - filter: str = None, + filter: Optional[str] = None, max_results: Optional[int] = None, - page_token: str = None, + page_token: Optional[str] = None, retry: retries.Retry = DEFAULT_RETRY, timeout: TimeoutType = DEFAULT_TIMEOUT, page_size: Optional[int] = None, @@ -498,7 +498,9 @@ def api_request(*args, **kwargs): page_size=page_size, ) - def dataset(self, dataset_id: str, project: str = None) -> DatasetReference: + def dataset( + self, dataset_id: str, project: Optional[str] = None + ) -> DatasetReference: """Deprecated: Construct a reference to a dataset. .. deprecated:: 1.24.0 @@ -890,7 +892,7 @@ def set_iam_policy( self, table: Union[Table, TableReference, TableListItem, str], policy: Policy, - updateMask: str = None, + updateMask: Optional[str] = None, retry: retries.Retry = DEFAULT_RETRY, timeout: TimeoutType = DEFAULT_TIMEOUT, ) -> Policy: @@ -1350,7 +1352,7 @@ def list_models( self, dataset: Union[Dataset, DatasetReference, DatasetListItem, str], max_results: Optional[int] = None, - page_token: str = None, + page_token: Optional[str] = None, retry: retries.Retry = DEFAULT_RETRY, timeout: TimeoutType = DEFAULT_TIMEOUT, page_size: Optional[int] = None, @@ -1427,7 +1429,7 @@ def list_routines( self, dataset: Union[Dataset, DatasetReference, DatasetListItem, str], max_results: Optional[int] = None, - page_token: str = None, + page_token: Optional[str] = None, retry: retries.Retry = DEFAULT_RETRY, timeout: TimeoutType = DEFAULT_TIMEOUT, page_size: Optional[int] = None, @@ -1504,7 +1506,7 @@ def list_tables( self, dataset: Union[Dataset, DatasetReference, DatasetListItem, str], max_results: Optional[int] = None, - page_token: str = None, + page_token: Optional[str] = None, retry: retries.Retry = DEFAULT_RETRY, timeout: TimeoutType = DEFAULT_TIMEOUT, page_size: Optional[int] = None, @@ -1862,9 +1864,9 @@ def _get_query_results( self, job_id: str, retry: retries.Retry, - project: str = None, + project: Optional[str] = None, timeout_ms: Optional[int] = None, - location: str = None, + location: Optional[str] = None, timeout: TimeoutType = DEFAULT_TIMEOUT, ) -> _QueryResults: """Get the query results object for a query job. @@ -2039,8 +2041,8 @@ def create_job( def get_job( self, job_id: Union[str, job.LoadJob, job.CopyJob, job.ExtractJob, job.QueryJob], - project: str = None, - location: str = None, + project: Optional[str] = None, + location: Optional[str] = None, retry: retries.Retry = DEFAULT_RETRY, timeout: TimeoutType = DEFAULT_TIMEOUT, ) -> Union[job.LoadJob, job.CopyJob, job.ExtractJob, job.QueryJob, job.UnknownJob]: @@ -2103,8 +2105,8 @@ def get_job( def cancel_job( self, job_id: str, - project: str = None, - location: str = None, + project: Optional[str] = None, + location: Optional[str] = None, retry: retries.Retry = DEFAULT_RETRY, timeout: TimeoutType = DEFAULT_TIMEOUT, ) -> Union[job.LoadJob, job.CopyJob, job.ExtractJob, job.QueryJob]: @@ -2181,12 +2183,12 @@ def cancel_job( def list_jobs( self, - project: str = None, + project: Optional[str] = None, parent_job: Optional[Union[QueryJob, str]] = None, max_results: Optional[int] = None, - page_token: str = None, + page_token: Optional[str] = None, all_users: bool = None, - state_filter: str = None, + state_filter: Optional[str] = None, retry: retries.Retry = DEFAULT_RETRY, timeout: TimeoutType = DEFAULT_TIMEOUT, min_creation_time: datetime.datetime = None, @@ -2297,11 +2299,11 @@ def load_table_from_uri( self, source_uris: Union[str, Sequence[str]], destination: Union[Table, TableReference, TableListItem, str], - job_id: str = None, - job_id_prefix: str = None, - location: str = None, - project: str = None, - job_config: LoadJobConfig = None, + job_id: Optional[str] = None, + job_id_prefix: Optional[str] = None, + location: Optional[str] = None, + project: Optional[str] = None, + job_config: Optional[LoadJobConfig] = None, retry: retries.Retry = DEFAULT_RETRY, timeout: TimeoutType = DEFAULT_TIMEOUT, ) -> job.LoadJob: @@ -2386,11 +2388,11 @@ def load_table_from_file( rewind: bool = False, size: Optional[int] = None, num_retries: int = _DEFAULT_NUM_RETRIES, - job_id: str = None, - job_id_prefix: str = None, - location: str = None, - project: str = None, - job_config: LoadJobConfig = None, + job_id: Optional[str] = None, + job_id_prefix: Optional[str] = None, + location: Optional[str] = None, + project: Optional[str] = None, + job_config: Optional[LoadJobConfig] = None, timeout: ResumableTimeoutType = DEFAULT_TIMEOUT, ) -> job.LoadJob: """Upload the contents of this table from a file-like object. @@ -2494,11 +2496,11 @@ def load_table_from_dataframe( dataframe: "pandas.DataFrame", destination: Union[Table, TableReference, str], num_retries: int = _DEFAULT_NUM_RETRIES, - job_id: str = None, - job_id_prefix: str = None, - location: str = None, - project: str = None, - job_config: LoadJobConfig = None, + job_id: Optional[str] = None, + job_id_prefix: Optional[str] = None, + location: Optional[str] = None, + project: Optional[str] = None, + job_config: Optional[LoadJobConfig] = None, parquet_compression: str = "snappy", timeout: ResumableTimeoutType = DEFAULT_TIMEOUT, ) -> job.LoadJob: @@ -2751,11 +2753,11 @@ def load_table_from_json( json_rows: Iterable[Dict[str, Any]], destination: Union[Table, TableReference, TableListItem, str], num_retries: int = _DEFAULT_NUM_RETRIES, - job_id: str = None, - job_id_prefix: str = None, - location: str = None, - project: str = None, - job_config: LoadJobConfig = None, + job_id: Optional[str] = None, + job_id_prefix: Optional[str] = None, + location: Optional[str] = None, + project: Optional[str] = None, + job_config: Optional[LoadJobConfig] = None, timeout: ResumableTimeoutType = DEFAULT_TIMEOUT, ) -> job.LoadJob: """Upload the contents of a table from a JSON string or dict. @@ -3064,10 +3066,10 @@ def copy_table( Sequence[Union[Table, TableReference, TableListItem, str]], ], destination: Union[Table, TableReference, TableListItem, str], - job_id: str = None, - job_id_prefix: str = None, - location: str = None, - project: str = None, + job_id: Optional[str] = None, + job_id_prefix: Optional[str] = None, + location: Optional[str] = None, + project: Optional[str] = None, job_config: CopyJobConfig = None, retry: retries.Retry = DEFAULT_RETRY, timeout: TimeoutType = DEFAULT_TIMEOUT, @@ -3170,10 +3172,10 @@ def extract_table( self, source: Union[Table, TableReference, TableListItem, Model, ModelReference, str], destination_uris: Union[str, Sequence[str]], - job_id: str = None, - job_id_prefix: str = None, - location: str = None, - project: str = None, + job_id: Optional[str] = None, + job_id_prefix: Optional[str] = None, + location: Optional[str] = None, + project: Optional[str] = None, job_config: ExtractJobConfig = None, retry: retries.Retry = DEFAULT_RETRY, timeout: TimeoutType = DEFAULT_TIMEOUT, @@ -3270,10 +3272,10 @@ def query( self, query: str, job_config: QueryJobConfig = None, - job_id: str = None, - job_id_prefix: str = None, - location: str = None, - project: str = None, + job_id: Optional[str] = None, + job_id_prefix: Optional[str] = None, + location: Optional[str] = None, + project: Optional[str] = None, retry: retries.Retry = DEFAULT_RETRY, timeout: TimeoutType = DEFAULT_TIMEOUT, job_retry: retries.Retry = DEFAULT_JOB_RETRY, @@ -3563,7 +3565,7 @@ def insert_rows_json( ] = AutoRowIDs.GENERATE_UUID, skip_invalid_rows: bool = None, ignore_unknown_values: bool = None, - template_suffix: str = None, + template_suffix: Optional[str] = None, retry: retries.Retry = DEFAULT_RETRY, timeout: TimeoutType = DEFAULT_TIMEOUT, ) -> Sequence[dict]: @@ -3755,7 +3757,7 @@ def list_rows( table: Union[Table, TableListItem, TableReference, str], selected_fields: Sequence[SchemaField] = None, max_results: Optional[int] = None, - page_token: str = None, + page_token: Optional[str] = None, start_index: Optional[int] = None, page_size: Optional[int] = None, retry: retries.Retry = DEFAULT_RETRY, diff --git a/google/cloud/bigquery/dataset.py b/google/cloud/bigquery/dataset.py index 0edd29359..513c32d9c 100644 --- a/google/cloud/bigquery/dataset.py +++ b/google/cloud/bigquery/dataset.py @@ -139,7 +139,7 @@ def from_api_repr(cls, resource: dict) -> "DatasetReference": @classmethod def from_string( - cls, dataset_id: str, default_project: str = None + cls, dataset_id: str, default_project: Optional[str] = None ) -> "DatasetReference": """Construct a dataset reference from dataset ID string. diff --git a/google/cloud/bigquery/job/base.py b/google/cloud/bigquery/job/base.py index 4073e0137..a6267be41 100644 --- a/google/cloud/bigquery/job/base.py +++ b/google/cloud/bigquery/job/base.py @@ -703,7 +703,10 @@ def _begin(self, client=None, retry=DEFAULT_RETRY, timeout=None): self._set_properties(api_response) def exists( - self, client=None, retry: "retries.Retry" = DEFAULT_RETRY, timeout: float = None + self, + client=None, + retry: "retries.Retry" = DEFAULT_RETRY, + timeout: Optional[float] = None, ) -> bool: """API call: test for the existence of the job via a GET request @@ -748,7 +751,10 @@ def exists( return True def reload( - self, client=None, retry: "retries.Retry" = DEFAULT_RETRY, timeout: float = None + self, + client=None, + retry: "retries.Retry" = DEFAULT_RETRY, + timeout: Optional[float] = None, ): """API call: refresh job properties via a GET request. @@ -785,7 +791,10 @@ def reload( self._set_properties(api_response) def cancel( - self, client=None, retry: "retries.Retry" = DEFAULT_RETRY, timeout: float = None + self, + client=None, + retry: "retries.Retry" = DEFAULT_RETRY, + timeout: Optional[float] = None, ) -> bool: """API call: cancel job via a POST request @@ -855,7 +864,7 @@ def _set_future_result(self): def done( self, retry: "retries.Retry" = DEFAULT_RETRY, - timeout: float = None, + timeout: Optional[float] = None, reload: bool = True, ) -> bool: """Checks if the job is complete. @@ -881,7 +890,9 @@ def done( return self.state == _DONE_STATE def result( # type: ignore # (signature complaint) - self, retry: "retries.Retry" = DEFAULT_RETRY, timeout: float = None + self, + retry: "retries.Retry" = DEFAULT_RETRY, + timeout: Optional[float] = None, ) -> "_AsyncJob": """Start the job and wait for it to complete and get the result. diff --git a/google/cloud/bigquery/job/query.py b/google/cloud/bigquery/job/query.py index 315d8201c..7dddc8278 100644 --- a/google/cloud/bigquery/job/query.py +++ b/google/cloud/bigquery/job/query.py @@ -1317,7 +1317,7 @@ def _begin(self, client=None, retry=DEFAULT_RETRY, timeout=None): raise def _reload_query_results( - self, retry: "retries.Retry" = DEFAULT_RETRY, timeout: float = None + self, retry: "retries.Retry" = DEFAULT_RETRY, timeout: Optional[float] = None ): """Refresh the cached query results. @@ -1405,7 +1405,7 @@ def result( # type: ignore # (complaints about the overloaded signature) page_size: Optional[int] = None, max_results: Optional[int] = None, retry: "retries.Retry" = DEFAULT_RETRY, - timeout: float = None, + timeout: Optional[float] = None, start_index: Optional[int] = None, job_retry: "retries.Retry" = DEFAULT_JOB_RETRY, ) -> Union["RowIterator", _EmptyRowIterator]: @@ -1557,7 +1557,7 @@ def do_get_result(): # that should only exist here in the QueryJob method. def to_arrow( self, - progress_bar_type: str = None, + progress_bar_type: Optional[str] = None, bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None, create_bqstorage_client: bool = True, max_results: Optional[int] = None, @@ -1634,7 +1634,7 @@ def to_dataframe( self, bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None, dtypes: Dict[str, Any] = None, - progress_bar_type: str = None, + progress_bar_type: Optional[str] = None, create_bqstorage_client: bool = True, max_results: Optional[int] = None, geography_as_object: bool = False, @@ -1820,7 +1820,7 @@ def to_geodataframe( self, bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None, dtypes: Dict[str, Any] = None, - progress_bar_type: str = None, + progress_bar_type: Optional[str] = None, create_bqstorage_client: bool = True, max_results: Optional[int] = None, geography_column: Optional[str] = None, diff --git a/google/cloud/bigquery/routine/routine.py b/google/cloud/bigquery/routine/routine.py index 36ed03728..ef33d507e 100644 --- a/google/cloud/bigquery/routine/routine.py +++ b/google/cloud/bigquery/routine/routine.py @@ -537,7 +537,7 @@ def from_api_repr(cls, resource: dict) -> "RoutineReference": @classmethod def from_string( - cls, routine_id: str, default_project: str = None + cls, routine_id: str, default_project: Optional[str] = None ) -> "RoutineReference": """Factory: construct a routine reference from routine ID string. diff --git a/google/cloud/bigquery/schema.py b/google/cloud/bigquery/schema.py index ebf34e4cd..20a1bc92f 100644 --- a/google/cloud/bigquery/schema.py +++ b/google/cloud/bigquery/schema.py @@ -16,7 +16,7 @@ import collections import enum -from typing import Any, Dict, Iterable, Union +from typing import Any, Dict, Iterable, Optional, Union from google.cloud.bigquery import standard_sql from google.cloud.bigquery.enums import StandardSqlTypeNames @@ -124,7 +124,7 @@ def __init__( name: str, field_type: str, mode: str = "NULLABLE", - default_value_expression: str = None, + default_value_expression: Optional[str] = None, description: Union[str, _DefaultSentinel] = _DEFAULT_VALUE, fields: Iterable["SchemaField"] = (), policy_tags: Union["PolicyTagList", None, _DefaultSentinel] = _DEFAULT_VALUE, diff --git a/google/cloud/bigquery/table.py b/google/cloud/bigquery/table.py index bf4a90317..462447d51 100644 --- a/google/cloud/bigquery/table.py +++ b/google/cloud/bigquery/table.py @@ -228,7 +228,7 @@ def __init__(self, dataset_ref: "DatasetReference", table_id: str): @classmethod def from_string( - cls, table_id: str, default_project: str = None + cls, table_id: str, default_project: Optional[str] = None ) -> "TableReference": """Construct a table reference from table ID string. @@ -1745,7 +1745,7 @@ def to_arrow_iterable( # changes to job.QueryJob.to_arrow() def to_arrow( self, - progress_bar_type: str = None, + progress_bar_type: Optional[str] = None, bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None, create_bqstorage_client: bool = True, ) -> "pyarrow.Table": @@ -1932,7 +1932,7 @@ def to_dataframe( self, bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None, dtypes: Dict[str, Any] = None, - progress_bar_type: str = None, + progress_bar_type: Optional[str] = None, create_bqstorage_client: bool = True, geography_as_object: bool = False, bool_dtype: Union[Any, None] = DefaultPandasDTypes.BOOL_DTYPE, @@ -2230,7 +2230,7 @@ def to_geodataframe( self, bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None, dtypes: Dict[str, Any] = None, - progress_bar_type: str = None, + progress_bar_type: Optional[str] = None, create_bqstorage_client: bool = True, geography_column: Optional[str] = None, ) -> "geopandas.GeoDataFrame":