diff --git a/docs/dbacademy.clients.airtable.html b/docs/dbacademy.clients.airtable.html index e4be2b47..52ffe260 100644 --- a/docs/dbacademy.clients.airtable.html +++ b/docs/dbacademy.clients.airtable.html @@ -39,12 +39,12 @@ class AirTable(builtins.object)     -AirTable(*, access_token: str, base_id: str, table_id: str, error_handler: dbacademy.clients.airtable.ErrorHandler = <dbacademy.clients.airtable.ErrorHandler object at 0x000001A4200671F0>)
+AirTable(*, access_token: str, base_id: str, table_id: str, error_handler: dbacademy.clients.ClientErrorHandler = <dbacademy.clients.ClientErrorHandler object at 0x000001D66BC40E80>)
 

 
  Methods defined here:
-
__init__(self, *, access_token: str, base_id: str, table_id: str, error_handler: dbacademy.clients.airtable.ErrorHandler = <dbacademy.clients.airtable.ErrorHandler object at 0x000001A4200671F0>)
Initialize self.  See help(type(self)) for accurate signature.
+
__init__(self, *, access_token: str, base_id: str, table_id: str, error_handler: dbacademy.clients.ClientErrorHandler = <dbacademy.clients.ClientErrorHandler object at 0x000001D66BC40E80>)
Initialize self.  See help(type(self)) for accurate signature.
assert_response(self, response: requests.models.Response, message: str) -> None
@@ -70,6 +70,8 @@
Data and other attributes defined here:
+
ClientErrorHandler = <class 'dbacademy.clients.ClientErrorHandler'>
+
Response = <class 'requests.models.Response'>
The :class:`Response <Response>` object, which contains a
server's response to an HTTP request.
diff --git a/docs/dbacademy.clients.html b/docs/dbacademy.clients.html index 1bf360d3..b4a72120 100644 --- a/docs/dbacademy.clients.html +++ b/docs/dbacademy.clients.html @@ -22,5 +22,45 @@ github (package)
google (package)
slack (package)
- +

+ + + + + +
 
+Classes
       
+
builtins.object +
+
+
ClientErrorHandler +
+
+
+

+ + + + + +
 
+class ClientErrorHandler(builtins.object)
    Methods defined here:
+
on_error(self, *messages: str)
+ +
+Data descriptors defined here:
+
__dict__
+
dictionary for instance variables (if defined)
+
+
__weakref__
+
list of weak references to the object (if defined)
+
+

+ + + + + +
 
+Data
       __all__ = ['ClientErrorHandler']
\ No newline at end of file diff --git a/docs/dbacademy.dbrest.instance_pools.html b/docs/dbacademy.dbrest.instance_pools.html index a696af11..a87ea666 100644 --- a/docs/dbacademy.dbrest.instance_pools.html +++ b/docs/dbacademy.dbrest.instance_pools.html @@ -45,7 +45,7 @@ Methods defined here:

__init__(self, client: dbacademy.dbrest.client.DBAcademyRestClient)
Initialize self.  See help(type(self)) for accurate signature.
-
create(self, name: str, definition: dict, tags: <function InstancePoolsClient.list at 0x000001A42027DAF0> = None)
+
create(self, name: str, definition: dict, tags: <function InstancePoolsClient.list at 0x000001D66CEB7AF0> = None)
create_or_update(self, instance_pool_name: str, idle_instance_autotermination_minutes: int, min_idle_instances: int = 0, max_capacity: int = None, node_type_id: str = None, preloaded_spark_version: str = None, tags: dict = None)
diff --git a/docs/dbacademy.dbrest.pipelines.html b/docs/dbacademy.dbrest.pipelines.html index f0da69cf..f643e238 100644 --- a/docs/dbacademy.dbrest.pipelines.html +++ b/docs/dbacademy.dbrest.pipelines.html @@ -53,11 +53,11 @@ Methods defined here:
__init__(self, client: dbacademy.dbrest.client.DBAcademyRestClient)
Initialize self.  See help(type(self)) for accurate signature.
-
create(self, name: str, storage: str, target: str, continuous: bool = False, development: bool = True, configuration: dict = None, notebooks: <function PipelinesClient.list at 0x000001A42025FAF0> = None, libraries: <function PipelinesClient.list at 0x000001A42025FAF0> = None, clusters: <function PipelinesClient.list at 0x000001A42025FAF0> = None, min_workers: int = 0, max_workers: int = 0, photon: bool = True)
+
create(self, name: str, storage: str, target: str, continuous: bool = False, development: bool = True, configuration: dict = None, notebooks: <function PipelinesClient.list at 0x000001D66CFC0AF0> = None, libraries: <function PipelinesClient.list at 0x000001D66CFC0AF0> = None, clusters: <function PipelinesClient.list at 0x000001D66CFC0AF0> = None, min_workers: int = 0, max_workers: int = 0, photon: bool = True)
create_from_dict(self, params: dict)
-
create_or_update(self, name: str, storage: str, target: str, continuous: bool = False, development: bool = True, configuration: dict = None, notebooks: <function PipelinesClient.list at 0x000001A42025FAF0> = None, libraries: <function PipelinesClient.list at 0x000001A42025FAF0> = None, clusters: <function PipelinesClient.list at 0x000001A42025FAF0> = None, min_workers: int = 0, max_workers: int = 0, photon: bool = True, pipeline_id: Optional[str] = None)
+
create_or_update(self, name: str, storage: str, target: str, continuous: bool = False, development: bool = True, configuration: dict = None, notebooks: <function PipelinesClient.list at 0x000001D66CFC0AF0> = None, libraries: <function PipelinesClient.list at 0x000001D66CFC0AF0> = None, clusters: <function PipelinesClient.list at 0x000001D66CFC0AF0> = None, min_workers: int = 0, max_workers: int = 0, photon: bool = True, pipeline_id: Optional[str] = None)
delete_by_id(self, pipeline_id)
@@ -75,7 +75,7 @@
start_by_name(self, name: str)
-
update(self, pipeline_id: str, name: str, storage: str, target: str, continuous: bool = False, development: bool = True, configuration: dict = None, notebooks: <function PipelinesClient.list at 0x000001A42025FAF0> = None, libraries: <function PipelinesClient.list at 0x000001A42025FAF0> = None, clusters: <function PipelinesClient.list at 0x000001A42025FAF0> = None, min_workers: int = 0, max_workers: int = 0, photon: bool = True)
+
update(self, pipeline_id: str, name: str, storage: str, target: str, continuous: bool = False, development: bool = True, configuration: dict = None, notebooks: <function PipelinesClient.list at 0x000001D66CFC0AF0> = None, libraries: <function PipelinesClient.list at 0x000001D66CFC0AF0> = None, clusters: <function PipelinesClient.list at 0x000001D66CFC0AF0> = None, min_workers: int = 0, max_workers: int = 0, photon: bool = True)
update_from_dict(self, pipeline_id: str, params: dict)
@@ -83,7 +83,7 @@ Static methods defined here:
existing_to_create(pipeline)
-
to_dict(name: str, storage: str, target: str, continuous: bool = False, development: bool = True, configuration: dict = None, notebooks: <function PipelinesClient.list at 0x000001A42025FAF0> = None, libraries: <function PipelinesClient.list at 0x000001A42025FAF0> = None, clusters: <function PipelinesClient.list at 0x000001A42025FAF0> = None, min_workers: int = 0, max_workers: int = 0, photon: bool = True)
+
to_dict(name: str, storage: str, target: str, continuous: bool = False, development: bool = True, configuration: dict = None, notebooks: <function PipelinesClient.list at 0x000001D66CFC0AF0> = None, libraries: <function PipelinesClient.list at 0x000001D66CFC0AF0> = None, clusters: <function PipelinesClient.list at 0x000001D66CFC0AF0> = None, min_workers: int = 0, max_workers: int = 0, photon: bool = True)

Methods inherited from dbacademy.rest.common.ApiContainer:
diff --git a/docs/dbacademy.dbrest.sql.endpoints.html b/docs/dbacademy.dbrest.sql.endpoints.html index aa148493..ff27fd9a 100644 --- a/docs/dbacademy.dbrest.sql.endpoints.html +++ b/docs/dbacademy.dbrest.sql.endpoints.html @@ -51,7 +51,7 @@
create_user_endpoint(self, user, naming_template: str, naming_params: dict, cluster_size: str, enable_serverless_compute: bool, min_num_clusters: int, max_num_clusters: int, auto_stop_mins: int, enable_photon: bool, spot_instance_policy: str, channel: str, tags: dict)
-
create_user_endpoints(self, naming_template: str, naming_params: dict, cluster_size: str, enable_serverless_compute: bool, min_num_clusters: int = 1, max_num_clusters: int = 1, auto_stop_mins: int = 120, enable_photon: bool = True, spot_instance_policy: str = 'RELIABILITY_OPTIMIZED', channel: str = 'CHANNEL_NAME_CURRENT', tags: dict = None, users: <function SqlWarehousesClient.list at 0x000001A4202E50D0> = None)
Creates one SQL endpoint per user in the current workspace. The list of users can be limited to a subset of users with the "users" parameter.
+
create_user_endpoints(self, naming_template: str, naming_params: dict, cluster_size: str, enable_serverless_compute: bool, min_num_clusters: int = 1, max_num_clusters: int = 1, auto_stop_mins: int = 120, enable_photon: bool = True, spot_instance_policy: str = 'RELIABILITY_OPTIMIZED', channel: str = 'CHANNEL_NAME_CURRENT', tags: dict = None, users: <function SqlWarehousesClient.list at 0x000001D66CEDC0D0> = None)
Creates one SQL endpoint per user in the current workspace. The list of users can be limited to a subset of users with the "users" parameter.
Parameters: 
    naming_template (str): The template used to name each user's endpoint.
    naming_params (str): The parameters used in completing the template.
@@ -72,7 +72,7 @@
delete_user_endpoint(self, user, naming_template: str, naming_params: dict)
-
delete_user_endpoints(self, naming_template: str, naming_params: dict, users: <function SqlWarehousesClient.list at 0x000001A4202E50D0> = None)
+
delete_user_endpoints(self, naming_template: str, naming_params: dict, users: <function SqlWarehousesClient.list at 0x000001D66CEDC0D0> = None)
edit(self, endpoint_id: str, name: str = None, cluster_size: str = None, enable_serverless_compute: bool = None, min_num_clusters: int = None, max_num_clusters: int = None, auto_stop_mins: int = None, enable_photon: bool = None, spot_instance_policy: str = None, channel: str = None, tags: dict = None)
@@ -86,13 +86,13 @@
start_user_endpoint(self, user, naming_template: str, naming_params: dict)
-
start_user_endpoints(self, naming_template: str, naming_params: dict, users: <function SqlWarehousesClient.list at 0x000001A4202E50D0> = None)
+
start_user_endpoints(self, naming_template: str, naming_params: dict, users: <function SqlWarehousesClient.list at 0x000001D66CEDC0D0> = None)
stop(self, endpoint_id)
stop_user_endpoint(self, user, naming_template: str, naming_params: dict)
-
stop_user_endpoints(self, naming_template: str, naming_params: dict, users: <function SqlWarehousesClient.list at 0x000001A4202E50D0> = None)
+
stop_user_endpoints(self, naming_template: str, naming_params: dict, users: <function SqlWarehousesClient.list at 0x000001D66CEDC0D0> = None)
update(self, endpoint_id: str, name: str = None, cluster_size: str = None, enable_serverless_compute: bool = None, min_num_clusters: int = None, max_num_clusters: int = None, auto_stop_mins: int = None, enable_photon: bool = None, spot_instance_policy: str = None, channel: str = None, tags: dict = None)
# TODO doug.bateman@databricks.com: Potential bugs.
# noinspection PyUnresolvedReferences
diff --git a/docs/dbacademy.dougrest.runs.html b/docs/dbacademy.dougrest.runs.html index b0788ef5..6b33fd59 100644 --- a/docs/dbacademy.dougrest.runs.html +++ b/docs/dbacademy.dougrest.runs.html @@ -47,11 +47,11 @@
cancel(self, run: Union[int, dict], *, if_not_exists: str = 'error') -> dict
-
cancel_all(self, job_id: int = None) -> <function Runs.list at 0x000001A42039DA60>
+
cancel_all(self, job_id: int = None) -> <function Runs.list at 0x000001D66CFF5A60>
delete(self, run: Union[int, dict], *, if_not_exists: str = 'error') -> dict
-
delete_all(self, job_id: int = None) -> <function Runs.list at 0x000001A42039DA60>
+
delete_all(self, job_id: int = None) -> <function Runs.list at 0x000001D66CFF5A60>
get(self, run: Union[int, dict], *, if_not_exists: str = 'error') -> dict
# TODO Remove unused parameter
# noinspection PyUnusedLocal
diff --git a/docs/dbacademy.dougrest.workspace.html b/docs/dbacademy.dougrest.workspace.html index 4a7537e6..d4b15678 100644 --- a/docs/dbacademy.dougrest.workspace.html +++ b/docs/dbacademy.dougrest.workspace.html @@ -69,13 +69,13 @@
is_empty(self, workspace_path)
-
list(self, workspace_path, sort_key=<function Workspace.<lambda> at 0x000001A42014BAF0>)
+
list(self, workspace_path, sort_key=<function Workspace.<lambda> at 0x000001D66CDBAAF0>)
-
list_names(self, workspace_path, sort_key=<function Workspace.<lambda> at 0x000001A42014BC10>)
+
list_names(self, workspace_path, sort_key=<function Workspace.<lambda> at 0x000001D66CDBAC10>)
mkdirs(self, workspace_path)
-
walk(self, workspace_path, sort_key=<function Workspace.<lambda> at 0x000001A42014BD30>)
Recursively list files into an iterator.  Sorting within a directory is done by the provided sort_key.
+
walk(self, workspace_path, sort_key=<function Workspace.<lambda> at 0x000001D66CDBAD30>)
Recursively list files into an iterator.  Sorting within a directory is done by the provided sort_key.

Static methods defined here:
diff --git a/src/dbacademy/clients/airtable/__init__.py b/src/dbacademy/clients/airtable/__init__.py index 17a35ec3..f1f42d1b 100644 --- a/src/dbacademy/clients/airtable/__init__.py +++ b/src/dbacademy/clients/airtable/__init__.py @@ -5,8 +5,9 @@ class AirTable(object): from requests import Response + from dbacademy.clients import ClientErrorHandler - def __init__(self, *, access_token: str, base_id: str, table_id: str, error_handler: ErrorHandler = ErrorHandler()): + def __init__(self, *, access_token: str, base_id: str, table_id: str, error_handler: ClientErrorHandler = ClientErrorHandler()): self.__base_id = base_id self.__table_id = table_id self.__access_token = access_token