diff --git a/.codegen.json b/.codegen.json index 65077c1cc..c5c1a82f3 100644 --- a/.codegen.json +++ b/.codegen.json @@ -15,7 +15,6 @@ ], "post_generate": [ "make fmt", - "pytest -m 'not integration' --cov=databricks --cov-report html tests", "pip install .", "python3.12 docs/gen-client-docs.py" ] diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 27c63e442..fe9f54bb2 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -608df7153d64c19e2d255144c9935fd4ed45900a \ No newline at end of file +universe:/home/parth.bansal/vn1/universe \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index e68ce1aca..6f9ebe3d9 100755 --- a/.gitattributes +++ b/.gitattributes @@ -6,13 +6,18 @@ databricks/sdk/service/apps.py linguist-generated=true databricks/sdk/service/billing.py linguist-generated=true databricks/sdk/service/catalog.py linguist-generated=true databricks/sdk/service/cleanrooms.py linguist-generated=true +databricks/sdk/service/common.py linguist-generated=true databricks/sdk/service/compute.py linguist-generated=true databricks/sdk/service/dashboards.py linguist-generated=true databricks/sdk/service/database.py linguist-generated=true +databricks/sdk/service/dataquality.py linguist-generated=true databricks/sdk/service/files.py linguist-generated=true +databricks/sdk/service/httpcallv2.py linguist-generated=true databricks/sdk/service/iam.py linguist-generated=true databricks/sdk/service/iamv2.py linguist-generated=true databricks/sdk/service/jobs.py linguist-generated=true +databricks/sdk/service/jsonmarshallv2.py linguist-generated=true +databricks/sdk/service/lrotesting.py linguist-generated=true databricks/sdk/service/marketplace.py linguist-generated=true databricks/sdk/service/ml.py linguist-generated=true databricks/sdk/service/oauth2.py linguist-generated=true @@ -27,3 +32,4 @@ databricks/sdk/service/sql.py linguist-generated=true databricks/sdk/service/tags.py linguist-generated=true databricks/sdk/service/vectorsearch.py linguist-generated=true databricks/sdk/service/workspace.py linguist-generated=true +test_json_marshall.py linguist-generated=true diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index ef42d1c34..dcabb2e4a 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -21,6 +21,7 @@ from databricks.sdk.service import compute as pkg_compute from databricks.sdk.service import dashboards as pkg_dashboards from databricks.sdk.service import database as pkg_database +from databricks.sdk.service import dataquality as pkg_dataquality from databricks.sdk.service import files as pkg_files from databricks.sdk.service import iam as pkg_iam from databricks.sdk.service import iamv2 as pkg_iamv2 @@ -43,7 +44,7 @@ from databricks.sdk.service.apps import AppsAPI, AppsSettingsAPI from databricks.sdk.service.billing import (BillableUsageAPI, BudgetPolicyAPI, BudgetsAPI, LogDeliveryAPI, - UsageDashboardsAPI) + UsageDashboardsAPI, UsagePolicyAPI) from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI, AccountMetastoresAPI, AccountStorageCredentialsAPI, @@ -77,8 +78,10 @@ PolicyComplianceForClustersAPI, PolicyFamiliesAPI) from databricks.sdk.service.dashboards import (GenieAPI, LakeviewAPI, - LakeviewEmbeddedAPI) -from databricks.sdk.service.database import DatabaseAPI + LakeviewEmbeddedAPI, + QueryExecutionAPI) +from databricks.sdk.service.database import DatabaseAPI, DatabaseProjectAPI +from databricks.sdk.service.dataquality import DataQualityAPI from databricks.sdk.service.files import DbfsAPI, FilesAPI from databricks.sdk.service.iam import (AccessControlAPI, AccountAccessControlAPI, @@ -152,7 +155,7 @@ QueryVisualizationsLegacyAPI, RedashConfigAPI, StatementExecutionAPI, WarehousesAPI) -from databricks.sdk.service.tags import TagPoliciesAPI +from databricks.sdk.service.tags import TagAssignmentsAPI, TagPoliciesAPI from databricks.sdk.service.vectorsearch import (VectorSearchEndpointsAPI, VectorSearchIndexesAPI) from databricks.sdk.service.workspace import (GitCredentialsAPI, ReposAPI, @@ -282,8 +285,10 @@ def __init__( self._current_user = pkg_iam.CurrentUserAPI(self._api_client) self._dashboard_widgets = pkg_sql.DashboardWidgetsAPI(self._api_client) self._dashboards = pkg_sql.DashboardsAPI(self._api_client) + self._data_quality = pkg_dataquality.DataQualityAPI(self._api_client) self._data_sources = pkg_sql.DataSourcesAPI(self._api_client) self._database = pkg_database.DatabaseAPI(self._api_client) + self._database_project = pkg_database.DatabaseProjectAPI(self._api_client) self._dbfs = DbfsExt(self._api_client) self._dbsql_permissions = pkg_sql.DbsqlPermissionsAPI(self._api_client) self._entity_tag_assignments = pkg_catalog.EntityTagAssignmentsAPI(self._api_client) @@ -294,6 +299,7 @@ def __init__( self._feature_engineering = pkg_ml.FeatureEngineeringAPI(self._api_client) self._feature_store = pkg_ml.FeatureStoreAPI(self._api_client) self._files = _make_files_client(self._api_client, self._config) + self._forecasting = pkg_ml.ForecastingAPI(self._api_client) self._functions = pkg_catalog.FunctionsAPI(self._api_client) self._genie = pkg_dashboards.GenieAPI(self._api_client) self._git_credentials = pkg_workspace.GitCredentialsAPI(self._api_client) @@ -334,6 +340,7 @@ def __init__( self._quality_monitors = pkg_catalog.QualityMonitorsAPI(self._api_client) self._queries = pkg_sql.QueriesAPI(self._api_client) self._queries_legacy = pkg_sql.QueriesLegacyAPI(self._api_client) + self._query_execution = pkg_dashboards.QueryExecutionAPI(self._api_client) self._query_history = pkg_sql.QueryHistoryAPI(self._api_client) self._query_visualizations = pkg_sql.QueryVisualizationsAPI(self._api_client) self._query_visualizations_legacy = pkg_sql.QueryVisualizationsLegacyAPI(self._api_client) @@ -363,6 +370,7 @@ def __init__( self._system_schemas = pkg_catalog.SystemSchemasAPI(self._api_client) self._table_constraints = pkg_catalog.TableConstraintsAPI(self._api_client) self._tables = pkg_catalog.TablesAPI(self._api_client) + self._tag_assignments = pkg_tags.TagAssignmentsAPI(self._api_client) self._tag_policies = pkg_tags.TagPoliciesAPI(self._api_client) self._temporary_path_credentials = pkg_catalog.TemporaryPathCredentialsAPI(self._api_client) self._temporary_table_credentials = pkg_catalog.TemporaryTableCredentialsAPI(self._api_client) @@ -376,9 +384,8 @@ def __init__( self._workspace = WorkspaceExt(self._api_client) self._workspace_bindings = pkg_catalog.WorkspaceBindingsAPI(self._api_client) self._workspace_conf = pkg_settings.WorkspaceConfAPI(self._api_client) - self._workspace_settings_v2 = pkg_settingsv2.WorkspaceSettingsV2API(self._api_client) - self._forecasting = pkg_ml.ForecastingAPI(self._api_client) self._workspace_iam_v2 = pkg_iamv2.WorkspaceIamV2API(self._api_client) + self._workspace_settings_v2 = pkg_settingsv2.WorkspaceSettingsV2API(self._api_client) self._groups = pkg_iam.GroupsAPI(self._api_client) self._service_principals = pkg_iam.ServicePrincipalsAPI(self._api_client) self._users = pkg_iam.UsersAPI(self._api_client) @@ -540,6 +547,11 @@ def dashboards(self) -> pkg_sql.DashboardsAPI: """In general, there is little need to modify dashboards using the API.""" return self._dashboards + @property + def data_quality(self) -> pkg_dataquality.DataQualityAPI: + """Manage the data quality of Unity Catalog objects (currently support `schema` and `table`).""" + return self._data_quality + @property def data_sources(self) -> pkg_sql.DataSourcesAPI: """This API is provided to assist you in making new query objects.""" @@ -550,6 +562,11 @@ def database(self) -> pkg_database.DatabaseAPI: """Database Instances provide access to a database via REST API or direct SQL.""" return self._database + @property + def database_project(self) -> pkg_database.DatabaseProjectAPI: + """Database Projects provide access to a database via REST API or direct SQL.""" + return self._database_project + @property def dbfs(self) -> DbfsExt: """DBFS API makes it simple to interact with various data sources without having to include a users credentials every time to read a file.""" @@ -600,6 +617,11 @@ def files(self) -> pkg_files.FilesAPI: """The Files API is a standard HTTP API that allows you to read, write, list, and delete files and directories by referring to their URI.""" return self._files + @property + def forecasting(self) -> pkg_ml.ForecastingAPI: + """The Forecasting API allows you to create and get serverless forecasting experiments.""" + return self._forecasting + @property def functions(self) -> pkg_catalog.FunctionsAPI: """Functions implement User-Defined Functions (UDFs) in Unity Catalog.""" @@ -790,6 +812,11 @@ def queries_legacy(self) -> pkg_sql.QueriesLegacyAPI: """These endpoints are used for CRUD operations on query definitions.""" return self._queries_legacy + @property + def query_execution(self) -> pkg_dashboards.QueryExecutionAPI: + """Query execution APIs for AI / BI Dashboards.""" + return self._query_execution + @property def query_history(self) -> pkg_sql.QueryHistoryAPI: """A service responsible for storing and retrieving the list of queries run against SQL endpoints and serverless compute.""" @@ -910,6 +937,11 @@ def tables(self) -> pkg_catalog.TablesAPI: """A table resides in the third layer of Unity Catalog’s three-level namespace.""" return self._tables + @property + def tag_assignments(self) -> pkg_tags.TagAssignmentsAPI: + """Manage tag assignments on workspace-scoped objects.""" + return self._tag_assignments + @property def tag_policies(self) -> pkg_tags.TagPoliciesAPI: """The Tag Policy API allows you to manage policies for governed tags in Databricks.""" @@ -975,21 +1007,16 @@ def workspace_conf(self) -> pkg_settings.WorkspaceConfAPI: """This API allows updating known workspace settings for advanced users.""" return self._workspace_conf - @property - def workspace_settings_v2(self) -> pkg_settingsv2.WorkspaceSettingsV2API: - """APIs to manage workspace level settings.""" - return self._workspace_settings_v2 - - @property - def forecasting(self) -> pkg_ml.ForecastingAPI: - """The Forecasting API allows you to create and get serverless forecasting experiments.""" - return self._forecasting - @property def workspace_iam_v2(self) -> pkg_iamv2.WorkspaceIamV2API: """These APIs are used to manage identities and the workspace access of these identities in .""" return self._workspace_iam_v2 + @property + def workspace_settings_v2(self) -> pkg_settingsv2.WorkspaceSettingsV2API: + """APIs to manage workspace level settings.""" + return self._workspace_settings_v2 + @property def groups(self) -> pkg_iam.GroupsAPI: """Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects.""" @@ -1082,11 +1109,13 @@ def __init__( self._access_control = pkg_iam.AccountAccessControlAPI(self._api_client) self._billable_usage = pkg_billing.BillableUsageAPI(self._api_client) self._budget_policy = pkg_billing.BudgetPolicyAPI(self._api_client) + self._budgets = pkg_billing.BudgetsAPI(self._api_client) self._credentials = pkg_provisioning.CredentialsAPI(self._api_client) self._custom_app_integration = pkg_oauth2.CustomAppIntegrationAPI(self._api_client) self._encryption_keys = pkg_provisioning.EncryptionKeysAPI(self._api_client) self._federation_policy = pkg_oauth2.AccountFederationPolicyAPI(self._api_client) self._groups_v2 = pkg_iam.AccountGroupsV2API(self._api_client) + self._iam_v2 = pkg_iamv2.AccountIamV2API(self._api_client) self._ip_access_lists = pkg_settings.AccountIpAccessListsAPI(self._api_client) self._log_delivery = pkg_billing.LogDeliveryAPI(self._api_client) self._metastore_assignments = pkg_catalog.AccountMetastoreAssignmentsAPI(self._api_client) @@ -1105,13 +1134,12 @@ def __init__( self._storage = pkg_provisioning.StorageAPI(self._api_client) self._storage_credentials = pkg_catalog.AccountStorageCredentialsAPI(self._api_client) self._usage_dashboards = pkg_billing.UsageDashboardsAPI(self._api_client) + self._usage_policy = pkg_billing.UsagePolicyAPI(self._api_client) self._users_v2 = pkg_iam.AccountUsersV2API(self._api_client) self._vpc_endpoints = pkg_provisioning.VpcEndpointsAPI(self._api_client) self._workspace_assignment = pkg_iam.WorkspaceAssignmentAPI(self._api_client) self._workspace_network_configuration = pkg_settings.WorkspaceNetworkConfigurationAPI(self._api_client) self._workspaces = pkg_provisioning.WorkspacesAPI(self._api_client) - self._iam_v2 = pkg_iamv2.AccountIamV2API(self._api_client) - self._budgets = pkg_billing.BudgetsAPI(self._api_client) self._groups = pkg_iam.AccountGroupsAPI(self._api_client) self._service_principals = pkg_iam.AccountServicePrincipalsAPI(self._api_client) self._users = pkg_iam.AccountUsersAPI(self._api_client) @@ -1139,6 +1167,11 @@ def budget_policy(self) -> pkg_billing.BudgetPolicyAPI: """A service serves REST API about Budget policies.""" return self._budget_policy + @property + def budgets(self) -> pkg_billing.BudgetsAPI: + """These APIs manage budget configurations for this account.""" + return self._budgets + @property def credentials(self) -> pkg_provisioning.CredentialsAPI: """These APIs manage credential configurations for this workspace.""" @@ -1164,6 +1197,11 @@ def groups_v2(self) -> pkg_iam.AccountGroupsV2API: """Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.""" return self._groups_v2 + @property + def iam_v2(self) -> pkg_iamv2.AccountIamV2API: + """These APIs are used to manage identities and the workspace access of these identities in .""" + return self._iam_v2 + @property def ip_access_lists(self) -> pkg_settings.AccountIpAccessListsAPI: """The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.""" @@ -1254,6 +1292,11 @@ def usage_dashboards(self) -> pkg_billing.UsageDashboardsAPI: """These APIs manage usage dashboards for this account.""" return self._usage_dashboards + @property + def usage_policy(self) -> pkg_billing.UsagePolicyAPI: + """A service serves REST API about Usage policies.""" + return self._usage_policy + @property def users_v2(self) -> pkg_iam.AccountUsersV2API: """User identities recognized by Databricks and represented by email addresses.""" @@ -1279,16 +1322,6 @@ def workspaces(self) -> pkg_provisioning.WorkspacesAPI: """These APIs manage workspaces for this account.""" return self._workspaces - @property - def iam_v2(self) -> pkg_iamv2.AccountIamV2API: - """These APIs are used to manage identities and the workspace access of these identities in .""" - return self._iam_v2 - - @property - def budgets(self) -> pkg_billing.BudgetsAPI: - """These APIs manage budget configurations for this account.""" - return self._budgets - @property def groups(self) -> pkg_iam.AccountGroupsAPI: """Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.""" diff --git a/databricks/sdk/service/agentbricks.py b/databricks/sdk/service/agentbricks.py index 25175acf0..b2a560e31 100755 --- a/databricks/sdk/service/agentbricks.py +++ b/databricks/sdk/service/agentbricks.py @@ -7,7 +7,7 @@ from enum import Enum from typing import Any, Dict, List, Optional -from ._internal import _enum, _from_dict, _repeated_dict +from databricks.sdk.service._internal import _enum, _from_dict, _repeated_dict _LOG = logging.getLogger("databricks.sdk") diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py index aeedb7146..742c4d188 100755 --- a/databricks/sdk/service/apps.py +++ b/databricks/sdk/service/apps.py @@ -10,8 +10,10 @@ from enum import Enum from typing import Any, Callable, Dict, Iterator, List, Optional +from databricks.sdk.service._internal import (Wait, _enum, _from_dict, + _repeated_dict) + from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated_dict _LOG = logging.getLogger("databricks.sdk") @@ -33,6 +35,8 @@ class App: budget_policy_id: Optional[str] = None + compute_size: Optional[ComputeSize] = None + compute_status: Optional[ComputeStatus] = None create_time: Optional[str] = None @@ -50,6 +54,8 @@ class App: effective_budget_policy_id: Optional[str] = None + effective_usage_policy_id: Optional[str] = None + effective_user_api_scopes: Optional[List[str]] = None """The effective api scopes granted to the user access token.""" @@ -82,6 +88,8 @@ class App: url: Optional[str] = None """The URL of the app once it is deployed.""" + usage_policy_id: Optional[str] = None + user_api_scopes: Optional[List[str]] = None def as_dict(self) -> dict: @@ -93,6 +101,8 @@ def as_dict(self) -> dict: body["app_status"] = self.app_status.as_dict() if self.budget_policy_id is not None: body["budget_policy_id"] = self.budget_policy_id + if self.compute_size is not None: + body["compute_size"] = self.compute_size.value if self.compute_status: body["compute_status"] = self.compute_status.as_dict() if self.create_time is not None: @@ -105,6 +115,8 @@ def as_dict(self) -> dict: body["description"] = self.description if self.effective_budget_policy_id is not None: body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.effective_user_api_scopes: body["effective_user_api_scopes"] = [v for v in self.effective_user_api_scopes] if self.id is not None: @@ -131,6 +143,8 @@ def as_dict(self) -> dict: body["updater"] = self.updater if self.url is not None: body["url"] = self.url + if self.usage_policy_id is not None: + body["usage_policy_id"] = self.usage_policy_id if self.user_api_scopes: body["user_api_scopes"] = [v for v in self.user_api_scopes] return body @@ -144,6 +158,8 @@ def as_shallow_dict(self) -> dict: body["app_status"] = self.app_status if self.budget_policy_id is not None: body["budget_policy_id"] = self.budget_policy_id + if self.compute_size is not None: + body["compute_size"] = self.compute_size if self.compute_status: body["compute_status"] = self.compute_status if self.create_time is not None: @@ -156,6 +172,8 @@ def as_shallow_dict(self) -> dict: body["description"] = self.description if self.effective_budget_policy_id is not None: body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.effective_user_api_scopes: body["effective_user_api_scopes"] = self.effective_user_api_scopes if self.id is not None: @@ -182,6 +200,8 @@ def as_shallow_dict(self) -> dict: body["updater"] = self.updater if self.url is not None: body["url"] = self.url + if self.usage_policy_id is not None: + body["usage_policy_id"] = self.usage_policy_id if self.user_api_scopes: body["user_api_scopes"] = self.user_api_scopes return body @@ -193,12 +213,14 @@ def from_dict(cls, d: Dict[str, Any]) -> App: active_deployment=_from_dict(d, "active_deployment", AppDeployment), app_status=_from_dict(d, "app_status", ApplicationStatus), budget_policy_id=d.get("budget_policy_id", None), + compute_size=_enum(d, "compute_size", ComputeSize), compute_status=_from_dict(d, "compute_status", ComputeStatus), create_time=d.get("create_time", None), creator=d.get("creator", None), default_source_code_path=d.get("default_source_code_path", None), description=d.get("description", None), effective_budget_policy_id=d.get("effective_budget_policy_id", None), + effective_usage_policy_id=d.get("effective_usage_policy_id", None), effective_user_api_scopes=d.get("effective_user_api_scopes", None), id=d.get("id", None), name=d.get("name", None), @@ -212,6 +234,7 @@ def from_dict(cls, d: Dict[str, Any]) -> App: update_time=d.get("update_time", None), updater=d.get("updater", None), url=d.get("url", None), + usage_policy_id=d.get("usage_policy_id", None), user_api_scopes=d.get("user_api_scopes", None), ) @@ -918,6 +941,8 @@ class AppResource: description: Optional[str] = None """Description of the App Resource.""" + genie_space: Optional[AppResourceGenieSpace] = None + job: Optional[AppResourceJob] = None secret: Optional[AppResourceSecret] = None @@ -935,6 +960,8 @@ def as_dict(self) -> dict: body["database"] = self.database.as_dict() if self.description is not None: body["description"] = self.description + if self.genie_space: + body["genie_space"] = self.genie_space.as_dict() if self.job: body["job"] = self.job.as_dict() if self.name is not None: @@ -956,6 +983,8 @@ def as_shallow_dict(self) -> dict: body["database"] = self.database if self.description is not None: body["description"] = self.description + if self.genie_space: + body["genie_space"] = self.genie_space if self.job: body["job"] = self.job if self.name is not None: @@ -976,6 +1005,7 @@ def from_dict(cls, d: Dict[str, Any]) -> AppResource: return cls( database=_from_dict(d, "database", AppResourceDatabase), description=d.get("description", None), + genie_space=_from_dict(d, "genie_space", AppResourceGenieSpace), job=_from_dict(d, "job", AppResourceJob), name=d.get("name", None), secret=_from_dict(d, "secret", AppResourceSecret), @@ -1030,6 +1060,54 @@ class AppResourceDatabaseDatabasePermission(Enum): CAN_CONNECT_AND_CREATE = "CAN_CONNECT_AND_CREATE" +@dataclass +class AppResourceGenieSpace: + name: str + + space_id: str + + permission: AppResourceGenieSpaceGenieSpacePermission + + def as_dict(self) -> dict: + """Serializes the AppResourceGenieSpace into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.name is not None: + body["name"] = self.name + if self.permission is not None: + body["permission"] = self.permission.value + if self.space_id is not None: + body["space_id"] = self.space_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AppResourceGenieSpace into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: + body["name"] = self.name + if self.permission is not None: + body["permission"] = self.permission + if self.space_id is not None: + body["space_id"] = self.space_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AppResourceGenieSpace: + """Deserializes the AppResourceGenieSpace from a dictionary.""" + return cls( + name=d.get("name", None), + permission=_enum(d, "permission", AppResourceGenieSpaceGenieSpacePermission), + space_id=d.get("space_id", None), + ) + + +class AppResourceGenieSpaceGenieSpacePermission(Enum): + + CAN_EDIT = "CAN_EDIT" + CAN_MANAGE = "CAN_MANAGE" + CAN_RUN = "CAN_RUN" + CAN_VIEW = "CAN_VIEW" + + @dataclass class AppResourceJob: id: str @@ -1259,6 +1337,112 @@ class AppResourceUcSecurableUcSecurableType(Enum): VOLUME = "VOLUME" +@dataclass +class AppUpdate: + budget_policy_id: Optional[str] = None + + compute_size: Optional[ComputeSize] = None + + description: Optional[str] = None + + resources: Optional[List[AppResource]] = None + + status: Optional[AppUpdateUpdateStatus] = None + + usage_policy_id: Optional[str] = None + + user_api_scopes: Optional[List[str]] = None + + def as_dict(self) -> dict: + """Serializes the AppUpdate into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.compute_size is not None: + body["compute_size"] = self.compute_size.value + if self.description is not None: + body["description"] = self.description + if self.resources: + body["resources"] = [v.as_dict() for v in self.resources] + if self.status: + body["status"] = self.status.as_dict() + if self.usage_policy_id is not None: + body["usage_policy_id"] = self.usage_policy_id + if self.user_api_scopes: + body["user_api_scopes"] = [v for v in self.user_api_scopes] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AppUpdate into a shallow dictionary of its immediate attributes.""" + body = {} + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.compute_size is not None: + body["compute_size"] = self.compute_size + if self.description is not None: + body["description"] = self.description + if self.resources: + body["resources"] = self.resources + if self.status: + body["status"] = self.status + if self.usage_policy_id is not None: + body["usage_policy_id"] = self.usage_policy_id + if self.user_api_scopes: + body["user_api_scopes"] = self.user_api_scopes + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AppUpdate: + """Deserializes the AppUpdate from a dictionary.""" + return cls( + budget_policy_id=d.get("budget_policy_id", None), + compute_size=_enum(d, "compute_size", ComputeSize), + description=d.get("description", None), + resources=_repeated_dict(d, "resources", AppResource), + status=_from_dict(d, "status", AppUpdateUpdateStatus), + usage_policy_id=d.get("usage_policy_id", None), + user_api_scopes=d.get("user_api_scopes", None), + ) + + +@dataclass +class AppUpdateUpdateStatus: + message: Optional[str] = None + + state: Optional[AppUpdateUpdateStatusUpdateState] = None + + def as_dict(self) -> dict: + """Serializes the AppUpdateUpdateStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.message is not None: + body["message"] = self.message + if self.state is not None: + body["state"] = self.state.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AppUpdateUpdateStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.message is not None: + body["message"] = self.message + if self.state is not None: + body["state"] = self.state + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AppUpdateUpdateStatus: + """Deserializes the AppUpdateUpdateStatus from a dictionary.""" + return cls(message=d.get("message", None), state=_enum(d, "state", AppUpdateUpdateStatusUpdateState)) + + +class AppUpdateUpdateStatusUpdateState(Enum): + + FAILED = "FAILED" + IN_PROGRESS = "IN_PROGRESS" + NOT_UPDATED = "NOT_UPDATED" + SUCCEEDED = "SUCCEEDED" + + class ApplicationState(Enum): CRASHED = "CRASHED" @@ -1299,6 +1483,12 @@ def from_dict(cls, d: Dict[str, Any]) -> ApplicationStatus: return cls(message=d.get("message", None), state=_enum(d, "state", ApplicationState)) +class ComputeSize(Enum): + + LARGE = "LARGE" + MEDIUM = "MEDIUM" + + class ComputeState(Enum): ACTIVE = "ACTIVE" @@ -1582,6 +1772,37 @@ def wait_get_app_active( attempt += 1 raise TimeoutError(f"timed out after {timeout}: {status_message}") + def wait_get_update_app_succeeded( + self, app_name: str, timeout=timedelta(minutes=20), callback: Optional[Callable[[AppUpdate], None]] = None + ) -> AppUpdate: + deadline = time.time() + timeout.total_seconds() + target_states = (AppUpdateUpdateStatusUpdateState.SUCCEEDED,) + failure_states = (AppUpdateUpdateStatusUpdateState.FAILED,) + status_message = "polling..." + attempt = 1 + while time.time() < deadline: + poll = self.get_update(app_name=app_name) + status = poll.status.state + status_message = f"current status: {status}" + if poll.status: + status_message = poll.status.message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f"failed to reach SUCCEEDED, got {status}: {status_message}" + raise OperationFailed(msg) + prefix = f"app_name={app_name}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f"timed out after {timeout}: {status_message}") + def wait_get_deployment_app_succeeded( self, app_name: str, @@ -1674,6 +1895,45 @@ def create(self, app: App, *, no_compute: Optional[bool] = None) -> Wait[App]: def create_and_wait(self, app: App, *, no_compute: Optional[bool] = None, timeout=timedelta(minutes=20)) -> App: return self.create(app=app, no_compute=no_compute).result(timeout=timeout) + def create_update(self, app_name: str, update_mask: str, *, app: Optional[App] = None) -> Wait[AppUpdate]: + """Creates an app update and starts the update process. The update process is asynchronous and the status + of the update can be checked with the GetAppUpdate method. + + :param app_name: str + :param update_mask: str + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + :param app: :class:`App` (optional) + + :returns: + Long-running operation waiter for :class:`AppUpdate`. + See :method:wait_get_update_app_succeeded for more details. + """ + body = {} + if app is not None: + body["app"] = app.as_dict() + if update_mask is not None: + body["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("POST", f"/api/2.0/apps/{app_name}/update", body=body, headers=headers) + return Wait(self.wait_get_update_app_succeeded, response=AppUpdate.from_dict(op_response), app_name=app_name) + + def create_update_and_wait( + self, app_name: str, update_mask: str, *, app: Optional[App] = None, timeout=timedelta(minutes=20) + ) -> AppUpdate: + return self.create_update(app=app, app_name=app_name, update_mask=update_mask).result(timeout=timeout) + def delete(self, name: str) -> App: """Deletes an app. @@ -1787,6 +2047,22 @@ def get_permissions(self, app_name: str) -> AppPermissions: res = self._api.do("GET", f"/api/2.0/permissions/apps/{app_name}", headers=headers) return AppPermissions.from_dict(res) + def get_update(self, app_name: str) -> AppUpdate: + """Gets the status of an app update. + + :param app_name: str + The name of the app. + + :returns: :class:`AppUpdate` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/apps/{app_name}/update", headers=headers) + return AppUpdate.from_dict(res) + def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[App]: """Lists all apps in the workspace. diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py index 2e118457a..55ed5f043 100755 --- a/databricks/sdk/service/billing.py +++ b/databricks/sdk/service/billing.py @@ -7,7 +7,7 @@ from enum import Enum from typing import Any, BinaryIO, Dict, Iterator, List, Optional -from ._internal import _enum, _from_dict, _repeated_dict +from databricks.sdk.service._internal import _enum, _from_dict, _repeated_dict _LOG = logging.getLogger("databricks.sdk") @@ -1034,6 +1034,50 @@ def from_dict(cls, d: Dict[str, Any]) -> ListBudgetPoliciesResponse: ) +@dataclass +class ListUsagePoliciesResponse: + """A list of usage policies.""" + + next_page_token: Optional[str] = None + """A token that can be sent as `page_token` to retrieve the next page.""" + + policies: Optional[List[UsagePolicy]] = None + + previous_page_token: Optional[str] = None + """A token that can be sent as `page_token` to retrieve the previous page.""" + + def as_dict(self) -> dict: + """Serializes the ListUsagePoliciesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.policies: + body["policies"] = [v.as_dict() for v in self.policies] + if self.previous_page_token is not None: + body["previous_page_token"] = self.previous_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListUsagePoliciesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.policies: + body["policies"] = self.policies + if self.previous_page_token is not None: + body["previous_page_token"] = self.previous_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListUsagePoliciesResponse: + """Deserializes the ListUsagePoliciesResponse from a dictionary.""" + return cls( + next_page_token=d.get("next_page_token", None), + policies=_repeated_dict(d, "policies", UsagePolicy), + previous_page_token=d.get("previous_page_token", None), + ) + + class LogDeliveryConfigStatus(Enum): """* Log Delivery Status @@ -1068,9 +1112,6 @@ class LogDeliveryConfiguration: [Configuring audit logs]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html""" - account_id: str - """Databricks account ID.""" - credentials_id: str """The ID for a method:credentials/create that represents the AWS IAM role with policy and trust relationship as described in the main billable usage documentation page. See [Configure billable @@ -1084,6 +1125,9 @@ class LogDeliveryConfiguration: [Configure billable usage delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html""" + account_id: Optional[str] = None + """Databricks account ID.""" + config_id: Optional[str] = None """The unique UUID of log delivery configuration""" @@ -1434,6 +1478,59 @@ class UsageDashboardType(Enum): USAGE_DASHBOARD_TYPE_WORKSPACE = "USAGE_DASHBOARD_TYPE_WORKSPACE" +@dataclass +class UsagePolicy: + """Contains the UsagePolicy details (same structure as BudgetPolicy)""" + + binding_workspace_ids: Optional[List[int]] = None + """List of workspaces that this usage policy will be exclusively bound to.""" + + custom_tags: Optional[List[compute.CustomPolicyTag]] = None + """A list of tags defined by the customer. At most 20 entries are allowed per policy.""" + + policy_id: Optional[str] = None + """The Id of the policy. This field is generated by Databricks and globally unique.""" + + policy_name: Optional[str] = None + """The name of the policy.""" + + def as_dict(self) -> dict: + """Serializes the UsagePolicy into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.binding_workspace_ids: + body["binding_workspace_ids"] = [v for v in self.binding_workspace_ids] + if self.custom_tags: + body["custom_tags"] = [v.as_dict() for v in self.custom_tags] + if self.policy_id is not None: + body["policy_id"] = self.policy_id + if self.policy_name is not None: + body["policy_name"] = self.policy_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UsagePolicy into a shallow dictionary of its immediate attributes.""" + body = {} + if self.binding_workspace_ids: + body["binding_workspace_ids"] = self.binding_workspace_ids + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.policy_id is not None: + body["policy_id"] = self.policy_id + if self.policy_name is not None: + body["policy_name"] = self.policy_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UsagePolicy: + """Deserializes the UsagePolicy from a dictionary.""" + return cls( + binding_workspace_ids=d.get("binding_workspace_ids", None), + custom_tags=_repeated_dict(d, "custom_tags", compute.CustomPolicyTag), + policy_id=d.get("policy_id", None), + policy_name=d.get("policy_name", None), + ) + + @dataclass class WrappedLogDeliveryConfiguration: log_delivery_configuration: Optional[LogDeliveryConfiguration] = None @@ -2065,3 +2162,144 @@ def get( res = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/dashboard", query=query, headers=headers) return GetBillingUsageDashboardResponse.from_dict(res) + + +class UsagePolicyAPI: + """A service serves REST API about Usage policies""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, *, policy: Optional[UsagePolicy] = None, request_id: Optional[str] = None) -> UsagePolicy: + """Creates a new usage policy. + + :param policy: :class:`UsagePolicy` (optional) + The policy to create. `policy_id` needs to be empty as it will be generated + :param request_id: str (optional) + A unique identifier for this request. Restricted to 36 ASCII characters. + + :returns: :class:`UsagePolicy` + """ + body = {} + if policy is not None: + body["policy"] = policy.as_dict() + if request_id is not None: + body["request_id"] = request_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/2.1/accounts/{self._api.account_id}/usage-policies", body=body, headers=headers + ) + return UsagePolicy.from_dict(res) + + def delete(self, policy_id: str): + """Deletes a usage policy + + :param policy_id: str + The Id of the policy. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.1/accounts/{self._api.account_id}/usage-policies/{policy_id}", headers=headers) + + def get(self, policy_id: str) -> UsagePolicy: + """Retrieves a usage policy by it's ID. + + :param policy_id: str + The Id of the policy. + + :returns: :class:`UsagePolicy` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.1/accounts/{self._api.account_id}/usage-policies/{policy_id}", headers=headers + ) + return UsagePolicy.from_dict(res) + + def list( + self, + *, + filter_by: Optional[Filter] = None, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + sort_spec: Optional[SortSpec] = None, + ) -> Iterator[UsagePolicy]: + """Lists all usage policies. Policies are returned in the alphabetically ascending order of their names. + + :param filter_by: :class:`Filter` (optional) + A filter to apply to the list of policies. + :param page_size: int (optional) + The maximum number of usage policies to return. + :param page_token: str (optional) + A page token, received from a previous `ListUsagePolicies` call. + :param sort_spec: :class:`SortSpec` (optional) + The sort specification. + + :returns: Iterator over :class:`UsagePolicy` + """ + + query = {} + if filter_by is not None: + query["filter_by"] = filter_by.as_dict() + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + if sort_spec is not None: + query["sort_spec"] = sort_spec.as_dict() + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do( + "GET", f"/api/2.1/accounts/{self._api.account_id}/usage-policies", query=query, headers=headers + ) + if "policies" in json: + for v in json["policies"]: + yield UsagePolicy.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update(self, policy_id: str, policy: UsagePolicy, *, limit_config: Optional[LimitConfig] = None) -> UsagePolicy: + """Updates a usage policy + + :param policy_id: str + The Id of the policy. This field is generated by Databricks and globally unique. + :param policy: :class:`UsagePolicy` + The policy to update. `creator_user_id` cannot be specified in the request. + :param limit_config: :class:`LimitConfig` (optional) + DEPRECATED. This is redundant field as LimitConfig is part of the UsagePolicy + + :returns: :class:`UsagePolicy` + """ + body = policy.as_dict() + query = {} + if limit_config is not None: + query["limit_config"] = limit_config.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.1/accounts/{self._api.account_id}/usage-policies/{policy_id}", + query=query, + body=body, + headers=headers, + ) + return UsagePolicy.from_dict(res) diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index 3ac709a89..a20075b57 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -10,8 +10,10 @@ from enum import Enum from typing import Any, Callable, Dict, Iterator, List, Optional +from databricks.sdk.service._internal import (Wait, _enum, _from_dict, + _repeated_dict, _repeated_enum) + from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum _LOG = logging.getLogger("databricks.sdk") @@ -63,8 +65,191 @@ def from_dict(cls, d: Dict[str, Any]) -> AccessRequestDestinations: ) +@dataclass +class AccountsCreateMetastoreAssignmentResponse: + """The metastore assignment was successfully created.""" + + def as_dict(self) -> dict: + """Serializes the AccountsCreateMetastoreAssignmentResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccountsCreateMetastoreAssignmentResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AccountsCreateMetastoreAssignmentResponse: + """Deserializes the AccountsCreateMetastoreAssignmentResponse from a dictionary.""" + return cls() + + +@dataclass +class AccountsCreateMetastoreResponse: + metastore_info: Optional[MetastoreInfo] = None + + def as_dict(self) -> dict: + """Serializes the AccountsCreateMetastoreResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.metastore_info: + body["metastore_info"] = self.metastore_info.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccountsCreateMetastoreResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.metastore_info: + body["metastore_info"] = self.metastore_info + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AccountsCreateMetastoreResponse: + """Deserializes the AccountsCreateMetastoreResponse from a dictionary.""" + return cls(metastore_info=_from_dict(d, "metastore_info", MetastoreInfo)) + + +@dataclass +class AccountsCreateStorageCredentialInfo: + credential_info: Optional[StorageCredentialInfo] = None + + def as_dict(self) -> dict: + """Serializes the AccountsCreateStorageCredentialInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.credential_info: + body["credential_info"] = self.credential_info.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccountsCreateStorageCredentialInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.credential_info: + body["credential_info"] = self.credential_info + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AccountsCreateStorageCredentialInfo: + """Deserializes the AccountsCreateStorageCredentialInfo from a dictionary.""" + return cls(credential_info=_from_dict(d, "credential_info", StorageCredentialInfo)) + + +@dataclass +class AccountsDeleteMetastoreAssignmentResponse: + """The metastore assignment was successfully deleted.""" + + def as_dict(self) -> dict: + """Serializes the AccountsDeleteMetastoreAssignmentResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccountsDeleteMetastoreAssignmentResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AccountsDeleteMetastoreAssignmentResponse: + """Deserializes the AccountsDeleteMetastoreAssignmentResponse from a dictionary.""" + return cls() + + +@dataclass +class AccountsDeleteMetastoreResponse: + """The metastore was successfully deleted.""" + + def as_dict(self) -> dict: + """Serializes the AccountsDeleteMetastoreResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccountsDeleteMetastoreResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AccountsDeleteMetastoreResponse: + """Deserializes the AccountsDeleteMetastoreResponse from a dictionary.""" + return cls() + + +@dataclass +class AccountsDeleteStorageCredentialResponse: + """The storage credential was successfully deleted.""" + + def as_dict(self) -> dict: + """Serializes the AccountsDeleteStorageCredentialResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccountsDeleteStorageCredentialResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AccountsDeleteStorageCredentialResponse: + """Deserializes the AccountsDeleteStorageCredentialResponse from a dictionary.""" + return cls() + + +@dataclass +class AccountsGetMetastoreResponse: + """The metastore was successfully returned.""" + + metastore_info: Optional[MetastoreInfo] = None + + def as_dict(self) -> dict: + """Serializes the AccountsGetMetastoreResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.metastore_info: + body["metastore_info"] = self.metastore_info.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccountsGetMetastoreResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.metastore_info: + body["metastore_info"] = self.metastore_info + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AccountsGetMetastoreResponse: + """Deserializes the AccountsGetMetastoreResponse from a dictionary.""" + return cls(metastore_info=_from_dict(d, "metastore_info", MetastoreInfo)) + + +@dataclass +class AccountsListMetastoresResponse: + """Metastores were returned successfully.""" + + metastores: Optional[List[MetastoreInfo]] = None + """An array of metastore information objects.""" + + def as_dict(self) -> dict: + """Serializes the AccountsListMetastoresResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.metastores: + body["metastores"] = [v.as_dict() for v in self.metastores] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccountsListMetastoresResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.metastores: + body["metastores"] = self.metastores + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AccountsListMetastoresResponse: + """Deserializes the AccountsListMetastoresResponse from a dictionary.""" + return cls(metastores=_repeated_dict(d, "metastores", MetastoreInfo)) + + @dataclass class AccountsMetastoreAssignment: + """The workspace metastore assignment was successfully returned.""" + metastore_assignment: Optional[MetastoreAssignment] = None def as_dict(self) -> dict: @@ -88,50 +273,100 @@ def from_dict(cls, d: Dict[str, Any]) -> AccountsMetastoreAssignment: @dataclass -class AccountsMetastoreInfo: +class AccountsStorageCredentialInfo: + """The storage credential was successfully retrieved.""" + + credential_info: Optional[StorageCredentialInfo] = None + + def as_dict(self) -> dict: + """Serializes the AccountsStorageCredentialInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.credential_info: + body["credential_info"] = self.credential_info.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccountsStorageCredentialInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.credential_info: + body["credential_info"] = self.credential_info + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AccountsStorageCredentialInfo: + """Deserializes the AccountsStorageCredentialInfo from a dictionary.""" + return cls(credential_info=_from_dict(d, "credential_info", StorageCredentialInfo)) + + +@dataclass +class AccountsUpdateMetastoreAssignmentResponse: + """The metastore assignment was successfully updated.""" + + def as_dict(self) -> dict: + """Serializes the AccountsUpdateMetastoreAssignmentResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccountsUpdateMetastoreAssignmentResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AccountsUpdateMetastoreAssignmentResponse: + """Deserializes the AccountsUpdateMetastoreAssignmentResponse from a dictionary.""" + return cls() + + +@dataclass +class AccountsUpdateMetastoreResponse: + """The metastore update request succeeded.""" + metastore_info: Optional[MetastoreInfo] = None def as_dict(self) -> dict: - """Serializes the AccountsMetastoreInfo into a dictionary suitable for use as a JSON request body.""" + """Serializes the AccountsUpdateMetastoreResponse into a dictionary suitable for use as a JSON request body.""" body = {} if self.metastore_info: body["metastore_info"] = self.metastore_info.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the AccountsMetastoreInfo into a shallow dictionary of its immediate attributes.""" + """Serializes the AccountsUpdateMetastoreResponse into a shallow dictionary of its immediate attributes.""" body = {} if self.metastore_info: body["metastore_info"] = self.metastore_info return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AccountsMetastoreInfo: - """Deserializes the AccountsMetastoreInfo from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> AccountsUpdateMetastoreResponse: + """Deserializes the AccountsUpdateMetastoreResponse from a dictionary.""" return cls(metastore_info=_from_dict(d, "metastore_info", MetastoreInfo)) @dataclass -class AccountsStorageCredentialInfo: +class AccountsUpdateStorageCredentialResponse: + """The storage credential was successfully updated.""" + credential_info: Optional[StorageCredentialInfo] = None def as_dict(self) -> dict: - """Serializes the AccountsStorageCredentialInfo into a dictionary suitable for use as a JSON request body.""" + """Serializes the AccountsUpdateStorageCredentialResponse into a dictionary suitable for use as a JSON request body.""" body = {} if self.credential_info: body["credential_info"] = self.credential_info.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the AccountsStorageCredentialInfo into a shallow dictionary of its immediate attributes.""" + """Serializes the AccountsUpdateStorageCredentialResponse into a shallow dictionary of its immediate attributes.""" body = {} if self.credential_info: body["credential_info"] = self.credential_info return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AccountsStorageCredentialInfo: - """Deserializes the AccountsStorageCredentialInfo from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> AccountsUpdateStorageCredentialResponse: + """Deserializes the AccountsUpdateStorageCredentialResponse from a dictionary.""" return cls(credential_info=_from_dict(d, "credential_info", StorageCredentialInfo)) @@ -807,12 +1042,18 @@ class CatalogInfo: connection_name: Optional[str] = None """The name of the connection to an external data source.""" + conversion_info: Optional[ConversionInfo] = None + """Status of conversion of FOREIGN catalog to UC Native catalog.""" + created_at: Optional[int] = None """Time at which this catalog was created, in epoch milliseconds.""" created_by: Optional[str] = None """Username of catalog creator.""" + dr_replication_info: Optional[DrReplicationInfo] = None + """Disaster Recovery replication state snapshot.""" + effective_predictive_optimization_flag: Optional[EffectivePredictiveOptimizationFlag] = None enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None @@ -874,10 +1115,14 @@ def as_dict(self) -> dict: body["comment"] = self.comment if self.connection_name is not None: body["connection_name"] = self.connection_name + if self.conversion_info: + body["conversion_info"] = self.conversion_info.as_dict() if self.created_at is not None: body["created_at"] = self.created_at if self.created_by is not None: body["created_by"] = self.created_by + if self.dr_replication_info: + body["dr_replication_info"] = self.dr_replication_info.as_dict() if self.effective_predictive_optimization_flag: body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag.as_dict() if self.enable_predictive_optimization is not None: @@ -925,10 +1170,14 @@ def as_shallow_dict(self) -> dict: body["comment"] = self.comment if self.connection_name is not None: body["connection_name"] = self.connection_name + if self.conversion_info: + body["conversion_info"] = self.conversion_info if self.created_at is not None: body["created_at"] = self.created_at if self.created_by is not None: body["created_by"] = self.created_by + if self.dr_replication_info: + body["dr_replication_info"] = self.dr_replication_info if self.effective_predictive_optimization_flag: body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag if self.enable_predictive_optimization is not None: @@ -973,8 +1222,10 @@ def from_dict(cls, d: Dict[str, Any]) -> CatalogInfo: catalog_type=_enum(d, "catalog_type", CatalogType), comment=d.get("comment", None), connection_name=d.get("connection_name", None), + conversion_info=_from_dict(d, "conversion_info", ConversionInfo), created_at=d.get("created_at", None), created_by=d.get("created_by", None), + dr_replication_info=_from_dict(d, "dr_replication_info", DrReplicationInfo), effective_predictive_optimization_flag=_from_dict( d, "effective_predictive_optimization_flag", EffectivePredictiveOptimizationFlag ), @@ -1025,7 +1276,7 @@ class CloudflareApiToken: secret_access_key: str """The secret access token generated for the above access key ID.""" - account_id: str + account_id: Optional[str] = None """The ID of the account associated with the API token.""" def as_dict(self) -> dict: @@ -1363,6 +1614,9 @@ class ConnectionInfo: credential_type: Optional[CredentialType] = None """The type of credential.""" + environment_settings: Optional[EnvironmentSettings] = None + """[Create,Update:OPT] Connection environment settings as EnvironmentSettings object.""" + full_name: Optional[str] = None """Full name of connection.""" @@ -1412,6 +1666,8 @@ def as_dict(self) -> dict: body["created_by"] = self.created_by if self.credential_type is not None: body["credential_type"] = self.credential_type.value + if self.environment_settings: + body["environment_settings"] = self.environment_settings.as_dict() if self.full_name is not None: body["full_name"] = self.full_name if self.metastore_id is not None: @@ -1453,6 +1709,8 @@ def as_shallow_dict(self) -> dict: body["created_by"] = self.created_by if self.credential_type is not None: body["credential_type"] = self.credential_type + if self.environment_settings: + body["environment_settings"] = self.environment_settings if self.full_name is not None: body["full_name"] = self.full_name if self.metastore_id is not None: @@ -1489,6 +1747,7 @@ def from_dict(cls, d: Dict[str, Any]) -> ConnectionInfo: created_at=d.get("created_at", None), created_by=d.get("created_by", None), credential_type=_enum(d, "credential_type", CredentialType), + environment_settings=_from_dict(d, "environment_settings", EnvironmentSettings), full_name=d.get("full_name", None), metastore_id=d.get("metastore_id", None), name=d.get("name", None), @@ -1578,6 +1837,39 @@ def from_dict(cls, d: Dict[str, Any]) -> ContinuousUpdateStatus: ) +@dataclass +class ConversionInfo: + """Status of conversion of FOREIGN entity into UC Native entity.""" + + state: Optional[ConversionInfoState] = None + """The conversion state of the resource.""" + + def as_dict(self) -> dict: + """Serializes the ConversionInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.state is not None: + body["state"] = self.state.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ConversionInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.state is not None: + body["state"] = self.state + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ConversionInfo: + """Deserializes the ConversionInfo from a dictionary.""" + return cls(state=_enum(d, "state", ConversionInfoState)) + + +class ConversionInfoState(Enum): + + COMPLETED = "COMPLETED" + IN_PROGRESS = "IN_PROGRESS" + + @dataclass class CreateAccessRequest: behalf_of: Optional[Principal] = None @@ -1665,18 +1957,145 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateAccessRequestResponse: ) +@dataclass +class CreateAccountsMetastore: + name: str + """The user-specified name of the metastore.""" + + region: Optional[str] = None + """Cloud region which the metastore serves (e.g., `us-west-2`, `westus`).""" + + storage_root: Optional[str] = None + """The storage root URL for metastore""" + + def as_dict(self) -> dict: + """Serializes the CreateAccountsMetastore into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.name is not None: + body["name"] = self.name + if self.region is not None: + body["region"] = self.region + if self.storage_root is not None: + body["storage_root"] = self.storage_root + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CreateAccountsMetastore into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: + body["name"] = self.name + if self.region is not None: + body["region"] = self.region + if self.storage_root is not None: + body["storage_root"] = self.storage_root + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CreateAccountsMetastore: + """Deserializes the CreateAccountsMetastore from a dictionary.""" + return cls(name=d.get("name", None), region=d.get("region", None), storage_root=d.get("storage_root", None)) + + +@dataclass +class CreateAccountsStorageCredential: + name: str + """The credential name. The name must be unique among storage and service credentials within the + metastore.""" + + aws_iam_role: Optional[AwsIamRoleRequest] = None + """The AWS IAM role configuration.""" + + azure_managed_identity: Optional[AzureManagedIdentityRequest] = None + """The Azure managed identity configuration.""" + + azure_service_principal: Optional[AzureServicePrincipal] = None + """The Azure service principal configuration.""" + + cloudflare_api_token: Optional[CloudflareApiToken] = None + """The Cloudflare API token configuration.""" + + comment: Optional[str] = None + """Comment associated with the credential.""" + + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None + """The Databricks managed GCP service account configuration.""" + + read_only: Optional[bool] = None + """Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**.""" + + def as_dict(self) -> dict: + """Serializes the CreateAccountsStorageCredential into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal.as_dict() + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() + if self.comment is not None: + body["comment"] = self.comment + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() + if self.name is not None: + body["name"] = self.name + if self.read_only is not None: + body["read_only"] = self.read_only + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CreateAccountsStorageCredential into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token + if self.comment is not None: + body["comment"] = self.comment + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account + if self.name is not None: + body["name"] = self.name + if self.read_only is not None: + body["read_only"] = self.read_only + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CreateAccountsStorageCredential: + """Deserializes the CreateAccountsStorageCredential from a dictionary.""" + return cls( + aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest), + azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityRequest), + azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), + cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), + comment=d.get("comment", None), + databricks_gcp_service_account=_from_dict( + d, "databricks_gcp_service_account", DatabricksGcpServiceAccountRequest + ), + name=d.get("name", None), + read_only=d.get("read_only", None), + ) + + @dataclass class CreateFunction: name: str """Name of function, relative to parent schema.""" catalog_name: str - """Name of parent catalog.""" + """Name of parent Catalog.""" schema_name: str - """Name of parent schema relative to its parent catalog.""" + """Name of parent Schema relative to its parent Catalog.""" input_params: FunctionParameterInfos + """Function input parameters.""" data_type: ColumnTypeName """Scalar function return data type.""" @@ -1686,8 +2105,8 @@ class CreateFunction: routine_body: CreateFunctionRoutineBody """Function language. When **EXTERNAL** is used, the language of the routine function should be - specified in the __external_language__ field, and the __return_params__ of the function cannot - be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be + specified in the **external_language** field, and the **return_params** of the function cannot + be used (as **TABLE** return type is not supported), and the **sql_data_access** field must be **NO_SQL**.""" routine_definition: str @@ -1727,7 +2146,7 @@ class CreateFunction: """Table function return parameters.""" routine_dependencies: Optional[DependencyList] = None - """Function dependencies.""" + """function dependencies.""" sql_path: Optional[str] = None """List of schemes whose objects can be referenced without qualification.""" @@ -1855,76 +2274,33 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateFunction: class CreateFunctionParameterStyle(Enum): - """Function parameter style. **S** is the value for SQL.""" S = "S" class CreateFunctionRoutineBody(Enum): - """Function language. When **EXTERNAL** is used, the language of the routine function should be - specified in the __external_language__ field, and the __return_params__ of the function cannot - be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be - **NO_SQL**.""" EXTERNAL = "EXTERNAL" SQL = "SQL" class CreateFunctionSecurityType(Enum): - """The security type of the function.""" DEFINER = "DEFINER" class CreateFunctionSqlDataAccess(Enum): - """Function SQL data access.""" CONTAINS_SQL = "CONTAINS_SQL" NO_SQL = "NO_SQL" READS_SQL_DATA = "READS_SQL_DATA" -@dataclass -class CreateMetastore: - name: str - """The user-specified name of the metastore.""" - - region: Optional[str] = None - """Cloud region which the metastore serves (e.g., `us-west-2`, `westus`).""" - - storage_root: Optional[str] = None - """The storage root URL for metastore""" - - def as_dict(self) -> dict: - """Serializes the CreateMetastore into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.region is not None: - body["region"] = self.region - if self.storage_root is not None: - body["storage_root"] = self.storage_root - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateMetastore into a shallow dictionary of its immediate attributes.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.region is not None: - body["region"] = self.region - if self.storage_root is not None: - body["storage_root"] = self.storage_root - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateMetastore: - """Deserializes the CreateMetastore from a dictionary.""" - return cls(name=d.get("name", None), region=d.get("region", None), storage_root=d.get("storage_root", None)) - - @dataclass class CreateMetastoreAssignment: + workspace_id: int + """A workspace ID.""" + metastore_id: str """The unique ID of the metastore.""" @@ -1932,9 +2308,6 @@ class CreateMetastoreAssignment: """The name of the default catalog in the metastore. This field is deprecated. Please use "Default Namespace API" to configure the default catalog for a Databricks workspace.""" - workspace_id: Optional[int] = None - """A workspace ID.""" - def as_dict(self) -> dict: """Serializes the CreateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" body = {} @@ -2026,119 +2399,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateRequestExternalLineage: ) -@dataclass -class CreateResponse: - def as_dict(self) -> dict: - """Serializes the CreateResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateResponse: - """Deserializes the CreateResponse from a dictionary.""" - return cls() - - -@dataclass -class CreateStorageCredential: - name: str - """The credential name. The name must be unique among storage and service credentials within the - metastore.""" - - aws_iam_role: Optional[AwsIamRoleRequest] = None - """The AWS IAM role configuration.""" - - azure_managed_identity: Optional[AzureManagedIdentityRequest] = None - """The Azure managed identity configuration.""" - - azure_service_principal: Optional[AzureServicePrincipal] = None - """The Azure service principal configuration.""" - - cloudflare_api_token: Optional[CloudflareApiToken] = None - """The Cloudflare API token configuration.""" - - comment: Optional[str] = None - """Comment associated with the credential.""" - - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None - """The Databricks managed GCP service account configuration.""" - - read_only: Optional[bool] = None - """Whether the credential is usable only for read operations. Only applicable when purpose is - **STORAGE**.""" - - skip_validation: Optional[bool] = None - """Supplying true to this argument skips validation of the created credential.""" - - def as_dict(self) -> dict: - """Serializes the CreateStorageCredential into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal.as_dict() - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.name is not None: - body["name"] = self.name - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateStorageCredential into a shallow dictionary of its immediate attributes.""" - body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.name is not None: - body["name"] = self.name - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateStorageCredential: - """Deserializes the CreateStorageCredential from a dictionary.""" - return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityRequest), - azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), - cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), - comment=d.get("comment", None), - databricks_gcp_service_account=_from_dict( - d, "databricks_gcp_service_account", DatabricksGcpServiceAccountRequest - ), - name=d.get("name", None), - read_only=d.get("read_only", None), - skip_validation=d.get("skip_validation", None), - ) - - @dataclass class CredentialDependency: """A credential that is dependent on a SQL object.""" @@ -2522,24 +2782,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabricksGcpServiceAccountResponse: return cls(credential_id=d.get("credential_id", None), email=d.get("email", None)) -@dataclass -class DeleteAliasResponse: - def as_dict(self) -> dict: - """Serializes the DeleteAliasResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteAliasResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteAliasResponse: - """Deserializes the DeleteAliasResponse from a dictionary.""" - return cls() - - @dataclass class DeleteCredentialResponse: def as_dict(self) -> dict: @@ -2805,10 +3047,42 @@ def as_shallow_dict(self) -> dict: body = {} return body - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DisableResponse: - """Deserializes the DisableResponse from a dictionary.""" - return cls() + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DisableResponse: + """Deserializes the DisableResponse from a dictionary.""" + return cls() + + +@dataclass +class DrReplicationInfo: + """Metadata related to Disaster Recovery.""" + + status: Optional[DrReplicationStatus] = None + + def as_dict(self) -> dict: + """Serializes the DrReplicationInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.status is not None: + body["status"] = self.status.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DrReplicationInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.status is not None: + body["status"] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DrReplicationInfo: + """Deserializes the DrReplicationInfo from a dictionary.""" + return cls(status=_enum(d, "status", DrReplicationStatus)) + + +class DrReplicationStatus(Enum): + + DR_REPLICATION_STATUS_PRIMARY = "DR_REPLICATION_STATUS_PRIMARY" + DR_REPLICATION_STATUS_SECONDARY = "DR_REPLICATION_STATUS_SECONDARY" @dataclass @@ -3081,6 +3355,38 @@ def from_dict(cls, d: Dict[str, Any]) -> EntityTagAssignment: ) +@dataclass +class EnvironmentSettings: + environment_version: Optional[str] = None + + java_dependencies: Optional[List[str]] = None + + def as_dict(self) -> dict: + """Serializes the EnvironmentSettings into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.environment_version is not None: + body["environment_version"] = self.environment_version + if self.java_dependencies: + body["java_dependencies"] = [v for v in self.java_dependencies] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the EnvironmentSettings into a shallow dictionary of its immediate attributes.""" + body = {} + if self.environment_version is not None: + body["environment_version"] = self.environment_version + if self.java_dependencies: + body["java_dependencies"] = self.java_dependencies + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> EnvironmentSettings: + """Deserializes the EnvironmentSettings from a dictionary.""" + return cls( + environment_version=d.get("environment_version", None), java_dependencies=d.get("java_dependencies", None) + ) + + @dataclass class ExternalLineageExternalMetadata: name: Optional[str] = None @@ -4143,7 +4449,7 @@ class FunctionInfo: through the BROWSE privilege when include_browse is enabled in the request.""" catalog_name: Optional[str] = None - """Name of parent catalog.""" + """Name of parent Catalog.""" comment: Optional[str] = None """User-provided free-form text description.""" @@ -4167,12 +4473,13 @@ class FunctionInfo: """Pretty printed function data type.""" full_name: Optional[str] = None - """Full name of function, in form of __catalog_name__.__schema_name__.__function__name__""" + """Full name of Function, in form of **catalog_name**.**schema_name**.**function_name**""" function_id: Optional[str] = None """Id of Function, relative to parent schema.""" input_params: Optional[FunctionParameterInfos] = None + """Function input parameters.""" is_deterministic: Optional[bool] = None """Whether the function is deterministic.""" @@ -4187,7 +4494,7 @@ class FunctionInfo: """Name of function, relative to parent schema.""" owner: Optional[str] = None - """Username of current owner of function.""" + """Username of current owner of the function.""" parameter_style: Optional[FunctionInfoParameterStyle] = None """Function parameter style. **S** is the value for SQL.""" @@ -4200,18 +4507,18 @@ class FunctionInfo: routine_body: Optional[FunctionInfoRoutineBody] = None """Function language. When **EXTERNAL** is used, the language of the routine function should be - specified in the __external_language__ field, and the __return_params__ of the function cannot - be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be + specified in the **external_language** field, and the **return_params** of the function cannot + be used (as **TABLE** return type is not supported), and the **sql_data_access** field must be **NO_SQL**.""" routine_definition: Optional[str] = None """Function body.""" routine_dependencies: Optional[DependencyList] = None - """Function dependencies.""" + """function dependencies.""" schema_name: Optional[str] = None - """Name of parent schema relative to its parent catalog.""" + """Name of parent Schema relative to its parent Catalog.""" security_type: Optional[FunctionInfoSecurityType] = None """Function security type.""" @@ -4226,10 +4533,10 @@ class FunctionInfo: """List of schemes whose objects can be referenced without qualification.""" updated_at: Optional[int] = None - """Time at which this function was created, in epoch milliseconds.""" + """Time at which this function was last modified, in epoch milliseconds.""" updated_by: Optional[str] = None - """Username of user who last modified function.""" + """Username of user who last modified the function.""" def as_dict(self) -> dict: """Serializes the FunctionInfo into a dictionary suitable for use as a JSON request body.""" @@ -4399,29 +4706,22 @@ def from_dict(cls, d: Dict[str, Any]) -> FunctionInfo: class FunctionInfoParameterStyle(Enum): - """Function parameter style. **S** is the value for SQL.""" S = "S" class FunctionInfoRoutineBody(Enum): - """Function language. When **EXTERNAL** is used, the language of the routine function should be - specified in the __external_language__ field, and the __return_params__ of the function cannot - be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be - **NO_SQL**.""" EXTERNAL = "EXTERNAL" SQL = "SQL" class FunctionInfoSecurityType(Enum): - """The security type of the function.""" DEFINER = "DEFINER" class FunctionInfoSqlDataAccess(Enum): - """Function SQL data access.""" CONTAINS_SQL = "CONTAINS_SQL" NO_SQL = "NO_SQL" @@ -4431,12 +4731,13 @@ class FunctionInfoSqlDataAccess(Enum): @dataclass class FunctionParameterInfo: name: str - """Name of parameter.""" + """Name of Parameter.""" type_text: str """Full data type spec, SQL/catalogString text.""" type_name: ColumnTypeName + """Name of type (INT, STRUCT, MAP, etc.)""" position: int """Ordinal position of column (starting at position 0).""" @@ -4448,8 +4749,10 @@ class FunctionParameterInfo: """Default value of the parameter.""" parameter_mode: Optional[FunctionParameterMode] = None + """Function parameter mode.""" parameter_type: Optional[FunctionParameterType] = None + """Function parameter type.""" type_interval_type: Optional[str] = None """Format of IntervalType.""" @@ -4543,7 +4846,6 @@ def from_dict(cls, d: Dict[str, Any]) -> FunctionParameterInfo: @dataclass class FunctionParameterInfos: parameters: Optional[List[FunctionParameterInfo]] = None - """The array of __FunctionParameterInfo__ definitions of the function's parameters.""" def as_dict(self) -> dict: """Serializes the FunctionParameterInfos into a dictionary suitable for use as a JSON request body.""" @@ -4566,13 +4868,11 @@ def from_dict(cls, d: Dict[str, Any]) -> FunctionParameterInfos: class FunctionParameterMode(Enum): - """The mode of the function parameter.""" IN = "IN" class FunctionParameterType(Enum): - """The type of function parameter.""" COLUMN = "COLUMN" PARAM = "PARAM" @@ -5153,7 +5453,7 @@ class LineageDirection(Enum): @dataclass class ListAccountMetastoreAssignmentsResponse: - """The list of workspaces to which the given metastore is assigned.""" + """The metastore assignments were successfully returned.""" workspace_ids: Optional[List[int]] = None @@ -5179,6 +5479,8 @@ def from_dict(cls, d: Dict[str, Any]) -> ListAccountMetastoreAssignmentsResponse @dataclass class ListAccountStorageCredentialsResponse: + """The metastore storage credentials were successfully returned.""" + storage_credentials: Optional[List[StorageCredentialInfo]] = None """An array of metastore storage credentials.""" @@ -5896,7 +6198,8 @@ class MetastoreAssignment: """The unique ID of the metastore.""" default_catalog_name: Optional[str] = None - """The name of the default catalog in the metastore.""" + """The name of the default catalog in the metastore. This field is deprecated. Please use "Default + Namespace API" to configure the default catalog for a Databricks workspace.""" def as_dict(self) -> dict: """Serializes the MetastoreAssignment into a dictionary suitable for use as a JSON request body.""" @@ -6113,10 +6416,6 @@ class ModelVersionInfo: aliases: Optional[List[RegisteredModelAlias]] = None """List of aliases associated with the model version""" - browse_only: Optional[bool] = None - """Indicates whether the principal is limited to retrieving metadata for the associated object - through the BROWSE privilege when include_browse is enabled in the request.""" - catalog_name: Optional[str] = None """The name of the catalog containing the model version""" @@ -6175,8 +6474,6 @@ def as_dict(self) -> dict: body = {} if self.aliases: body["aliases"] = [v.as_dict() for v in self.aliases] - if self.browse_only is not None: - body["browse_only"] = self.browse_only if self.catalog_name is not None: body["catalog_name"] = self.catalog_name if self.comment is not None: @@ -6218,8 +6515,6 @@ def as_shallow_dict(self) -> dict: body = {} if self.aliases: body["aliases"] = self.aliases - if self.browse_only is not None: - body["browse_only"] = self.browse_only if self.catalog_name is not None: body["catalog_name"] = self.catalog_name if self.comment is not None: @@ -6261,7 +6556,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ModelVersionInfo: """Deserializes the ModelVersionInfo from a dictionary.""" return cls( aliases=_repeated_dict(d, "aliases", RegisteredModelAlias), - browse_only=d.get("browse_only", None), catalog_name=d.get("catalog_name", None), comment=d.get("comment", None), created_at=d.get("created_at", None), @@ -6283,11 +6577,9 @@ def from_dict(cls, d: Dict[str, Any]) -> ModelVersionInfo: class ModelVersionInfoStatus(Enum): - """Current status of the model version. Newly created model versions start in PENDING_REGISTRATION - status, then move to READY status once the model version files are uploaded and the model - version is finalized. Only model versions in READY status can be loaded for inference or served.""" FAILED_REGISTRATION = "FAILED_REGISTRATION" + MODEL_VERSION_STATUS_UNKNOWN = "MODEL_VERSION_STATUS_UNKNOWN" PENDING_REGISTRATION = "PENDING_REGISTRATION" READY = "READY" @@ -7468,6 +7760,15 @@ class PermissionsChange: """The principal whose privileges we are changing. Only one of principal or principal_id should be specified, never both at the same time.""" + principal_id: Optional[int] = None + """An opaque internal ID that identifies the principal whose privileges should be removed. + + This field is intended for removing privileges associated with a deleted user. When set, only + the entries specified in the remove field are processed; any entries in the add field will be + rejected. + + Only one of principal or principal_id should be specified, never both at the same time.""" + remove: Optional[List[Privilege]] = None """The set of privileges to remove.""" @@ -7478,6 +7779,8 @@ def as_dict(self) -> dict: body["add"] = [v.value for v in self.add] if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.remove: body["remove"] = [v.value for v in self.remove] return body @@ -7489,6 +7792,8 @@ def as_shallow_dict(self) -> dict: body["add"] = self.add if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.remove: body["remove"] = self.remove return body @@ -7499,6 +7804,7 @@ def from_dict(cls, d: Dict[str, Any]) -> PermissionsChange: return cls( add=_repeated_enum(d, "add", Privilege), principal=d.get("principal", None), + principal_id=d.get("principal_id", None), remove=_repeated_enum(d, "remove", Privilege), ) @@ -7886,6 +8192,10 @@ class PrivilegeAssignment: """The principal (user email address or group name). For deleted principals, `principal` is empty while `principal_id` is populated.""" + principal_id: Optional[int] = None + """Unique identifier of the principal. For active principals, both `principal` and `principal_id` + are present.""" + privileges: Optional[List[Privilege]] = None """The privileges assigned to the principal.""" @@ -7894,6 +8204,8 @@ def as_dict(self) -> dict: body = {} if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.privileges: body["privileges"] = [v.value for v in self.privileges] return body @@ -7903,6 +8215,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.privileges: body["privileges"] = self.privileges return body @@ -7910,7 +8224,11 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> PrivilegeAssignment: """Deserializes the PrivilegeAssignment from a dictionary.""" - return cls(principal=d.get("principal", None), privileges=_repeated_enum(d, "privileges", Privilege)) + return cls( + principal=d.get("principal", None), + principal_id=d.get("principal_id", None), + privileges=_repeated_enum(d, "privileges", Privilege), + ) @dataclass @@ -8125,11 +8443,21 @@ def from_dict(cls, d: Dict[str, Any]) -> RegenerateDashboardResponse: @dataclass class RegisteredModelAlias: - """Registered model alias.""" - alias_name: Optional[str] = None """Name of the alias, e.g. 'champion' or 'latest_stable'""" + catalog_name: Optional[str] = None + """The name of the catalog containing the model version""" + + id: Optional[str] = None + """The unique identifier of the alias""" + + model_name: Optional[str] = None + """The name of the parent registered model of the model version, relative to parent schema""" + + schema_name: Optional[str] = None + """The name of the schema containing the model version, relative to parent catalog""" + version_num: Optional[int] = None """Integer version number of the model version to which this alias points.""" @@ -8138,6 +8466,14 @@ def as_dict(self) -> dict: body = {} if self.alias_name is not None: body["alias_name"] = self.alias_name + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.id is not None: + body["id"] = self.id + if self.model_name is not None: + body["model_name"] = self.model_name + if self.schema_name is not None: + body["schema_name"] = self.schema_name if self.version_num is not None: body["version_num"] = self.version_num return body @@ -8147,6 +8483,14 @@ def as_shallow_dict(self) -> dict: body = {} if self.alias_name is not None: body["alias_name"] = self.alias_name + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.id is not None: + body["id"] = self.id + if self.model_name is not None: + body["model_name"] = self.model_name + if self.schema_name is not None: + body["schema_name"] = self.schema_name if self.version_num is not None: body["version_num"] = self.version_num return body @@ -8154,7 +8498,14 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelAlias: """Deserializes the RegisteredModelAlias from a dictionary.""" - return cls(alias_name=d.get("alias_name", None), version_num=d.get("version_num", None)) + return cls( + alias_name=d.get("alias_name", None), + catalog_name=d.get("catalog_name", None), + id=d.get("id", None), + model_name=d.get("model_name", None), + schema_name=d.get("schema_name", None), + version_num=d.get("version_num", None), + ) @dataclass @@ -9636,50 +9987,7 @@ def from_dict(cls, d: Dict[str, Any]) -> UnassignResponse: @dataclass -class UpdateAssignmentResponse: - def as_dict(self) -> dict: - """Serializes the UpdateAssignmentResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateAssignmentResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateAssignmentResponse: - """Deserializes the UpdateAssignmentResponse from a dictionary.""" - return cls() - - -@dataclass -class UpdateCatalogWorkspaceBindingsResponse: - workspaces: Optional[List[int]] = None - """A list of workspace IDs""" - - def as_dict(self) -> dict: - """Serializes the UpdateCatalogWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.workspaces: - body["workspaces"] = [v for v in self.workspaces] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateCatalogWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.workspaces: - body["workspaces"] = self.workspaces - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateCatalogWorkspaceBindingsResponse: - """Deserializes the UpdateCatalogWorkspaceBindingsResponse from a dictionary.""" - return cls(workspaces=d.get("workspaces", None)) - - -@dataclass -class UpdateMetastore: +class UpdateAccountsMetastore: delta_sharing_organization_name: Optional[str] = None """The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta Sharing as the official name.""" @@ -9690,12 +9998,6 @@ class UpdateMetastore: delta_sharing_scope: Optional[DeltaSharingScopeEnum] = None """The scope of Delta Sharing enabled for the metastore.""" - id: Optional[str] = None - """Unique ID of the metastore.""" - - new_name: Optional[str] = None - """New name for the metastore.""" - owner: Optional[str] = None """The owner of the metastore.""" @@ -9706,7 +10008,7 @@ class UpdateMetastore: """UUID of storage credential to access the metastore storage_root.""" def as_dict(self) -> dict: - """Serializes the UpdateMetastore into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateAccountsMetastore into a dictionary suitable for use as a JSON request body.""" body = {} if self.delta_sharing_organization_name is not None: body["delta_sharing_organization_name"] = self.delta_sharing_organization_name @@ -9716,10 +10018,6 @@ def as_dict(self) -> dict: ) if self.delta_sharing_scope is not None: body["delta_sharing_scope"] = self.delta_sharing_scope.value - if self.id is not None: - body["id"] = self.id - if self.new_name is not None: - body["new_name"] = self.new_name if self.owner is not None: body["owner"] = self.owner if self.privilege_model_version is not None: @@ -9729,7 +10027,7 @@ def as_dict(self) -> dict: return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateMetastore into a shallow dictionary of its immediate attributes.""" + """Serializes the UpdateAccountsMetastore into a shallow dictionary of its immediate attributes.""" body = {} if self.delta_sharing_organization_name is not None: body["delta_sharing_organization_name"] = self.delta_sharing_organization_name @@ -9739,10 +10037,6 @@ def as_shallow_dict(self) -> dict: ) if self.delta_sharing_scope is not None: body["delta_sharing_scope"] = self.delta_sharing_scope - if self.id is not None: - body["id"] = self.id - if self.new_name is not None: - body["new_name"] = self.new_name if self.owner is not None: body["owner"] = self.owner if self.privilege_model_version is not None: @@ -9752,24 +10046,162 @@ def as_shallow_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateMetastore: - """Deserializes the UpdateMetastore from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> UpdateAccountsMetastore: + """Deserializes the UpdateAccountsMetastore from a dictionary.""" return cls( delta_sharing_organization_name=d.get("delta_sharing_organization_name", None), delta_sharing_recipient_token_lifetime_in_seconds=d.get( "delta_sharing_recipient_token_lifetime_in_seconds", None ), delta_sharing_scope=_enum(d, "delta_sharing_scope", DeltaSharingScopeEnum), - id=d.get("id", None), - new_name=d.get("new_name", None), owner=d.get("owner", None), privilege_model_version=d.get("privilege_model_version", None), storage_root_credential_id=d.get("storage_root_credential_id", None), ) +@dataclass +class UpdateAccountsStorageCredential: + aws_iam_role: Optional[AwsIamRoleRequest] = None + """The AWS IAM role configuration.""" + + azure_managed_identity: Optional[AzureManagedIdentityResponse] = None + """The Azure managed identity configuration.""" + + azure_service_principal: Optional[AzureServicePrincipal] = None + """The Azure service principal configuration.""" + + cloudflare_api_token: Optional[CloudflareApiToken] = None + """The Cloudflare API token configuration.""" + + comment: Optional[str] = None + """Comment associated with the credential.""" + + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None + """The Databricks managed GCP service account configuration.""" + + isolation_mode: Optional[IsolationMode] = None + """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" + + owner: Optional[str] = None + """Username of current owner of credential.""" + + read_only: Optional[bool] = None + """Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**.""" + + def as_dict(self) -> dict: + """Serializes the UpdateAccountsStorageCredential into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal.as_dict() + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() + if self.comment is not None: + body["comment"] = self.comment + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode.value + if self.owner is not None: + body["owner"] = self.owner + if self.read_only is not None: + body["read_only"] = self.read_only + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateAccountsStorageCredential into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token + if self.comment is not None: + body["comment"] = self.comment + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode + if self.owner is not None: + body["owner"] = self.owner + if self.read_only is not None: + body["read_only"] = self.read_only + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UpdateAccountsStorageCredential: + """Deserializes the UpdateAccountsStorageCredential from a dictionary.""" + return cls( + aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest), + azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityResponse), + azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), + cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), + comment=d.get("comment", None), + databricks_gcp_service_account=_from_dict( + d, "databricks_gcp_service_account", DatabricksGcpServiceAccountRequest + ), + isolation_mode=_enum(d, "isolation_mode", IsolationMode), + owner=d.get("owner", None), + read_only=d.get("read_only", None), + ) + + +@dataclass +class UpdateAssignmentResponse: + def as_dict(self) -> dict: + """Serializes the UpdateAssignmentResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateAssignmentResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UpdateAssignmentResponse: + """Deserializes the UpdateAssignmentResponse from a dictionary.""" + return cls() + + +@dataclass +class UpdateCatalogWorkspaceBindingsResponse: + workspaces: Optional[List[int]] = None + """A list of workspace IDs""" + + def as_dict(self) -> dict: + """Serializes the UpdateCatalogWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.workspaces: + body["workspaces"] = [v for v in self.workspaces] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateCatalogWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.workspaces: + body["workspaces"] = self.workspaces + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UpdateCatalogWorkspaceBindingsResponse: + """Deserializes the UpdateCatalogWorkspaceBindingsResponse from a dictionary.""" + return cls(workspaces=d.get("workspaces", None)) + + @dataclass class UpdateMetastoreAssignment: + workspace_id: int + """A workspace ID.""" + default_catalog_name: Optional[str] = None """The name of the default catalog in the metastore. This field is deprecated. Please use "Default Namespace API" to configure the default catalog for a Databricks workspace.""" @@ -9777,9 +10209,6 @@ class UpdateMetastoreAssignment: metastore_id: Optional[str] = None """The unique ID of the metastore.""" - workspace_id: Optional[int] = None - """A workspace ID.""" - def as_dict(self) -> dict: """Serializes the UpdateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" body = {} @@ -9914,132 +10343,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: return cls() -@dataclass -class UpdateStorageCredential: - aws_iam_role: Optional[AwsIamRoleRequest] = None - """The AWS IAM role configuration.""" - - azure_managed_identity: Optional[AzureManagedIdentityResponse] = None - """The Azure managed identity configuration.""" - - azure_service_principal: Optional[AzureServicePrincipal] = None - """The Azure service principal configuration.""" - - cloudflare_api_token: Optional[CloudflareApiToken] = None - """The Cloudflare API token configuration.""" - - comment: Optional[str] = None - """Comment associated with the credential.""" - - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None - """The Databricks managed GCP service account configuration.""" - - force: Optional[bool] = None - """Force update even if there are dependent external locations or external tables.""" - - isolation_mode: Optional[IsolationMode] = None - """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" - - name: Optional[str] = None - """Name of the storage credential.""" - - new_name: Optional[str] = None - """New name for the storage credential.""" - - owner: Optional[str] = None - """Username of current owner of credential.""" - - read_only: Optional[bool] = None - """Whether the credential is usable only for read operations. Only applicable when purpose is - **STORAGE**.""" - - skip_validation: Optional[bool] = None - """Supplying true to this argument skips validation of the updated credential.""" - - def as_dict(self) -> dict: - """Serializes the UpdateStorageCredential into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal.as_dict() - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.force is not None: - body["force"] = self.force - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode.value - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateStorageCredential into a shallow dictionary of its immediate attributes.""" - body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.force is not None: - body["force"] = self.force - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateStorageCredential: - """Deserializes the UpdateStorageCredential from a dictionary.""" - return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityResponse), - azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), - cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), - comment=d.get("comment", None), - databricks_gcp_service_account=_from_dict( - d, "databricks_gcp_service_account", DatabricksGcpServiceAccountRequest - ), - force=d.get("force", None), - isolation_mode=_enum(d, "isolation_mode", IsolationMode), - name=d.get("name", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - read_only=d.get("read_only", None), - skip_validation=d.get("skip_validation", None), - ) - - @dataclass class UpdateWorkspaceBindingsResponse: """A list of workspace IDs that are bound to the securable""" @@ -10415,7 +10718,7 @@ def __init__(self, api_client): def create( self, workspace_id: int, metastore_id: str, *, metastore_assignment: Optional[CreateMetastoreAssignment] = None - ): + ) -> AccountsCreateMetastoreAssignmentResponse: """Creates an assignment to a metastore for a workspace :param workspace_id: int @@ -10424,7 +10727,7 @@ def create( Unity Catalog metastore ID :param metastore_assignment: :class:`CreateMetastoreAssignment` (optional) - + :returns: :class:`AccountsCreateMetastoreAssignmentResponse` """ body = {} if metastore_assignment is not None: @@ -10434,14 +10737,15 @@ def create( "Content-Type": "application/json", } - self._api.do( + res = self._api.do( "POST", f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}", body=body, headers=headers, ) + return AccountsCreateMetastoreAssignmentResponse.from_dict(res) - def delete(self, workspace_id: int, metastore_id: str): + def delete(self, workspace_id: int, metastore_id: str) -> AccountsDeleteMetastoreAssignmentResponse: """Deletes a metastore assignment to a workspace, leaving the workspace with no metastore. :param workspace_id: int @@ -10449,23 +10753,24 @@ def delete(self, workspace_id: int, metastore_id: str): :param metastore_id: str Unity Catalog metastore ID - + :returns: :class:`AccountsDeleteMetastoreAssignmentResponse` """ headers = { "Accept": "application/json", } - self._api.do( + res = self._api.do( "DELETE", f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}", headers=headers, ) + return AccountsDeleteMetastoreAssignmentResponse.from_dict(res) def get(self, workspace_id: int) -> AccountsMetastoreAssignment: """Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace is assigned - a metastore, the mappig will be returned. If no metastore is assigned to the workspace, the assignment - will not be found and a 404 returned. + a metastore, the mapping will be returned. If no metastore is assigned to the workspace, the + assignment will not be found and a 404 returned. :param workspace_id: int Workspace ID. @@ -10503,7 +10808,7 @@ def list(self, metastore_id: str) -> Iterator[int]: def update( self, workspace_id: int, metastore_id: str, *, metastore_assignment: Optional[UpdateMetastoreAssignment] = None - ): + ) -> AccountsUpdateMetastoreAssignmentResponse: """Updates an assignment to a metastore for a workspace. Currently, only the default catalog may be updated. @@ -10513,7 +10818,7 @@ def update( Unity Catalog metastore ID :param metastore_assignment: :class:`UpdateMetastoreAssignment` (optional) - + :returns: :class:`AccountsUpdateMetastoreAssignmentResponse` """ body = {} if metastore_assignment is not None: @@ -10523,12 +10828,13 @@ def update( "Content-Type": "application/json", } - self._api.do( + res = self._api.do( "PUT", f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}", body=body, headers=headers, ) + return AccountsUpdateMetastoreAssignmentResponse.from_dict(res) class AccountMetastoresAPI: @@ -10538,12 +10844,12 @@ class AccountMetastoresAPI: def __init__(self, api_client): self._api = api_client - def create(self, *, metastore_info: Optional[CreateMetastore] = None) -> AccountsMetastoreInfo: + def create(self, *, metastore_info: Optional[CreateAccountsMetastore] = None) -> AccountsCreateMetastoreResponse: """Creates a Unity Catalog metastore. - :param metastore_info: :class:`CreateMetastore` (optional) + :param metastore_info: :class:`CreateAccountsMetastore` (optional) - :returns: :class:`AccountsMetastoreInfo` + :returns: :class:`AccountsCreateMetastoreResponse` """ body = {} if metastore_info is not None: @@ -10554,9 +10860,9 @@ def create(self, *, metastore_info: Optional[CreateMetastore] = None) -> Account } res = self._api.do("POST", f"/api/2.0/accounts/{self._api.account_id}/metastores", body=body, headers=headers) - return AccountsMetastoreInfo.from_dict(res) + return AccountsCreateMetastoreResponse.from_dict(res) - def delete(self, metastore_id: str, *, force: Optional[bool] = None): + def delete(self, metastore_id: str, *, force: Optional[bool] = None) -> AccountsDeleteMetastoreResponse: """Deletes a Unity Catalog metastore for an account, both specified by ID. :param metastore_id: str @@ -10564,7 +10870,7 @@ def delete(self, metastore_id: str, *, force: Optional[bool] = None): :param force: bool (optional) Force deletion even if the metastore is not empty. Default is false. - + :returns: :class:`AccountsDeleteMetastoreResponse` """ query = {} @@ -10574,20 +10880,21 @@ def delete(self, metastore_id: str, *, force: Optional[bool] = None): "Accept": "application/json", } - self._api.do( + res = self._api.do( "DELETE", f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}", query=query, headers=headers, ) + return AccountsDeleteMetastoreResponse.from_dict(res) - def get(self, metastore_id: str) -> AccountsMetastoreInfo: + def get(self, metastore_id: str) -> AccountsGetMetastoreResponse: """Gets a Unity Catalog metastore from an account, both specified by ID. :param metastore_id: str Unity Catalog metastore ID - :returns: :class:`AccountsMetastoreInfo` + :returns: :class:`AccountsGetMetastoreResponse` """ headers = { @@ -10597,7 +10904,7 @@ def get(self, metastore_id: str) -> AccountsMetastoreInfo: res = self._api.do( "GET", f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}", headers=headers ) - return AccountsMetastoreInfo.from_dict(res) + return AccountsGetMetastoreResponse.from_dict(res) def list(self) -> Iterator[MetastoreInfo]: """Gets all Unity Catalog metastores associated with an account specified by ID. @@ -10611,17 +10918,20 @@ def list(self) -> Iterator[MetastoreInfo]: } json = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/metastores", headers=headers) - parsed = ListMetastoresResponse.from_dict(json).metastores + parsed = AccountsListMetastoresResponse.from_dict(json).metastores return parsed if parsed is not None else [] - def update(self, metastore_id: str, *, metastore_info: Optional[UpdateMetastore] = None) -> AccountsMetastoreInfo: + def update( + self, metastore_id: str, *, metastore_info: Optional[UpdateAccountsMetastore] = None + ) -> AccountsUpdateMetastoreResponse: """Updates an existing Unity Catalog metastore. :param metastore_id: str Unity Catalog metastore ID - :param metastore_info: :class:`UpdateMetastore` (optional) + :param metastore_info: :class:`UpdateAccountsMetastore` (optional) + Properties of the metastore to change. - :returns: :class:`AccountsMetastoreInfo` + :returns: :class:`AccountsUpdateMetastoreResponse` """ body = {} if metastore_info is not None: @@ -10634,7 +10944,7 @@ def update(self, metastore_id: str, *, metastore_info: Optional[UpdateMetastore] res = self._api.do( "PUT", f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}", body=body, headers=headers ) - return AccountsMetastoreInfo.from_dict(res) + return AccountsUpdateMetastoreResponse.from_dict(res) class AccountStorageCredentialsAPI: @@ -10644,25 +10954,33 @@ def __init__(self, api_client): self._api = api_client def create( - self, metastore_id: str, *, credential_info: Optional[CreateStorageCredential] = None - ) -> AccountsStorageCredentialInfo: - """Creates a new storage credential. The request object is specific to the cloud: - - * **AwsIamRole** for AWS credentials * **AzureServicePrincipal** for Azure credentials * - **GcpServiceAcountKey** for GCP credentials. + self, + metastore_id: str, + *, + credential_info: Optional[CreateAccountsStorageCredential] = None, + skip_validation: Optional[bool] = None, + ) -> AccountsCreateStorageCredentialInfo: + """Creates a new storage credential. The request object is specific to the cloud: - **AwsIamRole** for + AWS credentials - **AzureServicePrincipal** for Azure credentials - **GcpServiceAccountKey** for GCP + credentials - The caller must be a metastore admin and have the **CREATE_STORAGE_CREDENTIAL** privilege on the + The caller must be a metastore admin and have the `CREATE_STORAGE_CREDENTIAL` privilege on the metastore. :param metastore_id: str Unity Catalog metastore ID - :param credential_info: :class:`CreateStorageCredential` (optional) + :param credential_info: :class:`CreateAccountsStorageCredential` (optional) + :param skip_validation: bool (optional) + Optional, default false. Supplying true to this argument skips validation of the created set of + credentials. - :returns: :class:`AccountsStorageCredentialInfo` + :returns: :class:`AccountsCreateStorageCredentialInfo` """ body = {} if credential_info is not None: body["credential_info"] = credential_info.as_dict() + if skip_validation is not None: + body["skip_validation"] = skip_validation headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -10674,9 +10992,11 @@ def create( body=body, headers=headers, ) - return AccountsStorageCredentialInfo.from_dict(res) + return AccountsCreateStorageCredentialInfo.from_dict(res) - def delete(self, metastore_id: str, storage_credential_name: str, *, force: Optional[bool] = None): + def delete( + self, metastore_id: str, storage_credential_name: str, *, force: Optional[bool] = None + ) -> AccountsDeleteStorageCredentialResponse: """Deletes a storage credential from the metastore. The caller must be an owner of the storage credential. @@ -10687,7 +11007,7 @@ def delete(self, metastore_id: str, storage_credential_name: str, *, force: Opti :param force: bool (optional) Force deletion even if the Storage Credential is not empty. Default is false. - + :returns: :class:`AccountsDeleteStorageCredentialResponse` """ query = {} @@ -10697,12 +11017,13 @@ def delete(self, metastore_id: str, storage_credential_name: str, *, force: Opti "Accept": "application/json", } - self._api.do( + res = self._api.do( "DELETE", f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/{storage_credential_name}", query=query, headers=headers, ) + return AccountsDeleteStorageCredentialResponse.from_dict(res) def get(self, metastore_id: str, storage_credential_name: str) -> AccountsStorageCredentialInfo: """Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the @@ -10711,7 +11032,7 @@ def get(self, metastore_id: str, storage_credential_name: str) -> AccountsStorag :param metastore_id: str Unity Catalog metastore ID :param storage_credential_name: str - Name of the storage credential. + Required. Name of the storage credential. :returns: :class:`AccountsStorageCredentialInfo` """ @@ -10753,22 +11074,27 @@ def update( metastore_id: str, storage_credential_name: str, *, - credential_info: Optional[UpdateStorageCredential] = None, - ) -> AccountsStorageCredentialInfo: + credential_info: Optional[UpdateAccountsStorageCredential] = None, + skip_validation: Optional[bool] = None, + ) -> AccountsUpdateStorageCredentialResponse: """Updates a storage credential on the metastore. The caller must be the owner of the storage credential. - If the caller is a metastore admin, only the __owner__ credential can be changed. + If the caller is a metastore admin, only the **owner** credential can be changed. :param metastore_id: str Unity Catalog metastore ID :param storage_credential_name: str Name of the storage credential. - :param credential_info: :class:`UpdateStorageCredential` (optional) + :param credential_info: :class:`UpdateAccountsStorageCredential` (optional) + :param skip_validation: bool (optional) + Optional. Supplying true to this argument skips validation of the updated set of credentials. - :returns: :class:`AccountsStorageCredentialInfo` + :returns: :class:`AccountsUpdateStorageCredentialResponse` """ body = {} if credential_info is not None: body["credential_info"] = credential_info.as_dict() + if skip_validation is not None: + body["skip_validation"] = skip_validation headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -10780,7 +11106,7 @@ def update( body=body, headers=headers, ) - return AccountsStorageCredentialInfo.from_dict(res) + return AccountsUpdateStorageCredentialResponse.from_dict(res) class ArtifactAllowlistsAPI: @@ -10870,6 +11196,8 @@ def create( *, comment: Optional[str] = None, connection_name: Optional[str] = None, + conversion_info: Optional[ConversionInfo] = None, + dr_replication_info: Optional[DrReplicationInfo] = None, options: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, provider_name: Optional[str] = None, @@ -10885,6 +11213,10 @@ def create( User-provided free-form text description. :param connection_name: str (optional) The name of the connection to an external data source. + :param conversion_info: :class:`ConversionInfo` (optional) + Status of conversion of FOREIGN catalog to UC Native catalog. + :param dr_replication_info: :class:`DrReplicationInfo` (optional) + Disaster Recovery replication state snapshot. :param options: Dict[str,str] (optional) A map of key-value properties attached to the securable. :param properties: Dict[str,str] (optional) @@ -10905,6 +11237,10 @@ def create( body["comment"] = comment if connection_name is not None: body["connection_name"] = connection_name + if conversion_info is not None: + body["conversion_info"] = conversion_info.as_dict() + if dr_replication_info is not None: + body["dr_replication_info"] = dr_replication_info.as_dict() if name is not None: body["name"] = name if options is not None: @@ -11023,6 +11359,8 @@ def update( name: str, *, comment: Optional[str] = None, + conversion_info: Optional[ConversionInfo] = None, + dr_replication_info: Optional[DrReplicationInfo] = None, enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None, isolation_mode: Optional[CatalogIsolationMode] = None, new_name: Optional[str] = None, @@ -11037,6 +11375,10 @@ def update( The name of the catalog. :param comment: str (optional) User-provided free-form text description. + :param conversion_info: :class:`ConversionInfo` (optional) + Status of conversion of FOREIGN catalog to UC Native catalog. + :param dr_replication_info: :class:`DrReplicationInfo` (optional) + Disaster Recovery replication state snapshot. :param enable_predictive_optimization: :class:`EnablePredictiveOptimization` (optional) Whether predictive optimization should be enabled for this object and objects under it. :param isolation_mode: :class:`CatalogIsolationMode` (optional) @@ -11055,6 +11397,10 @@ def update( body = {} if comment is not None: body["comment"] = comment + if conversion_info is not None: + body["conversion_info"] = conversion_info.as_dict() + if dr_replication_info is not None: + body["dr_replication_info"] = dr_replication_info.as_dict() if enable_predictive_optimization is not None: body["enable_predictive_optimization"] = enable_predictive_optimization.value if isolation_mode is not None: @@ -11096,6 +11442,7 @@ def create( options: Dict[str, str], *, comment: Optional[str] = None, + environment_settings: Optional[EnvironmentSettings] = None, properties: Optional[Dict[str, str]] = None, read_only: Optional[bool] = None, ) -> ConnectionInfo: @@ -11112,6 +11459,8 @@ def create( A map of key-value properties attached to the securable. :param comment: str (optional) User-provided free-form text description. + :param environment_settings: :class:`EnvironmentSettings` (optional) + [Create,Update:OPT] Connection environment settings as EnvironmentSettings object. :param properties: Dict[str,str] (optional) A map of key-value properties attached to the securable. :param read_only: bool (optional) @@ -11124,6 +11473,8 @@ def create( body["comment"] = comment if connection_type is not None: body["connection_type"] = connection_type.value + if environment_settings is not None: + body["environment_settings"] = environment_settings.as_dict() if name is not None: body["name"] = name if options is not None: @@ -11204,7 +11555,13 @@ def list(self, *, max_results: Optional[int] = None, page_token: Optional[str] = query["page_token"] = json["next_page_token"] def update( - self, name: str, options: Dict[str, str], *, new_name: Optional[str] = None, owner: Optional[str] = None + self, + name: str, + options: Dict[str, str], + *, + environment_settings: Optional[EnvironmentSettings] = None, + new_name: Optional[str] = None, + owner: Optional[str] = None, ) -> ConnectionInfo: """Updates the connection that matches the supplied name. @@ -11212,6 +11569,8 @@ def update( Name of the connection. :param options: Dict[str,str] A map of key-value properties attached to the securable. + :param environment_settings: :class:`EnvironmentSettings` (optional) + [Create,Update:OPT] Connection environment settings as EnvironmentSettings object. :param new_name: str (optional) New name for the connection. :param owner: str (optional) @@ -11220,6 +11579,8 @@ def update( :returns: :class:`ConnectionInfo` """ body = {} + if environment_settings is not None: + body["environment_settings"] = environment_settings.as_dict() if new_name is not None: body["new_name"] = new_name if options is not None: @@ -12346,7 +12707,7 @@ def delete(self, name: str, *, force: Optional[bool] = None): :param name: str The fully-qualified name of the function (of the form - __catalog_name__.__schema_name__.__function__name__). + __catalog_name__.__schema_name__.__function__name__) . :param force: bool (optional) Force deletion even if the function is notempty. @@ -12356,9 +12717,7 @@ def delete(self, name: str, *, force: Optional[bool] = None): query = {} if force is not None: query["force"] = force - headers = { - "Accept": "application/json", - } + headers = {} self._api.do("DELETE", f"/api/2.1/unity-catalog/functions/{name}", query=query, headers=headers) @@ -12459,7 +12818,7 @@ def update(self, name: str, *, owner: Optional[str] = None) -> FunctionInfo: The fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__). :param owner: str (optional) - Username of current owner of function. + Username of current owner of the function. :returns: :class:`FunctionInfo` """ @@ -12494,6 +12853,7 @@ def get( securable_type: str, full_name: str, *, + include_deleted_principals: Optional[bool] = None, max_results: Optional[int] = None, page_token: Optional[str] = None, principal: Optional[str] = None, @@ -12504,6 +12864,8 @@ def get( Type of securable. :param full_name: str Full name of securable. + :param include_deleted_principals: bool (optional) + Optional. If true, also return privilege assignments whose principals have been deleted. :param max_results: int (optional) Specifies the maximum number of privileges to return (page length). Every PrivilegeAssignment present in a single page response is guaranteed to contain all the privileges granted on the @@ -12523,6 +12885,8 @@ def get( """ query = {} + if include_deleted_principals is not None: + query["include_deleted_principals"] = include_deleted_principals if max_results is not None: query["max_results"] = max_results if page_token is not None: @@ -13069,7 +13433,29 @@ def list( return query["page_token"] = json["next_page_token"] - def update(self, full_name: str, version: int, *, comment: Optional[str] = None) -> ModelVersionInfo: + def update( + self, + full_name: str, + version: int, + *, + aliases: Optional[List[RegisteredModelAlias]] = None, + catalog_name: Optional[str] = None, + comment: Optional[str] = None, + created_at: Optional[int] = None, + created_by: Optional[str] = None, + id: Optional[str] = None, + metastore_id: Optional[str] = None, + model_name: Optional[str] = None, + model_version_dependencies: Optional[DependencyList] = None, + run_id: Optional[str] = None, + run_workspace_id: Optional[int] = None, + schema_name: Optional[str] = None, + source: Optional[str] = None, + status: Optional[ModelVersionInfoStatus] = None, + storage_location: Optional[str] = None, + updated_at: Optional[int] = None, + updated_by: Optional[str] = None, + ) -> ModelVersionInfo: """Updates the specified model version. The caller must be a metastore admin or an owner of the parent registered model. For the latter case, @@ -13082,14 +13468,80 @@ def update(self, full_name: str, version: int, *, comment: Optional[str] = None) The three-level (fully qualified) name of the model version :param version: int The integer version number of the model version + :param aliases: List[:class:`RegisteredModelAlias`] (optional) + List of aliases associated with the model version + :param catalog_name: str (optional) + The name of the catalog containing the model version :param comment: str (optional) The comment attached to the model version + :param created_at: int (optional) + :param created_by: str (optional) + The identifier of the user who created the model version + :param id: str (optional) + The unique identifier of the model version + :param metastore_id: str (optional) + The unique identifier of the metastore containing the model version + :param model_name: str (optional) + The name of the parent registered model of the model version, relative to parent schema + :param model_version_dependencies: :class:`DependencyList` (optional) + Model version dependencies, for feature-store packaged models + :param run_id: str (optional) + MLflow run ID used when creating the model version, if ``source`` was generated by an experiment run + stored in an MLflow tracking server + :param run_workspace_id: int (optional) + ID of the Databricks workspace containing the MLflow run that generated this model version, if + applicable + :param schema_name: str (optional) + The name of the schema containing the model version, relative to parent catalog + :param source: str (optional) + URI indicating the location of the source artifacts (files) for the model version + :param status: :class:`ModelVersionInfoStatus` (optional) + Current status of the model version. Newly created model versions start in PENDING_REGISTRATION + status, then move to READY status once the model version files are uploaded and the model version is + finalized. Only model versions in READY status can be loaded for inference or served. + :param storage_location: str (optional) + The storage location on the cloud under which model version data files are stored + :param updated_at: int (optional) + :param updated_by: str (optional) + The identifier of the user who updated the model version last time :returns: :class:`ModelVersionInfo` """ body = {} + if aliases is not None: + body["aliases"] = [v.as_dict() for v in aliases] + if catalog_name is not None: + body["catalog_name"] = catalog_name if comment is not None: body["comment"] = comment + if created_at is not None: + body["created_at"] = created_at + if created_by is not None: + body["created_by"] = created_by + if id is not None: + body["id"] = id + if metastore_id is not None: + body["metastore_id"] = metastore_id + if model_name is not None: + body["model_name"] = model_name + if model_version_dependencies is not None: + body["model_version_dependencies"] = model_version_dependencies.as_dict() + if run_id is not None: + body["run_id"] = run_id + if run_workspace_id is not None: + body["run_workspace_id"] = run_workspace_id + if schema_name is not None: + body["schema_name"] = schema_name + if source is not None: + body["source"] = source + if status is not None: + body["status"] = status.value + if storage_location is not None: + body["storage_location"] = storage_location + if updated_at is not None: + body["updated_at"] = updated_at + if updated_by is not None: + body["updated_by"] = updated_by headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -13797,20 +14249,29 @@ class RegisteredModelsAPI: new model version, or update permissions on the registered model, users must be owners of the registered model. - Note: The securable type for models is "FUNCTION". When using REST APIs (e.g. tagging, grants) that - specify a securable type, use "FUNCTION" as the securable type.""" + Note: The securable type for models is FUNCTION. When using REST APIs (e.g. tagging, grants) that specify + a securable type, use FUNCTION as the securable type.""" def __init__(self, api_client): self._api = api_client def create( self, - catalog_name: str, - schema_name: str, - name: str, *, + aliases: Optional[List[RegisteredModelAlias]] = None, + browse_only: Optional[bool] = None, + catalog_name: Optional[str] = None, comment: Optional[str] = None, + created_at: Optional[int] = None, + created_by: Optional[str] = None, + full_name: Optional[str] = None, + metastore_id: Optional[str] = None, + name: Optional[str] = None, + owner: Optional[str] = None, + schema_name: Optional[str] = None, storage_location: Optional[str] = None, + updated_at: Optional[int] = None, + updated_by: Optional[str] = None, ) -> RegisteredModelInfo: """Creates a new registered model in Unity Catalog. @@ -13822,30 +14283,67 @@ def create( **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - The caller must have the **CREATE MODEL** or **CREATE FUNCTION** privilege on the parent schema. - :param catalog_name: str + :param aliases: List[:class:`RegisteredModelAlias`] (optional) + List of aliases associated with the registered model + :param browse_only: bool (optional) + Indicates whether the principal is limited to retrieving metadata for the associated object through + the BROWSE privilege when include_browse is enabled in the request. + :param catalog_name: str (optional) The name of the catalog where the schema and the registered model reside - :param schema_name: str - The name of the schema where the registered model resides - :param name: str - The name of the registered model :param comment: str (optional) The comment attached to the registered model + :param created_at: int (optional) + Creation timestamp of the registered model in milliseconds since the Unix epoch + :param created_by: str (optional) + The identifier of the user who created the registered model + :param full_name: str (optional) + The three-level (fully qualified) name of the registered model + :param metastore_id: str (optional) + The unique identifier of the metastore + :param name: str (optional) + The name of the registered model + :param owner: str (optional) + The identifier of the user who owns the registered model + :param schema_name: str (optional) + The name of the schema where the registered model resides :param storage_location: str (optional) The storage location on the cloud under which model version data files are stored + :param updated_at: int (optional) + Last-update timestamp of the registered model in milliseconds since the Unix epoch + :param updated_by: str (optional) + The identifier of the user who updated the registered model last time :returns: :class:`RegisteredModelInfo` """ body = {} + if aliases is not None: + body["aliases"] = [v.as_dict() for v in aliases] + if browse_only is not None: + body["browse_only"] = browse_only if catalog_name is not None: body["catalog_name"] = catalog_name if comment is not None: body["comment"] = comment + if created_at is not None: + body["created_at"] = created_at + if created_by is not None: + body["created_by"] = created_by + if full_name is not None: + body["full_name"] = full_name + if metastore_id is not None: + body["metastore_id"] = metastore_id if name is not None: body["name"] = name + if owner is not None: + body["owner"] = owner if schema_name is not None: body["schema_name"] = schema_name if storage_location is not None: body["storage_location"] = storage_location + if updated_at is not None: + body["updated_at"] = updated_at + if updated_by is not None: + body["updated_by"] = updated_by headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -14003,7 +14501,7 @@ def set_alias(self, full_name: str, alias: str, version_num: int) -> RegisteredM **USE_SCHEMA** privilege on the parent schema. :param full_name: str - Full name of the registered model + The three-level (fully qualified) name of the registered model :param alias: str The name of the alias :param version_num: int @@ -14028,9 +14526,20 @@ def update( self, full_name: str, *, + aliases: Optional[List[RegisteredModelAlias]] = None, + browse_only: Optional[bool] = None, + catalog_name: Optional[str] = None, comment: Optional[str] = None, + created_at: Optional[int] = None, + created_by: Optional[str] = None, + metastore_id: Optional[str] = None, + name: Optional[str] = None, new_name: Optional[str] = None, owner: Optional[str] = None, + schema_name: Optional[str] = None, + storage_location: Optional[str] = None, + updated_at: Optional[int] = None, + updated_by: Optional[str] = None, ) -> RegisteredModelInfo: """Updates the specified registered model. @@ -14042,22 +14551,67 @@ def update( :param full_name: str The three-level (fully qualified) name of the registered model + :param aliases: List[:class:`RegisteredModelAlias`] (optional) + List of aliases associated with the registered model + :param browse_only: bool (optional) + Indicates whether the principal is limited to retrieving metadata for the associated object through + the BROWSE privilege when include_browse is enabled in the request. + :param catalog_name: str (optional) + The name of the catalog where the schema and the registered model reside :param comment: str (optional) The comment attached to the registered model + :param created_at: int (optional) + Creation timestamp of the registered model in milliseconds since the Unix epoch + :param created_by: str (optional) + The identifier of the user who created the registered model + :param metastore_id: str (optional) + The unique identifier of the metastore + :param name: str (optional) + The name of the registered model :param new_name: str (optional) New name for the registered model. :param owner: str (optional) The identifier of the user who owns the registered model + :param schema_name: str (optional) + The name of the schema where the registered model resides + :param storage_location: str (optional) + The storage location on the cloud under which model version data files are stored + :param updated_at: int (optional) + Last-update timestamp of the registered model in milliseconds since the Unix epoch + :param updated_by: str (optional) + The identifier of the user who updated the registered model last time :returns: :class:`RegisteredModelInfo` """ body = {} + if aliases is not None: + body["aliases"] = [v.as_dict() for v in aliases] + if browse_only is not None: + body["browse_only"] = browse_only + if catalog_name is not None: + body["catalog_name"] = catalog_name if comment is not None: body["comment"] = comment + if created_at is not None: + body["created_at"] = created_at + if created_by is not None: + body["created_by"] = created_by + if metastore_id is not None: + body["metastore_id"] = metastore_id + if name is not None: + body["name"] = name if new_name is not None: body["new_name"] = new_name if owner is not None: body["owner"] = owner + if schema_name is not None: + body["schema_name"] = schema_name + if storage_location is not None: + body["storage_location"] = storage_location + if updated_at is not None: + body["updated_at"] = updated_at + if updated_by is not None: + body["updated_by"] = updated_by headers = { "Accept": "application/json", "Content-Type": "application/json", diff --git a/databricks/sdk/service/cleanrooms.py b/databricks/sdk/service/cleanrooms.py index 57ea7e961..eb847ed95 100755 --- a/databricks/sdk/service/cleanrooms.py +++ b/databricks/sdk/service/cleanrooms.py @@ -10,7 +10,8 @@ from enum import Enum from typing import Any, Callable, Dict, Iterator, List, Optional -from ._internal import Wait, _enum, _from_dict, _repeated_dict +from databricks.sdk.service._internal import (Wait, _enum, _from_dict, + _repeated_dict, _repeated_enum) _LOG = logging.getLogger("databricks.sdk") @@ -1080,7 +1081,7 @@ def as_dict(self) -> dict: """Serializes the ComplianceSecurityProfile into a dictionary suitable for use as a JSON request body.""" body = {} if self.compliance_standards: - body["compliance_standards"] = [v.as_dict() for v in self.compliance_standards] + body["compliance_standards"] = [v.value for v in self.compliance_standards] if self.is_enabled is not None: body["is_enabled"] = self.is_enabled return body @@ -1098,7 +1099,7 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> ComplianceSecurityProfile: """Deserializes the ComplianceSecurityProfile from a dictionary.""" return cls( - compliance_standards=_repeated_dict(d, "compliance_standards", settings.ComplianceStandard), + compliance_standards=_repeated_enum(d, "compliance_standards", settings.ComplianceStandard), is_enabled=d.get("is_enabled", None), ) diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py index 11a2a2b78..021a90f23 100755 --- a/databricks/sdk/service/compute.py +++ b/databricks/sdk/service/compute.py @@ -10,8 +10,10 @@ from enum import Enum from typing import Any, Callable, Dict, Iterator, List, Optional +from databricks.sdk.service._internal import (Wait, _enum, _from_dict, + _repeated_dict, _repeated_enum) + from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum _LOG = logging.getLogger("databricks.sdk") @@ -325,6 +327,13 @@ class AzureAvailability(Enum): SPOT_WITH_FALLBACK_AZURE = "SPOT_WITH_FALLBACK_AZURE" +class BaseEnvironmentType(Enum): + """If changed, also update estore/namespaces/defaultbaseenvironments/latest.proto""" + + CPU = "CPU" + GPU = "GPU" + + @dataclass class CancelResponse: def as_dict(self) -> dict: @@ -2718,6 +2727,184 @@ def from_dict(cls, d: Dict[str, Any]) -> DbfsStorageInfo: return cls(destination=d.get("destination", None)) +@dataclass +class DefaultBaseEnvironment: + base_environment_cache: Optional[List[DefaultBaseEnvironmentCache]] = None + + base_environment_type: Optional[BaseEnvironmentType] = None + + created_timestamp: Optional[int] = None + + creator_user_id: Optional[int] = None + + environment: Optional[Environment] = None + """Note: we made `environment` non-internal because we need to expose its `client` field. All other + fields should be treated as internal.""" + + filepath: Optional[str] = None + + id: Optional[str] = None + + is_default: Optional[bool] = None + + last_updated_timestamp: Optional[int] = None + + last_updated_user_id: Optional[int] = None + + message: Optional[str] = None + + name: Optional[str] = None + + principal_ids: Optional[List[int]] = None + + status: Optional[DefaultBaseEnvironmentCacheStatus] = None + + def as_dict(self) -> dict: + """Serializes the DefaultBaseEnvironment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.base_environment_cache: + body["base_environment_cache"] = [v.as_dict() for v in self.base_environment_cache] + if self.base_environment_type is not None: + body["base_environment_type"] = self.base_environment_type.value + if self.created_timestamp is not None: + body["created_timestamp"] = self.created_timestamp + if self.creator_user_id is not None: + body["creator_user_id"] = self.creator_user_id + if self.environment: + body["environment"] = self.environment.as_dict() + if self.filepath is not None: + body["filepath"] = self.filepath + if self.id is not None: + body["id"] = self.id + if self.is_default is not None: + body["is_default"] = self.is_default + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.last_updated_user_id is not None: + body["last_updated_user_id"] = self.last_updated_user_id + if self.message is not None: + body["message"] = self.message + if self.name is not None: + body["name"] = self.name + if self.principal_ids: + body["principal_ids"] = [v for v in self.principal_ids] + if self.status is not None: + body["status"] = self.status.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DefaultBaseEnvironment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.base_environment_cache: + body["base_environment_cache"] = self.base_environment_cache + if self.base_environment_type is not None: + body["base_environment_type"] = self.base_environment_type + if self.created_timestamp is not None: + body["created_timestamp"] = self.created_timestamp + if self.creator_user_id is not None: + body["creator_user_id"] = self.creator_user_id + if self.environment: + body["environment"] = self.environment + if self.filepath is not None: + body["filepath"] = self.filepath + if self.id is not None: + body["id"] = self.id + if self.is_default is not None: + body["is_default"] = self.is_default + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.last_updated_user_id is not None: + body["last_updated_user_id"] = self.last_updated_user_id + if self.message is not None: + body["message"] = self.message + if self.name is not None: + body["name"] = self.name + if self.principal_ids: + body["principal_ids"] = self.principal_ids + if self.status is not None: + body["status"] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DefaultBaseEnvironment: + """Deserializes the DefaultBaseEnvironment from a dictionary.""" + return cls( + base_environment_cache=_repeated_dict(d, "base_environment_cache", DefaultBaseEnvironmentCache), + base_environment_type=_enum(d, "base_environment_type", BaseEnvironmentType), + created_timestamp=d.get("created_timestamp", None), + creator_user_id=d.get("creator_user_id", None), + environment=_from_dict(d, "environment", Environment), + filepath=d.get("filepath", None), + id=d.get("id", None), + is_default=d.get("is_default", None), + last_updated_timestamp=d.get("last_updated_timestamp", None), + last_updated_user_id=d.get("last_updated_user_id", None), + message=d.get("message", None), + name=d.get("name", None), + principal_ids=d.get("principal_ids", None), + status=_enum(d, "status", DefaultBaseEnvironmentCacheStatus), + ) + + +@dataclass +class DefaultBaseEnvironmentCache: + indefinite_materialized_environment: Optional[MaterializedEnvironment] = None + + materialized_environment: Optional[MaterializedEnvironment] = None + + message: Optional[str] = None + + status: Optional[DefaultBaseEnvironmentCacheStatus] = None + + def as_dict(self) -> dict: + """Serializes the DefaultBaseEnvironmentCache into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.indefinite_materialized_environment: + body["indefinite_materialized_environment"] = self.indefinite_materialized_environment.as_dict() + if self.materialized_environment: + body["materialized_environment"] = self.materialized_environment.as_dict() + if self.message is not None: + body["message"] = self.message + if self.status is not None: + body["status"] = self.status.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DefaultBaseEnvironmentCache into a shallow dictionary of its immediate attributes.""" + body = {} + if self.indefinite_materialized_environment: + body["indefinite_materialized_environment"] = self.indefinite_materialized_environment + if self.materialized_environment: + body["materialized_environment"] = self.materialized_environment + if self.message is not None: + body["message"] = self.message + if self.status is not None: + body["status"] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DefaultBaseEnvironmentCache: + """Deserializes the DefaultBaseEnvironmentCache from a dictionary.""" + return cls( + indefinite_materialized_environment=_from_dict( + d, "indefinite_materialized_environment", MaterializedEnvironment + ), + materialized_environment=_from_dict(d, "materialized_environment", MaterializedEnvironment), + message=d.get("message", None), + status=_enum(d, "status", DefaultBaseEnvironmentCacheStatus), + ) + + +class DefaultBaseEnvironmentCacheStatus(Enum): + + CREATED = "CREATED" + EXPIRED = "EXPIRED" + FAILED = "FAILED" + INVALID = "INVALID" + PENDING = "PENDING" + REFRESHING = "REFRESHING" + + @dataclass class DeleteClusterResponse: def as_dict(self) -> dict: @@ -3135,12 +3322,9 @@ class Environment: """Required. Environment version used by the environment. Each version comes with a specific Python version and a set of Python packages. The version is a string, consisting of an integer.""" - jar_dependencies: Optional[List[str]] = None - """Use `java_dependencies` instead.""" - java_dependencies: Optional[List[str]] = None - """List of jar dependencies, should be string representing volume paths. For example: - `/Volumes/path/to/test.jar`.""" + """List of java dependencies. Each dependency is a string representing a java library path. For + example: `/Volumes/path/to/test.jar`.""" def as_dict(self) -> dict: """Serializes the Environment into a dictionary suitable for use as a JSON request body.""" @@ -3151,8 +3335,6 @@ def as_dict(self) -> dict: body["dependencies"] = [v for v in self.dependencies] if self.environment_version is not None: body["environment_version"] = self.environment_version - if self.jar_dependencies: - body["jar_dependencies"] = [v for v in self.jar_dependencies] if self.java_dependencies: body["java_dependencies"] = [v for v in self.java_dependencies] return body @@ -3166,8 +3348,6 @@ def as_shallow_dict(self) -> dict: body["dependencies"] = self.dependencies if self.environment_version is not None: body["environment_version"] = self.environment_version - if self.jar_dependencies: - body["jar_dependencies"] = self.jar_dependencies if self.java_dependencies: body["java_dependencies"] = self.java_dependencies return body @@ -3179,7 +3359,6 @@ def from_dict(cls, d: Dict[str, Any]) -> Environment: client=d.get("client", None), dependencies=d.get("dependencies", None), environment_version=d.get("environment_version", None), - jar_dependencies=d.get("jar_dependencies", None), java_dependencies=d.get("java_dependencies", None), ) @@ -3831,6 +4010,10 @@ class GetInstancePool: disk_spec: Optional[DiskSpec] = None """Defines the specification of the disks that will be attached to all spark containers.""" + enable_auto_alternate_node_types: Optional[bool] = None + """For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids + are enabled. This field should not be true if node_type_flexibility is set.""" + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space. In AWS, this feature @@ -3860,6 +4043,11 @@ class GetInstancePool: min_idle_instances: Optional[int] = None """Minimum number of idle instances to keep in the instance pool""" + node_type_flexibility: Optional[NodeTypeFlexibility] = None + """For pools with node type flexibility (Fleet-V2), this object contains the information about the + alternate node type ids to use when attempting to launch a cluster if the node type id is not + available. This field should not be set if enable_auto_alternate_node_types is true.""" + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or @@ -3904,6 +4092,8 @@ def as_dict(self) -> dict: body["default_tags"] = self.default_tags if self.disk_spec: body["disk_spec"] = self.disk_spec.as_dict() + if self.enable_auto_alternate_node_types is not None: + body["enable_auto_alternate_node_types"] = self.enable_auto_alternate_node_types if self.enable_elastic_disk is not None: body["enable_elastic_disk"] = self.enable_elastic_disk if self.gcp_attributes: @@ -3918,6 +4108,8 @@ def as_dict(self) -> dict: body["max_capacity"] = self.max_capacity if self.min_idle_instances is not None: body["min_idle_instances"] = self.min_idle_instances + if self.node_type_flexibility: + body["node_type_flexibility"] = self.node_type_flexibility.as_dict() if self.node_type_id is not None: body["node_type_id"] = self.node_type_id if self.preloaded_docker_images: @@ -3949,6 +4141,8 @@ def as_shallow_dict(self) -> dict: body["default_tags"] = self.default_tags if self.disk_spec: body["disk_spec"] = self.disk_spec + if self.enable_auto_alternate_node_types is not None: + body["enable_auto_alternate_node_types"] = self.enable_auto_alternate_node_types if self.enable_elastic_disk is not None: body["enable_elastic_disk"] = self.enable_elastic_disk if self.gcp_attributes: @@ -3963,6 +4157,8 @@ def as_shallow_dict(self) -> dict: body["max_capacity"] = self.max_capacity if self.min_idle_instances is not None: body["min_idle_instances"] = self.min_idle_instances + if self.node_type_flexibility: + body["node_type_flexibility"] = self.node_type_flexibility if self.node_type_id is not None: body["node_type_id"] = self.node_type_id if self.preloaded_docker_images: @@ -3990,6 +4186,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GetInstancePool: custom_tags=d.get("custom_tags", None), default_tags=d.get("default_tags", None), disk_spec=_from_dict(d, "disk_spec", DiskSpec), + enable_auto_alternate_node_types=d.get("enable_auto_alternate_node_types", None), enable_elastic_disk=d.get("enable_elastic_disk", None), gcp_attributes=_from_dict(d, "gcp_attributes", InstancePoolGcpAttributes), idle_instance_autotermination_minutes=d.get("idle_instance_autotermination_minutes", None), @@ -3997,6 +4194,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GetInstancePool: instance_pool_name=d.get("instance_pool_name", None), max_capacity=d.get("max_capacity", None), min_idle_instances=d.get("min_idle_instances", None), + node_type_flexibility=_from_dict(d, "node_type_flexibility", NodeTypeFlexibility), node_type_id=d.get("node_type_id", None), preloaded_docker_images=_repeated_dict(d, "preloaded_docker_images", DockerImage), preloaded_spark_versions=d.get("preloaded_spark_versions", None), @@ -4642,6 +4840,10 @@ class InstancePoolAndStats: disk_spec: Optional[DiskSpec] = None """Defines the specification of the disks that will be attached to all spark containers.""" + enable_auto_alternate_node_types: Optional[bool] = None + """For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids + are enabled. This field should not be true if node_type_flexibility is set.""" + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space. In AWS, this feature @@ -4674,6 +4876,11 @@ class InstancePoolAndStats: min_idle_instances: Optional[int] = None """Minimum number of idle instances to keep in the instance pool""" + node_type_flexibility: Optional[NodeTypeFlexibility] = None + """For pools with node type flexibility (Fleet-V2), this object contains the information about the + alternate node type ids to use when attempting to launch a cluster if the node type id is not + available. This field should not be set if enable_auto_alternate_node_types is true.""" + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or @@ -4718,6 +4925,8 @@ def as_dict(self) -> dict: body["default_tags"] = self.default_tags if self.disk_spec: body["disk_spec"] = self.disk_spec.as_dict() + if self.enable_auto_alternate_node_types is not None: + body["enable_auto_alternate_node_types"] = self.enable_auto_alternate_node_types if self.enable_elastic_disk is not None: body["enable_elastic_disk"] = self.enable_elastic_disk if self.gcp_attributes: @@ -4732,6 +4941,8 @@ def as_dict(self) -> dict: body["max_capacity"] = self.max_capacity if self.min_idle_instances is not None: body["min_idle_instances"] = self.min_idle_instances + if self.node_type_flexibility: + body["node_type_flexibility"] = self.node_type_flexibility.as_dict() if self.node_type_id is not None: body["node_type_id"] = self.node_type_id if self.preloaded_docker_images: @@ -4763,6 +4974,8 @@ def as_shallow_dict(self) -> dict: body["default_tags"] = self.default_tags if self.disk_spec: body["disk_spec"] = self.disk_spec + if self.enable_auto_alternate_node_types is not None: + body["enable_auto_alternate_node_types"] = self.enable_auto_alternate_node_types if self.enable_elastic_disk is not None: body["enable_elastic_disk"] = self.enable_elastic_disk if self.gcp_attributes: @@ -4777,6 +4990,8 @@ def as_shallow_dict(self) -> dict: body["max_capacity"] = self.max_capacity if self.min_idle_instances is not None: body["min_idle_instances"] = self.min_idle_instances + if self.node_type_flexibility: + body["node_type_flexibility"] = self.node_type_flexibility if self.node_type_id is not None: body["node_type_id"] = self.node_type_id if self.preloaded_docker_images: @@ -4804,6 +5019,7 @@ def from_dict(cls, d: Dict[str, Any]) -> InstancePoolAndStats: custom_tags=d.get("custom_tags", None), default_tags=d.get("default_tags", None), disk_spec=_from_dict(d, "disk_spec", DiskSpec), + enable_auto_alternate_node_types=d.get("enable_auto_alternate_node_types", None), enable_elastic_disk=d.get("enable_elastic_disk", None), gcp_attributes=_from_dict(d, "gcp_attributes", InstancePoolGcpAttributes), idle_instance_autotermination_minutes=d.get("idle_instance_autotermination_minutes", None), @@ -4811,6 +5027,7 @@ def from_dict(cls, d: Dict[str, Any]) -> InstancePoolAndStats: instance_pool_name=d.get("instance_pool_name", None), max_capacity=d.get("max_capacity", None), min_idle_instances=d.get("min_idle_instances", None), + node_type_flexibility=_from_dict(d, "node_type_flexibility", NodeTypeFlexibility), node_type_id=d.get("node_type_id", None), preloaded_docker_images=_repeated_dict(d, "preloaded_docker_images", DockerImage), preloaded_spark_versions=d.get("preloaded_spark_versions", None), @@ -4829,6 +5046,16 @@ class InstancePoolAwsAttributes: availability: Optional[InstancePoolAwsAttributesAvailability] = None """Availability type used for the spot nodes.""" + instance_profile_arn: Optional[str] = None + """All AWS instances belonging to the instance pool will have this instance profile. If omitted, + instances will initially be launched with the workspace's default instance profile. If defined, + clusters that use the pool will inherit the instance profile, and must not specify their own + instance profile on cluster creation or update. If the pool does not specify an instance + profile, clusters using the pool may specify any instance profile. The instance profile must + have previously been added to the Databricks environment by an account administrator. + + This feature may only be available to certain customer plans.""" + spot_bid_price_percent: Optional[int] = None """Calculates the bid price for AWS spot instances, as a percentage of the corresponding instance type's on-demand price. For example, if this field is set to 50, and the cluster needs a new @@ -4851,6 +5078,8 @@ def as_dict(self) -> dict: body = {} if self.availability is not None: body["availability"] = self.availability.value + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn if self.spot_bid_price_percent is not None: body["spot_bid_price_percent"] = self.spot_bid_price_percent if self.zone_id is not None: @@ -4862,6 +5091,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.availability is not None: body["availability"] = self.availability + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn if self.spot_bid_price_percent is not None: body["spot_bid_price_percent"] = self.spot_bid_price_percent if self.zone_id is not None: @@ -4873,6 +5104,7 @@ def from_dict(cls, d: Dict[str, Any]) -> InstancePoolAwsAttributes: """Deserializes the InstancePoolAwsAttributes from a dictionary.""" return cls( availability=_enum(d, "availability", InstancePoolAwsAttributesAvailability), + instance_profile_arn=d.get("instance_profile_arn", None), spot_bid_price_percent=d.get("spot_bid_price_percent", None), zone_id=d.get("zone_id", None), ) @@ -5676,6 +5908,39 @@ class ListClustersSortByField(Enum): DEFAULT = "DEFAULT" +@dataclass +class ListDefaultBaseEnvironmentsResponse: + default_base_environments: Optional[List[DefaultBaseEnvironment]] = None + + next_page_token: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the ListDefaultBaseEnvironmentsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.default_base_environments: + body["default_base_environments"] = [v.as_dict() for v in self.default_base_environments] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListDefaultBaseEnvironmentsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.default_base_environments: + body["default_base_environments"] = self.default_base_environments + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListDefaultBaseEnvironmentsResponse: + """Deserializes the ListDefaultBaseEnvironmentsResponse from a dictionary.""" + return cls( + default_base_environments=_repeated_dict(d, "default_base_environments", DefaultBaseEnvironment), + next_page_token=d.get("next_page_token", None), + ) + + @dataclass class ListGlobalInitScriptsResponse: scripts: Optional[List[GlobalInitScriptDetails]] = None @@ -5944,6 +6209,44 @@ def from_dict(cls, d: Dict[str, Any]) -> LogSyncStatus: MapAny = Dict[str, Any] +@dataclass +class MaterializedEnvironment: + """Materialized Environment information enables environment sharing and reuse via Environment + Caching during library installations. Currently this feature is only supported for Python + libraries. + + - If the env cache entry in LMv2 DB doesn't exist or invalid, library installations and + environment materialization will occur. A new Materialized Environment metadata will be sent + from DP upon successful library installations and env materialization, and is persisted into + database by LMv2. - If the env cache entry in LMv2 DB is valid, the Materialized Environment + will be sent to DP by LMv2, and DP will restore the cached environment from a store instead of + reinstalling libraries from scratch. + + If changed, also update estore/namespaces/defaultbaseenvironments/latest.proto with new version""" + + last_updated_timestamp: Optional[int] = None + """The timestamp (in epoch milliseconds) when the materialized env is updated.""" + + def as_dict(self) -> dict: + """Serializes the MaterializedEnvironment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + return body + + def as_shallow_dict(self) -> dict: + """Serializes the MaterializedEnvironment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> MaterializedEnvironment: + """Deserializes the MaterializedEnvironment from a dictionary.""" + return cls(last_updated_timestamp=d.get("last_updated_timestamp", None)) + + @dataclass class MavenLibrary: coordinates: str @@ -6238,6 +6541,28 @@ def from_dict(cls, d: Dict[str, Any]) -> NodeType: ) +@dataclass +class NodeTypeFlexibility: + """For Fleet-V2 using classic clusters, this object contains the information about the alternate + node type ids to use when attempting to launch a cluster. It can be used with both the driver + and worker node types.""" + + def as_dict(self) -> dict: + """Serializes the NodeTypeFlexibility into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the NodeTypeFlexibility into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> NodeTypeFlexibility: + """Deserializes the NodeTypeFlexibility from a dictionary.""" + return cls() + + @dataclass class PendingInstanceError: """Error message of a failed pending instances""" @@ -6550,6 +6875,24 @@ def from_dict(cls, d: Dict[str, Any]) -> RCranLibrary: return cls(package=d.get("package", None), repo=d.get("repo", None)) +@dataclass +class RefreshDefaultBaseEnvironmentsResponse: + def as_dict(self) -> dict: + """Serializes the RefreshDefaultBaseEnvironmentsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the RefreshDefaultBaseEnvironmentsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> RefreshDefaultBaseEnvironmentsResponse: + """Deserializes the RefreshDefaultBaseEnvironmentsResponse from a dictionary.""" + return cls() + + @dataclass class RemoveResponse: def as_dict(self) -> dict: @@ -8493,11 +8836,7 @@ def delete(self, cluster_id: str) -> Wait[ClusterDetails]: } op_response = self._api.do("POST", "/api/2.1/clusters/delete", body=body, headers=headers) - return Wait( - self.wait_get_cluster_terminated, - response=DeleteClusterResponse.from_dict(op_response), - cluster_id=cluster_id, - ) + return Wait(self.wait_get_cluster_terminated, cluster_id=cluster_id) def delete_and_wait(self, cluster_id: str, timeout=timedelta(minutes=20)) -> ClusterDetails: return self.delete(cluster_id=cluster_id).result(timeout=timeout) @@ -8759,9 +9098,7 @@ def edit( } op_response = self._api.do("POST", "/api/2.1/clusters/edit", body=body, headers=headers) - return Wait( - self.wait_get_cluster_running, response=EditClusterResponse.from_dict(op_response), cluster_id=cluster_id - ) + return Wait(self.wait_get_cluster_running, cluster_id=cluster_id) def edit_and_wait( self, @@ -9123,9 +9460,7 @@ def resize( } op_response = self._api.do("POST", "/api/2.1/clusters/resize", body=body, headers=headers) - return Wait( - self.wait_get_cluster_running, response=ResizeClusterResponse.from_dict(op_response), cluster_id=cluster_id - ) + return Wait(self.wait_get_cluster_running, cluster_id=cluster_id) def resize_and_wait( self, @@ -9160,9 +9495,7 @@ def restart(self, cluster_id: str, *, restart_user: Optional[str] = None) -> Wai } op_response = self._api.do("POST", "/api/2.1/clusters/restart", body=body, headers=headers) - return Wait( - self.wait_get_cluster_running, response=RestartClusterResponse.from_dict(op_response), cluster_id=cluster_id - ) + return Wait(self.wait_get_cluster_running, cluster_id=cluster_id) def restart_and_wait( self, cluster_id: str, *, restart_user: Optional[str] = None, timeout=timedelta(minutes=20) @@ -9229,9 +9562,7 @@ def start(self, cluster_id: str) -> Wait[ClusterDetails]: } op_response = self._api.do("POST", "/api/2.1/clusters/start", body=body, headers=headers) - return Wait( - self.wait_get_cluster_running, response=StartClusterResponse.from_dict(op_response), cluster_id=cluster_id - ) + return Wait(self.wait_get_cluster_running, cluster_id=cluster_id) def start_and_wait(self, cluster_id: str, timeout=timedelta(minutes=20)) -> ClusterDetails: return self.start(cluster_id=cluster_id).result(timeout=timeout) @@ -9302,9 +9633,7 @@ def update( } op_response = self._api.do("POST", "/api/2.1/clusters/update", body=body, headers=headers) - return Wait( - self.wait_get_cluster_running, response=UpdateClusterResponse.from_dict(op_response), cluster_id=cluster_id - ) + return Wait(self.wait_get_cluster_running, cluster_id=cluster_id) def update_and_wait( self, @@ -9485,7 +9814,6 @@ def cancel( op_response = self._api.do("POST", "/api/1.2/commands/cancel", body=body, headers=headers) return Wait( self.wait_command_status_command_execution_cancelled, - response=CancelResponse.from_dict(op_response), cluster_id=cluster_id, command_id=command_id, context_id=context_id, @@ -9840,11 +10168,13 @@ def create( azure_attributes: Optional[InstancePoolAzureAttributes] = None, custom_tags: Optional[Dict[str, str]] = None, disk_spec: Optional[DiskSpec] = None, + enable_auto_alternate_node_types: Optional[bool] = None, enable_elastic_disk: Optional[bool] = None, gcp_attributes: Optional[InstancePoolGcpAttributes] = None, idle_instance_autotermination_minutes: Optional[int] = None, max_capacity: Optional[int] = None, min_idle_instances: Optional[int] = None, + node_type_flexibility: Optional[NodeTypeFlexibility] = None, preloaded_docker_images: Optional[List[DockerImage]] = None, preloaded_spark_versions: Optional[List[str]] = None, remote_disk_throughput: Optional[int] = None, @@ -9873,6 +10203,9 @@ def create( - Currently, Databricks allows at most 45 custom tags :param disk_spec: :class:`DiskSpec` (optional) Defines the specification of the disks that will be attached to all spark containers. + :param enable_auto_alternate_node_types: bool (optional) + For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids are + enabled. This field should not be true if node_type_flexibility is set. :param enable_elastic_disk: bool (optional) Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space. In AWS, this feature @@ -9892,6 +10225,10 @@ def create( upsize requests. :param min_idle_instances: int (optional) Minimum number of idle instances to keep in the instance pool + :param node_type_flexibility: :class:`NodeTypeFlexibility` (optional) + For pools with node type flexibility (Fleet-V2), this object contains the information about the + alternate node type ids to use when attempting to launch a cluster if the node type id is not + available. This field should not be set if enable_auto_alternate_node_types is true. :param preloaded_docker_images: List[:class:`DockerImage`] (optional) Custom Docker Image BYOC :param preloaded_spark_versions: List[str] (optional) @@ -9916,6 +10253,8 @@ def create( body["custom_tags"] = custom_tags if disk_spec is not None: body["disk_spec"] = disk_spec.as_dict() + if enable_auto_alternate_node_types is not None: + body["enable_auto_alternate_node_types"] = enable_auto_alternate_node_types if enable_elastic_disk is not None: body["enable_elastic_disk"] = enable_elastic_disk if gcp_attributes is not None: @@ -9928,6 +10267,8 @@ def create( body["max_capacity"] = max_capacity if min_idle_instances is not None: body["min_idle_instances"] = min_idle_instances + if node_type_flexibility is not None: + body["node_type_flexibility"] = node_type_flexibility.as_dict() if node_type_id is not None: body["node_type_id"] = node_type_id if preloaded_docker_images is not None: @@ -9971,9 +10312,11 @@ def edit( node_type_id: str, *, custom_tags: Optional[Dict[str, str]] = None, + enable_auto_alternate_node_types: Optional[bool] = None, idle_instance_autotermination_minutes: Optional[int] = None, max_capacity: Optional[int] = None, min_idle_instances: Optional[int] = None, + node_type_flexibility: Optional[NodeTypeFlexibility] = None, remote_disk_throughput: Optional[int] = None, total_initial_remote_disk_size: Optional[int] = None, ): @@ -9994,6 +10337,9 @@ def edit( EBS volumes) with these tags in addition to `default_tags`. Notes: - Currently, Databricks allows at most 45 custom tags + :param enable_auto_alternate_node_types: bool (optional) + For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids are + enabled. This field should not be true if node_type_flexibility is set. :param idle_instance_autotermination_minutes: int (optional) Automatically terminates the extra instances in the pool cache after they are inactive for this time in minutes if min_idle_instances requirement is already met. If not set, the extra pool instances @@ -10006,6 +10352,10 @@ def edit( upsize requests. :param min_idle_instances: int (optional) Minimum number of idle instances to keep in the instance pool + :param node_type_flexibility: :class:`NodeTypeFlexibility` (optional) + For pools with node type flexibility (Fleet-V2), this object contains the information about the + alternate node type ids to use when attempting to launch a cluster if the node type id is not + available. This field should not be set if enable_auto_alternate_node_types is true. :param remote_disk_throughput: int (optional) If set, what the configurable throughput (in Mb/s) for the remote disk is. Currently only supported for GCP HYPERDISK_BALANCED types. @@ -10018,6 +10368,8 @@ def edit( body = {} if custom_tags is not None: body["custom_tags"] = custom_tags + if enable_auto_alternate_node_types is not None: + body["enable_auto_alternate_node_types"] = enable_auto_alternate_node_types if idle_instance_autotermination_minutes is not None: body["idle_instance_autotermination_minutes"] = idle_instance_autotermination_minutes if instance_pool_id is not None: @@ -10028,6 +10380,8 @@ def edit( body["max_capacity"] = max_capacity if min_idle_instances is not None: body["min_idle_instances"] = min_idle_instances + if node_type_flexibility is not None: + body["node_type_flexibility"] = node_type_flexibility.as_dict() if node_type_id is not None: body["node_type_id"] = node_type_id if remote_disk_throughput is not None: @@ -10372,6 +10726,68 @@ def cluster_status(self, cluster_id: str) -> Iterator[LibraryFullStatus]: parsed = ClusterLibraryStatuses.from_dict(json).library_statuses return parsed if parsed is not None else [] + def create_default_base_environment( + self, default_base_environment: DefaultBaseEnvironment, *, request_id: Optional[str] = None + ) -> DefaultBaseEnvironment: + """Create a default base environment within workspaces to define the environment version and a list of + dependencies to be used in serverless notebooks and jobs. This process will asynchronously generate a + cache to optimize dependency resolution. + + :param default_base_environment: :class:`DefaultBaseEnvironment` + :param request_id: str (optional) + A unique identifier for this request. A random UUID is recommended. This request is only idempotent + if a `request_id` is provided. + + :returns: :class:`DefaultBaseEnvironment` + """ + body = {} + if default_base_environment is not None: + body["default_base_environment"] = default_base_environment.as_dict() + if request_id is not None: + body["request_id"] = request_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/default-base-environments", body=body, headers=headers) + return DefaultBaseEnvironment.from_dict(res) + + def delete_default_base_environment(self, id: str): + """Delete the default base environment given an ID. The default base environment may be used by + downstream workloads. Please ensure that the deletion is intentional. + + :param id: str + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/default-base-environments/{id}", headers=headers) + + def get_default_base_environment(self, id: str) -> DefaultBaseEnvironment: + """Return the default base environment details for a given ID. + + :param id: str + + :returns: :class:`DefaultBaseEnvironment` + """ + + query = {} + if id is not None: + query["id"] = id + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", "/api/2.0/default-base-environments:getDefaultBaseEnvironment", query=query, headers=headers + ) + return DefaultBaseEnvironment.from_dict(res) + def install(self, cluster_id: str, libraries: List[Library]): """Add libraries to install on a cluster. The installation is asynchronous; it happens in the background after the completion of this request. @@ -10395,6 +10811,53 @@ def install(self, cluster_id: str, libraries: List[Library]): self._api.do("POST", "/api/2.0/libraries/install", body=body, headers=headers) + def list_default_base_environments( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[DefaultBaseEnvironment]: + """List default base environments defined in the workspaces for the requested user. + + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`DefaultBaseEnvironment` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do("GET", "/api/2.0/default-base-environments", query=query, headers=headers) + if "default_base_environments" in json: + for v in json["default_base_environments"]: + yield DefaultBaseEnvironment.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def refresh_default_base_environments(self, ids: List[str]): + """Refresh the cached default base environments for the given IDs. This process will asynchronously + regenerate the caches. The existing caches remains available until it expires. + + :param ids: List[str] + + + """ + body = {} + if ids is not None: + body["ids"] = [v for v in ids] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/default-base-environments/refresh", body=body, headers=headers) + def uninstall(self, cluster_id: str, libraries: List[Library]): """Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the cluster is restarted. A request to uninstall a library that is not currently installed is ignored. @@ -10418,6 +10881,52 @@ def uninstall(self, cluster_id: str, libraries: List[Library]): self._api.do("POST", "/api/2.0/libraries/uninstall", body=body, headers=headers) + def update_default_base_environment( + self, id: str, default_base_environment: DefaultBaseEnvironment + ) -> DefaultBaseEnvironment: + """Update the default base environment for the given ID. This process will asynchronously regenerate the + cache. The existing cache remains available until it expires. + + :param id: str + :param default_base_environment: :class:`DefaultBaseEnvironment` + + :returns: :class:`DefaultBaseEnvironment` + """ + body = {} + if default_base_environment is not None: + body["default_base_environment"] = default_base_environment.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/default-base-environments/{id}", body=body, headers=headers) + return DefaultBaseEnvironment.from_dict(res) + + def update_default_default_base_environment( + self, *, base_environment_type: Optional[BaseEnvironmentType] = None, id: Optional[str] = None + ) -> DefaultBaseEnvironment: + """Set the default base environment for the workspace. This marks the specified DBE as the workspace + default. + + :param base_environment_type: :class:`BaseEnvironmentType` (optional) + :param id: str (optional) + + :returns: :class:`DefaultBaseEnvironment` + """ + body = {} + if base_environment_type is not None: + body["base_environment_type"] = base_environment_type.value + if id is not None: + body["id"] = id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/default-base-environments:setDefault", body=body, headers=headers) + return DefaultBaseEnvironment.from_dict(res) + class PolicyComplianceForClustersAPI: """The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py index 9ea51bfb7..685185ad1 100755 --- a/databricks/sdk/service/dashboards.py +++ b/databricks/sdk/service/dashboards.py @@ -10,8 +10,10 @@ from enum import Enum from typing import Any, Callable, Dict, Iterator, List, Optional +from databricks.sdk.service._internal import (Wait, _enum, _from_dict, + _repeated_dict) + from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated_dict _LOG = logging.getLogger("databricks.sdk") @@ -102,6 +104,72 @@ def from_dict(cls, d: Dict[str, Any]) -> AuthorizationDetailsGrantRule: return cls(permission_set=d.get("permission_set", None)) +@dataclass +class CancelQueryExecutionResponse: + status: Optional[List[CancelQueryExecutionResponseStatus]] = None + + def as_dict(self) -> dict: + """Serializes the CancelQueryExecutionResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.status: + body["status"] = [v.as_dict() for v in self.status] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CancelQueryExecutionResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.status: + body["status"] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CancelQueryExecutionResponse: + """Deserializes the CancelQueryExecutionResponse from a dictionary.""" + return cls(status=_repeated_dict(d, "status", CancelQueryExecutionResponseStatus)) + + +@dataclass +class CancelQueryExecutionResponseStatus: + data_token: str + """The token to poll for result asynchronously Example: + EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ""" + + pending: Optional[Empty] = None + + success: Optional[Empty] = None + + def as_dict(self) -> dict: + """Serializes the CancelQueryExecutionResponseStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.data_token is not None: + body["data_token"] = self.data_token + if self.pending: + body["pending"] = self.pending.as_dict() + if self.success: + body["success"] = self.success.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CancelQueryExecutionResponseStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data_token is not None: + body["data_token"] = self.data_token + if self.pending: + body["pending"] = self.pending + if self.success: + body["success"] = self.success + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CancelQueryExecutionResponseStatus: + """Deserializes the CancelQueryExecutionResponseStatus from a dictionary.""" + return cls( + data_token=d.get("data_token", None), + pending=_from_dict(d, "pending", Empty), + success=_from_dict(d, "success", Empty), + ) + + @dataclass class CronSchedule: quartz_cron_expression: str @@ -253,6 +321,45 @@ class DashboardView(Enum): DASHBOARD_VIEW_BASIC = "DASHBOARD_VIEW_BASIC" +@dataclass +class Empty: + """Represents an empty message, similar to google.protobuf.Empty, which is not available in the + firm right now.""" + + def as_dict(self) -> dict: + """Serializes the Empty into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Empty into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Empty: + """Deserializes the Empty from a dictionary.""" + return cls() + + +@dataclass +class ExecuteQueryResponse: + def as_dict(self) -> dict: + """Serializes the ExecuteQueryResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ExecuteQueryResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ExecuteQueryResponse: + """Deserializes the ExecuteQueryResponse from a dictionary.""" + return cls() + + @dataclass class GenieAttachment: """Genie AI Response""" @@ -425,12 +532,17 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieConversationSummary: class GenieFeedback: """Feedback containing rating and optional comment""" + comment: Optional[str] = None + """Optional feedback comment text""" + rating: Optional[GenieFeedbackRating] = None """The feedback rating""" def as_dict(self) -> dict: """Serializes the GenieFeedback into a dictionary suitable for use as a JSON request body.""" body = {} + if self.comment is not None: + body["comment"] = self.comment if self.rating is not None: body["rating"] = self.rating.value return body @@ -438,6 +550,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the GenieFeedback into a shallow dictionary of its immediate attributes.""" body = {} + if self.comment is not None: + body["comment"] = self.comment if self.rating is not None: body["rating"] = self.rating return body @@ -445,7 +559,7 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieFeedback: """Deserializes the GenieFeedback from a dictionary.""" - return cls(rating=_enum(d, "rating", GenieFeedbackRating)) + return cls(comment=d.get("comment", None), rating=_enum(d, "rating", GenieFeedbackRating)) class GenieFeedbackRating(Enum): @@ -456,6 +570,57 @@ class GenieFeedbackRating(Enum): POSITIVE = "POSITIVE" +@dataclass +class GenieGenerateDownloadFullQueryResultResponse: + download_id: Optional[str] = None + """Download ID. Use this ID to track the download request in subsequent polling calls""" + + def as_dict(self) -> dict: + """Serializes the GenieGenerateDownloadFullQueryResultResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.download_id is not None: + body["download_id"] = self.download_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GenieGenerateDownloadFullQueryResultResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.download_id is not None: + body["download_id"] = self.download_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GenieGenerateDownloadFullQueryResultResponse: + """Deserializes the GenieGenerateDownloadFullQueryResultResponse from a dictionary.""" + return cls(download_id=d.get("download_id", None)) + + +@dataclass +class GenieGetDownloadFullQueryResultResponse: + statement_response: Optional[sql.StatementResponse] = None + """SQL Statement Execution response. See [Get status, manifest, and result first + chunk](:method:statementexecution/getstatement) for more details.""" + + def as_dict(self) -> dict: + """Serializes the GenieGetDownloadFullQueryResultResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.statement_response: + body["statement_response"] = self.statement_response.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GenieGetDownloadFullQueryResultResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.statement_response: + body["statement_response"] = self.statement_response + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GenieGetDownloadFullQueryResultResponse: + """Deserializes the GenieGetDownloadFullQueryResultResponse from a dictionary.""" + return cls(statement_response=_from_dict(d, "statement_response", sql.StatementResponse)) + + @dataclass class GenieGetMessageQueryResultResponse: statement_response: Optional[sql.StatementResponse] = None @@ -945,6 +1110,24 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieSuggestedQuestionsAttachment: return cls(questions=d.get("questions", None)) +@dataclass +class GetPublishedDashboardEmbeddedResponse: + def as_dict(self) -> dict: + """Serializes the GetPublishedDashboardEmbeddedResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GetPublishedDashboardEmbeddedResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GetPublishedDashboardEmbeddedResponse: + """Deserializes the GetPublishedDashboardEmbeddedResponse from a dictionary.""" + return cls() + + @dataclass class GetPublishedDashboardTokenInfoResponse: authorization_details: Optional[List[AuthorizationDetails]] = None @@ -1153,6 +1336,9 @@ class MessageErrorType(Enum): GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION = "GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION" GENERIC_SQL_EXEC_API_CALL_EXCEPTION = "GENERIC_SQL_EXEC_API_CALL_EXCEPTION" ILLEGAL_PARAMETER_DEFINITION_EXCEPTION = "ILLEGAL_PARAMETER_DEFINITION_EXCEPTION" + INTERNAL_CATALOG_ASSET_CREATION_FAILED_EXCEPTION = "INTERNAL_CATALOG_ASSET_CREATION_FAILED_EXCEPTION" + INTERNAL_CATALOG_ASSET_CREATION_ONGOING_EXCEPTION = "INTERNAL_CATALOG_ASSET_CREATION_ONGOING_EXCEPTION" + INTERNAL_CATALOG_ASSET_CREATION_UNSUPPORTED_EXCEPTION = "INTERNAL_CATALOG_ASSET_CREATION_UNSUPPORTED_EXCEPTION" INTERNAL_CATALOG_MISSING_UC_PATH_EXCEPTION = "INTERNAL_CATALOG_MISSING_UC_PATH_EXCEPTION" INTERNAL_CATALOG_PATH_OVERLAP_EXCEPTION = "INTERNAL_CATALOG_PATH_OVERLAP_EXCEPTION" INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION = "INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION" @@ -1217,6 +1403,80 @@ class MessageStatus(Enum): SUBMITTED = "SUBMITTED" +@dataclass +class PendingStatus: + data_token: str + """The token to poll for result asynchronously Example: + EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ""" + + def as_dict(self) -> dict: + """Serializes the PendingStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.data_token is not None: + body["data_token"] = self.data_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PendingStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data_token is not None: + body["data_token"] = self.data_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> PendingStatus: + """Deserializes the PendingStatus from a dictionary.""" + return cls(data_token=d.get("data_token", None)) + + +@dataclass +class PollQueryStatusResponse: + data: Optional[List[PollQueryStatusResponseData]] = None + + def as_dict(self) -> dict: + """Serializes the PollQueryStatusResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.data: + body["data"] = [v.as_dict() for v in self.data] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PollQueryStatusResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data: + body["data"] = self.data + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> PollQueryStatusResponse: + """Deserializes the PollQueryStatusResponse from a dictionary.""" + return cls(data=_repeated_dict(d, "data", PollQueryStatusResponseData)) + + +@dataclass +class PollQueryStatusResponseData: + status: QueryResponseStatus + + def as_dict(self) -> dict: + """Serializes the PollQueryStatusResponseData into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.status: + body["status"] = self.status.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PollQueryStatusResponseData into a shallow dictionary of its immediate attributes.""" + body = {} + if self.status: + body["status"] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> PollQueryStatusResponseData: + """Deserializes the PollQueryStatusResponseData from a dictionary.""" + return cls(status=_from_dict(d, "status", QueryResponseStatus)) + + @dataclass class PublishedDashboard: display_name: Optional[str] = None @@ -1304,6 +1564,63 @@ def from_dict(cls, d: Dict[str, Any]) -> QueryAttachmentParameter: return cls(keyword=d.get("keyword", None), sql_type=d.get("sql_type", None), value=d.get("value", None)) +@dataclass +class QueryResponseStatus: + canceled: Optional[Empty] = None + + closed: Optional[Empty] = None + + pending: Optional[PendingStatus] = None + + statement_id: Optional[str] = None + """The statement id in format(01eef5da-c56e-1f36-bafa-21906587d6ba) The statement_id should be + identical to data_token in SuccessStatus and PendingStatus. This field is created for audit + logging purpose to record the statement_id of all QueryResponseStatus.""" + + success: Optional[SuccessStatus] = None + + def as_dict(self) -> dict: + """Serializes the QueryResponseStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.canceled: + body["canceled"] = self.canceled.as_dict() + if self.closed: + body["closed"] = self.closed.as_dict() + if self.pending: + body["pending"] = self.pending.as_dict() + if self.statement_id is not None: + body["statement_id"] = self.statement_id + if self.success: + body["success"] = self.success.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the QueryResponseStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.canceled: + body["canceled"] = self.canceled + if self.closed: + body["closed"] = self.closed + if self.pending: + body["pending"] = self.pending + if self.statement_id is not None: + body["statement_id"] = self.statement_id + if self.success: + body["success"] = self.success + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> QueryResponseStatus: + """Deserializes the QueryResponseStatus from a dictionary.""" + return cls( + canceled=_from_dict(d, "canceled", Empty), + closed=_from_dict(d, "closed", Empty), + pending=_from_dict(d, "pending", PendingStatus), + statement_id=d.get("statement_id", None), + success=_from_dict(d, "success", SuccessStatus), + ) + + @dataclass class Result: is_truncated: Optional[bool] = None @@ -1627,6 +1944,39 @@ def from_dict(cls, d: Dict[str, Any]) -> SubscriptionSubscriberUser: return cls(user_id=d.get("user_id", None)) +@dataclass +class SuccessStatus: + data_token: str + """The token to poll for result asynchronously Example: + EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ""" + + truncated: Optional[bool] = None + """Whether the query result is truncated (either by byte limit or row limit)""" + + def as_dict(self) -> dict: + """Serializes the SuccessStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.data_token is not None: + body["data_token"] = self.data_token + if self.truncated is not None: + body["truncated"] = self.truncated + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SuccessStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data_token is not None: + body["data_token"] = self.data_token + if self.truncated is not None: + body["truncated"] = self.truncated + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SuccessStatus: + """Deserializes the SuccessStatus from a dictionary.""" + return cls(data_token=d.get("data_token", None), truncated=d.get("truncated", None)) + + @dataclass class TextAttachment: content: Optional[str] = None @@ -1781,6 +2131,49 @@ def create_message_and_wait( timeout=timeout ) + def create_space( + self, + warehouse_id: str, + serialized_space: str, + *, + description: Optional[str] = None, + parent_path: Optional[str] = None, + title: Optional[str] = None, + ) -> GenieSpace: + """Creates a Genie space from a serialized payload. + + :param warehouse_id: str + Warehouse to associate with the new space + :param serialized_space: str + Serialized export model for the space contents + :param description: str (optional) + Optional description + :param parent_path: str (optional) + Parent folder path where the space will be registered + :param title: str (optional) + Optional title override + + :returns: :class:`GenieSpace` + """ + body = {} + if description is not None: + body["description"] = description + if parent_path is not None: + body["parent_path"] = parent_path + if serialized_space is not None: + body["serialized_space"] = serialized_space + if title is not None: + body["title"] = title + if warehouse_id is not None: + body["warehouse_id"] = warehouse_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/genie/spaces", body=body, headers=headers) + return GenieSpace.from_dict(res) + def delete_conversation(self, space_id: str, conversation_id: str): """Delete a conversation. @@ -1877,6 +2270,75 @@ def execute_message_query( ) return GenieGetMessageQueryResultResponse.from_dict(res) + def generate_download_full_query_result( + self, space_id: str, conversation_id: str, message_id: str, attachment_id: str + ) -> GenieGenerateDownloadFullQueryResultResponse: + """Initiates a new SQL execution and returns a `download_id` that you can use to track the progress of + the download. The query result is stored in an external link and can be retrieved using the [Get + Download Full Query Result](:method:genie/getdownloadfullqueryresult) API. Warning: Databricks + strongly recommends that you protect the URLs that are returned by the `EXTERNAL_LINKS` disposition. + See [Execute Statement](:method:statementexecution/executestatement) for more details. + + :param space_id: str + Genie space ID + :param conversation_id: str + Conversation ID + :param message_id: str + Message ID + :param attachment_id: str + Attachment ID + + :returns: :class:`GenieGenerateDownloadFullQueryResultResponse` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/downloads", + headers=headers, + ) + return GenieGenerateDownloadFullQueryResultResponse.from_dict(res) + + def get_download_full_query_result( + self, space_id: str, conversation_id: str, message_id: str, attachment_id: str, download_id: str + ) -> GenieGetDownloadFullQueryResultResponse: + """After [Generating a Full Query Result Download](:method:genie/getdownloadfullqueryresult) and + successfully receiving a `download_id`, use this API to poll the download progress. When the download + is complete, the API returns one or more external links to the query result files. Warning: Databricks + strongly recommends that you protect the URLs that are returned by the `EXTERNAL_LINKS` disposition. + You must not set an Authorization header in download requests. When using the `EXTERNAL_LINKS` + disposition, Databricks returns presigned URLs that grant temporary access to data. See [Execute + Statement](:method:statementexecution/executestatement) for more details. + + :param space_id: str + Genie space ID + :param conversation_id: str + Conversation ID + :param message_id: str + Message ID + :param attachment_id: str + Attachment ID + :param download_id: str + Download ID. This ID is provided by the [Generate Download + endpoint](:method:genie/generateDownloadFullQueryResult) + + :returns: :class:`GenieGetDownloadFullQueryResultResponse` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/downloads/{download_id}", + headers=headers, + ) + return GenieGetDownloadFullQueryResultResponse.from_dict(res) + def get_message(self, space_id: str, conversation_id: str, message_id: str) -> GenieMessage: """Get message from conversation. @@ -2098,7 +2560,15 @@ def list_spaces( res = self._api.do("GET", "/api/2.0/genie/spaces", query=query, headers=headers) return GenieListSpacesResponse.from_dict(res) - def send_message_feedback(self, space_id: str, conversation_id: str, message_id: str, rating: GenieFeedbackRating): + def send_message_feedback( + self, + space_id: str, + conversation_id: str, + message_id: str, + rating: GenieFeedbackRating, + *, + comment: Optional[str] = None, + ): """Send feedback for a message. :param space_id: str @@ -2109,10 +2579,14 @@ def send_message_feedback(self, space_id: str, conversation_id: str, message_id: The ID associated with the message to provide feedback for. :param rating: :class:`GenieFeedbackRating` The rating (POSITIVE, NEGATIVE, or NONE). + :param comment: str (optional) + Optional text feedback that will be stored as a comment. """ body = {} + if comment is not None: + body["comment"] = comment if rating is not None: body["rating"] = rating.value headers = { @@ -2176,6 +2650,47 @@ def trash_space(self, space_id: str): self._api.do("DELETE", f"/api/2.0/genie/spaces/{space_id}", headers=headers) + def update_space( + self, + space_id: str, + *, + description: Optional[str] = None, + serialized_space: Optional[str] = None, + title: Optional[str] = None, + warehouse_id: Optional[str] = None, + ) -> GenieSpace: + """Updates a Genie space with a serialized payload. + + :param space_id: str + Genie space ID + :param description: str (optional) + Optional description + :param serialized_space: str (optional) + Serialized export model for the space contents (full replacement) + :param title: str (optional) + Optional title override + :param warehouse_id: str (optional) + Optional warehouse override + + :returns: :class:`GenieSpace` + """ + body = {} + if description is not None: + body["description"] = description + if serialized_space is not None: + body["serialized_space"] = serialized_space + if title is not None: + body["title"] = title + if warehouse_id is not None: + body["warehouse_id"] = warehouse_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/genie/spaces/{space_id}", body=body, headers=headers) + return GenieSpace.from_dict(res) + class LakeviewAPI: """These APIs provide specific management operations for Lakeview dashboards. Generic resource management can @@ -2649,6 +3164,21 @@ class LakeviewEmbeddedAPI: def __init__(self, api_client): self._api = api_client + def get_published_dashboard_embedded(self, dashboard_id: str): + """Get the current published dashboard within an embedded context. + + :param dashboard_id: str + UUID identifying the published dashboard. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("GET", f"/api/2.0/lakeview/dashboards/{dashboard_id}/published/embedded", headers=headers) + def get_published_dashboard_token_info( self, dashboard_id: str, *, external_value: Optional[str] = None, external_viewer_id: Optional[str] = None ) -> GetPublishedDashboardTokenInfoResponse: @@ -2677,3 +3207,93 @@ def get_published_dashboard_token_info( "GET", f"/api/2.0/lakeview/dashboards/{dashboard_id}/published/tokeninfo", query=query, headers=headers ) return GetPublishedDashboardTokenInfoResponse.from_dict(res) + + +class QueryExecutionAPI: + """Query execution APIs for AI / BI Dashboards""" + + def __init__(self, api_client): + self._api = api_client + + def cancel_published_query_execution( + self, dashboard_name: str, dashboard_revision_id: str, *, tokens: Optional[List[str]] = None + ) -> CancelQueryExecutionResponse: + """Cancel the results for the a query for a published, embedded dashboard. + + :param dashboard_name: str + :param dashboard_revision_id: str + :param tokens: List[str] (optional) + Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ + + :returns: :class:`CancelQueryExecutionResponse` + """ + + query = {} + if dashboard_name is not None: + query["dashboard_name"] = dashboard_name + if dashboard_revision_id is not None: + query["dashboard_revision_id"] = dashboard_revision_id + if tokens is not None: + query["tokens"] = [v for v in tokens] + headers = { + "Accept": "application/json", + } + + res = self._api.do("DELETE", "/api/2.0/lakeview-query/query/published", query=query, headers=headers) + return CancelQueryExecutionResponse.from_dict(res) + + def execute_published_dashboard_query( + self, dashboard_name: str, dashboard_revision_id: str, *, override_warehouse_id: Optional[str] = None + ): + """Execute a query for a published dashboard. + + :param dashboard_name: str + Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains the + list of datasets, warehouse_id, and embedded_credentials + :param dashboard_revision_id: str + :param override_warehouse_id: str (optional) + A dashboard schedule can override the warehouse used as compute for processing the published + dashboard queries + + + """ + body = {} + if dashboard_name is not None: + body["dashboard_name"] = dashboard_name + if dashboard_revision_id is not None: + body["dashboard_revision_id"] = dashboard_revision_id + if override_warehouse_id is not None: + body["override_warehouse_id"] = override_warehouse_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/lakeview-query/query/published", body=body, headers=headers) + + def poll_published_query_status( + self, dashboard_name: str, dashboard_revision_id: str, *, tokens: Optional[List[str]] = None + ) -> PollQueryStatusResponse: + """Poll the results for the a query for a published, embedded dashboard. + + :param dashboard_name: str + :param dashboard_revision_id: str + :param tokens: List[str] (optional) + Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ + + :returns: :class:`PollQueryStatusResponse` + """ + + query = {} + if dashboard_name is not None: + query["dashboard_name"] = dashboard_name + if dashboard_revision_id is not None: + query["dashboard_revision_id"] = dashboard_revision_id + if tokens is not None: + query["tokens"] = [v for v in tokens] + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/lakeview-query/query/published", query=query, headers=headers) + return PollQueryStatusResponse.from_dict(res) diff --git a/databricks/sdk/service/database.py b/databricks/sdk/service/database.py index 73d084ce2..a2d2a494d 100755 --- a/databricks/sdk/service/database.py +++ b/databricks/sdk/service/database.py @@ -10,7 +10,8 @@ from enum import Enum from typing import Any, Callable, Dict, Iterator, List, Optional -from ._internal import Wait, _enum, _from_dict, _repeated_dict +from databricks.sdk.service._internal import (Wait, _enum, _from_dict, + _repeated_dict) _LOG = logging.getLogger("databricks.sdk") @@ -50,6 +51,137 @@ def from_dict(cls, d: Dict[str, Any]) -> CustomTag: return cls(key=d.get("key", None), value=d.get("value", None)) +@dataclass +class DatabaseBranch: + project_id: str + + branch_id: Optional[str] = None + + create_time: Optional[str] = None + """A timestamp indicating when the branch was created.""" + + current_state: Optional[str] = None + """The branch’s state, indicating if it is initializing, ready for use, or archived.""" + + default: Optional[bool] = None + """Whether the branch is the project's default branch. This field is only returned on create/update + responses. See effective_default for the value that is actually applied to the database branch.""" + + effective_default: Optional[bool] = None + """Whether the branch is the project's default branch.""" + + is_protected: Optional[bool] = None + """Whether the branch is protected.""" + + logical_size_bytes: Optional[int] = None + """The logical size of the branch.""" + + parent_id: Optional[str] = None + """The id of the parent branch""" + + parent_lsn: Optional[str] = None + """The Log Sequence Number (LSN) on the parent branch from which this branch was created. When + restoring a branch using the Restore Database Branch endpoint, this value isn’t finalized + until all operations related to the restore have completed successfully.""" + + parent_time: Optional[str] = None + """The point in time on the parent branch from which this branch was created.""" + + pending_state: Optional[str] = None + + state_change_time: Optional[str] = None + """A timestamp indicating when the `current_state` began.""" + + update_time: Optional[str] = None + """A timestamp indicating when the branch was last updated.""" + + def as_dict(self) -> dict: + """Serializes the DatabaseBranch into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.branch_id is not None: + body["branch_id"] = self.branch_id + if self.create_time is not None: + body["create_time"] = self.create_time + if self.current_state is not None: + body["current_state"] = self.current_state + if self.default is not None: + body["default"] = self.default + if self.effective_default is not None: + body["effective_default"] = self.effective_default + if self.is_protected is not None: + body["is_protected"] = self.is_protected + if self.logical_size_bytes is not None: + body["logical_size_bytes"] = self.logical_size_bytes + if self.parent_id is not None: + body["parent_id"] = self.parent_id + if self.parent_lsn is not None: + body["parent_lsn"] = self.parent_lsn + if self.parent_time is not None: + body["parent_time"] = self.parent_time + if self.pending_state is not None: + body["pending_state"] = self.pending_state + if self.project_id is not None: + body["project_id"] = self.project_id + if self.state_change_time is not None: + body["state_change_time"] = self.state_change_time + if self.update_time is not None: + body["update_time"] = self.update_time + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabaseBranch into a shallow dictionary of its immediate attributes.""" + body = {} + if self.branch_id is not None: + body["branch_id"] = self.branch_id + if self.create_time is not None: + body["create_time"] = self.create_time + if self.current_state is not None: + body["current_state"] = self.current_state + if self.default is not None: + body["default"] = self.default + if self.effective_default is not None: + body["effective_default"] = self.effective_default + if self.is_protected is not None: + body["is_protected"] = self.is_protected + if self.logical_size_bytes is not None: + body["logical_size_bytes"] = self.logical_size_bytes + if self.parent_id is not None: + body["parent_id"] = self.parent_id + if self.parent_lsn is not None: + body["parent_lsn"] = self.parent_lsn + if self.parent_time is not None: + body["parent_time"] = self.parent_time + if self.pending_state is not None: + body["pending_state"] = self.pending_state + if self.project_id is not None: + body["project_id"] = self.project_id + if self.state_change_time is not None: + body["state_change_time"] = self.state_change_time + if self.update_time is not None: + body["update_time"] = self.update_time + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DatabaseBranch: + """Deserializes the DatabaseBranch from a dictionary.""" + return cls( + branch_id=d.get("branch_id", None), + create_time=d.get("create_time", None), + current_state=d.get("current_state", None), + default=d.get("default", None), + effective_default=d.get("effective_default", None), + is_protected=d.get("is_protected", None), + logical_size_bytes=d.get("logical_size_bytes", None), + parent_id=d.get("parent_id", None), + parent_lsn=d.get("parent_lsn", None), + parent_time=d.get("parent_time", None), + pending_state=d.get("pending_state", None), + project_id=d.get("project_id", None), + state_change_time=d.get("state_change_time", None), + update_time=d.get("update_time", None), + ) + + @dataclass class DatabaseCatalog: name: str @@ -63,6 +195,12 @@ class DatabaseCatalog: create_database_if_not_exists: Optional[bool] = None + database_branch_id: Optional[str] = None + """The branch_id of the database branch associated with the catalog.""" + + database_project_id: Optional[str] = None + """The project_id of the database project associated with the catalog.""" + uid: Optional[str] = None def as_dict(self) -> dict: @@ -70,10 +208,14 @@ def as_dict(self) -> dict: body = {} if self.create_database_if_not_exists is not None: body["create_database_if_not_exists"] = self.create_database_if_not_exists + if self.database_branch_id is not None: + body["database_branch_id"] = self.database_branch_id if self.database_instance_name is not None: body["database_instance_name"] = self.database_instance_name if self.database_name is not None: body["database_name"] = self.database_name + if self.database_project_id is not None: + body["database_project_id"] = self.database_project_id if self.name is not None: body["name"] = self.name if self.uid is not None: @@ -85,10 +227,14 @@ def as_shallow_dict(self) -> dict: body = {} if self.create_database_if_not_exists is not None: body["create_database_if_not_exists"] = self.create_database_if_not_exists + if self.database_branch_id is not None: + body["database_branch_id"] = self.database_branch_id if self.database_instance_name is not None: body["database_instance_name"] = self.database_instance_name if self.database_name is not None: body["database_name"] = self.database_name + if self.database_project_id is not None: + body["database_project_id"] = self.database_project_id if self.name is not None: body["name"] = self.name if self.uid is not None: @@ -100,8 +246,10 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabaseCatalog: """Deserializes the DatabaseCatalog from a dictionary.""" return cls( create_database_if_not_exists=d.get("create_database_if_not_exists", None), + database_branch_id=d.get("database_branch_id", None), database_instance_name=d.get("database_instance_name", None), database_name=d.get("database_name", None), + database_project_id=d.get("database_project_id", None), name=d.get("name", None), uid=d.get("uid", None), ) @@ -137,6 +285,221 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabaseCredential: return cls(expiration_time=d.get("expiration_time", None), token=d.get("token", None)) +@dataclass +class DatabaseEndpoint: + project_id: str + + branch_id: str + + autoscaling_limit_max_cu: Optional[float] = None + """The maximum number of Compute Units.""" + + autoscaling_limit_min_cu: Optional[float] = None + """The minimum number of Compute Units.""" + + create_time: Optional[str] = None + """A timestamp indicating when the compute endpoint was created.""" + + current_state: Optional[DatabaseEndpointState] = None + + disabled: Optional[bool] = None + """Whether to restrict connections to the compute endpoint. Enabling this option schedules a + suspend compute operation. A disabled compute endpoint cannot be enabled by a connection or + console action.""" + + endpoint_id: Optional[str] = None + + host: Optional[str] = None + """The hostname of the compute endpoint. This is the hostname specified when connecting to a + database.""" + + last_active_time: Optional[str] = None + """A timestamp indicating when the compute endpoint was last active.""" + + pending_state: Optional[DatabaseEndpointState] = None + + pooler_mode: Optional[DatabaseEndpointPoolerMode] = None + + settings: Optional[DatabaseEndpointSettings] = None + + start_time: Optional[str] = None + """A timestamp indicating when the compute endpoint was last started.""" + + suspend_time: Optional[str] = None + """A timestamp indicating when the compute endpoint was last suspended.""" + + suspend_timeout_duration: Optional[str] = None + """Duration of inactivity after which the compute endpoint is automatically suspended.""" + + type: Optional[DatabaseEndpointType] = None + """NOTE: if want type to default to some value set the server then an effective_type field OR make + this field REQUIRED""" + + update_time: Optional[str] = None + """A timestamp indicating when the compute endpoint was last updated.""" + + def as_dict(self) -> dict: + """Serializes the DatabaseEndpoint into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.autoscaling_limit_max_cu is not None: + body["autoscaling_limit_max_cu"] = self.autoscaling_limit_max_cu + if self.autoscaling_limit_min_cu is not None: + body["autoscaling_limit_min_cu"] = self.autoscaling_limit_min_cu + if self.branch_id is not None: + body["branch_id"] = self.branch_id + if self.create_time is not None: + body["create_time"] = self.create_time + if self.current_state is not None: + body["current_state"] = self.current_state.value + if self.disabled is not None: + body["disabled"] = self.disabled + if self.endpoint_id is not None: + body["endpoint_id"] = self.endpoint_id + if self.host is not None: + body["host"] = self.host + if self.last_active_time is not None: + body["last_active_time"] = self.last_active_time + if self.pending_state is not None: + body["pending_state"] = self.pending_state.value + if self.pooler_mode is not None: + body["pooler_mode"] = self.pooler_mode.value + if self.project_id is not None: + body["project_id"] = self.project_id + if self.settings: + body["settings"] = self.settings.as_dict() + if self.start_time is not None: + body["start_time"] = self.start_time + if self.suspend_time is not None: + body["suspend_time"] = self.suspend_time + if self.suspend_timeout_duration is not None: + body["suspend_timeout_duration"] = self.suspend_timeout_duration + if self.type is not None: + body["type"] = self.type.value + if self.update_time is not None: + body["update_time"] = self.update_time + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabaseEndpoint into a shallow dictionary of its immediate attributes.""" + body = {} + if self.autoscaling_limit_max_cu is not None: + body["autoscaling_limit_max_cu"] = self.autoscaling_limit_max_cu + if self.autoscaling_limit_min_cu is not None: + body["autoscaling_limit_min_cu"] = self.autoscaling_limit_min_cu + if self.branch_id is not None: + body["branch_id"] = self.branch_id + if self.create_time is not None: + body["create_time"] = self.create_time + if self.current_state is not None: + body["current_state"] = self.current_state + if self.disabled is not None: + body["disabled"] = self.disabled + if self.endpoint_id is not None: + body["endpoint_id"] = self.endpoint_id + if self.host is not None: + body["host"] = self.host + if self.last_active_time is not None: + body["last_active_time"] = self.last_active_time + if self.pending_state is not None: + body["pending_state"] = self.pending_state + if self.pooler_mode is not None: + body["pooler_mode"] = self.pooler_mode + if self.project_id is not None: + body["project_id"] = self.project_id + if self.settings: + body["settings"] = self.settings + if self.start_time is not None: + body["start_time"] = self.start_time + if self.suspend_time is not None: + body["suspend_time"] = self.suspend_time + if self.suspend_timeout_duration is not None: + body["suspend_timeout_duration"] = self.suspend_timeout_duration + if self.type is not None: + body["type"] = self.type + if self.update_time is not None: + body["update_time"] = self.update_time + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DatabaseEndpoint: + """Deserializes the DatabaseEndpoint from a dictionary.""" + return cls( + autoscaling_limit_max_cu=d.get("autoscaling_limit_max_cu", None), + autoscaling_limit_min_cu=d.get("autoscaling_limit_min_cu", None), + branch_id=d.get("branch_id", None), + create_time=d.get("create_time", None), + current_state=_enum(d, "current_state", DatabaseEndpointState), + disabled=d.get("disabled", None), + endpoint_id=d.get("endpoint_id", None), + host=d.get("host", None), + last_active_time=d.get("last_active_time", None), + pending_state=_enum(d, "pending_state", DatabaseEndpointState), + pooler_mode=_enum(d, "pooler_mode", DatabaseEndpointPoolerMode), + project_id=d.get("project_id", None), + settings=_from_dict(d, "settings", DatabaseEndpointSettings), + start_time=d.get("start_time", None), + suspend_time=d.get("suspend_time", None), + suspend_timeout_duration=d.get("suspend_timeout_duration", None), + type=_enum(d, "type", DatabaseEndpointType), + update_time=d.get("update_time", None), + ) + + +class DatabaseEndpointPoolerMode(Enum): + """The connection pooler mode. Lakebase supports PgBouncer in `transaction` mode only.""" + + TRANSACTION = "TRANSACTION" + + +@dataclass +class DatabaseEndpointSettings: + """A collection of settings for a compute endpoint""" + + pg_settings: Optional[Dict[str, str]] = None + """A raw representation of Postgres settings.""" + + pgbouncer_settings: Optional[Dict[str, str]] = None + """A raw representation of PgBouncer settings.""" + + def as_dict(self) -> dict: + """Serializes the DatabaseEndpointSettings into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.pg_settings: + body["pg_settings"] = self.pg_settings + if self.pgbouncer_settings: + body["pgbouncer_settings"] = self.pgbouncer_settings + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabaseEndpointSettings into a shallow dictionary of its immediate attributes.""" + body = {} + if self.pg_settings: + body["pg_settings"] = self.pg_settings + if self.pgbouncer_settings: + body["pgbouncer_settings"] = self.pgbouncer_settings + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DatabaseEndpointSettings: + """Deserializes the DatabaseEndpointSettings from a dictionary.""" + return cls(pg_settings=d.get("pg_settings", None), pgbouncer_settings=d.get("pgbouncer_settings", None)) + + +class DatabaseEndpointState(Enum): + """The state of the compute endpoint""" + + ACTIVE = "ACTIVE" + IDLE = "IDLE" + INIT = "INIT" + + +class DatabaseEndpointType(Enum): + """The compute endpoint type. Either `read_write` or `read_only`.""" + + READ_ONLY = "READ_ONLY" + READ_WRITE = "READ_WRITE" + + @dataclass class DatabaseInstance: """A DatabaseInstance represents a logical Postgres instance, comprised of both compute and @@ -587,6 +950,261 @@ class DatabaseInstanceState(Enum): UPDATING = "UPDATING" +@dataclass +class DatabaseProject: + branch_logical_size_limit_bytes: Optional[int] = None + """The logical size limit for a branch.""" + + budget_policy_id: Optional[str] = None + """The desired budget policy to associate with the instance. This field is only returned on + create/update responses, and represents the customer provided budget policy. See + effective_budget_policy_id for the policy that is actually applied to the instance.""" + + compute_last_active_time: Optional[str] = None + """The most recent time when any endpoint of this project was active.""" + + create_time: Optional[str] = None + """A timestamp indicating when the project was created.""" + + custom_tags: Optional[List[DatabaseProjectCustomTag]] = None + """Custom tags associated with the instance.""" + + default_endpoint_settings: Optional[DatabaseProjectDefaultEndpointSettings] = None + + display_name: Optional[str] = None + """Human-readable project name.""" + + effective_budget_policy_id: Optional[str] = None + """The policy that is applied to the instance.""" + + history_retention_duration: Optional[str] = None + """The number of seconds to retain the shared history for point in time recovery for all branches + in this project.""" + + pg_version: Optional[int] = None + """The major Postgres version number. NOTE: fields could be either user-set or server-set. we can't + have fields that are optionally user-provided and server-set to default value. TODO: this needs + an effective variant or make REQUIRED""" + + project_id: Optional[str] = None + + settings: Optional[DatabaseProjectSettings] = None + + synthetic_storage_size_bytes: Optional[int] = None + """The current space occupied by the project in storage. Synthetic storage size combines the + logical data size and Write-Ahead Log (WAL) size for all branches in a project.""" + + update_time: Optional[str] = None + """A timestamp indicating when the project was last updated.""" + + def as_dict(self) -> dict: + """Serializes the DatabaseProject into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.branch_logical_size_limit_bytes is not None: + body["branch_logical_size_limit_bytes"] = self.branch_logical_size_limit_bytes + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.compute_last_active_time is not None: + body["compute_last_active_time"] = self.compute_last_active_time + if self.create_time is not None: + body["create_time"] = self.create_time + if self.custom_tags: + body["custom_tags"] = [v.as_dict() for v in self.custom_tags] + if self.default_endpoint_settings: + body["default_endpoint_settings"] = self.default_endpoint_settings.as_dict() + if self.display_name is not None: + body["display_name"] = self.display_name + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.history_retention_duration is not None: + body["history_retention_duration"] = self.history_retention_duration + if self.pg_version is not None: + body["pg_version"] = self.pg_version + if self.project_id is not None: + body["project_id"] = self.project_id + if self.settings: + body["settings"] = self.settings.as_dict() + if self.synthetic_storage_size_bytes is not None: + body["synthetic_storage_size_bytes"] = self.synthetic_storage_size_bytes + if self.update_time is not None: + body["update_time"] = self.update_time + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabaseProject into a shallow dictionary of its immediate attributes.""" + body = {} + if self.branch_logical_size_limit_bytes is not None: + body["branch_logical_size_limit_bytes"] = self.branch_logical_size_limit_bytes + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.compute_last_active_time is not None: + body["compute_last_active_time"] = self.compute_last_active_time + if self.create_time is not None: + body["create_time"] = self.create_time + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.default_endpoint_settings: + body["default_endpoint_settings"] = self.default_endpoint_settings + if self.display_name is not None: + body["display_name"] = self.display_name + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.history_retention_duration is not None: + body["history_retention_duration"] = self.history_retention_duration + if self.pg_version is not None: + body["pg_version"] = self.pg_version + if self.project_id is not None: + body["project_id"] = self.project_id + if self.settings: + body["settings"] = self.settings + if self.synthetic_storage_size_bytes is not None: + body["synthetic_storage_size_bytes"] = self.synthetic_storage_size_bytes + if self.update_time is not None: + body["update_time"] = self.update_time + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DatabaseProject: + """Deserializes the DatabaseProject from a dictionary.""" + return cls( + branch_logical_size_limit_bytes=d.get("branch_logical_size_limit_bytes", None), + budget_policy_id=d.get("budget_policy_id", None), + compute_last_active_time=d.get("compute_last_active_time", None), + create_time=d.get("create_time", None), + custom_tags=_repeated_dict(d, "custom_tags", DatabaseProjectCustomTag), + default_endpoint_settings=_from_dict( + d, "default_endpoint_settings", DatabaseProjectDefaultEndpointSettings + ), + display_name=d.get("display_name", None), + effective_budget_policy_id=d.get("effective_budget_policy_id", None), + history_retention_duration=d.get("history_retention_duration", None), + pg_version=d.get("pg_version", None), + project_id=d.get("project_id", None), + settings=_from_dict(d, "settings", DatabaseProjectSettings), + synthetic_storage_size_bytes=d.get("synthetic_storage_size_bytes", None), + update_time=d.get("update_time", None), + ) + + +@dataclass +class DatabaseProjectCustomTag: + key: Optional[str] = None + """The key of the custom tag.""" + + value: Optional[str] = None + """The value of the custom tag.""" + + def as_dict(self) -> dict: + """Serializes the DatabaseProjectCustomTag into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabaseProjectCustomTag into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DatabaseProjectCustomTag: + """Deserializes the DatabaseProjectCustomTag from a dictionary.""" + return cls(key=d.get("key", None), value=d.get("value", None)) + + +@dataclass +class DatabaseProjectDefaultEndpointSettings: + """A collection of settings for a database endpoint.""" + + autoscaling_limit_max_cu: Optional[float] = None + """The maximum number of Compute Units.""" + + autoscaling_limit_min_cu: Optional[float] = None + """The minimum number of Compute Units.""" + + pg_settings: Optional[Dict[str, str]] = None + """A raw representation of Postgres settings.""" + + pgbouncer_settings: Optional[Dict[str, str]] = None + """A raw representation of PgBouncer settings.""" + + suspend_timeout_duration: Optional[str] = None + """Duration of inactivity after which the compute endpoint is automatically suspended.""" + + def as_dict(self) -> dict: + """Serializes the DatabaseProjectDefaultEndpointSettings into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.autoscaling_limit_max_cu is not None: + body["autoscaling_limit_max_cu"] = self.autoscaling_limit_max_cu + if self.autoscaling_limit_min_cu is not None: + body["autoscaling_limit_min_cu"] = self.autoscaling_limit_min_cu + if self.pg_settings: + body["pg_settings"] = self.pg_settings + if self.pgbouncer_settings: + body["pgbouncer_settings"] = self.pgbouncer_settings + if self.suspend_timeout_duration is not None: + body["suspend_timeout_duration"] = self.suspend_timeout_duration + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabaseProjectDefaultEndpointSettings into a shallow dictionary of its immediate attributes.""" + body = {} + if self.autoscaling_limit_max_cu is not None: + body["autoscaling_limit_max_cu"] = self.autoscaling_limit_max_cu + if self.autoscaling_limit_min_cu is not None: + body["autoscaling_limit_min_cu"] = self.autoscaling_limit_min_cu + if self.pg_settings: + body["pg_settings"] = self.pg_settings + if self.pgbouncer_settings: + body["pgbouncer_settings"] = self.pgbouncer_settings + if self.suspend_timeout_duration is not None: + body["suspend_timeout_duration"] = self.suspend_timeout_duration + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DatabaseProjectDefaultEndpointSettings: + """Deserializes the DatabaseProjectDefaultEndpointSettings from a dictionary.""" + return cls( + autoscaling_limit_max_cu=d.get("autoscaling_limit_max_cu", None), + autoscaling_limit_min_cu=d.get("autoscaling_limit_min_cu", None), + pg_settings=d.get("pg_settings", None), + pgbouncer_settings=d.get("pgbouncer_settings", None), + suspend_timeout_duration=d.get("suspend_timeout_duration", None), + ) + + +@dataclass +class DatabaseProjectSettings: + enable_logical_replication: Optional[bool] = None + """Sets wal_level=logical for all compute endpoints in this project. All active endpoints will be + suspended. Once enabled, logical replication cannot be disabled.""" + + def as_dict(self) -> dict: + """Serializes the DatabaseProjectSettings into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.enable_logical_replication is not None: + body["enable_logical_replication"] = self.enable_logical_replication + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabaseProjectSettings into a shallow dictionary of its immediate attributes.""" + body = {} + if self.enable_logical_replication is not None: + body["enable_logical_replication"] = self.enable_logical_replication + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DatabaseProjectSettings: + """Deserializes the DatabaseProjectSettings from a dictionary.""" + return cls(enable_logical_replication=d.get("enable_logical_replication", None)) + + @dataclass class DatabaseTable: """Next field marker: 13""" @@ -611,6 +1229,9 @@ class DatabaseTable: When creating a table in a standard catalog, this field is required. In this scenario, specifying this field will allow targeting an arbitrary postgres database.""" + table_serving_url: Optional[str] = None + """Data serving REST API URL for this table""" + def as_dict(self) -> dict: """Serializes the DatabaseTable into a dictionary suitable for use as a JSON request body.""" body = {} @@ -620,6 +1241,8 @@ def as_dict(self) -> dict: body["logical_database_name"] = self.logical_database_name if self.name is not None: body["name"] = self.name + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url return body def as_shallow_dict(self) -> dict: @@ -631,6 +1254,8 @@ def as_shallow_dict(self) -> dict: body["logical_database_name"] = self.logical_database_name if self.name is not None: body["name"] = self.name + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url return body @classmethod @@ -640,6 +1265,7 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabaseTable: database_instance_name=d.get("database_instance_name", None), logical_database_name=d.get("logical_database_name", None), name=d.get("name", None), + table_serving_url=d.get("table_serving_url", None), ) @@ -679,6 +1305,41 @@ def from_dict(cls, d: Dict[str, Any]) -> DeltaTableSyncInfo: ) +@dataclass +class ListDatabaseBranchesResponse: + database_branches: Optional[List[DatabaseBranch]] = None + """List of branches.""" + + next_page_token: Optional[str] = None + """Pagination token to request the next page of instances.""" + + def as_dict(self) -> dict: + """Serializes the ListDatabaseBranchesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.database_branches: + body["database_branches"] = [v.as_dict() for v in self.database_branches] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListDatabaseBranchesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.database_branches: + body["database_branches"] = self.database_branches + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseBranchesResponse: + """Deserializes the ListDatabaseBranchesResponse from a dictionary.""" + return cls( + database_branches=_repeated_dict(d, "database_branches", DatabaseBranch), + next_page_token=d.get("next_page_token", None), + ) + + @dataclass class ListDatabaseCatalogsResponse: database_catalogs: Optional[List[DatabaseCatalog]] = None @@ -713,6 +1374,41 @@ def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseCatalogsResponse: ) +@dataclass +class ListDatabaseEndpointsResponse: + database_endpoints: Optional[List[DatabaseEndpoint]] = None + """List of endpoints.""" + + next_page_token: Optional[str] = None + """Pagination token to request the next page of instances.""" + + def as_dict(self) -> dict: + """Serializes the ListDatabaseEndpointsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.database_endpoints: + body["database_endpoints"] = [v.as_dict() for v in self.database_endpoints] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListDatabaseEndpointsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.database_endpoints: + body["database_endpoints"] = self.database_endpoints + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseEndpointsResponse: + """Deserializes the ListDatabaseEndpointsResponse from a dictionary.""" + return cls( + database_endpoints=_repeated_dict(d, "database_endpoints", DatabaseEndpoint), + next_page_token=d.get("next_page_token", None), + ) + + @dataclass class ListDatabaseInstanceRolesResponse: database_instance_roles: Optional[List[DatabaseInstanceRole]] = None @@ -783,6 +1479,41 @@ def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseInstancesResponse: ) +@dataclass +class ListDatabaseProjectsResponse: + database_projects: Optional[List[DatabaseProject]] = None + """List of projects.""" + + next_page_token: Optional[str] = None + """Pagination token to request the next page of instances.""" + + def as_dict(self) -> dict: + """Serializes the ListDatabaseProjectsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.database_projects: + body["database_projects"] = [v.as_dict() for v in self.database_projects] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListDatabaseProjectsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.database_projects: + body["database_projects"] = self.database_projects + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseProjectsResponse: + """Deserializes the ListDatabaseProjectsResponse from a dictionary.""" + return cls( + database_projects=_repeated_dict(d, "database_projects", DatabaseProject), + next_page_token=d.get("next_page_token", None), + ) + + @dataclass class ListSyncedDatabaseTablesResponse: next_page_token: Optional[str] = None @@ -822,6 +1553,9 @@ class NewPipelineSpec: """Custom fields that user can set for pipeline while creating SyncedDatabaseTable. Note that other fields of pipeline are still inferred by table def internally""" + budget_policy_id: Optional[str] = None + """Budget policy of this pipeline.""" + storage_catalog: Optional[str] = None """This field needs to be specified if the destination catalog is a managed postgres catalog. @@ -837,6 +1571,8 @@ class NewPipelineSpec: def as_dict(self) -> dict: """Serializes the NewPipelineSpec into a dictionary suitable for use as a JSON request body.""" body = {} + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id if self.storage_catalog is not None: body["storage_catalog"] = self.storage_catalog if self.storage_schema is not None: @@ -846,6 +1582,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the NewPipelineSpec into a shallow dictionary of its immediate attributes.""" body = {} + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id if self.storage_catalog is not None: body["storage_catalog"] = self.storage_catalog if self.storage_schema is not None: @@ -855,7 +1593,11 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> NewPipelineSpec: """Deserializes the NewPipelineSpec from a dictionary.""" - return cls(storage_catalog=d.get("storage_catalog", None), storage_schema=d.get("storage_schema", None)) + return cls( + budget_policy_id=d.get("budget_policy_id", None), + storage_catalog=d.get("storage_catalog", None), + storage_schema=d.get("storage_schema", None), + ) class ProvisioningInfoState(Enum): @@ -985,6 +1727,9 @@ class SyncedDatabaseTable: spec: Optional[SyncedTableSpec] = None + table_serving_url: Optional[str] = None + """Data serving REST API URL for this table""" + unity_catalog_provisioning_state: Optional[ProvisioningInfoState] = None """The provisioning state of the synced table entity in Unity Catalog. This is distinct from the state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline @@ -1007,6 +1752,8 @@ def as_dict(self) -> dict: body["name"] = self.name if self.spec: body["spec"] = self.spec.as_dict() + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url if self.unity_catalog_provisioning_state is not None: body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state.value return body @@ -1028,6 +1775,8 @@ def as_shallow_dict(self) -> dict: body["name"] = self.name if self.spec: body["spec"] = self.spec + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url if self.unity_catalog_provisioning_state is not None: body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state return body @@ -1043,6 +1792,7 @@ def from_dict(cls, d: Dict[str, Any]) -> SyncedDatabaseTable: logical_database_name=d.get("logical_database_name", None), name=d.get("name", None), spec=_from_dict(d, "spec", SyncedTableSpec), + table_serving_url=d.get("table_serving_url", None), unity_catalog_provisioning_state=_enum(d, "unity_catalog_provisioning_state", ProvisioningInfoState), ) @@ -1768,6 +2518,28 @@ def delete_synced_database_table(self, name: str): self._api.do("DELETE", f"/api/2.0/database/synced_tables/{name}", headers=headers) + def failover_database_instance( + self, name: str, *, failover_target_database_instance_name: Optional[str] = None + ) -> DatabaseInstance: + """Failover the primary node of a Database Instance to a secondary. + + :param name: str + Name of the instance to failover. + :param failover_target_database_instance_name: str (optional) + + :returns: :class:`DatabaseInstance` + """ + body = {} + if failover_target_database_instance_name is not None: + body["failover_target_database_instance_name"] = failover_target_database_instance_name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", f"/api/2.0/database/instances/{name}/failover", body=body, headers=headers) + return DatabaseInstance.from_dict(res) + def find_database_instance_by_uid(self, *, uid: Optional[str] = None) -> DatabaseInstance: """Find a Database Instance by uid. @@ -2086,6 +2858,42 @@ def update_database_instance( res = self._api.do("PATCH", f"/api/2.0/database/instances/{name}", query=query, body=body, headers=headers) return DatabaseInstance.from_dict(res) + def update_database_instance_role( + self, + instance_name: str, + name: str, + database_instance_role: DatabaseInstanceRole, + *, + database_instance_name: Optional[str] = None, + ) -> DatabaseInstanceRole: + """Update a role for a Database Instance. + + :param instance_name: str + :param name: str + The name of the role. This is the unique identifier for the role in an instance. + :param database_instance_role: :class:`DatabaseInstanceRole` + :param database_instance_name: str (optional) + + :returns: :class:`DatabaseInstanceRole` + """ + body = database_instance_role.as_dict() + query = {} + if database_instance_name is not None: + query["database_instance_name"] = database_instance_name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/database/instances/{instance_name}/roles/{name}", + query=query, + body=body, + headers=headers, + ) + return DatabaseInstanceRole.from_dict(res) + def update_synced_database_table( self, name: str, synced_table: SyncedDatabaseTable, update_mask: str ) -> SyncedDatabaseTable: @@ -2111,3 +2919,381 @@ def update_synced_database_table( res = self._api.do("PATCH", f"/api/2.0/database/synced_tables/{name}", query=query, body=body, headers=headers) return SyncedDatabaseTable.from_dict(res) + + +class DatabaseProjectAPI: + """Database Projects provide access to a database via REST API or direct SQL.""" + + def __init__(self, api_client): + self._api = api_client + + def create_database_branch(self, project_id: str, database_branch: DatabaseBranch) -> DatabaseBranch: + """Create a Database Branch. + + :param project_id: str + :param database_branch: :class:`DatabaseBranch` + + :returns: :class:`DatabaseBranch` + """ + body = database_branch.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", f"/api/2.0/database/projects/{project_id}/branches", body=body, headers=headers) + return DatabaseBranch.from_dict(res) + + def create_database_endpoint( + self, project_id: str, branch_id: str, database_endpoint: DatabaseEndpoint + ) -> DatabaseEndpoint: + """Create a Database Endpoint. + + :param project_id: str + :param branch_id: str + :param database_endpoint: :class:`DatabaseEndpoint` + + :returns: :class:`DatabaseEndpoint` + """ + body = database_endpoint.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/database/projects/{project_id}/branches/{branch_id}/endpoints", + body=body, + headers=headers, + ) + return DatabaseEndpoint.from_dict(res) + + def create_database_project(self, database_project: DatabaseProject) -> DatabaseProject: + """Create a Database Project. + + :param database_project: :class:`DatabaseProject` + + :returns: :class:`DatabaseProject` + """ + body = database_project.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/database/projects", body=body, headers=headers) + return DatabaseProject.from_dict(res) + + def delete_database_branch(self, project_id: str, branch_id: str): + """Delete a Database Branch. + + :param project_id: str + :param branch_id: str + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/database/projects/{project_id}/branches/{branch_id}", headers=headers) + + def delete_database_endpoint(self, project_id: str, branch_id: str, endpoint_id: str): + """Delete a Database Endpoint. + + :param project_id: str + :param branch_id: str + :param endpoint_id: str + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", + f"/api/2.0/database/projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id}", + headers=headers, + ) + + def delete_database_project(self, project_id: str): + """Delete a Database Project. + + :param project_id: str + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/database/projects/{project_id}", headers=headers) + + def get_database_branch(self, project_id: str, branch_id: str) -> DatabaseBranch: + """Get a Database Branch. + + :param project_id: str + :param branch_id: str + + :returns: :class:`DatabaseBranch` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/database/projects/{project_id}/branches/{branch_id}", headers=headers) + return DatabaseBranch.from_dict(res) + + def get_database_endpoint(self, project_id: str, branch_id: str, endpoint_id: str) -> DatabaseEndpoint: + """Get a Database Endpoint. + + :param project_id: str + :param branch_id: str + :param endpoint_id: str + + :returns: :class:`DatabaseEndpoint` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/database/projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id}", + headers=headers, + ) + return DatabaseEndpoint.from_dict(res) + + def get_database_project(self, project_id: str) -> DatabaseProject: + """Get a Database Project. + + :param project_id: str + + :returns: :class:`DatabaseProject` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/database/projects/{project_id}", headers=headers) + return DatabaseProject.from_dict(res) + + def list_database_branches( + self, project_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[DatabaseBranch]: + """List Database Branches. + + :param project_id: str + :param page_size: int (optional) + Upper bound for items returned. + :param page_token: str (optional) + Pagination token to go to the next page of Database Branches. Requests first page if absent. + + :returns: Iterator over :class:`DatabaseBranch` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do( + "GET", f"/api/2.0/database/projects/{project_id}/branches", query=query, headers=headers + ) + if "database_branches" in json: + for v in json["database_branches"]: + yield DatabaseBranch.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def list_database_endpoints( + self, project_id: str, branch_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[DatabaseEndpoint]: + """List Database Endpoints. + + :param project_id: str + :param branch_id: str + :param page_size: int (optional) + Upper bound for items returned. If specified must be at least 10. + :param page_token: str (optional) + Pagination token to go to the next page of Database Endpoints. Requests first page if absent. + + :returns: Iterator over :class:`DatabaseEndpoint` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do( + "GET", + f"/api/2.0/database/projects/{project_id}/branches/{branch_id}/endpoints", + query=query, + headers=headers, + ) + if "database_endpoints" in json: + for v in json["database_endpoints"]: + yield DatabaseEndpoint.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def list_database_projects( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[DatabaseProject]: + """List Database Instances. + + :param page_size: int (optional) + Upper bound for items returned. + :param page_token: str (optional) + Pagination token to go to the next page of Database Projects. Requests first page if absent. + + :returns: Iterator over :class:`DatabaseProject` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do("GET", "/api/2.0/database/projects", query=query, headers=headers) + if "database_projects" in json: + for v in json["database_projects"]: + yield DatabaseProject.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def restart_database_endpoint(self, project_id: str, branch_id: str, endpoint_id: str) -> DatabaseEndpoint: + """Restart a Database Endpoint. TODO: should return databricks.longrunning.Operation + + :param project_id: str + :param branch_id: str + :param endpoint_id: str + + :returns: :class:`DatabaseEndpoint` + """ + + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/database/projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id}/restart", + headers=headers, + ) + return DatabaseEndpoint.from_dict(res) + + def update_database_branch( + self, project_id: str, branch_id: str, database_branch: DatabaseBranch, update_mask: str + ) -> DatabaseBranch: + """Update a Database Branch. + + :param project_id: str + :param branch_id: str + :param database_branch: :class:`DatabaseBranch` + :param update_mask: str + The list of fields to update. If unspecified, all fields will be updated when possible. + + :returns: :class:`DatabaseBranch` + """ + body = database_branch.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/database/projects/{project_id}/branches/{branch_id}", + query=query, + body=body, + headers=headers, + ) + return DatabaseBranch.from_dict(res) + + def update_database_endpoint( + self, project_id: str, branch_id: str, endpoint_id: str, database_endpoint: DatabaseEndpoint, update_mask: str + ) -> DatabaseEndpoint: + """Update a Database Endpoint. TODO: should return databricks.longrunning.Operation { + + :param project_id: str + :param branch_id: str + :param endpoint_id: str + :param database_endpoint: :class:`DatabaseEndpoint` + :param update_mask: str + The list of fields to update. If unspecified, all fields will be updated when possible. + + :returns: :class:`DatabaseEndpoint` + """ + body = database_endpoint.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/database/projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id}", + query=query, + body=body, + headers=headers, + ) + return DatabaseEndpoint.from_dict(res) + + def update_database_project( + self, project_id: str, database_project: DatabaseProject, update_mask: str + ) -> DatabaseProject: + """Update a Database Project. + + :param project_id: str + :param database_project: :class:`DatabaseProject` + :param update_mask: str + The list of fields to update. If unspecified, all fields will be updated when possible. + + :returns: :class:`DatabaseProject` + """ + body = database_project.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/database/projects/{project_id}", query=query, body=body, headers=headers) + return DatabaseProject.from_dict(res) diff --git a/databricks/sdk/service/dataquality.py b/databricks/sdk/service/dataquality.py new file mode 100755 index 000000000..ed09644f6 --- /dev/null +++ b/databricks/sdk/service/dataquality.py @@ -0,0 +1,1186 @@ +# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +from __future__ import annotations + +import logging +from dataclasses import dataclass +from enum import Enum +from typing import Any, Dict, Iterator, List, Optional + +from databricks.sdk.service._internal import (_enum, _from_dict, + _repeated_dict, _repeated_enum) + +_LOG = logging.getLogger("databricks.sdk") + + +# all definitions in this file are in alphabetical order + + +class AggregationGranularity(Enum): + """The granularity for aggregating data into time windows based on their timestamp.""" + + AGGREGATION_GRANULARITY_1_DAY = "AGGREGATION_GRANULARITY_1_DAY" + AGGREGATION_GRANULARITY_1_HOUR = "AGGREGATION_GRANULARITY_1_HOUR" + AGGREGATION_GRANULARITY_1_MONTH = "AGGREGATION_GRANULARITY_1_MONTH" + AGGREGATION_GRANULARITY_1_WEEK = "AGGREGATION_GRANULARITY_1_WEEK" + AGGREGATION_GRANULARITY_1_YEAR = "AGGREGATION_GRANULARITY_1_YEAR" + AGGREGATION_GRANULARITY_2_WEEKS = "AGGREGATION_GRANULARITY_2_WEEKS" + AGGREGATION_GRANULARITY_30_MINUTES = "AGGREGATION_GRANULARITY_30_MINUTES" + AGGREGATION_GRANULARITY_3_WEEKS = "AGGREGATION_GRANULARITY_3_WEEKS" + AGGREGATION_GRANULARITY_4_WEEKS = "AGGREGATION_GRANULARITY_4_WEEKS" + AGGREGATION_GRANULARITY_5_MINUTES = "AGGREGATION_GRANULARITY_5_MINUTES" + + +@dataclass +class AnomalyDetectionConfig: + """Anomaly Detection Configurations.""" + + anomaly_detection_workflow_id: Optional[int] = None + """The id of the workflow that detects the anomaly. This field will only be returned in the + Get/Update response, if the request comes from the workspace where this anomaly detection job is + created.""" + + job_type: Optional[AnomalyDetectionJobType] = None + """The type of the last run of the workflow.""" + + publish_health_indicator: Optional[bool] = None + """If the health indicator should be shown.""" + + def as_dict(self) -> dict: + """Serializes the AnomalyDetectionConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.anomaly_detection_workflow_id is not None: + body["anomaly_detection_workflow_id"] = self.anomaly_detection_workflow_id + if self.job_type is not None: + body["job_type"] = self.job_type.value + if self.publish_health_indicator is not None: + body["publish_health_indicator"] = self.publish_health_indicator + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AnomalyDetectionConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.anomaly_detection_workflow_id is not None: + body["anomaly_detection_workflow_id"] = self.anomaly_detection_workflow_id + if self.job_type is not None: + body["job_type"] = self.job_type + if self.publish_health_indicator is not None: + body["publish_health_indicator"] = self.publish_health_indicator + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AnomalyDetectionConfig: + """Deserializes the AnomalyDetectionConfig from a dictionary.""" + return cls( + anomaly_detection_workflow_id=d.get("anomaly_detection_workflow_id", None), + job_type=_enum(d, "job_type", AnomalyDetectionJobType), + publish_health_indicator=d.get("publish_health_indicator", None), + ) + + +class AnomalyDetectionJobType(Enum): + """Anomaly Detection job type.""" + + ANOMALY_DETECTION_JOB_TYPE_INTERNAL_HIDDEN = "ANOMALY_DETECTION_JOB_TYPE_INTERNAL_HIDDEN" + ANOMALY_DETECTION_JOB_TYPE_NORMAL = "ANOMALY_DETECTION_JOB_TYPE_NORMAL" + + +@dataclass +class CancelRefreshResponse: + """Response to cancelling a refresh.""" + + refresh: Optional[Refresh] = None + """The refresh to cancel.""" + + def as_dict(self) -> dict: + """Serializes the CancelRefreshResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.refresh: + body["refresh"] = self.refresh.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CancelRefreshResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.refresh: + body["refresh"] = self.refresh + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CancelRefreshResponse: + """Deserializes the CancelRefreshResponse from a dictionary.""" + return cls(refresh=_from_dict(d, "refresh", Refresh)) + + +@dataclass +class CronSchedule: + """The data quality monitoring workflow cron schedule.""" + + quartz_cron_expression: str + """The expression that determines when to run the monitor. See [examples]. + + [examples]: https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html""" + + timezone_id: str + """A Java timezone id. The schedule for a job will be resolved with respect to this timezone. See + `Java TimeZone `_ for details. + The timezone id (e.g., ``America/Los_Angeles``) in which to evaluate the quartz expression.""" + + pause_status: Optional[CronSchedulePauseStatus] = None + """Read only field that indicates whether the schedule is paused or not.""" + + def as_dict(self) -> dict: + """Serializes the CronSchedule into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.pause_status is not None: + body["pause_status"] = self.pause_status.value + if self.quartz_cron_expression is not None: + body["quartz_cron_expression"] = self.quartz_cron_expression + if self.timezone_id is not None: + body["timezone_id"] = self.timezone_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CronSchedule into a shallow dictionary of its immediate attributes.""" + body = {} + if self.pause_status is not None: + body["pause_status"] = self.pause_status + if self.quartz_cron_expression is not None: + body["quartz_cron_expression"] = self.quartz_cron_expression + if self.timezone_id is not None: + body["timezone_id"] = self.timezone_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CronSchedule: + """Deserializes the CronSchedule from a dictionary.""" + return cls( + pause_status=_enum(d, "pause_status", CronSchedulePauseStatus), + quartz_cron_expression=d.get("quartz_cron_expression", None), + timezone_id=d.get("timezone_id", None), + ) + + +class CronSchedulePauseStatus(Enum): + """The data quality monitoring workflow cron schedule pause status.""" + + CRON_SCHEDULE_PAUSE_STATUS_PAUSED = "CRON_SCHEDULE_PAUSE_STATUS_PAUSED" + CRON_SCHEDULE_PAUSE_STATUS_UNPAUSED = "CRON_SCHEDULE_PAUSE_STATUS_UNPAUSED" + + +@dataclass +class DataProfilingConfig: + """Data Profiling Configurations.""" + + output_schema_id: str + """ID of the schema where output tables are created.""" + + assets_dir: Optional[str] = None + """Field for specifying the absolute path to a custom directory to store data-monitoring assets. + Normally prepopulated to a default user location via UI and Python APIs.""" + + baseline_table_name: Optional[str] = None + """Baseline table name. Baseline data is used to compute drift from the data in the monitored + `table_name`. The baseline table and the monitored table shall have the same schema.""" + + custom_metrics: Optional[List[DataProfilingCustomMetric]] = None + """Custom metrics.""" + + dashboard_id: Optional[str] = None + """Id of dashboard that visualizes the computed metrics. This can be empty if the monitor is in + PENDING state.""" + + drift_metrics_table_name: Optional[str] = None + """Table that stores drift metrics data. Format: `catalog.schema.table_name`.""" + + effective_warehouse_id: Optional[str] = None + """The warehouse for dashboard creation""" + + inference_log: Optional[InferenceLogConfig] = None + """Configuration for monitoring inference log tables.""" + + latest_monitor_failure_message: Optional[str] = None + """The latest error message for a monitor failure.""" + + monitor_version: Optional[int] = None + """Represents the current monitor configuration version in use. The version will be represented in + a numeric fashion (1,2,3...). The field has flexibility to take on negative values, which can + indicate corrupted monitor_version numbers.""" + + monitored_table_name: Optional[str] = None + """Unity Catalog table to monitor. Format: `catalog.schema.table_name`""" + + notification_settings: Optional[NotificationSettings] = None + """Field for specifying notification settings.""" + + profile_metrics_table_name: Optional[str] = None + """Table that stores profile metrics data. Format: `catalog.schema.table_name`.""" + + schedule: Optional[CronSchedule] = None + """The cron schedule.""" + + skip_builtin_dashboard: Optional[bool] = None + """Whether to skip creating a default dashboard summarizing data quality metrics.""" + + slicing_exprs: Optional[List[str]] = None + """List of column expressions to slice data with for targeted analysis. The data is grouped by each + expression independently, resulting in a separate slice for each predicate and its complements. + For example `slicing_exprs=[“col_1”, “col_2 > 10”]` will generate the following slices: + two slices for `col_2 > 10` (True and False), and one slice per unique value in `col1`. For + high-cardinality columns, only the top 100 unique values by frequency will generate slices.""" + + snapshot: Optional[SnapshotConfig] = None + """Configuration for monitoring snapshot tables.""" + + status: Optional[DataProfilingStatus] = None + """The data profiling monitor status.""" + + time_series: Optional[TimeSeriesConfig] = None + """Configuration for monitoring time series tables.""" + + warehouse_id: Optional[str] = None + """Optional argument to specify the warehouse for dashboard creation. If not specified, the first + running warehouse will be used.""" + + def as_dict(self) -> dict: + """Serializes the DataProfilingConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.assets_dir is not None: + body["assets_dir"] = self.assets_dir + if self.baseline_table_name is not None: + body["baseline_table_name"] = self.baseline_table_name + if self.custom_metrics: + body["custom_metrics"] = [v.as_dict() for v in self.custom_metrics] + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.drift_metrics_table_name is not None: + body["drift_metrics_table_name"] = self.drift_metrics_table_name + if self.effective_warehouse_id is not None: + body["effective_warehouse_id"] = self.effective_warehouse_id + if self.inference_log: + body["inference_log"] = self.inference_log.as_dict() + if self.latest_monitor_failure_message is not None: + body["latest_monitor_failure_message"] = self.latest_monitor_failure_message + if self.monitor_version is not None: + body["monitor_version"] = self.monitor_version + if self.monitored_table_name is not None: + body["monitored_table_name"] = self.monitored_table_name + if self.notification_settings: + body["notification_settings"] = self.notification_settings.as_dict() + if self.output_schema_id is not None: + body["output_schema_id"] = self.output_schema_id + if self.profile_metrics_table_name is not None: + body["profile_metrics_table_name"] = self.profile_metrics_table_name + if self.schedule: + body["schedule"] = self.schedule.as_dict() + if self.skip_builtin_dashboard is not None: + body["skip_builtin_dashboard"] = self.skip_builtin_dashboard + if self.slicing_exprs: + body["slicing_exprs"] = [v for v in self.slicing_exprs] + if self.snapshot: + body["snapshot"] = self.snapshot.as_dict() + if self.status is not None: + body["status"] = self.status.value + if self.time_series: + body["time_series"] = self.time_series.as_dict() + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DataProfilingConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.assets_dir is not None: + body["assets_dir"] = self.assets_dir + if self.baseline_table_name is not None: + body["baseline_table_name"] = self.baseline_table_name + if self.custom_metrics: + body["custom_metrics"] = self.custom_metrics + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.drift_metrics_table_name is not None: + body["drift_metrics_table_name"] = self.drift_metrics_table_name + if self.effective_warehouse_id is not None: + body["effective_warehouse_id"] = self.effective_warehouse_id + if self.inference_log: + body["inference_log"] = self.inference_log + if self.latest_monitor_failure_message is not None: + body["latest_monitor_failure_message"] = self.latest_monitor_failure_message + if self.monitor_version is not None: + body["monitor_version"] = self.monitor_version + if self.monitored_table_name is not None: + body["monitored_table_name"] = self.monitored_table_name + if self.notification_settings: + body["notification_settings"] = self.notification_settings + if self.output_schema_id is not None: + body["output_schema_id"] = self.output_schema_id + if self.profile_metrics_table_name is not None: + body["profile_metrics_table_name"] = self.profile_metrics_table_name + if self.schedule: + body["schedule"] = self.schedule + if self.skip_builtin_dashboard is not None: + body["skip_builtin_dashboard"] = self.skip_builtin_dashboard + if self.slicing_exprs: + body["slicing_exprs"] = self.slicing_exprs + if self.snapshot: + body["snapshot"] = self.snapshot + if self.status is not None: + body["status"] = self.status + if self.time_series: + body["time_series"] = self.time_series + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DataProfilingConfig: + """Deserializes the DataProfilingConfig from a dictionary.""" + return cls( + assets_dir=d.get("assets_dir", None), + baseline_table_name=d.get("baseline_table_name", None), + custom_metrics=_repeated_dict(d, "custom_metrics", DataProfilingCustomMetric), + dashboard_id=d.get("dashboard_id", None), + drift_metrics_table_name=d.get("drift_metrics_table_name", None), + effective_warehouse_id=d.get("effective_warehouse_id", None), + inference_log=_from_dict(d, "inference_log", InferenceLogConfig), + latest_monitor_failure_message=d.get("latest_monitor_failure_message", None), + monitor_version=d.get("monitor_version", None), + monitored_table_name=d.get("monitored_table_name", None), + notification_settings=_from_dict(d, "notification_settings", NotificationSettings), + output_schema_id=d.get("output_schema_id", None), + profile_metrics_table_name=d.get("profile_metrics_table_name", None), + schedule=_from_dict(d, "schedule", CronSchedule), + skip_builtin_dashboard=d.get("skip_builtin_dashboard", None), + slicing_exprs=d.get("slicing_exprs", None), + snapshot=_from_dict(d, "snapshot", SnapshotConfig), + status=_enum(d, "status", DataProfilingStatus), + time_series=_from_dict(d, "time_series", TimeSeriesConfig), + warehouse_id=d.get("warehouse_id", None), + ) + + +@dataclass +class DataProfilingCustomMetric: + """Custom metric definition.""" + + name: str + """Name of the metric in the output tables.""" + + definition: str + """Jinja template for a SQL expression that specifies how to compute the metric. See [create metric + definition]. + + [create metric definition]: https://docs.databricks.com/en/lakehouse-monitoring/custom-metrics.html#create-definition""" + + input_columns: List[str] + """A list of column names in the input table the metric should be computed for. Can use + ``":table"`` to indicate that the metric needs information from multiple columns.""" + + output_data_type: str + """The output type of the custom metric.""" + + type: DataProfilingCustomMetricType + """The type of the custom metric.""" + + def as_dict(self) -> dict: + """Serializes the DataProfilingCustomMetric into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.definition is not None: + body["definition"] = self.definition + if self.input_columns: + body["input_columns"] = [v for v in self.input_columns] + if self.name is not None: + body["name"] = self.name + if self.output_data_type is not None: + body["output_data_type"] = self.output_data_type + if self.type is not None: + body["type"] = self.type.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DataProfilingCustomMetric into a shallow dictionary of its immediate attributes.""" + body = {} + if self.definition is not None: + body["definition"] = self.definition + if self.input_columns: + body["input_columns"] = self.input_columns + if self.name is not None: + body["name"] = self.name + if self.output_data_type is not None: + body["output_data_type"] = self.output_data_type + if self.type is not None: + body["type"] = self.type + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DataProfilingCustomMetric: + """Deserializes the DataProfilingCustomMetric from a dictionary.""" + return cls( + definition=d.get("definition", None), + input_columns=d.get("input_columns", None), + name=d.get("name", None), + output_data_type=d.get("output_data_type", None), + type=_enum(d, "type", DataProfilingCustomMetricType), + ) + + +class DataProfilingCustomMetricType(Enum): + """The custom metric type.""" + + DATA_PROFILING_CUSTOM_METRIC_TYPE_AGGREGATE = "DATA_PROFILING_CUSTOM_METRIC_TYPE_AGGREGATE" + DATA_PROFILING_CUSTOM_METRIC_TYPE_DERIVED = "DATA_PROFILING_CUSTOM_METRIC_TYPE_DERIVED" + DATA_PROFILING_CUSTOM_METRIC_TYPE_DRIFT = "DATA_PROFILING_CUSTOM_METRIC_TYPE_DRIFT" + + +class DataProfilingStatus(Enum): + """The status of the data profiling monitor.""" + + DATA_PROFILING_STATUS_ACTIVE = "DATA_PROFILING_STATUS_ACTIVE" + DATA_PROFILING_STATUS_DELETE_PENDING = "DATA_PROFILING_STATUS_DELETE_PENDING" + DATA_PROFILING_STATUS_ERROR = "DATA_PROFILING_STATUS_ERROR" + DATA_PROFILING_STATUS_FAILED = "DATA_PROFILING_STATUS_FAILED" + DATA_PROFILING_STATUS_PENDING = "DATA_PROFILING_STATUS_PENDING" + + +@dataclass +class InferenceLogConfig: + """Inference log configuration.""" + + problem_type: InferenceProblemType + """Problem type the model aims to solve.""" + + timestamp_column: str + """Column for the timestamp.""" + + granularities: List[AggregationGranularity] + """List of granularities to use when aggregating data into time windows based on their timestamp.""" + + prediction_column: str + """Column for the prediction.""" + + model_id_column: str + """Column for the model identifier.""" + + label_column: Optional[str] = None + """Column for the label.""" + + prediction_probability_column: Optional[str] = None + """Column for prediction probabilities""" + + def as_dict(self) -> dict: + """Serializes the InferenceLogConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.granularities: + body["granularities"] = [v.value for v in self.granularities] + if self.label_column is not None: + body["label_column"] = self.label_column + if self.model_id_column is not None: + body["model_id_column"] = self.model_id_column + if self.prediction_column is not None: + body["prediction_column"] = self.prediction_column + if self.prediction_probability_column is not None: + body["prediction_probability_column"] = self.prediction_probability_column + if self.problem_type is not None: + body["problem_type"] = self.problem_type.value + if self.timestamp_column is not None: + body["timestamp_column"] = self.timestamp_column + return body + + def as_shallow_dict(self) -> dict: + """Serializes the InferenceLogConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.granularities: + body["granularities"] = self.granularities + if self.label_column is not None: + body["label_column"] = self.label_column + if self.model_id_column is not None: + body["model_id_column"] = self.model_id_column + if self.prediction_column is not None: + body["prediction_column"] = self.prediction_column + if self.prediction_probability_column is not None: + body["prediction_probability_column"] = self.prediction_probability_column + if self.problem_type is not None: + body["problem_type"] = self.problem_type + if self.timestamp_column is not None: + body["timestamp_column"] = self.timestamp_column + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> InferenceLogConfig: + """Deserializes the InferenceLogConfig from a dictionary.""" + return cls( + granularities=_repeated_enum(d, "granularities", AggregationGranularity), + label_column=d.get("label_column", None), + model_id_column=d.get("model_id_column", None), + prediction_column=d.get("prediction_column", None), + prediction_probability_column=d.get("prediction_probability_column", None), + problem_type=_enum(d, "problem_type", InferenceProblemType), + timestamp_column=d.get("timestamp_column", None), + ) + + +class InferenceProblemType(Enum): + """Inference problem type the model aims to solve.""" + + INFERENCE_PROBLEM_TYPE_CLASSIFICATION = "INFERENCE_PROBLEM_TYPE_CLASSIFICATION" + INFERENCE_PROBLEM_TYPE_REGRESSION = "INFERENCE_PROBLEM_TYPE_REGRESSION" + + +@dataclass +class ListMonitorResponse: + """Response for listing Monitors.""" + + monitors: Optional[List[Monitor]] = None + + next_page_token: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the ListMonitorResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.monitors: + body["monitors"] = [v.as_dict() for v in self.monitors] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListMonitorResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.monitors: + body["monitors"] = self.monitors + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListMonitorResponse: + """Deserializes the ListMonitorResponse from a dictionary.""" + return cls(monitors=_repeated_dict(d, "monitors", Monitor), next_page_token=d.get("next_page_token", None)) + + +@dataclass +class ListRefreshResponse: + """Response for listing refreshes.""" + + next_page_token: Optional[str] = None + + refreshes: Optional[List[Refresh]] = None + + def as_dict(self) -> dict: + """Serializes the ListRefreshResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.refreshes: + body["refreshes"] = [v.as_dict() for v in self.refreshes] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListRefreshResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.refreshes: + body["refreshes"] = self.refreshes + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListRefreshResponse: + """Deserializes the ListRefreshResponse from a dictionary.""" + return cls(next_page_token=d.get("next_page_token", None), refreshes=_repeated_dict(d, "refreshes", Refresh)) + + +@dataclass +class Monitor: + """Monitor for the data quality of unity catalog entities such as schema or table.""" + + object_type: str + """The type of the monitored object. Can be one of the following: schema or table.""" + + object_id: str + """The UUID of the request object. For example, schema id.""" + + anomaly_detection_config: Optional[AnomalyDetectionConfig] = None + """Anomaly Detection Configuration, applicable to `schema` object types.""" + + data_profiling_config: Optional[DataProfilingConfig] = None + """Data Profiling Configuration, applicable to `table` object types""" + + def as_dict(self) -> dict: + """Serializes the Monitor into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.anomaly_detection_config: + body["anomaly_detection_config"] = self.anomaly_detection_config.as_dict() + if self.data_profiling_config: + body["data_profiling_config"] = self.data_profiling_config.as_dict() + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Monitor into a shallow dictionary of its immediate attributes.""" + body = {} + if self.anomaly_detection_config: + body["anomaly_detection_config"] = self.anomaly_detection_config + if self.data_profiling_config: + body["data_profiling_config"] = self.data_profiling_config + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Monitor: + """Deserializes the Monitor from a dictionary.""" + return cls( + anomaly_detection_config=_from_dict(d, "anomaly_detection_config", AnomalyDetectionConfig), + data_profiling_config=_from_dict(d, "data_profiling_config", DataProfilingConfig), + object_id=d.get("object_id", None), + object_type=d.get("object_type", None), + ) + + +@dataclass +class NotificationDestination: + """Destination of the data quality monitoring notification.""" + + email_addresses: Optional[List[str]] = None + """The list of email addresses to send the notification to. A maximum of 5 email addresses is + supported.""" + + def as_dict(self) -> dict: + """Serializes the NotificationDestination into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.email_addresses: + body["email_addresses"] = [v for v in self.email_addresses] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the NotificationDestination into a shallow dictionary of its immediate attributes.""" + body = {} + if self.email_addresses: + body["email_addresses"] = self.email_addresses + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> NotificationDestination: + """Deserializes the NotificationDestination from a dictionary.""" + return cls(email_addresses=d.get("email_addresses", None)) + + +@dataclass +class NotificationSettings: + """Settings for sending notifications on the data quality monitoring.""" + + on_failure: Optional[NotificationDestination] = None + """Destinations to send notifications on failure/timeout.""" + + def as_dict(self) -> dict: + """Serializes the NotificationSettings into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.on_failure: + body["on_failure"] = self.on_failure.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the NotificationSettings into a shallow dictionary of its immediate attributes.""" + body = {} + if self.on_failure: + body["on_failure"] = self.on_failure + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> NotificationSettings: + """Deserializes the NotificationSettings from a dictionary.""" + return cls(on_failure=_from_dict(d, "on_failure", NotificationDestination)) + + +@dataclass +class Refresh: + """The Refresh object gives information on a refresh of the data quality monitoring pipeline.""" + + object_type: str + """The type of the monitored object. Can be one of the following: table.""" + + object_id: str + """The UUID of the request object. For example, table id.""" + + end_time_ms: Optional[int] = None + """Time when the refresh ended (milliseconds since 1/1/1970 UTC).""" + + message: Optional[str] = None + """An optional message to give insight into the current state of the refresh (e.g. FAILURE + messages).""" + + refresh_id: Optional[int] = None + """Unique id of the refresh operation.""" + + start_time_ms: Optional[int] = None + """Time when the refresh started (milliseconds since 1/1/1970 UTC).""" + + state: Optional[RefreshState] = None + """The current state of the refresh.""" + + trigger: Optional[RefreshTrigger] = None + """What triggered the refresh.""" + + def as_dict(self) -> dict: + """Serializes the Refresh into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.end_time_ms is not None: + body["end_time_ms"] = self.end_time_ms + if self.message is not None: + body["message"] = self.message + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type + if self.refresh_id is not None: + body["refresh_id"] = self.refresh_id + if self.start_time_ms is not None: + body["start_time_ms"] = self.start_time_ms + if self.state is not None: + body["state"] = self.state.value + if self.trigger is not None: + body["trigger"] = self.trigger.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Refresh into a shallow dictionary of its immediate attributes.""" + body = {} + if self.end_time_ms is not None: + body["end_time_ms"] = self.end_time_ms + if self.message is not None: + body["message"] = self.message + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type + if self.refresh_id is not None: + body["refresh_id"] = self.refresh_id + if self.start_time_ms is not None: + body["start_time_ms"] = self.start_time_ms + if self.state is not None: + body["state"] = self.state + if self.trigger is not None: + body["trigger"] = self.trigger + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Refresh: + """Deserializes the Refresh from a dictionary.""" + return cls( + end_time_ms=d.get("end_time_ms", None), + message=d.get("message", None), + object_id=d.get("object_id", None), + object_type=d.get("object_type", None), + refresh_id=d.get("refresh_id", None), + start_time_ms=d.get("start_time_ms", None), + state=_enum(d, "state", RefreshState), + trigger=_enum(d, "trigger", RefreshTrigger), + ) + + +class RefreshState(Enum): + """The state of the refresh.""" + + MONITOR_REFRESH_STATE_CANCELED = "MONITOR_REFRESH_STATE_CANCELED" + MONITOR_REFRESH_STATE_FAILED = "MONITOR_REFRESH_STATE_FAILED" + MONITOR_REFRESH_STATE_PENDING = "MONITOR_REFRESH_STATE_PENDING" + MONITOR_REFRESH_STATE_RUNNING = "MONITOR_REFRESH_STATE_RUNNING" + MONITOR_REFRESH_STATE_SUCCESS = "MONITOR_REFRESH_STATE_SUCCESS" + MONITOR_REFRESH_STATE_UNKNOWN = "MONITOR_REFRESH_STATE_UNKNOWN" + + +class RefreshTrigger(Enum): + """The trigger of the refresh.""" + + MONITOR_REFRESH_TRIGGER_DATA_CHANGE = "MONITOR_REFRESH_TRIGGER_DATA_CHANGE" + MONITOR_REFRESH_TRIGGER_MANUAL = "MONITOR_REFRESH_TRIGGER_MANUAL" + MONITOR_REFRESH_TRIGGER_SCHEDULE = "MONITOR_REFRESH_TRIGGER_SCHEDULE" + MONITOR_REFRESH_TRIGGER_UNKNOWN = "MONITOR_REFRESH_TRIGGER_UNKNOWN" + + +@dataclass +class SnapshotConfig: + """Snapshot analysis configuration.""" + + def as_dict(self) -> dict: + """Serializes the SnapshotConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SnapshotConfig into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SnapshotConfig: + """Deserializes the SnapshotConfig from a dictionary.""" + return cls() + + +@dataclass +class TimeSeriesConfig: + """Time series analysis configuration.""" + + timestamp_column: str + """Column for the timestamp.""" + + granularities: List[AggregationGranularity] + """List of granularities to use when aggregating data into time windows based on their timestamp.""" + + def as_dict(self) -> dict: + """Serializes the TimeSeriesConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.granularities: + body["granularities"] = [v.value for v in self.granularities] + if self.timestamp_column is not None: + body["timestamp_column"] = self.timestamp_column + return body + + def as_shallow_dict(self) -> dict: + """Serializes the TimeSeriesConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.granularities: + body["granularities"] = self.granularities + if self.timestamp_column is not None: + body["timestamp_column"] = self.timestamp_column + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> TimeSeriesConfig: + """Deserializes the TimeSeriesConfig from a dictionary.""" + return cls( + granularities=_repeated_enum(d, "granularities", AggregationGranularity), + timestamp_column=d.get("timestamp_column", None), + ) + + +class DataQualityAPI: + """Manage the data quality of Unity Catalog objects (currently support `schema` and `table`)""" + + def __init__(self, api_client): + self._api = api_client + + def cancel_refresh(self, object_type: str, object_id: str, refresh_id: int) -> CancelRefreshResponse: + """Cancels a data quality monitor refresh. Currently only supported for the `table` `object_type`. + + :param object_type: str + The type of the monitored object. Can be one of the following: schema or table. + :param object_id: str + The UUID of the request object. For example, schema id. + :param refresh_id: int + Unique id of the refresh operation. + + :returns: :class:`CancelRefreshResponse` + """ + + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/data-quality/v1/monitors/{object_type}/{object_id}/refreshes/{refresh_id}/cancel", + headers=headers, + ) + return CancelRefreshResponse.from_dict(res) + + def create_monitor(self, monitor: Monitor) -> Monitor: + """Create a data quality monitor on a Unity Catalog object. The caller must provide either + `anomaly_detection_config` for a schema monitor or `data_profiling_config` for a table monitor. + + For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent catalog, + have **USE_SCHEMA** on the table's parent schema, and have **SELECT** access on the table 2. have + **USE_CATALOG** on the table's parent catalog, be an owner of the table's parent schema, and have + **SELECT** access on the table. 3. have the following permissions: - **USE_CATALOG** on the table's + parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table. + + Workspace assets, such as the dashboard, will be created in the workspace where this call was made. + + :param monitor: :class:`Monitor` + The monitor to create. + + :returns: :class:`Monitor` + """ + body = monitor.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/data-quality/v1/monitors", body=body, headers=headers) + return Monitor.from_dict(res) + + def create_refresh(self, object_type: str, object_id: str, refresh: Refresh) -> Refresh: + """Creates a refresh. Currently only supported for the `table` `object_type`. + + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the + table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: + - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an + owner of the table + + :param object_type: str + The type of the monitored object. Can be one of the following: table. + :param object_id: str + The UUID of the request object. For example, table id. + :param refresh: :class:`Refresh` + The refresh to create + + :returns: :class:`Refresh` + """ + body = refresh.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/data-quality/v1/monitors/{object_type}/{object_id}/refreshes", body=body, headers=headers + ) + return Refresh.from_dict(res) + + def delete_monitor(self, object_type: str, object_id: str): + """Delete a data quality monitor on Unity Catalog object. + + For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent catalog 2. + have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. + have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on + the table's parent schema - be an owner of the table. + + Note that the metric tables and dashboard will not be deleted as part of this call; those assets must + be manually cleaned up (if desired). + + :param object_type: str + The type of the monitored object. Can be one of the following: schema or table. + :param object_id: str + The UUID of the request object. For example, schema id. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/data-quality/v1/monitors/{object_type}/{object_id}", headers=headers) + + def delete_refresh(self, object_type: str, object_id: str, refresh_id: int): + """(Unimplemented) Delete a refresh + + :param object_type: str + The type of the monitored object. Can be one of the following: schema or table. + :param object_id: str + The UUID of the request object. For example, schema id. + :param refresh_id: int + Unique id of the refresh operation. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", f"/api/data-quality/v1/monitors/{object_type}/{object_id}/refreshes/{refresh_id}", headers=headers + ) + + def get_monitor(self, object_type: str, object_id: str) -> Monitor: + """Read a data quality monitor on Unity Catalog object. + + For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent catalog 2. + have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema. 3. + have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on + the table's parent schema - **SELECT** privilege on the table. + + The returned information includes configuration values, as well as information on assets created by + the monitor. Some information (e.g., dashboard) may be filtered out if the caller is in a different + workspace than where the monitor was created. + + :param object_type: str + The type of the monitored object. Can be one of the following: schema or table. + :param object_id: str + The UUID of the request object. For example, schema id. + + :returns: :class:`Monitor` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/data-quality/v1/monitors/{object_type}/{object_id}", headers=headers) + return Monitor.from_dict(res) + + def get_refresh(self, object_type: str, object_id: str, refresh_id: int) -> Refresh: + """Get data quality monitor refresh. + + For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent catalog 2. + have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. + have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on + the table's parent schema - **SELECT** privilege on the table. + + :param object_type: str + The type of the monitored object. Can be one of the following: schema or table. + :param object_id: str + The UUID of the request object. For example, schema id. + :param refresh_id: int + Unique id of the refresh operation. + + :returns: :class:`Refresh` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/data-quality/v1/monitors/{object_type}/{object_id}/refreshes/{refresh_id}", headers=headers + ) + return Refresh.from_dict(res) + + def list_monitor(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Monitor]: + """(Unimplemented) List data quality monitors. + + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`Monitor` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do("GET", "/api/data-quality/v1/monitors", query=query, headers=headers) + if "monitors" in json: + for v in json["monitors"]: + yield Monitor.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def list_refresh( + self, object_type: str, object_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[Refresh]: + """List data quality monitor refreshes. + + For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent catalog 2. + have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. + have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on + the table's parent schema - **SELECT** privilege on the table. + + :param object_type: str + The type of the monitored object. Can be one of the following: schema or table. + :param object_id: str + The UUID of the request object. For example, schema id. + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`Refresh` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do( + "GET", + f"/api/data-quality/v1/monitors/{object_type}/{object_id}/refreshes", + query=query, + headers=headers, + ) + if "refreshes" in json: + for v in json["refreshes"]: + yield Refresh.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update_monitor(self, object_type: str, object_id: str, monitor: Monitor, update_mask: str) -> Monitor: + """Update a data quality monitor on Unity Catalog object. + + For the `table` `object_type`, The caller must either: 1. be an owner of the table's parent catalog 2. + have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. + have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on + the table's parent schema - be an owner of the table. + + :param object_type: str + The type of the monitored object. Can be one of the following: schema or table. + :param object_id: str + The UUID of the request object. For example, schema id. + :param monitor: :class:`Monitor` + The monitor to update. + :param update_mask: str + The field mask to specify which fields to update. + + :returns: :class:`Monitor` + """ + body = monitor.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", f"/api/data-quality/v1/monitors/{object_type}/{object_id}", query=query, body=body, headers=headers + ) + return Monitor.from_dict(res) + + def update_refresh( + self, object_type: str, object_id: str, refresh_id: int, refresh: Refresh, update_mask: str + ) -> Refresh: + """(Unimplemented) Update a refresh + + :param object_type: str + The type of the monitored object. Can be one of the following: schema or table. + :param object_id: str + The UUID of the request object. For example, schema id. + :param refresh_id: int + Unique id of the refresh operation. + :param refresh: :class:`Refresh` + The refresh to update. + :param update_mask: str + The field mask to specify which fields to update. + + :returns: :class:`Refresh` + """ + body = refresh.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/data-quality/v1/monitors/{object_type}/{object_id}/refreshes/{refresh_id}", + query=query, + body=body, + headers=headers, + ) + return Refresh.from_dict(res) diff --git a/databricks/sdk/service/files.py b/databricks/sdk/service/files.py index 2117a09f3..909adcb9e 100755 --- a/databricks/sdk/service/files.py +++ b/databricks/sdk/service/files.py @@ -6,7 +6,8 @@ from dataclasses import dataclass from typing import Any, BinaryIO, Dict, Iterator, List, Optional -from ._internal import _escape_multi_segment_path_parameter, _repeated_dict +from databricks.sdk.service._internal import ( + _escape_multi_segment_path_parameter, _repeated_dict) _LOG = logging.getLogger("databricks.sdk") @@ -50,24 +51,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CloseResponse: return cls() -@dataclass -class CreateDirectoryResponse: - def as_dict(self) -> dict: - """Serializes the CreateDirectoryResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateDirectoryResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateDirectoryResponse: - """Deserializes the CreateDirectoryResponse from a dictionary.""" - return cls() - - @dataclass class CreateResponse: handle: Optional[int] = None @@ -94,24 +77,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateResponse: return cls(handle=d.get("handle", None)) -@dataclass -class DeleteDirectoryResponse: - def as_dict(self) -> dict: - """Serializes the DeleteDirectoryResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteDirectoryResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteDirectoryResponse: - """Deserializes the DeleteDirectoryResponse from a dictionary.""" - return cls() - - @dataclass class DeleteResponse: def as_dict(self) -> dict: @@ -289,24 +254,6 @@ def from_dict(cls, d: Dict[str, Any]) -> FileInfo: ) -@dataclass -class GetDirectoryMetadataResponse: - def as_dict(self) -> dict: - """Serializes the GetDirectoryMetadataResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the GetDirectoryMetadataResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GetDirectoryMetadataResponse: - """Deserializes the GetDirectoryMetadataResponse from a dictionary.""" - return cls() - - @dataclass class GetMetadataResponse: content_length: Optional[int] = None @@ -495,24 +442,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ReadResponse: return cls(bytes_read=d.get("bytes_read", None), data=d.get("data", None)) -@dataclass -class UploadResponse: - def as_dict(self) -> dict: - """Serializes the UploadResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UploadResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UploadResponse: - """Deserializes the UploadResponse from a dictionary.""" - return cls() - - class DbfsAPI: """DBFS API makes it simple to interact with various data sources without having to include a users credentials every time to read a file.""" diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py index a470d7544..3eebd3b3c 100755 --- a/databricks/sdk/service/iam.py +++ b/databricks/sdk/service/iam.py @@ -7,7 +7,8 @@ from enum import Enum from typing import Any, Dict, Iterator, List, Optional -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum +from databricks.sdk.service._internal import (_enum, _from_dict, + _repeated_dict, _repeated_enum) _LOG = logging.getLogger("databricks.sdk") @@ -500,24 +501,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ConsistencyToken: return cls(value=d.get("value", None)) -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() - - @dataclass class DeleteWorkspacePermissionAssignmentResponse: def as_dict(self) -> dict: @@ -1464,24 +1447,6 @@ class PatchOp(Enum): REPLACE = "replace" -@dataclass -class PatchResponse: - def as_dict(self) -> dict: - """Serializes the PatchResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PatchResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PatchResponse: - """Deserializes the PatchResponse from a dictionary.""" - return cls() - - class PatchSchema(Enum): URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP = "urn:ietf:params:scim:api:messages:2.0:PatchOp" diff --git a/databricks/sdk/service/iamv2.py b/databricks/sdk/service/iamv2.py index 25cd2ad25..80cb8be4a 100755 --- a/databricks/sdk/service/iamv2.py +++ b/databricks/sdk/service/iamv2.py @@ -7,7 +7,8 @@ from enum import Enum from typing import Any, Dict, List, Optional -from ._internal import _enum, _from_dict, _repeated_enum +from databricks.sdk.service._internal import (_enum, _from_dict, + _repeated_dict, _repeated_enum) _LOG = logging.getLogger("databricks.sdk") @@ -68,6 +69,148 @@ def from_dict(cls, d: Dict[str, Any]) -> Group: ) +@dataclass +class ListGroupsResponse: + """TODO: Write description later when this method is implemented""" + + groups: Optional[List[Group]] = None + + next_page_token: Optional[str] = None + """A token, which can be sent as page_token to retrieve the next page. If this field is omitted, + there are no subsequent pages.""" + + def as_dict(self) -> dict: + """Serializes the ListGroupsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.groups: + body["groups"] = [v.as_dict() for v in self.groups] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListGroupsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.groups: + body["groups"] = self.groups + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListGroupsResponse: + """Deserializes the ListGroupsResponse from a dictionary.""" + return cls(groups=_repeated_dict(d, "groups", Group), next_page_token=d.get("next_page_token", None)) + + +@dataclass +class ListServicePrincipalsResponse: + """TODO: Write description later when this method is implemented""" + + next_page_token: Optional[str] = None + """A token, which can be sent as page_token to retrieve the next page. If this field is omitted, + there are no subsequent pages.""" + + service_principals: Optional[List[ServicePrincipal]] = None + + def as_dict(self) -> dict: + """Serializes the ListServicePrincipalsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.service_principals: + body["service_principals"] = [v.as_dict() for v in self.service_principals] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListServicePrincipalsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.service_principals: + body["service_principals"] = self.service_principals + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListServicePrincipalsResponse: + """Deserializes the ListServicePrincipalsResponse from a dictionary.""" + return cls( + next_page_token=d.get("next_page_token", None), + service_principals=_repeated_dict(d, "service_principals", ServicePrincipal), + ) + + +@dataclass +class ListUsersResponse: + """TODO: Write description later when this method is implemented""" + + next_page_token: Optional[str] = None + """A token, which can be sent as page_token to retrieve the next page. If this field is omitted, + there are no subsequent pages.""" + + users: Optional[List[User]] = None + + def as_dict(self) -> dict: + """Serializes the ListUsersResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.users: + body["users"] = [v.as_dict() for v in self.users] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListUsersResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.users: + body["users"] = self.users + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListUsersResponse: + """Deserializes the ListUsersResponse from a dictionary.""" + return cls(next_page_token=d.get("next_page_token", None), users=_repeated_dict(d, "users", User)) + + +@dataclass +class ListWorkspaceAccessDetailsResponse: + """TODO: Write description later when this method is implemented""" + + next_page_token: Optional[str] = None + """A token, which can be sent as page_token to retrieve the next page. If this field is omitted, + there are no subsequent pages.""" + + workspace_access_details: Optional[List[WorkspaceAccessDetail]] = None + + def as_dict(self) -> dict: + """Serializes the ListWorkspaceAccessDetailsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.workspace_access_details: + body["workspace_access_details"] = [v.as_dict() for v in self.workspace_access_details] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListWorkspaceAccessDetailsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.workspace_access_details: + body["workspace_access_details"] = self.workspace_access_details + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListWorkspaceAccessDetailsResponse: + """Deserializes the ListWorkspaceAccessDetailsResponse from a dictionary.""" + return cls( + next_page_token=d.get("next_page_token", None), + workspace_access_details=_repeated_dict(d, "workspace_access_details", WorkspaceAccessDetail), + ) + + class PrincipalType(Enum): """The type of the principal (user/sp/group).""" @@ -427,6 +570,217 @@ class AccountIamV2API: def __init__(self, api_client): self._api = api_client + def create_group(self, group: Group) -> Group: + """TODO: Write description later when this method is implemented + + :param group: :class:`Group` + Required. Group to be created in + + :returns: :class:`Group` + """ + body = group.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/2.0/identity/accounts/{self._api.account_id}/groups", body=body, headers=headers + ) + return Group.from_dict(res) + + def create_service_principal(self, service_principal: ServicePrincipal) -> ServicePrincipal: + """TODO: Write description later when this method is implemented + + :param service_principal: :class:`ServicePrincipal` + Required. Service principal to be created in + + :returns: :class:`ServicePrincipal` + """ + body = service_principal.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/2.0/identity/accounts/{self._api.account_id}/servicePrincipals", body=body, headers=headers + ) + return ServicePrincipal.from_dict(res) + + def create_user(self, user: User) -> User: + """TODO: Write description later when this method is implemented + + :param user: :class:`User` + Required. User to be created in + + :returns: :class:`User` + """ + body = user.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/2.0/identity/accounts/{self._api.account_id}/users", body=body, headers=headers + ) + return User.from_dict(res) + + def create_workspace_access_detail( + self, parent: str, workspace_access_detail: WorkspaceAccessDetail + ) -> WorkspaceAccessDetail: + """TODO: Write description later when this method is implemented + + :param parent: str + Required. The parent path for workspace access detail. + :param workspace_access_detail: :class:`WorkspaceAccessDetail` + Required. Workspace access detail to be created in . + + :returns: :class:`WorkspaceAccessDetail` + """ + body = workspace_access_detail.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/identity/accounts/{self._api.account_id}/workspaces/{parent}/workspaceAccessDetails", + body=body, + headers=headers, + ) + return WorkspaceAccessDetail.from_dict(res) + + def delete_group(self, internal_id: int): + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the group in Databricks. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", f"/api/2.0/identity/accounts/{self._api.account_id}/groups/{internal_id}", headers=headers + ) + + def delete_service_principal(self, internal_id: int): + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the service principal in Databricks. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", + f"/api/2.0/identity/accounts/{self._api.account_id}/servicePrincipals/{internal_id}", + headers=headers, + ) + + def delete_user(self, internal_id: int): + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the user in Databricks. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", f"/api/2.0/identity/accounts/{self._api.account_id}/users/{internal_id}", headers=headers + ) + + def delete_workspace_access_detail(self, workspace_id: int, principal_id: int): + """TODO: Write description later when this method is implemented + + :param workspace_id: int + The workspace ID where the principal has access. + :param principal_id: int + Required. ID of the principal in Databricks to delete workspace access for. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", + f"/api/2.0/identity/accounts/{self._api.account_id}/workspaces/{workspace_id}/workspaceAccessDetails/{principal_id}", + headers=headers, + ) + + def get_group(self, internal_id: int) -> Group: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the group in Databricks. + + :returns: :class:`Group` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/identity/accounts/{self._api.account_id}/groups/{internal_id}", headers=headers + ) + return Group.from_dict(res) + + def get_service_principal(self, internal_id: int) -> ServicePrincipal: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the service principal in Databricks. + + :returns: :class:`ServicePrincipal` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/identity/accounts/{self._api.account_id}/servicePrincipals/{internal_id}", headers=headers + ) + return ServicePrincipal.from_dict(res) + + def get_user(self, internal_id: int) -> User: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the user in Databricks. + + :returns: :class:`User` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/identity/accounts/{self._api.account_id}/users/{internal_id}", headers=headers + ) + return User.from_dict(res) + def get_workspace_access_detail( self, workspace_id: int, principal_id: int, *, view: Optional[WorkspaceAccessDetailView] = None ) -> WorkspaceAccessDetail: @@ -462,6 +816,119 @@ def get_workspace_access_detail( ) return WorkspaceAccessDetail.from_dict(res) + def list_groups(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> ListGroupsResponse: + """TODO: Write description later when this method is implemented + + :param page_size: int (optional) + The maximum number of groups to return. The service may return fewer than this value. + :param page_token: str (optional) + A page token, received from a previous ListGroups call. Provide this to retrieve the subsequent + page. + + :returns: :class:`ListGroupsResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/identity/accounts/{self._api.account_id}/groups", query=query, headers=headers + ) + return ListGroupsResponse.from_dict(res) + + def list_service_principals( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListServicePrincipalsResponse: + """TODO: Write description later when this method is implemented + + :param page_size: int (optional) + The maximum number of service principals to return. The service may return fewer than this value. + :param page_token: str (optional) + A page token, received from a previous ListServicePrincipals call. Provide this to retrieve the + subsequent page. + + :returns: :class:`ListServicePrincipalsResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/identity/accounts/{self._api.account_id}/servicePrincipals", query=query, headers=headers + ) + return ListServicePrincipalsResponse.from_dict(res) + + def list_users(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> ListUsersResponse: + """TODO: Write description later when this method is implemented + + :param page_size: int (optional) + The maximum number of users to return. The service may return fewer than this value. + :param page_token: str (optional) + A page token, received from a previous ListUsers call. Provide this to retrieve the subsequent page. + + :returns: :class:`ListUsersResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/identity/accounts/{self._api.account_id}/users", query=query, headers=headers + ) + return ListUsersResponse.from_dict(res) + + def list_workspace_access_details( + self, workspace_id: int, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListWorkspaceAccessDetailsResponse: + """TODO: Write description later when this method is implemented + + :param workspace_id: int + The workspace ID for which the workspace access details are being fetched. + :param page_size: int (optional) + The maximum number of workspace access details to return. The service may return fewer than this + value. + :param page_token: str (optional) + A page token, received from a previous ListWorkspaceAccessDetails call. Provide this to retrieve the + subsequent page. + + :returns: :class:`ListWorkspaceAccessDetailsResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/identity/accounts/{self._api.account_id}/workspaces/{workspace_id}/workspaceAccessDetails", + query=query, + headers=headers, + ) + return ListWorkspaceAccessDetailsResponse.from_dict(res) + def resolve_group(self, external_id: str) -> ResolveGroupResponse: """Resolves a group with the given external ID from the customer's IdP. If the group does not exist, it will be created in the account. If the customer is not onboarded onto Automatic Identity Management @@ -540,6 +1007,132 @@ def resolve_user(self, external_id: str) -> ResolveUserResponse: ) return ResolveUserResponse.from_dict(res) + def update_group(self, internal_id: int, group: Group, update_mask: str) -> Group: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the group in Databricks. + :param group: :class:`Group` + Required. Group to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`Group` + """ + body = group.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/identity/accounts/{self._api.account_id}/groups/{internal_id}", + query=query, + body=body, + headers=headers, + ) + return Group.from_dict(res) + + def update_service_principal( + self, internal_id: int, service_principal: ServicePrincipal, update_mask: str + ) -> ServicePrincipal: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the service principal in Databricks. + :param service_principal: :class:`ServicePrincipal` + Required. Service Principal to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`ServicePrincipal` + """ + body = service_principal.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/identity/accounts/{self._api.account_id}/servicePrincipals/{internal_id}", + query=query, + body=body, + headers=headers, + ) + return ServicePrincipal.from_dict(res) + + def update_user(self, internal_id: int, user: User, update_mask: str) -> User: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the user in Databricks. + :param user: :class:`User` + Required. User to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`User` + """ + body = user.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/identity/accounts/{self._api.account_id}/users/{internal_id}", + query=query, + body=body, + headers=headers, + ) + return User.from_dict(res) + + def update_workspace_access_detail( + self, workspace_id: int, principal_id: int, workspace_access_detail: WorkspaceAccessDetail, update_mask: str + ) -> WorkspaceAccessDetail: + """TODO: Write description later when this method is implemented + + :param workspace_id: int + Required. The workspace ID for which the workspace access detail is being updated. + :param principal_id: int + Required. ID of the principal in Databricks. + :param workspace_access_detail: :class:`WorkspaceAccessDetail` + Required. Workspace access detail to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`WorkspaceAccessDetail` + """ + body = workspace_access_detail.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/identity/accounts/{self._api.account_id}/workspaces/{workspace_id}/workspaceAccessDetails/{principal_id}", + query=query, + body=body, + headers=headers, + ) + return WorkspaceAccessDetail.from_dict(res) + class WorkspaceIamV2API: """These APIs are used to manage identities and the workspace access of these identities in .""" @@ -547,6 +1140,184 @@ class WorkspaceIamV2API: def __init__(self, api_client): self._api = api_client + def create_group_proxy(self, group: Group) -> Group: + """TODO: Write description later when this method is implemented + + :param group: :class:`Group` + Required. Group to be created in + + :returns: :class:`Group` + """ + body = group.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/identity/groups", body=body, headers=headers) + return Group.from_dict(res) + + def create_service_principal_proxy(self, service_principal: ServicePrincipal) -> ServicePrincipal: + """TODO: Write description later when this method is implemented + + :param service_principal: :class:`ServicePrincipal` + Required. Service principal to be created in + + :returns: :class:`ServicePrincipal` + """ + body = service_principal.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/identity/servicePrincipals", body=body, headers=headers) + return ServicePrincipal.from_dict(res) + + def create_user_proxy(self, user: User) -> User: + """TODO: Write description later when this method is implemented + + :param user: :class:`User` + Required. User to be created in + + :returns: :class:`User` + """ + body = user.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/identity/users", body=body, headers=headers) + return User.from_dict(res) + + def create_workspace_access_detail_local( + self, workspace_access_detail: WorkspaceAccessDetail + ) -> WorkspaceAccessDetail: + """TODO: Write description later when this method is implemented + + :param workspace_access_detail: :class:`WorkspaceAccessDetail` + Required. Workspace access detail to be created in . + + :returns: :class:`WorkspaceAccessDetail` + """ + body = workspace_access_detail.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/identity/workspaceAccessDetails", body=body, headers=headers) + return WorkspaceAccessDetail.from_dict(res) + + def delete_group_proxy(self, internal_id: int): + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the group in Databricks. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/identity/groups/{internal_id}", headers=headers) + + def delete_service_principal_proxy(self, internal_id: int): + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the service principal in Databricks. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/identity/servicePrincipals/{internal_id}", headers=headers) + + def delete_user_proxy(self, internal_id: int): + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the user in Databricks. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/identity/users/{internal_id}", headers=headers) + + def delete_workspace_access_detail_local(self, principal_id: int): + """TODO: Write description later when this method is implemented + + :param principal_id: int + Required. ID of the principal in Databricks. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/identity/workspaceAccessDetails/{principal_id}", headers=headers) + + def get_group_proxy(self, internal_id: int) -> Group: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the group in Databricks. + + :returns: :class:`Group` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/identity/groups/{internal_id}", headers=headers) + return Group.from_dict(res) + + def get_service_principal_proxy(self, internal_id: int) -> ServicePrincipal: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the service principal in Databricks. + + :returns: :class:`ServicePrincipal` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/identity/servicePrincipals/{internal_id}", headers=headers) + return ServicePrincipal.from_dict(res) + + def get_user_proxy(self, internal_id: int) -> User: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the user in Databricks. + + :returns: :class:`User` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/identity/users/{internal_id}", headers=headers) + return User.from_dict(res) + def get_workspace_access_detail_local( self, principal_id: int, *, view: Optional[WorkspaceAccessDetailView] = None ) -> WorkspaceAccessDetail: @@ -577,6 +1348,110 @@ def get_workspace_access_detail_local( ) return WorkspaceAccessDetail.from_dict(res) + def list_groups_proxy( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListGroupsResponse: + """TODO: Write description later when this method is implemented + + :param page_size: int (optional) + The maximum number of groups to return. The service may return fewer than this value. + :param page_token: str (optional) + A page token, received from a previous ListGroups call. Provide this to retrieve the subsequent + page. + + :returns: :class:`ListGroupsResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/identity/groups", query=query, headers=headers) + return ListGroupsResponse.from_dict(res) + + def list_service_principals_proxy( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListServicePrincipalsResponse: + """TODO: Write description later when this method is implemented + + :param page_size: int (optional) + The maximum number of SPs to return. The service may return fewer than this value. + :param page_token: str (optional) + A page token, received from a previous ListServicePrincipals call. Provide this to retrieve the + subsequent page. + + :returns: :class:`ListServicePrincipalsResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/identity/servicePrincipals", query=query, headers=headers) + return ListServicePrincipalsResponse.from_dict(res) + + def list_users_proxy( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListUsersResponse: + """TODO: Write description later when this method is implemented + + :param page_size: int (optional) + The maximum number of users to return. The service may return fewer than this value. + :param page_token: str (optional) + A page token, received from a previous ListUsers call. Provide this to retrieve the subsequent page. + + :returns: :class:`ListUsersResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/identity/users", query=query, headers=headers) + return ListUsersResponse.from_dict(res) + + def list_workspace_access_details_local( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListWorkspaceAccessDetailsResponse: + """TODO: Write description later when this method is implemented + + :param page_size: int (optional) + The maximum number of workspace access details to return. The service may return fewer than this + value. + :param page_token: str (optional) + A page token, received from a previous ListWorkspaceAccessDetails call. Provide this to retrieve the + subsequent page. + + :returns: :class:`ListWorkspaceAccessDetailsResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/identity/workspaceAccessDetails", query=query, headers=headers) + return ListWorkspaceAccessDetailsResponse.from_dict(res) + def resolve_group_proxy(self, external_id: str) -> ResolveGroupResponse: """Resolves a group with the given external ID from the customer's IdP. If the group does not exist, it will be created in the account. If the customer is not onboarded onto Automatic Identity Management @@ -641,3 +1516,107 @@ def resolve_user_proxy(self, external_id: str) -> ResolveUserResponse: res = self._api.do("POST", "/api/2.0/identity/users/resolveByExternalId", body=body, headers=headers) return ResolveUserResponse.from_dict(res) + + def update_group_proxy(self, internal_id: int, group: Group, update_mask: str) -> Group: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the group in Databricks. + :param group: :class:`Group` + Required. Group to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`Group` + """ + body = group.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/identity/groups/{internal_id}", query=query, body=body, headers=headers) + return Group.from_dict(res) + + def update_service_principal_proxy( + self, internal_id: int, service_principal: ServicePrincipal, update_mask: str + ) -> ServicePrincipal: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the service principal in Databricks. + :param service_principal: :class:`ServicePrincipal` + Required. Service principal to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`ServicePrincipal` + """ + body = service_principal.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", f"/api/2.0/identity/servicePrincipals/{internal_id}", query=query, body=body, headers=headers + ) + return ServicePrincipal.from_dict(res) + + def update_user_proxy(self, internal_id: int, user: User, update_mask: str) -> User: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the user in Databricks. + :param user: :class:`User` + Required. User to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`User` + """ + body = user.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/identity/users/{internal_id}", query=query, body=body, headers=headers) + return User.from_dict(res) + + def update_workspace_access_detail_local( + self, principal_id: int, workspace_access_detail: WorkspaceAccessDetail, update_mask: str + ) -> WorkspaceAccessDetail: + """TODO: Write description later when this method is implemented + + :param principal_id: int + Required. ID of the principal in Databricks. + :param workspace_access_detail: :class:`WorkspaceAccessDetail` + Required. WorkspaceAccessDetail to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`WorkspaceAccessDetail` + """ + body = workspace_access_detail.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", f"/api/2.0/identity/workspaceAccessDetails/{principal_id}", query=query, body=body, headers=headers + ) + return WorkspaceAccessDetail.from_dict(res) diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index 9519e8ba7..07c1e8b23 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -10,8 +10,10 @@ from enum import Enum from typing import Any, Callable, Dict, Iterator, List, Optional +from databricks.sdk.service._internal import (Wait, _enum, _from_dict, + _repeated_dict) + from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated_dict _LOG = logging.getLogger("databricks.sdk") @@ -53,6 +55,10 @@ class BaseJob: job_id: Optional[int] = None """The canonical identifier for this job.""" + path: Optional[str] = None + """Path of the job object in workspace file tree, including file extension. If absent, the job + doesn't have a workspace object. Example: /Workspace/user@example.com/my_project/my_job.job.json""" + settings: Optional[JobSettings] = None """Settings for this job and all of its runs. These settings can be updated using the `resetJob` method.""" @@ -75,6 +81,8 @@ def as_dict(self) -> dict: body["has_more"] = self.has_more if self.job_id is not None: body["job_id"] = self.job_id + if self.path is not None: + body["path"] = self.path if self.settings: body["settings"] = self.settings.as_dict() if self.trigger_state: @@ -96,6 +104,8 @@ def as_shallow_dict(self) -> dict: body["has_more"] = self.has_more if self.job_id is not None: body["job_id"] = self.job_id + if self.path is not None: + body["path"] = self.path if self.settings: body["settings"] = self.settings if self.trigger_state: @@ -112,6 +122,7 @@ def from_dict(cls, d: Dict[str, Any]) -> BaseJob: effective_usage_policy_id=d.get("effective_usage_policy_id", None), has_more=d.get("has_more", None), job_id=d.get("job_id", None), + path=d.get("path", None), settings=_from_dict(d, "settings", JobSettings), trigger_state=_from_dict(d, "trigger_state", TriggerStateProto), ) @@ -448,42 +459,6 @@ def from_dict(cls, d: Dict[str, Any]) -> BaseRun: ) -@dataclass -class CancelAllRunsResponse: - def as_dict(self) -> dict: - """Serializes the CancelAllRunsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CancelAllRunsResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CancelAllRunsResponse: - """Deserializes the CancelAllRunsResponse from a dictionary.""" - return cls() - - -@dataclass -class CancelRunResponse: - def as_dict(self) -> dict: - """Serializes the CancelRunResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CancelRunResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CancelRunResponse: - """Deserializes the CancelRunResponse from a dictionary.""" - return cls() - - class CleanRoomTaskRunLifeCycleState(Enum): """Copied from elastic-spark-common/api/messages/runs.proto. Using the original definition to remove coupling with jobs API definition""" @@ -1511,42 +1486,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DbtTask: ) -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() - - -@dataclass -class DeleteRunResponse: - def as_dict(self) -> dict: - """Serializes the DeleteRunResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteRunResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteRunResponse: - """Deserializes the DeleteRunResponse from a dictionary.""" - return cls() - - @dataclass class EnforcePolicyComplianceForJobResponseJobClusterSettingsChange: """Represents a change to the job cluster's settings that would be required for the job clusters to @@ -2250,6 +2189,10 @@ class Job: next_page_token: Optional[str] = None """A token that can be used to list the next page of array properties.""" + path: Optional[str] = None + """Path of the job object in workspace file tree, including file extension. If absent, the job + doesn't have a workspace object. Example: /Workspace/user@example.com/my_project/my_job.job.json""" + run_as_user_name: Optional[str] = None """The email of an active workspace user or the application ID of a service principal that the job runs as. This value can be changed by setting the `run_as` field when creating or updating a @@ -2283,6 +2226,8 @@ def as_dict(self) -> dict: body["job_id"] = self.job_id if self.next_page_token is not None: body["next_page_token"] = self.next_page_token + if self.path is not None: + body["path"] = self.path if self.run_as_user_name is not None: body["run_as_user_name"] = self.run_as_user_name if self.settings: @@ -2308,6 +2253,8 @@ def as_shallow_dict(self) -> dict: body["job_id"] = self.job_id if self.next_page_token is not None: body["next_page_token"] = self.next_page_token + if self.path is not None: + body["path"] = self.path if self.run_as_user_name is not None: body["run_as_user_name"] = self.run_as_user_name if self.settings: @@ -2327,6 +2274,7 @@ def from_dict(cls, d: Dict[str, Any]) -> Job: has_more=d.get("has_more", None), job_id=d.get("job_id", None), next_page_token=d.get("next_page_token", None), + path=d.get("path", None), run_as_user_name=d.get("run_as_user_name", None), settings=_from_dict(d, "settings", JobSettings), trigger_state=_from_dict(d, "trigger_state", TriggerStateProto), @@ -3029,6 +2977,10 @@ class JobSettings: parameters: Optional[List[JobParameterDefinition]] = None """Job-level parameter definitions""" + parent_path: Optional[str] = None + """Path of the job parent folder in workspace file tree. If absent, the job doesn't have a + workspace object.""" + performance_target: Optional[PerformanceTarget] = None """The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run. @@ -3110,6 +3062,8 @@ def as_dict(self) -> dict: body["notification_settings"] = self.notification_settings.as_dict() if self.parameters: body["parameters"] = [v.as_dict() for v in self.parameters] + if self.parent_path is not None: + body["parent_path"] = self.parent_path if self.performance_target is not None: body["performance_target"] = self.performance_target.value if self.queue: @@ -3165,6 +3119,8 @@ def as_shallow_dict(self) -> dict: body["notification_settings"] = self.notification_settings if self.parameters: body["parameters"] = self.parameters + if self.parent_path is not None: + body["parent_path"] = self.parent_path if self.performance_target is not None: body["performance_target"] = self.performance_target if self.queue: @@ -3206,6 +3162,7 @@ def from_dict(cls, d: Dict[str, Any]) -> JobSettings: name=d.get("name", None), notification_settings=_from_dict(d, "notification_settings", JobNotificationSettings), parameters=_repeated_dict(d, "parameters", JobParameterDefinition), + parent_path=d.get("parent_path", None), performance_target=_enum(d, "performance_target", PerformanceTarget), queue=_from_dict(d, "queue", QueueSettings), run_as=_from_dict(d, "run_as", JobRunAs), @@ -3528,6 +3485,78 @@ def from_dict(cls, d: Dict[str, Any]) -> ListRunsResponse: ) +@dataclass +class ModelTriggerConfiguration: + condition: ModelTriggerConfigurationCondition + """The condition based on which to trigger a job run.""" + + aliases: Optional[List[str]] = None + """Aliases of the model versions to monitor. Can only be used in conjunction with condition + MODEL_ALIAS_SET.""" + + min_time_between_triggers_seconds: Optional[int] = None + """If set, the trigger starts a run only after the specified amount of time has passed since the + last time the trigger fired. The minimum allowed value is 60 seconds.""" + + securable_name: Optional[str] = None + """Name of the securable to monitor ("mycatalog.myschema.mymodel" in the case of model-level + triggers, "mycatalog.myschema" in the case of schema-level triggers) or empty in the case of + metastore-level triggers.""" + + wait_after_last_change_seconds: Optional[int] = None + """If set, the trigger starts a run only after no model updates have occurred for the specified + time and can be used to wait for a series of model updates before triggering a run. The minimum + allowed value is 60 seconds.""" + + def as_dict(self) -> dict: + """Serializes the ModelTriggerConfiguration into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.aliases: + body["aliases"] = [v for v in self.aliases] + if self.condition is not None: + body["condition"] = self.condition.value + if self.min_time_between_triggers_seconds is not None: + body["min_time_between_triggers_seconds"] = self.min_time_between_triggers_seconds + if self.securable_name is not None: + body["securable_name"] = self.securable_name + if self.wait_after_last_change_seconds is not None: + body["wait_after_last_change_seconds"] = self.wait_after_last_change_seconds + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ModelTriggerConfiguration into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aliases: + body["aliases"] = self.aliases + if self.condition is not None: + body["condition"] = self.condition + if self.min_time_between_triggers_seconds is not None: + body["min_time_between_triggers_seconds"] = self.min_time_between_triggers_seconds + if self.securable_name is not None: + body["securable_name"] = self.securable_name + if self.wait_after_last_change_seconds is not None: + body["wait_after_last_change_seconds"] = self.wait_after_last_change_seconds + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ModelTriggerConfiguration: + """Deserializes the ModelTriggerConfiguration from a dictionary.""" + return cls( + aliases=d.get("aliases", None), + condition=_enum(d, "condition", ModelTriggerConfigurationCondition), + min_time_between_triggers_seconds=d.get("min_time_between_triggers_seconds", None), + securable_name=d.get("securable_name", None), + wait_after_last_change_seconds=d.get("wait_after_last_change_seconds", None), + ) + + +class ModelTriggerConfigurationCondition(Enum): + + MODEL_ALIAS_SET = "MODEL_ALIAS_SET" + MODEL_CREATED = "MODEL_CREATED" + MODEL_VERSION_READY = "MODEL_VERSION_READY" + + @dataclass class NotebookOutput: result: Optional[str] = None @@ -4208,24 +4237,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RepairRunResponse: return cls(repair_id=d.get("repair_id", None)) -@dataclass -class ResetResponse: - def as_dict(self) -> dict: - """Serializes the ResetResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ResetResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ResetResponse: - """Deserializes the ResetResponse from a dictionary.""" - return cls() - - @dataclass class ResolvedConditionTaskValues: left: Optional[str] = None @@ -5694,6 +5705,10 @@ class RunTask: description: Optional[str] = None """An optional description for this task.""" + disabled: Optional[bool] = None + """An optional flag to disable the task. If set to true, the task will not run even if it is part + of a job.""" + effective_performance_target: Optional[PerformanceTarget] = None """The actual performance target used by the serverless run during execution. This can differ from the client-set performance target on the request depending on whether the performance mode is @@ -5856,6 +5871,8 @@ def as_dict(self) -> dict: body["depends_on"] = [v.as_dict() for v in self.depends_on] if self.description is not None: body["description"] = self.description + if self.disabled is not None: + body["disabled"] = self.disabled if self.effective_performance_target is not None: body["effective_performance_target"] = self.effective_performance_target.value if self.email_notifications: @@ -5953,6 +5970,8 @@ def as_shallow_dict(self) -> dict: body["depends_on"] = self.depends_on if self.description is not None: body["description"] = self.description + if self.disabled is not None: + body["disabled"] = self.disabled if self.effective_performance_target is not None: body["effective_performance_target"] = self.effective_performance_target if self.email_notifications: @@ -6040,6 +6059,7 @@ def from_dict(cls, d: Dict[str, Any]) -> RunTask: dbt_task=_from_dict(d, "dbt_task", DbtTask), depends_on=_repeated_dict(d, "depends_on", TaskDependency), description=d.get("description", None), + disabled=d.get("disabled", None), effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget), email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications), end_time=d.get("end_time", None), @@ -6921,6 +6941,10 @@ class SubmitTask: description: Optional[str] = None """An optional description for this task.""" + disabled: Optional[bool] = None + """An optional flag to disable the task. If set to true, the task will not run even if it is part + of a job.""" + email_notifications: Optional[JobEmailNotifications] = None """An optional set of email addresses notified when the task run begins or completes. The default behavior is to not send any emails.""" @@ -7016,6 +7040,8 @@ def as_dict(self) -> dict: body["depends_on"] = [v.as_dict() for v in self.depends_on] if self.description is not None: body["description"] = self.description + if self.disabled is not None: + body["disabled"] = self.disabled if self.email_notifications: body["email_notifications"] = self.email_notifications.as_dict() if self.environment_key is not None: @@ -7081,6 +7107,8 @@ def as_shallow_dict(self) -> dict: body["depends_on"] = self.depends_on if self.description is not None: body["description"] = self.description + if self.disabled is not None: + body["disabled"] = self.disabled if self.email_notifications: body["email_notifications"] = self.email_notifications if self.environment_key is not None: @@ -7139,6 +7167,7 @@ def from_dict(cls, d: Dict[str, Any]) -> SubmitTask: dbt_task=_from_dict(d, "dbt_task", DbtTask), depends_on=_repeated_dict(d, "depends_on", TaskDependency), description=d.get("description", None), + disabled=d.get("disabled", None), email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications), environment_key=d.get("environment_key", None), existing_cluster_id=d.get("existing_cluster_id", None), @@ -8030,6 +8059,8 @@ class TriggerSettings: file_arrival: Optional[FileArrivalTriggerConfiguration] = None """File arrival trigger settings.""" + model: Optional[ModelTriggerConfiguration] = None + pause_status: Optional[PauseStatus] = None """Whether this trigger is paused or not.""" @@ -8046,6 +8077,8 @@ def as_dict(self) -> dict: body = {} if self.file_arrival: body["file_arrival"] = self.file_arrival.as_dict() + if self.model: + body["model"] = self.model.as_dict() if self.pause_status is not None: body["pause_status"] = self.pause_status.value if self.periodic: @@ -8061,6 +8094,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.file_arrival: body["file_arrival"] = self.file_arrival + if self.model: + body["model"] = self.model if self.pause_status is not None: body["pause_status"] = self.pause_status if self.periodic: @@ -8076,6 +8111,7 @@ def from_dict(cls, d: Dict[str, Any]) -> TriggerSettings: """Deserializes the TriggerSettings from a dictionary.""" return cls( file_arrival=_from_dict(d, "file_arrival", FileArrivalTriggerConfiguration), + model=_from_dict(d, "model", ModelTriggerConfiguration), pause_status=_enum(d, "pause_status", PauseStatus), periodic=_from_dict(d, "periodic", PeriodicTriggerConfiguration), table=_from_dict(d, "table", TableUpdateTriggerConfiguration), @@ -8139,24 +8175,6 @@ class TriggerType(Enum): TABLE = "TABLE" -@dataclass -class UpdateResponse: - def as_dict(self) -> dict: - """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: - """Deserializes the UpdateResponse from a dictionary.""" - return cls() - - @dataclass class ViewItem: content: Optional[str] = None @@ -8428,11 +8446,7 @@ def cancel_run(self, run_id: int) -> Wait[Run]: } op_response = self._api.do("POST", "/api/2.2/jobs/runs/cancel", body=body, headers=headers) - return Wait( - self.wait_get_run_job_terminated_or_skipped, - response=CancelRunResponse.from_dict(op_response), - run_id=run_id, - ) + return Wait(self.wait_get_run_job_terminated_or_skipped, run_id=run_id) def cancel_run_and_wait(self, run_id: int, timeout=timedelta(minutes=20)) -> Run: return self.cancel_run(run_id=run_id).result(timeout=timeout) @@ -8456,6 +8470,7 @@ def create( name: Optional[str] = None, notification_settings: Optional[JobNotificationSettings] = None, parameters: Optional[List[JobParameterDefinition]] = None, + parent_path: Optional[str] = None, performance_target: Optional[PerformanceTarget] = None, queue: Optional[QueueSettings] = None, run_as: Optional[JobRunAs] = None, @@ -8527,6 +8542,9 @@ def create( `email_notifications` and `webhook_notifications` for this job. :param parameters: List[:class:`JobParameterDefinition`] (optional) Job-level parameter definitions + :param parent_path: str (optional) + Path of the job parent folder in workspace file tree. If absent, the job doesn't have a workspace + object. :param performance_target: :class:`PerformanceTarget` (optional) The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run. @@ -8601,6 +8619,8 @@ def create( body["notification_settings"] = notification_settings.as_dict() if parameters is not None: body["parameters"] = [v.as_dict() for v in parameters] + if parent_path is not None: + body["parent_path"] = parent_path if performance_target is not None: body["performance_target"] = performance_target.value if queue is not None: diff --git a/databricks/sdk/service/marketplace.py b/databricks/sdk/service/marketplace.py index a199010ab..db313f7a1 100755 --- a/databricks/sdk/service/marketplace.py +++ b/databricks/sdk/service/marketplace.py @@ -7,7 +7,8 @@ from enum import Enum from typing import Any, Dict, Iterator, List, Optional -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum +from databricks.sdk.service._internal import (_enum, _from_dict, + _repeated_dict, _repeated_enum) _LOG = logging.getLogger("databricks.sdk") diff --git a/databricks/sdk/service/ml.py b/databricks/sdk/service/ml.py index 6a132360c..63cc88fc1 100755 --- a/databricks/sdk/service/ml.py +++ b/databricks/sdk/service/ml.py @@ -10,8 +10,10 @@ from enum import Enum from typing import Any, Callable, Dict, Iterator, List, Optional +from databricks.sdk.service._internal import (Wait, _enum, _from_dict, + _repeated_dict, _repeated_enum) + from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum _LOG = logging.getLogger("databricks.sdk") @@ -1864,6 +1866,31 @@ def from_dict(cls, d: Dict[str, Any]) -> GetLoggedModelResponse: return cls(model=_from_dict(d, "model", LoggedModel)) +@dataclass +class GetLoggedModelsRequestResponse: + models: Optional[List[LoggedModel]] = None + """The retrieved logged models.""" + + def as_dict(self) -> dict: + """Serializes the GetLoggedModelsRequestResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.models: + body["models"] = [v.as_dict() for v in self.models] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GetLoggedModelsRequestResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.models: + body["models"] = self.models + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GetLoggedModelsRequestResponse: + """Deserializes the GetLoggedModelsRequestResponse from a dictionary.""" + return cls(models=_repeated_dict(d, "models", LoggedModel)) + + @dataclass class GetMetricHistoryResponse: metrics: Optional[List[Metric]] = None @@ -5670,6 +5697,25 @@ def get_logged_model(self, model_id: str) -> GetLoggedModelResponse: res = self._api.do("GET", f"/api/2.0/mlflow/logged-models/{model_id}", headers=headers) return GetLoggedModelResponse.from_dict(res) + def get_logged_models(self, *, model_ids: Optional[List[str]] = None) -> GetLoggedModelsRequestResponse: + """Batch endpoint for getting logged models from a list of model IDs + + :param model_ids: List[str] (optional) + The IDs of the logged models to retrieve. Max threshold is 100. + + :returns: :class:`GetLoggedModelsRequestResponse` + """ + + query = {} + if model_ids is not None: + query["model_ids"] = [v for v in model_ids] + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/mlflow/logged-models:batchGet", query=query, headers=headers) + return GetLoggedModelsRequestResponse.from_dict(res) + def get_permission_levels(self, experiment_id: str) -> GetExperimentPermissionLevelsResponse: """Gets the permission levels that a user can have on an object. diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py index 58c57808d..3762a2408 100755 --- a/databricks/sdk/service/oauth2.py +++ b/databricks/sdk/service/oauth2.py @@ -6,7 +6,7 @@ from dataclasses import dataclass from typing import Any, Dict, Iterator, List, Optional -from ._internal import _from_dict, _repeated_dict +from databricks.sdk.service._internal import _from_dict, _repeated_dict _LOG = logging.getLogger("databricks.sdk") @@ -194,24 +194,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeletePublishedAppIntegrationOutput: return cls() -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() - - @dataclass class FederationPolicy: create_time: Optional[str] = None @@ -863,17 +845,27 @@ def from_dict(cls, d: Dict[str, Any]) -> SecretInfo: @dataclass class TokenAccessPolicy: + absolute_session_lifetime_in_minutes: Optional[int] = None + """absolute OAuth session TTL in minutes when single-use refresh tokens are enabled""" + access_token_ttl_in_minutes: Optional[int] = None """access token time to live in minutes""" + enable_single_use_refresh_tokens: Optional[bool] = None + """whether to enable single-use refresh tokens""" + refresh_token_ttl_in_minutes: Optional[int] = None """refresh token time to live in minutes""" def as_dict(self) -> dict: """Serializes the TokenAccessPolicy into a dictionary suitable for use as a JSON request body.""" body = {} + if self.absolute_session_lifetime_in_minutes is not None: + body["absolute_session_lifetime_in_minutes"] = self.absolute_session_lifetime_in_minutes if self.access_token_ttl_in_minutes is not None: body["access_token_ttl_in_minutes"] = self.access_token_ttl_in_minutes + if self.enable_single_use_refresh_tokens is not None: + body["enable_single_use_refresh_tokens"] = self.enable_single_use_refresh_tokens if self.refresh_token_ttl_in_minutes is not None: body["refresh_token_ttl_in_minutes"] = self.refresh_token_ttl_in_minutes return body @@ -881,8 +873,12 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the TokenAccessPolicy into a shallow dictionary of its immediate attributes.""" body = {} + if self.absolute_session_lifetime_in_minutes is not None: + body["absolute_session_lifetime_in_minutes"] = self.absolute_session_lifetime_in_minutes if self.access_token_ttl_in_minutes is not None: body["access_token_ttl_in_minutes"] = self.access_token_ttl_in_minutes + if self.enable_single_use_refresh_tokens is not None: + body["enable_single_use_refresh_tokens"] = self.enable_single_use_refresh_tokens if self.refresh_token_ttl_in_minutes is not None: body["refresh_token_ttl_in_minutes"] = self.refresh_token_ttl_in_minutes return body @@ -891,7 +887,9 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> TokenAccessPolicy: """Deserializes the TokenAccessPolicy from a dictionary.""" return cls( + absolute_session_lifetime_in_minutes=d.get("absolute_session_lifetime_in_minutes", None), access_token_ttl_in_minutes=d.get("access_token_ttl_in_minutes", None), + enable_single_use_refresh_tokens=d.get("enable_single_use_refresh_tokens", None), refresh_token_ttl_in_minutes=d.get("refresh_token_ttl_in_minutes", None), ) diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py index ff309b6ae..dcda6ba74 100755 --- a/databricks/sdk/service/pipelines.py +++ b/databricks/sdk/service/pipelines.py @@ -10,8 +10,10 @@ from enum import Enum from typing import Any, Callable, Dict, Iterator, List, Optional +from databricks.sdk.service._internal import (Wait, _enum, _from_dict, + _repeated_dict, _repeated_enum) + from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum _LOG = logging.getLogger("databricks.sdk") @@ -21,6 +23,51 @@ # all definitions in this file are in alphabetical order +@dataclass +class ApplyEnvironmentRequestResponse: + def as_dict(self) -> dict: + """Serializes the ApplyEnvironmentRequestResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ApplyEnvironmentRequestResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ApplyEnvironmentRequestResponse: + """Deserializes the ApplyEnvironmentRequestResponse from a dictionary.""" + return cls() + + +@dataclass +class ConnectionParameters: + source_catalog: Optional[str] = None + """Source catalog for initial connection. This is necessary for schema exploration in some database + systems like Oracle, and optional but nice-to-have in some other database systems like Postgres. + For Oracle databases, this maps to a service name.""" + + def as_dict(self) -> dict: + """Serializes the ConnectionParameters into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.source_catalog is not None: + body["source_catalog"] = self.source_catalog + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ConnectionParameters into a shallow dictionary of its immediate attributes.""" + body = {} + if self.source_catalog is not None: + body["source_catalog"] = self.source_catalog + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ConnectionParameters: + """Deserializes the ConnectionParameters from a dictionary.""" + return cls(source_catalog=d.get("source_catalog", None)) + + @dataclass class CreatePipelineResponse: effective_settings: Optional[PipelineSpec] = None @@ -351,6 +398,9 @@ class GetPipelineResponse: effective_budget_policy_id: Optional[str] = None """Serverless budget policy ID of this pipeline.""" + effective_usage_policy_id: Optional[str] = None + """Serverless usage policy ID of the pipeline.""" + health: Optional[GetPipelineResponseHealth] = None """The health of a pipeline.""" @@ -391,6 +441,8 @@ def as_dict(self) -> dict: body["creator_user_name"] = self.creator_user_name if self.effective_budget_policy_id is not None: body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.health is not None: body["health"] = self.health.value if self.last_modified is not None: @@ -422,6 +474,8 @@ def as_shallow_dict(self) -> dict: body["creator_user_name"] = self.creator_user_name if self.effective_budget_policy_id is not None: body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.health is not None: body["health"] = self.health if self.last_modified is not None: @@ -450,6 +504,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GetPipelineResponse: cluster_id=d.get("cluster_id", None), creator_user_name=d.get("creator_user_name", None), effective_budget_policy_id=d.get("effective_budget_policy_id", None), + effective_usage_policy_id=d.get("effective_usage_policy_id", None), health=_enum(d, "health", GetPipelineResponseHealth), last_modified=d.get("last_modified", None), latest_updates=_repeated_dict(d, "latest_updates", UpdateStateInfo), @@ -553,6 +608,9 @@ class IngestionGatewayPipelineDefinition: """[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.""" + connection_parameters: Optional[ConnectionParameters] = None + """Optional, Internal. Parameters required to establish an initial connection with the source.""" + gateway_storage_name: Optional[str] = None """Optional. The Unity Catalog-compatible name for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will @@ -565,6 +623,8 @@ def as_dict(self) -> dict: body["connection_id"] = self.connection_id if self.connection_name is not None: body["connection_name"] = self.connection_name + if self.connection_parameters: + body["connection_parameters"] = self.connection_parameters.as_dict() if self.gateway_storage_catalog is not None: body["gateway_storage_catalog"] = self.gateway_storage_catalog if self.gateway_storage_name is not None: @@ -580,6 +640,8 @@ def as_shallow_dict(self) -> dict: body["connection_id"] = self.connection_id if self.connection_name is not None: body["connection_name"] = self.connection_name + if self.connection_parameters: + body["connection_parameters"] = self.connection_parameters if self.gateway_storage_catalog is not None: body["gateway_storage_catalog"] = self.gateway_storage_catalog if self.gateway_storage_name is not None: @@ -594,6 +656,7 @@ def from_dict(cls, d: Dict[str, Any]) -> IngestionGatewayPipelineDefinition: return cls( connection_id=d.get("connection_id", None), connection_name=d.get("connection_name", None), + connection_parameters=_from_dict(d, "connection_parameters", ConnectionParameters), gateway_storage_catalog=d.get("gateway_storage_catalog", None), gateway_storage_name=d.get("gateway_storage_name", None), gateway_storage_schema=d.get("gateway_storage_schema", None), @@ -606,6 +669,11 @@ class IngestionPipelineDefinition: """Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with the source. This is used with connectors for applications like Salesforce, Workday, and so on.""" + ingest_from_uc_foreign_catalog: Optional[bool] = None + """Immutable. If set to true, the pipeline will ingest tables from the UC foreign catalogs directly + without the need to specify a UC connection or ingestion gateway. The `source_catalog` fields in + objects of IngestionConfig are interpreted as the UC foreign catalogs to ingest from.""" + ingestion_gateway_id: Optional[str] = None """Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server.""" @@ -633,6 +701,8 @@ def as_dict(self) -> dict: body = {} if self.connection_name is not None: body["connection_name"] = self.connection_name + if self.ingest_from_uc_foreign_catalog is not None: + body["ingest_from_uc_foreign_catalog"] = self.ingest_from_uc_foreign_catalog if self.ingestion_gateway_id is not None: body["ingestion_gateway_id"] = self.ingestion_gateway_id if self.netsuite_jar_path is not None: @@ -652,6 +722,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.connection_name is not None: body["connection_name"] = self.connection_name + if self.ingest_from_uc_foreign_catalog is not None: + body["ingest_from_uc_foreign_catalog"] = self.ingest_from_uc_foreign_catalog if self.ingestion_gateway_id is not None: body["ingestion_gateway_id"] = self.ingestion_gateway_id if self.netsuite_jar_path is not None: @@ -671,6 +743,7 @@ def from_dict(cls, d: Dict[str, Any]) -> IngestionPipelineDefinition: """Deserializes the IngestionPipelineDefinition from a dictionary.""" return cls( connection_name=d.get("connection_name", None), + ingest_from_uc_foreign_catalog=d.get("ingest_from_uc_foreign_catalog", None), ingestion_gateway_id=d.get("ingestion_gateway_id", None), netsuite_jar_path=d.get("netsuite_jar_path", None), objects=_repeated_dict(d, "objects", IngestionConfig), @@ -1076,6 +1149,9 @@ class Origin: flow_name: Optional[str] = None """The name of the flow. Not unique.""" + graph_id: Optional[str] = None + """The UUID of the graph associated with this event, corresponding to a GRAPH_UPDATED event.""" + host: Optional[str] = None """The optional host name where the event was triggered""" @@ -1124,6 +1200,8 @@ def as_dict(self) -> dict: body["flow_id"] = self.flow_id if self.flow_name is not None: body["flow_name"] = self.flow_name + if self.graph_id is not None: + body["graph_id"] = self.graph_id if self.host is not None: body["host"] = self.host if self.maintenance_id is not None: @@ -1163,6 +1241,8 @@ def as_shallow_dict(self) -> dict: body["flow_id"] = self.flow_id if self.flow_name is not None: body["flow_name"] = self.flow_name + if self.graph_id is not None: + body["graph_id"] = self.graph_id if self.host is not None: body["host"] = self.host if self.maintenance_id is not None: @@ -1197,6 +1277,7 @@ def from_dict(cls, d: Dict[str, Any]) -> Origin: dataset_name=d.get("dataset_name", None), flow_id=d.get("flow_id", None), flow_name=d.get("flow_name", None), + graph_id=d.get("graph_id", None), host=d.get("host", None), maintenance_id=d.get("maintenance_id", None), materialization_name=d.get("materialization_name", None), @@ -2023,6 +2104,9 @@ class PipelineSpec: trigger: Optional[PipelineTrigger] = None """Which pipeline trigger to use. Deprecated: Use `continuous` instead.""" + usage_policy_id: Optional[str] = None + """Usage policy of this pipeline.""" + def as_dict(self) -> dict: """Serializes the PipelineSpec into a dictionary suitable for use as a JSON request body.""" body = {} @@ -2080,6 +2164,8 @@ def as_dict(self) -> dict: body["target"] = self.target if self.trigger: body["trigger"] = self.trigger.as_dict() + if self.usage_policy_id is not None: + body["usage_policy_id"] = self.usage_policy_id return body def as_shallow_dict(self) -> dict: @@ -2139,6 +2225,8 @@ def as_shallow_dict(self) -> dict: body["target"] = self.target if self.trigger: body["trigger"] = self.trigger + if self.usage_policy_id is not None: + body["usage_policy_id"] = self.usage_policy_id return body @classmethod @@ -2172,6 +2260,7 @@ def from_dict(cls, d: Dict[str, Any]) -> PipelineSpec: tags=d.get("tags", None), target=d.get("target", None), trigger=_from_dict(d, "trigger", PipelineTrigger), + usage_policy_id=d.get("usage_policy_id", None), ) @@ -2510,6 +2599,24 @@ def from_dict(cls, d: Dict[str, Any]) -> RestartWindow: ) +@dataclass +class RestorePipelineRequestResponse: + def as_dict(self) -> dict: + """Serializes the RestorePipelineRequestResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the RestorePipelineRequestResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> RestorePipelineRequestResponse: + """Deserializes the RestorePipelineRequestResponse from a dictionary.""" + return cls() + + @dataclass class RunAs: """Write-only setting, available only in Create/Update calls. Specifies the user or service @@ -2955,6 +3062,10 @@ class TableSpecificConfig: None ) + row_filter: Optional[str] = None + """(Optional, Immutable) The row filter condition to be applied to the table. It must not contain + the WHERE keyword, only the actual filter condition. It must be in DBSQL format.""" + salesforce_include_formula_fields: Optional[bool] = None """If true, formula fields defined in the table are included in the ingestion. This setting is only valid for the Salesforce connector""" @@ -2980,6 +3091,8 @@ def as_dict(self) -> dict: body["primary_keys"] = [v for v in self.primary_keys] if self.query_based_connector_config: body["query_based_connector_config"] = self.query_based_connector_config.as_dict() + if self.row_filter is not None: + body["row_filter"] = self.row_filter if self.salesforce_include_formula_fields is not None: body["salesforce_include_formula_fields"] = self.salesforce_include_formula_fields if self.scd_type is not None: @@ -3001,6 +3114,8 @@ def as_shallow_dict(self) -> dict: body["primary_keys"] = self.primary_keys if self.query_based_connector_config: body["query_based_connector_config"] = self.query_based_connector_config + if self.row_filter is not None: + body["row_filter"] = self.row_filter if self.salesforce_include_formula_fields is not None: body["salesforce_include_formula_fields"] = self.salesforce_include_formula_fields if self.scd_type is not None: @@ -3023,6 +3138,7 @@ def from_dict(cls, d: Dict[str, Any]) -> TableSpecificConfig: "query_based_connector_config", IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig, ), + row_filter=d.get("row_filter", None), salesforce_include_formula_fields=d.get("salesforce_include_formula_fields", None), scd_type=_enum(d, "scd_type", TableSpecificConfigScdType), sequence_by=d.get("sequence_by", None), @@ -3063,6 +3179,10 @@ class UpdateInfo: full_refresh_selection are empty, this is a full graph update. Full Refresh on a table means that the states of the table will be reset before the refresh.""" + mode: Optional[UpdateMode] = None + """Indicates whether the update is either part of a continuous job run, or running in legacy + continuous pipeline mode.""" + pipeline_id: Optional[str] = None """The ID of the pipeline.""" @@ -3096,6 +3216,8 @@ def as_dict(self) -> dict: body["full_refresh"] = self.full_refresh if self.full_refresh_selection: body["full_refresh_selection"] = [v for v in self.full_refresh_selection] + if self.mode is not None: + body["mode"] = self.mode.value if self.pipeline_id is not None: body["pipeline_id"] = self.pipeline_id if self.refresh_selection: @@ -3123,6 +3245,8 @@ def as_shallow_dict(self) -> dict: body["full_refresh"] = self.full_refresh if self.full_refresh_selection: body["full_refresh_selection"] = self.full_refresh_selection + if self.mode is not None: + body["mode"] = self.mode if self.pipeline_id is not None: body["pipeline_id"] = self.pipeline_id if self.refresh_selection: @@ -3145,6 +3269,7 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateInfo: creation_time=d.get("creation_time", None), full_refresh=d.get("full_refresh", None), full_refresh_selection=d.get("full_refresh_selection", None), + mode=_enum(d, "mode", UpdateMode), pipeline_id=d.get("pipeline_id", None), refresh_selection=d.get("refresh_selection", None), state=_enum(d, "state", UpdateInfoState), @@ -3181,6 +3306,12 @@ class UpdateInfoState(Enum): WAITING_FOR_RESOURCES = "WAITING_FOR_RESOURCES" +class UpdateMode(Enum): + + CONTINUOUS = "CONTINUOUS" + DEFAULT = "DEFAULT" + + @dataclass class UpdateStateInfo: creation_time: Optional[str] = None @@ -3284,6 +3415,22 @@ def wait_get_pipeline_idle( attempt += 1 raise TimeoutError(f"timed out after {timeout}: {status_message}") + def apply_environment(self, pipeline_id: str) -> ApplyEnvironmentRequestResponse: + """* Applies the current pipeline environment onto the pipeline compute. The environment applied can be + used by subsequent dev-mode updates. + + :param pipeline_id: str + + :returns: :class:`ApplyEnvironmentRequestResponse` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("POST", f"/api/2.0/pipelines/{pipeline_id}/environment/apply", headers=headers) + return ApplyEnvironmentRequestResponse.from_dict(res) + def create( self, *, @@ -3317,6 +3464,7 @@ def create( tags: Optional[Dict[str, str]] = None, target: Optional[str] = None, trigger: Optional[PipelineTrigger] = None, + usage_policy_id: Optional[str] = None, ) -> CreatePipelineResponse: """Creates a new data processing pipeline based on the requested configuration. If successful, this method returns the ID of the new pipeline. @@ -3387,6 +3535,8 @@ def create( for pipeline creation in favor of the `schema` field. :param trigger: :class:`PipelineTrigger` (optional) Which pipeline trigger to use. Deprecated: Use `continuous` instead. + :param usage_policy_id: str (optional) + Usage policy of this pipeline. :returns: :class:`CreatePipelineResponse` """ @@ -3451,6 +3601,8 @@ def create( body["target"] = target if trigger is not None: body["trigger"] = trigger.as_dict() + if usage_policy_id is not None: + body["usage_policy_id"] = usage_policy_id headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -3686,6 +3838,23 @@ def list_updates( res = self._api.do("GET", f"/api/2.0/pipelines/{pipeline_id}/updates", query=query, headers=headers) return ListUpdatesResponse.from_dict(res) + def restore_pipeline(self, pipeline_id: str) -> RestorePipelineRequestResponse: + """* Restores a pipeline that was previously deleted, if within the restoration window. All tables + deleted at pipeline deletion will be undropped as well. + + :param pipeline_id: str + The ID of the pipeline to restore + + :returns: :class:`RestorePipelineRequestResponse` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("POST", f"/api/2.0/pipelines/{pipeline_id}/restore", headers=headers) + return RestorePipelineRequestResponse.from_dict(res) + def set_permissions( self, pipeline_id: str, *, access_control_list: Optional[List[PipelineAccessControlRequest]] = None ) -> PipelinePermissions: @@ -3775,9 +3944,7 @@ def stop(self, pipeline_id: str) -> Wait[GetPipelineResponse]: } op_response = self._api.do("POST", f"/api/2.0/pipelines/{pipeline_id}/stop", headers=headers) - return Wait( - self.wait_get_pipeline_idle, response=StopPipelineResponse.from_dict(op_response), pipeline_id=pipeline_id - ) + return Wait(self.wait_get_pipeline_idle, pipeline_id=pipeline_id) def stop_and_wait(self, pipeline_id: str, timeout=timedelta(minutes=20)) -> GetPipelineResponse: return self.stop(pipeline_id=pipeline_id).result(timeout=timeout) @@ -3816,6 +3983,7 @@ def update( tags: Optional[Dict[str, str]] = None, target: Optional[str] = None, trigger: Optional[PipelineTrigger] = None, + usage_policy_id: Optional[str] = None, ): """Updates a pipeline with the supplied configuration. @@ -3889,6 +4057,8 @@ def update( for pipeline creation in favor of the `schema` field. :param trigger: :class:`PipelineTrigger` (optional) Which pipeline trigger to use. Deprecated: Use `continuous` instead. + :param usage_policy_id: str (optional) + Usage policy of this pipeline. """ @@ -3953,6 +4123,8 @@ def update( body["target"] = target if trigger is not None: body["trigger"] = trigger.as_dict() + if usage_policy_id is not None: + body["usage_policy_id"] = usage_policy_id headers = { "Accept": "application/json", "Content-Type": "application/json", diff --git a/databricks/sdk/service/provisioning.py b/databricks/sdk/service/provisioning.py index 8e34b28f0..ab260aa5a 100755 --- a/databricks/sdk/service/provisioning.py +++ b/databricks/sdk/service/provisioning.py @@ -10,8 +10,10 @@ from enum import Enum from typing import Any, Callable, Dict, Iterator, List, Optional +from databricks.sdk.service._internal import (Wait, _enum, _from_dict, + _repeated_dict, _repeated_enum) + from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum _LOG = logging.getLogger("databricks.sdk") @@ -55,9 +57,9 @@ class AwsKeyInfo: """The AWS KMS key alias.""" reuse_key_for_cluster_volumes: Optional[bool] = None - """This field applies only if the `use_cases` property includes `STORAGE`. If this is set to `true` + """This field applies only if the `use_cases` property includes `STORAGE`. If this is set to true or omitted, the key is also used to encrypt cluster EBS volumes. If you do not want to use this - key for encrypting EBS volumes, set to `false`.""" + key for encrypting EBS volumes, set to false.""" def as_dict(self) -> dict: """Serializes the AwsKeyInfo into a dictionary suitable for use as a JSON request body.""" @@ -96,6 +98,75 @@ def from_dict(cls, d: Dict[str, Any]) -> AwsKeyInfo: ) +@dataclass +class AzureKeyInfo: + disk_encryption_set_id: Optional[str] = None + """The Disk Encryption Set id that is used to represent the key info used for Managed Disk BYOK use + case""" + + key_access_configuration: Optional[KeyAccessConfiguration] = None + """The structure to store key access credential This is set if the Managed Identity is being used + to access the Azure Key Vault key.""" + + key_name: Optional[str] = None + """The name of the key in KeyVault.""" + + key_vault_uri: Optional[str] = None + """The base URI of the KeyVault.""" + + tenant_id: Optional[str] = None + """The tenant id where the KeyVault lives.""" + + version: Optional[str] = None + """The current key version.""" + + def as_dict(self) -> dict: + """Serializes the AzureKeyInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.disk_encryption_set_id is not None: + body["disk_encryption_set_id"] = self.disk_encryption_set_id + if self.key_access_configuration: + body["key_access_configuration"] = self.key_access_configuration.as_dict() + if self.key_name is not None: + body["key_name"] = self.key_name + if self.key_vault_uri is not None: + body["key_vault_uri"] = self.key_vault_uri + if self.tenant_id is not None: + body["tenant_id"] = self.tenant_id + if self.version is not None: + body["version"] = self.version + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AzureKeyInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.disk_encryption_set_id is not None: + body["disk_encryption_set_id"] = self.disk_encryption_set_id + if self.key_access_configuration: + body["key_access_configuration"] = self.key_access_configuration + if self.key_name is not None: + body["key_name"] = self.key_name + if self.key_vault_uri is not None: + body["key_vault_uri"] = self.key_vault_uri + if self.tenant_id is not None: + body["tenant_id"] = self.tenant_id + if self.version is not None: + body["version"] = self.version + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AzureKeyInfo: + """Deserializes the AzureKeyInfo from a dictionary.""" + return cls( + disk_encryption_set_id=d.get("disk_encryption_set_id", None), + key_access_configuration=_from_dict(d, "key_access_configuration", KeyAccessConfiguration), + key_name=d.get("key_name", None), + key_vault_uri=d.get("key_vault_uri", None), + tenant_id=d.get("tenant_id", None), + version=d.get("version", None), + ) + + @dataclass class AzureWorkspaceInfo: resource_group: Optional[str] = None @@ -130,8 +201,6 @@ def from_dict(cls, d: Dict[str, Any]) -> AzureWorkspaceInfo: @dataclass class CloudResourceContainer: - """The general workspace configurations that are specific to cloud providers.""" - gcp: Optional[CustomerFacingGcpCloudResourceContainer] = None def as_dict(self) -> dict: @@ -157,16 +226,18 @@ def from_dict(cls, d: Dict[str, Any]) -> CloudResourceContainer: @dataclass class CreateAwsKeyInfo: key_arn: str - """The AWS KMS key's Amazon Resource Name (ARN). Note that the key's AWS region is inferred from - the ARN.""" + """The AWS KMS key's Amazon Resource Name (ARN).""" key_alias: Optional[str] = None """The AWS KMS key alias.""" + key_region: Optional[str] = None + """The AWS KMS key region.""" + reuse_key_for_cluster_volumes: Optional[bool] = None - """This field applies only if the `use_cases` property includes `STORAGE`. If this is set to `true` - or omitted, the key is also used to encrypt cluster EBS volumes. To not use this key also for - encrypting EBS volumes, set this to `false`.""" + """This field applies only if the `use_cases` property includes `STORAGE`. If this is set to true + or omitted, the key is also used to encrypt cluster EBS volumes. If you do not want to use this + key for encrypting EBS volumes, set to false.""" def as_dict(self) -> dict: """Serializes the CreateAwsKeyInfo into a dictionary suitable for use as a JSON request body.""" @@ -175,6 +246,8 @@ def as_dict(self) -> dict: body["key_alias"] = self.key_alias if self.key_arn is not None: body["key_arn"] = self.key_arn + if self.key_region is not None: + body["key_region"] = self.key_region if self.reuse_key_for_cluster_volumes is not None: body["reuse_key_for_cluster_volumes"] = self.reuse_key_for_cluster_volumes return body @@ -186,6 +259,8 @@ def as_shallow_dict(self) -> dict: body["key_alias"] = self.key_alias if self.key_arn is not None: body["key_arn"] = self.key_arn + if self.key_region is not None: + body["key_region"] = self.key_region if self.reuse_key_for_cluster_volumes is not None: body["reuse_key_for_cluster_volumes"] = self.reuse_key_for_cluster_volumes return body @@ -196,6 +271,7 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateAwsKeyInfo: return cls( key_alias=d.get("key_alias", None), key_arn=d.get("key_arn", None), + key_region=d.get("key_region", None), reuse_key_for_cluster_volumes=d.get("reuse_key_for_cluster_volumes", None), ) @@ -227,7 +303,7 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialAwsCredentials: @dataclass class CreateCredentialStsRole: role_arn: Optional[str] = None - """The Amazon Resource Name (ARN) of the cross account role.""" + """The Amazon Resource Name (ARN) of the cross account IAM role.""" def as_dict(self) -> dict: """Serializes the CreateCredentialStsRole into a dictionary suitable for use as a JSON request body.""" @@ -252,7 +328,8 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialStsRole: @dataclass class CreateGcpKeyInfo: kms_key_id: str - """The GCP KMS key's resource name""" + """Globally unique kms key resource id of the form + projects/testProjectId/locations/us-east4/keyRings/gcpCmkKeyRing/cryptoKeys/cmk-eastus4""" def as_dict(self) -> dict: """Serializes the CreateGcpKeyInfo into a dictionary suitable for use as a JSON request body.""" @@ -332,13 +409,18 @@ def from_dict(cls, d: Dict[str, Any]) -> Credential: ) +class CustomerFacingComputeMode(Enum): + """Corresponds to compute mode defined here: + https://src.dev.databricks.com/databricks/universe@9076536b18479afd639d1c1f9dd5a59f72215e69/-/blob/central/api/common.proto?L872 + """ + + HYBRID = "HYBRID" + SERVERLESS = "SERVERLESS" + + @dataclass class CustomerFacingGcpCloudResourceContainer: - """The general workspace configurations that are specific to Google Cloud.""" - project_id: Optional[str] = None - """The Google Cloud project ID, which the workspace uses to instantiate cloud resources for your - workspace.""" def as_dict(self) -> dict: """Serializes the CustomerFacingGcpCloudResourceContainer into a dictionary suitable for use as a JSON request body.""" @@ -360,6 +442,12 @@ def from_dict(cls, d: Dict[str, Any]) -> CustomerFacingGcpCloudResourceContainer return cls(project_id=d.get("project_id", None)) +class CustomerFacingStorageMode(Enum): + + CUSTOMER_HOSTED = "CUSTOMER_HOSTED" + DEFAULT_STORAGE = "DEFAULT_STORAGE" + + @dataclass class CustomerManagedKey: account_id: Optional[str] = None @@ -367,6 +455,8 @@ class CustomerManagedKey: aws_key_info: Optional[AwsKeyInfo] = None + azure_key_info: Optional[AzureKeyInfo] = None + creation_time: Optional[int] = None """Time in epoch milliseconds when the customer key was created.""" @@ -385,6 +475,8 @@ def as_dict(self) -> dict: body["account_id"] = self.account_id if self.aws_key_info: body["aws_key_info"] = self.aws_key_info.as_dict() + if self.azure_key_info: + body["azure_key_info"] = self.azure_key_info.as_dict() if self.creation_time is not None: body["creation_time"] = self.creation_time if self.customer_managed_key_id is not None: @@ -402,6 +494,8 @@ def as_shallow_dict(self) -> dict: body["account_id"] = self.account_id if self.aws_key_info: body["aws_key_info"] = self.aws_key_info + if self.azure_key_info: + body["azure_key_info"] = self.azure_key_info if self.creation_time is not None: body["creation_time"] = self.creation_time if self.customer_managed_key_id is not None: @@ -418,6 +512,7 @@ def from_dict(cls, d: Dict[str, Any]) -> CustomerManagedKey: return cls( account_id=d.get("account_id", None), aws_key_info=_from_dict(d, "aws_key_info", AwsKeyInfo), + azure_key_info=_from_dict(d, "azure_key_info", AzureKeyInfo), creation_time=d.get("creation_time", None), customer_managed_key_id=d.get("customer_managed_key_id", None), gcp_key_info=_from_dict(d, "gcp_key_info", GcpKeyInfo), @@ -425,37 +520,15 @@ def from_dict(cls, d: Dict[str, Any]) -> CustomerManagedKey: ) -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() - - class EndpointUseCase(Enum): - """This enumeration represents the type of Databricks VPC [endpoint service] that was used when - creating this VPC endpoint. - - [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/endpoint-service.html""" DATAPLANE_RELAY_ACCESS = "DATAPLANE_RELAY_ACCESS" WORKSPACE_ACCESS = "WORKSPACE_ACCESS" class ErrorType(Enum): - """The AWS resource associated with this error: credentials, VPC, subnet, security group, or - network ACL.""" + """ErrorType and WarningType are used to represent the type of error or warning by NetworkHealth + and NetworkWarning defined in central/api/accounts/accounts.proto""" CREDENTIALS = "credentials" NETWORK_ACL = "networkAcl" @@ -465,52 +538,49 @@ class ErrorType(Enum): @dataclass -class ExternalCustomerInfo: - authoritative_user_email: Optional[str] = None - """Email of the authoritative user.""" +class GcpCommonNetworkConfig: + """The shared network config for GCP workspace. This object has common network configurations that + are network attributions of a workspace. DEPRECATED. Use GkeConfig instead.""" - authoritative_user_full_name: Optional[str] = None - """The authoritative user full name.""" + gke_cluster_master_ip_range: Optional[str] = None + """The IP range that will be used to allocate GKE cluster master resources from. This field must + not be set if gke_cluster_type=PUBLIC_NODE_PUBLIC_MASTER.""" - customer_name: Optional[str] = None - """The legal entity name for the external workspace""" + gke_connectivity_type: Optional[GkeConfigConnectivityType] = None + """The type of network connectivity of the GKE cluster.""" def as_dict(self) -> dict: - """Serializes the ExternalCustomerInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.authoritative_user_email is not None: - body["authoritative_user_email"] = self.authoritative_user_email - if self.authoritative_user_full_name is not None: - body["authoritative_user_full_name"] = self.authoritative_user_full_name - if self.customer_name is not None: - body["customer_name"] = self.customer_name + """Serializes the GcpCommonNetworkConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.gke_cluster_master_ip_range is not None: + body["gke_cluster_master_ip_range"] = self.gke_cluster_master_ip_range + if self.gke_connectivity_type is not None: + body["gke_connectivity_type"] = self.gke_connectivity_type.value return body def as_shallow_dict(self) -> dict: - """Serializes the ExternalCustomerInfo into a shallow dictionary of its immediate attributes.""" - body = {} - if self.authoritative_user_email is not None: - body["authoritative_user_email"] = self.authoritative_user_email - if self.authoritative_user_full_name is not None: - body["authoritative_user_full_name"] = self.authoritative_user_full_name - if self.customer_name is not None: - body["customer_name"] = self.customer_name + """Serializes the GcpCommonNetworkConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.gke_cluster_master_ip_range is not None: + body["gke_cluster_master_ip_range"] = self.gke_cluster_master_ip_range + if self.gke_connectivity_type is not None: + body["gke_connectivity_type"] = self.gke_connectivity_type return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExternalCustomerInfo: - """Deserializes the ExternalCustomerInfo from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> GcpCommonNetworkConfig: + """Deserializes the GcpCommonNetworkConfig from a dictionary.""" return cls( - authoritative_user_email=d.get("authoritative_user_email", None), - authoritative_user_full_name=d.get("authoritative_user_full_name", None), - customer_name=d.get("customer_name", None), + gke_cluster_master_ip_range=d.get("gke_cluster_master_ip_range", None), + gke_connectivity_type=_enum(d, "gke_connectivity_type", GkeConfigConnectivityType), ) @dataclass class GcpKeyInfo: kms_key_id: str - """The GCP KMS key's resource name""" + """Globally unique kms key resource id of the form + projects/testProjectId/locations/us-east4/keyRings/gcpCmkKeyRing/cryptoKeys/cmk-eastus4""" def as_dict(self) -> dict: """Serializes the GcpKeyInfo into a dictionary suitable for use as a JSON request body.""" @@ -534,37 +604,17 @@ def from_dict(cls, d: Dict[str, Any]) -> GcpKeyInfo: @dataclass class GcpManagedNetworkConfig: - """The network settings for the workspace. The configurations are only for Databricks-managed VPCs. - It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP - range configurations must be mutually exclusive. An attempt to create a workspace fails if - Databricks detects an IP range overlap. - - Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and - all IP addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`, - `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`. - - The sizes of these IP ranges affect the maximum number of nodes for the workspace. - - **Important**: Confirm the IP ranges used by your Databricks workspace before creating the - workspace. You cannot change them after your workspace is deployed. If the IP address ranges for - your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To - determine the address range sizes that you need, Databricks provides a calculator as a Microsoft - Excel spreadsheet. See [calculate subnet sizes for a new workspace]. - - [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html - """ + """The network configuration for the workspace.""" gke_cluster_pod_ip_range: Optional[str] = None - """The IP range from which to allocate GKE cluster pods. No bigger than `/9` and no smaller than - `/21`.""" + """The IP range that will be used to allocate GKE cluster Pods from.""" gke_cluster_service_ip_range: Optional[str] = None - """The IP range from which to allocate GKE cluster services. No bigger than `/16` and no smaller - than `/27`.""" + """The IP range that will be used to allocate GKE cluster Services from.""" subnet_cidr: Optional[str] = None - """The IP range from which to allocate GKE cluster nodes. No bigger than `/9` and no smaller than - `/29`.""" + """The IP range which will be used to allocate GKE cluster nodes from. Note: Pods, services and + master IP range must be mutually exclusive.""" def as_dict(self) -> dict: """Serializes the GcpManagedNetworkConfig into a dictionary suitable for use as a JSON request body.""" @@ -600,29 +650,24 @@ def from_dict(cls, d: Dict[str, Any]) -> GcpManagedNetworkConfig: @dataclass class GcpNetworkInfo: - """The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and - secondary IP ranges).""" - network_project_id: str - """The Google Cloud project ID of the VPC network.""" + """The GCP project ID for network resources. This project is where the VPC and subnet resides.""" vpc_id: str - """The ID of the VPC associated with this network. VPC IDs can be used in multiple network - configurations.""" + """The customer-provided VPC ID.""" subnet_id: str - """The ID of the subnet associated with this network.""" + """The customer-provided Subnet ID that will be available to Clusters in Workspaces using this + Network.""" subnet_region: str - """The Google Cloud region of the workspace data plane (for example, `us-east4`).""" pod_ip_range_name: str - """The name of the secondary IP range for pods. A Databricks-managed GKE cluster uses this IP range - for its pods. This secondary IP range can be used by only one workspace.""" + """Name of the secondary range within the subnet that will be used by GKE as Pod IP range. This is + BYO VPC specific. DB VPC uses network.getGcpManagedNetworkConfig.getGkeClusterPodIpRange""" service_ip_range_name: str - """The name of the secondary IP range for services. A Databricks-managed GKE cluster uses this IP - range for its services. This secondary IP range can be used by only one workspace.""" + """Name of the secondary range within the subnet that will be used by GKE as Service IP range.""" def as_dict(self) -> dict: """Serializes the GcpNetworkInfo into a dictionary suitable for use as a JSON request body.""" @@ -673,22 +718,15 @@ def from_dict(cls, d: Dict[str, Any]) -> GcpNetworkInfo: @dataclass class GcpVpcEndpointInfo: - """The Google Cloud specific information for this Private Service Connect endpoint.""" - project_id: str - """The Google Cloud project ID of the VPC network where the PSC connection resides.""" psc_endpoint_name: str - """The name of the PSC endpoint in the Google Cloud project.""" endpoint_region: str - """Region of the PSC endpoint.""" psc_connection_id: Optional[str] = None - """The unique ID of this PSC connection.""" service_attachment_id: Optional[str] = None - """The service attachment this PSC connection connects to.""" def as_dict(self) -> dict: """Serializes the GcpVpcEndpointInfo into a dictionary suitable for use as a JSON request body.""" @@ -734,22 +772,14 @@ def from_dict(cls, d: Dict[str, Any]) -> GcpVpcEndpointInfo: @dataclass class GkeConfig: - """The configurations for the GKE cluster of a Databricks workspace.""" + """The configurations of the GKE cluster used by the GCP workspace.""" connectivity_type: Optional[GkeConfigConnectivityType] = None - """Specifies the network connectivity types for the GKE nodes and the GKE master network. - - Set to `PRIVATE_NODE_PUBLIC_MASTER` for a private GKE cluster for the workspace. The GKE nodes - will not have public IPs. - - Set to `PUBLIC_NODE_PUBLIC_MASTER` for a public GKE cluster. The nodes of a public GKE cluster - have public IP addresses.""" + """The type of network connectivity of the GKE cluster.""" master_ip_range: Optional[str] = None - """The IP range from which to allocate GKE cluster master resources. This field will be ignored if - GKE private cluster is not enabled. - - It must be exactly as big as `/28`.""" + """The IP range that will be used to allocate GKE cluster master resources from. This field must + not be set if gke_cluster_type=PUBLIC_NODE_PUBLIC_MASTER.""" def as_dict(self) -> dict: """Serializes the GkeConfig into a dictionary suitable for use as a JSON request body.""" @@ -791,10 +821,33 @@ class GkeConfigConnectivityType(Enum): PUBLIC_NODE_PUBLIC_MASTER = "PUBLIC_NODE_PUBLIC_MASTER" +@dataclass +class KeyAccessConfiguration: + """The credential ID that is used to access the key vault.""" + + credential_id: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the KeyAccessConfiguration into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.credential_id is not None: + body["credential_id"] = self.credential_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the KeyAccessConfiguration into a shallow dictionary of its immediate attributes.""" + body = {} + if self.credential_id is not None: + body["credential_id"] = self.credential_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> KeyAccessConfiguration: + """Deserializes the KeyAccessConfiguration from a dictionary.""" + return cls(credential_id=d.get("credential_id", None)) + + class KeyUseCase(Enum): - """Possible values are: * `MANAGED_SERVICES`: Encrypts notebook and secret data in the control - plane * `STORAGE`: Encrypts the workspace's root S3 bucket (root DBFS and system data) and, - optionally, cluster EBS volumes.""" MANAGED_SERVICES = "MANAGED_SERVICES" STORAGE = "STORAGE" @@ -820,8 +873,12 @@ class Network: """The human-readable name of the network configuration.""" security_group_ids: Optional[List[str]] = None + """IDs of one to five security groups associated with this network. Security group IDs **cannot** + be used in multiple network configurations.""" subnet_ids: Optional[List[str]] = None + """IDs of at least two subnets associated with this network. Subnet IDs **cannot** be used in + multiple network configurations.""" vpc_endpoints: Optional[NetworkVpcEndpoints] = None @@ -952,18 +1009,13 @@ def from_dict(cls, d: Dict[str, Any]) -> NetworkHealth: @dataclass class NetworkVpcEndpoints: - """If specified, contains the VPC endpoints used to allow cluster communication from this VPC over - [AWS PrivateLink]. - - [AWS PrivateLink]: https://aws.amazon.com/privatelink/""" - - rest_api: List[str] - """The VPC endpoint ID used by this network to access the Databricks REST API.""" - - dataplane_relay: List[str] + dataplane_relay: Optional[List[str]] = None """The VPC endpoint ID used by this network to access the Databricks secure cluster connectivity relay.""" + rest_api: Optional[List[str]] = None + """The VPC endpoint ID used by this network to access the Databricks REST API.""" + def as_dict(self) -> dict: """Serializes the NetworkVpcEndpoints into a dictionary suitable for use as a JSON request body.""" body = {} @@ -1020,9 +1072,6 @@ def from_dict(cls, d: Dict[str, Any]) -> NetworkWarning: class PricingTier(Enum): - """The pricing tier of the workspace. For pricing tier information, see [AWS Pricing]. - - [AWS Pricing]: https://databricks.com/product/aws-pricing""" COMMUNITY_EDITION = "COMMUNITY_EDITION" DEDICATED = "DEDICATED" @@ -1033,11 +1082,6 @@ class PricingTier(Enum): class PrivateAccessLevel(Enum): - """The private access level controls which VPC endpoints can connect to the UI or API of any - workspace that attaches this private access settings object. * `ACCOUNT` level access (the - default) allows only VPC endpoints that are registered in your Databricks account connect to - your workspace. * `ENDPOINT` level access allows only specified VPC endpoints connect to your - workspace. For details, see `allowed_vpc_endpoint_ids`.""" ACCOUNT = "ACCOUNT" ENDPOINT = "ENDPOINT" @@ -1045,13 +1089,26 @@ class PrivateAccessLevel(Enum): @dataclass class PrivateAccessSettings: + """*""" + account_id: Optional[str] = None - """The Databricks account ID that hosts the credential.""" + """The Databricks account ID that hosts the private access settings.""" allowed_vpc_endpoint_ids: Optional[List[str]] = None - """An array of Databricks VPC endpoint IDs.""" + """An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when + registering the VPC endpoint configuration in your Databricks account. This is not the ID of the + VPC endpoint in AWS. Only used when private_access_level is set to ENDPOINT. This is an allow + list of VPC endpoints that in your account that can connect to your workspace over AWS + PrivateLink. If hybrid access to your workspace is enabled by setting public_access_enabled to + true, this control only works for PrivateLink connections. To control how your workspace is + accessed via public internet, see IP access lists.""" private_access_level: Optional[PrivateAccessLevel] = None + """The private access level controls which VPC endpoints can connect to the UI or API of any + workspace that attaches this private access settings object. `ACCOUNT` level access (the + default) allows only VPC endpoints that are registered in your Databricks account connect to + your workspace. `ENDPOINT` level access allows only specified VPC endpoints connect to your + workspace. For details, see allowed_vpc_endpoint_ids.""" private_access_settings_id: Optional[str] = None """Databricks private access settings ID.""" @@ -1061,12 +1118,11 @@ class PrivateAccessSettings: public_access_enabled: Optional[bool] = None """Determines if the workspace can be accessed over public internet. For fully private workspaces, - you can optionally specify `false`, but only if you implement both the front-end and the - back-end PrivateLink connections. Otherwise, specify `true`, which means that public access is - enabled.""" + you can optionally specify false, but only if you implement both the front-end and the back-end + PrivateLink connections. Otherwise, specify true, which means that public access is enabled.""" region: Optional[str] = None - """The cloud region for workspaces attached to this private access settings object.""" + """The AWS region for workspaces attached to this private access settings object.""" def as_dict(self) -> dict: """Serializes the PrivateAccessSettings into a dictionary suitable for use as a JSON request body.""" @@ -1120,30 +1176,10 @@ def from_dict(cls, d: Dict[str, Any]) -> PrivateAccessSettings: ) -@dataclass -class ReplaceResponse: - def as_dict(self) -> dict: - """Serializes the ReplaceResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ReplaceResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ReplaceResponse: - """Deserializes the ReplaceResponse from a dictionary.""" - return cls() - - @dataclass class RootBucketInfo: - """Root S3 bucket information.""" - bucket_name: Optional[str] = None - """The name of the S3 bucket.""" + """Name of the S3 bucket""" def as_dict(self) -> dict: """Serializes the RootBucketInfo into a dictionary suitable for use as a JSON request body.""" @@ -1168,12 +1204,20 @@ def from_dict(cls, d: Dict[str, Any]) -> RootBucketInfo: @dataclass class StorageConfiguration: account_id: Optional[str] = None - """The Databricks account ID that hosts the credential.""" + """The Databricks account ID associated with this storage configuration.""" creation_time: Optional[int] = None """Time in epoch milliseconds when the storage configuration was created.""" + role_arn: Optional[str] = None + """Optional IAM role that is used to access the workspace catalog which is created during workspace + creation for UC by Default. If a storage configuration with this field populated is used to + create a workspace, then a workspace catalog is created together with the workspace. The + workspace catalog shares the root bucket with internal workspace storage (including DBFS root) + but uses a dedicated bucket path prefix.""" + root_bucket_info: Optional[RootBucketInfo] = None + """The root bucket information for the storage configuration.""" storage_configuration_id: Optional[str] = None """Databricks storage configuration ID.""" @@ -1188,6 +1232,8 @@ def as_dict(self) -> dict: body["account_id"] = self.account_id if self.creation_time is not None: body["creation_time"] = self.creation_time + if self.role_arn is not None: + body["role_arn"] = self.role_arn if self.root_bucket_info: body["root_bucket_info"] = self.root_bucket_info.as_dict() if self.storage_configuration_id is not None: @@ -1203,6 +1249,8 @@ def as_shallow_dict(self) -> dict: body["account_id"] = self.account_id if self.creation_time is not None: body["creation_time"] = self.creation_time + if self.role_arn is not None: + body["role_arn"] = self.role_arn if self.root_bucket_info: body["root_bucket_info"] = self.root_bucket_info if self.storage_configuration_id is not None: @@ -1217,6 +1265,7 @@ def from_dict(cls, d: Dict[str, Any]) -> StorageConfiguration: return cls( account_id=d.get("account_id", None), creation_time=d.get("creation_time", None), + role_arn=d.get("role_arn", None), root_bucket_info=_from_dict(d, "root_bucket_info", RootBucketInfo), storage_configuration_id=d.get("storage_configuration_id", None), storage_configuration_name=d.get("storage_configuration_name", None), @@ -1225,18 +1274,12 @@ def from_dict(cls, d: Dict[str, Any]) -> StorageConfiguration: @dataclass class StsRole: - external_id: Optional[str] = None - """The external ID that needs to be trusted by the cross-account role. This is always your - Databricks account ID.""" - role_arn: Optional[str] = None - """The Amazon Resource Name (ARN) of the cross account role.""" + """The Amazon Resource Name (ARN) of the cross account IAM role.""" def as_dict(self) -> dict: """Serializes the StsRole into a dictionary suitable for use as a JSON request body.""" body = {} - if self.external_id is not None: - body["external_id"] = self.external_id if self.role_arn is not None: body["role_arn"] = self.role_arn return body @@ -1244,8 +1287,6 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the StsRole into a shallow dictionary of its immediate attributes.""" body = {} - if self.external_id is not None: - body["external_id"] = self.external_id if self.role_arn is not None: body["role_arn"] = self.role_arn return body @@ -1253,31 +1294,16 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> StsRole: """Deserializes the StsRole from a dictionary.""" - return cls(external_id=d.get("external_id", None), role_arn=d.get("role_arn", None)) - - -@dataclass -class UpdateResponse: - def as_dict(self) -> dict: - """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: - """Deserializes the UpdateResponse from a dictionary.""" - return cls() + return cls(role_arn=d.get("role_arn", None)) @dataclass class VpcEndpoint: + """*""" + account_id: Optional[str] = None - """The Databricks account ID that hosts the VPC endpoint configuration.""" + """The Databricks account ID that hosts the VPC endpoint configuration. TODO - This may signal an + OpenAPI diff; it does not show up in the generated spec""" aws_account_id: Optional[str] = None """The AWS Account in which the VPC endpoint object exists.""" @@ -1294,6 +1320,7 @@ class VpcEndpoint: """The ID of the VPC endpoint object in AWS.""" gcp_vpc_endpoint_info: Optional[GcpVpcEndpointInfo] = None + """The cloud info of this vpc endpoint. Info for a GCP vpc endpoint.""" region: Optional[str] = None """The AWS region in which this VPC endpoint object exists.""" @@ -1305,6 +1332,11 @@ class VpcEndpoint: [AWS DescribeVpcEndpoint documentation]: https://docs.aws.amazon.com/cli/latest/reference/ec2/describe-vpc-endpoints.html""" use_case: Optional[EndpointUseCase] = None + """This enumeration represents the type of Databricks VPC endpoint service that was used when + creating this VPC endpoint. If the VPC endpoint connects to the Databricks control plane for + either the front-end connection or the back-end REST API connection, the value is + WORKSPACE_ACCESS. If the VPC endpoint connects to the Databricks workspace for the back-end + secure cluster connectivity relay, the value is DATAPLANE_RELAY_ACCESS.""" vpc_endpoint_id: Optional[str] = None """Databricks VPC endpoint ID. This is the Databricks-specific name of the VPC endpoint. Do not @@ -1381,8 +1413,6 @@ def from_dict(cls, d: Dict[str, Any]) -> VpcEndpoint: class VpcStatus(Enum): - """The status of this network configuration object in terms of its use in a workspace: * - `UNATTACHED`: Unattached. * `VALID`: Valid. * `BROKEN`: Broken. * `WARNED`: Warned.""" BROKEN = "BROKEN" UNATTACHED = "UNATTACHED" @@ -1391,7 +1421,6 @@ class VpcStatus(Enum): class WarningType(Enum): - """The AWS resource associated with this warning: a subnet or a security group.""" SECURITY_GROUP = "securityGroup" SUBNET = "subnet" @@ -1403,7 +1432,6 @@ class Workspace: """Databricks account ID.""" aws_region: Optional[str] = None - """The AWS region of the workspace data plane (for example, `us-west-2`).""" azure_workspace_info: Optional[AzureWorkspaceInfo] = None @@ -1412,6 +1440,9 @@ class Workspace: cloud_resource_container: Optional[CloudResourceContainer] = None + compute_mode: Optional[CustomerFacingComputeMode] = None + """The compute mode of the workspace.""" + creation_time: Optional[int] = None """Time in epoch milliseconds when the workspace was created.""" @@ -1424,22 +1455,11 @@ class Workspace: characters. The key can be of maximum length of 127 characters, and cannot be empty.""" deployment_name: Optional[str] = None - """The deployment name defines part of the subdomain for the workspace. The workspace URL for web - application and REST APIs is `.cloud.databricks.com`. - - This value must be unique across all non-deleted deployments across all AWS regions.""" - - external_customer_info: Optional[ExternalCustomerInfo] = None - """If this workspace is for a external customer, then external_customer_info is populated. If this - workspace is not for a external customer, then external_customer_info is empty.""" gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None gke_config: Optional[GkeConfig] = None - is_no_public_ip_enabled: Optional[bool] = None - """Whether no public IP is enabled for the workspace.""" - location: Optional[str] = None """The Google Cloud region of the workspace data plane in your Google account (for example, `us-east4`).""" @@ -1447,9 +1467,17 @@ class Workspace: managed_services_customer_managed_key_id: Optional[str] = None """ID of the key configuration for encrypting managed services.""" + network: Optional[WorkspaceNetwork] = None + """The network configuration for the workspace. + + DEPRECATED. Use `network_id` instead.""" + + network_connectivity_config_id: Optional[str] = None + """The object ID of network connectivity config.""" + network_id: Optional[str] = None - """The network configuration ID that is attached to the workspace. This field is available only if - the network is a customer-managed network.""" + """If this workspace is BYO VPC, then the network_id will be populated. If this workspace is not + BYO VPC, then the network_id will be empty.""" pricing_tier: Optional[PricingTier] = None @@ -1469,6 +1497,9 @@ class Workspace: storage_customer_managed_key_id: Optional[str] = None """ID of the key configuration for encrypting workspace storage.""" + storage_mode: Optional[CustomerFacingStorageMode] = None + """The storage mode of the workspace.""" + workspace_id: Optional[int] = None """A unique integer ID for the workspace""" @@ -1476,6 +1507,7 @@ class Workspace: """The human-readable name of the workspace.""" workspace_status: Optional[WorkspaceStatus] = None + """The status of a workspace""" workspace_status_message: Optional[str] = None """Message describing the current workspace status.""" @@ -1493,6 +1525,8 @@ def as_dict(self) -> dict: body["cloud"] = self.cloud if self.cloud_resource_container: body["cloud_resource_container"] = self.cloud_resource_container.as_dict() + if self.compute_mode is not None: + body["compute_mode"] = self.compute_mode.value if self.creation_time is not None: body["creation_time"] = self.creation_time if self.credentials_id is not None: @@ -1501,18 +1535,18 @@ def as_dict(self) -> dict: body["custom_tags"] = self.custom_tags if self.deployment_name is not None: body["deployment_name"] = self.deployment_name - if self.external_customer_info: - body["external_customer_info"] = self.external_customer_info.as_dict() if self.gcp_managed_network_config: body["gcp_managed_network_config"] = self.gcp_managed_network_config.as_dict() if self.gke_config: body["gke_config"] = self.gke_config.as_dict() - if self.is_no_public_ip_enabled is not None: - body["is_no_public_ip_enabled"] = self.is_no_public_ip_enabled if self.location is not None: body["location"] = self.location if self.managed_services_customer_managed_key_id is not None: body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id + if self.network: + body["network"] = self.network.as_dict() + if self.network_connectivity_config_id is not None: + body["network_connectivity_config_id"] = self.network_connectivity_config_id if self.network_id is not None: body["network_id"] = self.network_id if self.pricing_tier is not None: @@ -1523,6 +1557,8 @@ def as_dict(self) -> dict: body["storage_configuration_id"] = self.storage_configuration_id if self.storage_customer_managed_key_id is not None: body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id + if self.storage_mode is not None: + body["storage_mode"] = self.storage_mode.value if self.workspace_id is not None: body["workspace_id"] = self.workspace_id if self.workspace_name is not None: @@ -1546,6 +1582,8 @@ def as_shallow_dict(self) -> dict: body["cloud"] = self.cloud if self.cloud_resource_container: body["cloud_resource_container"] = self.cloud_resource_container + if self.compute_mode is not None: + body["compute_mode"] = self.compute_mode if self.creation_time is not None: body["creation_time"] = self.creation_time if self.credentials_id is not None: @@ -1554,18 +1592,18 @@ def as_shallow_dict(self) -> dict: body["custom_tags"] = self.custom_tags if self.deployment_name is not None: body["deployment_name"] = self.deployment_name - if self.external_customer_info: - body["external_customer_info"] = self.external_customer_info if self.gcp_managed_network_config: body["gcp_managed_network_config"] = self.gcp_managed_network_config if self.gke_config: body["gke_config"] = self.gke_config - if self.is_no_public_ip_enabled is not None: - body["is_no_public_ip_enabled"] = self.is_no_public_ip_enabled if self.location is not None: body["location"] = self.location if self.managed_services_customer_managed_key_id is not None: body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id + if self.network: + body["network"] = self.network + if self.network_connectivity_config_id is not None: + body["network_connectivity_config_id"] = self.network_connectivity_config_id if self.network_id is not None: body["network_id"] = self.network_id if self.pricing_tier is not None: @@ -1576,6 +1614,8 @@ def as_shallow_dict(self) -> dict: body["storage_configuration_id"] = self.storage_configuration_id if self.storage_customer_managed_key_id is not None: body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id + if self.storage_mode is not None: + body["storage_mode"] = self.storage_mode if self.workspace_id is not None: body["workspace_id"] = self.workspace_id if self.workspace_name is not None: @@ -1595,21 +1635,23 @@ def from_dict(cls, d: Dict[str, Any]) -> Workspace: azure_workspace_info=_from_dict(d, "azure_workspace_info", AzureWorkspaceInfo), cloud=d.get("cloud", None), cloud_resource_container=_from_dict(d, "cloud_resource_container", CloudResourceContainer), + compute_mode=_enum(d, "compute_mode", CustomerFacingComputeMode), creation_time=d.get("creation_time", None), credentials_id=d.get("credentials_id", None), custom_tags=d.get("custom_tags", None), deployment_name=d.get("deployment_name", None), - external_customer_info=_from_dict(d, "external_customer_info", ExternalCustomerInfo), gcp_managed_network_config=_from_dict(d, "gcp_managed_network_config", GcpManagedNetworkConfig), gke_config=_from_dict(d, "gke_config", GkeConfig), - is_no_public_ip_enabled=d.get("is_no_public_ip_enabled", None), location=d.get("location", None), managed_services_customer_managed_key_id=d.get("managed_services_customer_managed_key_id", None), + network=_from_dict(d, "network", WorkspaceNetwork), + network_connectivity_config_id=d.get("network_connectivity_config_id", None), network_id=d.get("network_id", None), pricing_tier=_enum(d, "pricing_tier", PricingTier), private_access_settings_id=d.get("private_access_settings_id", None), storage_configuration_id=d.get("storage_configuration_id", None), storage_customer_managed_key_id=d.get("storage_customer_managed_key_id", None), + storage_mode=_enum(d, "storage_mode", CustomerFacingStorageMode), workspace_id=d.get("workspace_id", None), workspace_name=d.get("workspace_name", None), workspace_status=_enum(d, "workspace_status", WorkspaceStatus), @@ -1617,9 +1659,65 @@ def from_dict(cls, d: Dict[str, Any]) -> Workspace: ) +@dataclass +class WorkspaceNetwork: + """The network configuration for workspaces.""" + + gcp_common_network_config: Optional[GcpCommonNetworkConfig] = None + """The shared network config for GCP workspace. This object has common network configurations that + are network attributions of a workspace. This object is input-only.""" + + gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None + """The mutually exclusive network deployment modes. The option decides which network mode the + workspace will use. The network config for GCP workspace with Databricks managed network. This + object is input-only and will not be provided when listing workspaces. See + go/gcp-byovpc-alpha-design for interface decisions.""" + + network_id: Optional[str] = None + """The ID of the network object, if the workspace is a BYOVPC workspace. This should apply to + workspaces on all clouds in internal services. In accounts-rest-api, user will use + workspace.network_id for input and output instead. Currently (2021-06-19) the network ID is only + used by GCP.""" + + def as_dict(self) -> dict: + """Serializes the WorkspaceNetwork into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.gcp_common_network_config: + body["gcp_common_network_config"] = self.gcp_common_network_config.as_dict() + if self.gcp_managed_network_config: + body["gcp_managed_network_config"] = self.gcp_managed_network_config.as_dict() + if self.network_id is not None: + body["network_id"] = self.network_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the WorkspaceNetwork into a shallow dictionary of its immediate attributes.""" + body = {} + if self.gcp_common_network_config: + body["gcp_common_network_config"] = self.gcp_common_network_config + if self.gcp_managed_network_config: + body["gcp_managed_network_config"] = self.gcp_managed_network_config + if self.network_id is not None: + body["network_id"] = self.network_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> WorkspaceNetwork: + """Deserializes the WorkspaceNetwork from a dictionary.""" + return cls( + gcp_common_network_config=_from_dict(d, "gcp_common_network_config", GcpCommonNetworkConfig), + gcp_managed_network_config=_from_dict(d, "gcp_managed_network_config", GcpManagedNetworkConfig), + network_id=d.get("network_id", None), + ) + + class WorkspaceStatus(Enum): - """The status of the workspace. For workspace creation, usually it is set to `PROVISIONING` - initially. Continue to check the status until the status is `RUNNING`.""" + """The different statuses of a workspace. The following represents the current set of valid + transitions from status to status: NOT_PROVISIONED -> PROVISIONING -> CANCELLED PROVISIONING -> + RUNNING -> FAILED -> CANCELLED (note that this transition is disallowed in the MultiWorkspace + Project) RUNNING -> PROVISIONING -> BANNED -> CANCELLED FAILED -> PROVISIONING -> CANCELLED + BANNED -> RUNNING -> CANCELLED Note that a transition from any state to itself is also valid. + TODO(PLAT-5867): add a transition from CANCELLED to some other value (e.g. RECOVERING)""" BANNED = "BANNED" CANCELLING = "CANCELLING" @@ -1671,29 +1769,30 @@ def create(self, credentials_name: str, aws_credentials: CreateCredentialAwsCred res = self._api.do("POST", f"/api/2.0/accounts/{self._api.account_id}/credentials", body=body, headers=headers) return Credential.from_dict(res) - def delete(self, credentials_id: str): + def delete(self, credentials_id: str) -> Credential: """Deletes a Databricks credential configuration object for an account, both specified by ID. You cannot delete a credential that is associated with any workspace. :param credentials_id: str Databricks Account API credential configuration ID - + :returns: :class:`Credential` """ headers = { "Accept": "application/json", } - self._api.do( + res = self._api.do( "DELETE", f"/api/2.0/accounts/{self._api.account_id}/credentials/{credentials_id}", headers=headers ) + return Credential.from_dict(res) def get(self, credentials_id: str) -> Credential: """Gets a Databricks credential configuration object for an account, both specified by ID. :param credentials_id: str - Databricks Account API credential configuration ID + Credential configuration ID :returns: :class:`Credential` """ @@ -1708,7 +1807,7 @@ def get(self, credentials_id: str) -> Credential: return Credential.from_dict(res) def list(self) -> Iterator[Credential]: - """Gets all Databricks credential configurations associated with an account specified by ID. + """List Databricks credential configuration objects for an account, specified by ID. :returns: Iterator over :class:`Credential` @@ -1785,25 +1884,26 @@ def create( ) return CustomerManagedKey.from_dict(res) - def delete(self, customer_managed_key_id: str): + def delete(self, customer_managed_key_id: str) -> CustomerManagedKey: """Deletes a customer-managed key configuration object for an account. You cannot delete a configuration that is associated with a running workspace. :param customer_managed_key_id: str Databricks encryption key configuration ID. - + :returns: :class:`CustomerManagedKey` """ headers = { "Accept": "application/json", } - self._api.do( + res = self._api.do( "DELETE", f"/api/2.0/accounts/{self._api.account_id}/customer-managed-keys/{customer_managed_key_id}", headers=headers, ) + return CustomerManagedKey.from_dict(res) def get(self, customer_managed_key_id: str) -> CustomerManagedKey: """Gets a customer-managed key configuration object for an account, specified by ID. This operation @@ -1837,16 +1937,7 @@ def get(self, customer_managed_key_id: str) -> CustomerManagedKey: return CustomerManagedKey.from_dict(res) def list(self) -> Iterator[CustomerManagedKey]: - """Gets all customer-managed key configuration objects for an account. If the key is specified as a - workspace's managed services customer-managed key, Databricks uses the key to encrypt the workspace's - notebooks and secrets in the control plane, in addition to Databricks SQL queries and query history. - If the key is specified as a workspace's storage customer-managed key, the key is used to encrypt the - workspace's root S3 bucket and optionally can encrypt cluster EBS volumes data in the data plane. - - **Important**: Customer-managed keys are supported only for some deployment types, subscription types, - and AWS regions. - - This operation is available only if your account is on the E2 version of the platform. + """Lists Databricks customer-managed key configurations for an account. :returns: Iterator over :class:`CustomerManagedKey` @@ -1869,9 +1960,9 @@ def __init__(self, api_client): def create( self, - network_name: str, *, gcp_network_info: Optional[GcpNetworkInfo] = None, + network_name: Optional[str] = None, security_group_ids: Optional[List[str]] = None, subnet_ids: Optional[List[str]] = None, vpc_endpoints: Optional[NetworkVpcEndpoints] = None, @@ -1880,9 +1971,9 @@ def create( """Creates a Databricks network configuration that represents an VPC and its resources. The VPC will be used for new Databricks clusters. This requires a pre-existing VPC and subnets. - :param network_name: str - The human-readable name of the network configuration. :param gcp_network_info: :class:`GcpNetworkInfo` (optional) + :param network_name: str (optional) + The human-readable name of the network configuration. :param security_group_ids: List[str] (optional) IDs of one to five security groups associated with this network. Security group IDs **cannot** be used in multiple network configurations. @@ -1891,8 +1982,8 @@ def create( network configurations. :param vpc_endpoints: :class:`NetworkVpcEndpoints` (optional) :param vpc_id: str (optional) - The ID of the VPC associated with this network. VPC IDs can be used in multiple network - configurations. + The ID of the VPC associated with this network configuration. VPC IDs can be used in multiple + networks. :returns: :class:`Network` """ @@ -1917,7 +2008,7 @@ def create( res = self._api.do("POST", f"/api/2.0/accounts/{self._api.account_id}/networks", body=body, headers=headers) return Network.from_dict(res) - def delete(self, network_id: str): + def delete(self, network_id: str) -> Network: """Deletes a Databricks network configuration, which represents a cloud VPC and its resources. You cannot delete a network that is associated with a workspace. @@ -1926,14 +2017,15 @@ def delete(self, network_id: str): :param network_id: str Databricks Account API network configuration ID. - + :returns: :class:`Network` """ headers = { "Accept": "application/json", } - self._api.do("DELETE", f"/api/2.0/accounts/{self._api.account_id}/networks/{network_id}", headers=headers) + res = self._api.do("DELETE", f"/api/2.0/accounts/{self._api.account_id}/networks/{network_id}", headers=headers) + return Network.from_dict(res) def get(self, network_id: str) -> Network: """Gets a Databricks network configuration, which represents a cloud VPC and its resources. @@ -1952,9 +2044,7 @@ def get(self, network_id: str) -> Network: return Network.from_dict(res) def list(self) -> Iterator[Network]: - """Gets a list of all Databricks network configurations for an account, specified by ID. - - This operation is available only if your account is on the E2 version of the platform. + """Lists Databricks network configurations for an account. :returns: Iterator over :class:`Network` @@ -1976,48 +2066,39 @@ def __init__(self, api_client): def create( self, - private_access_settings_name: str, - region: str, *, allowed_vpc_endpoint_ids: Optional[List[str]] = None, private_access_level: Optional[PrivateAccessLevel] = None, + private_access_settings_name: Optional[str] = None, public_access_enabled: Optional[bool] = None, + region: Optional[str] = None, ) -> PrivateAccessSettings: - """Creates a private access settings object, which specifies how your workspace is accessed over [AWS - PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object - referenced by ID in the workspace's `private_access_settings_id` property. - - You can share one private access settings with multiple workspaces in a single account. However, - private access settings are specific to AWS regions, so only workspaces in the same AWS region can use - a given private access settings object. - - Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - - [AWS PrivateLink]: https://aws.amazon.com/privatelink - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html + """Creates a private access settings configuration, which represents network access restrictions for + workspace resources. Private access settings configure whether workspaces can be accessed from the + public internet or only from private endpoints. - :param private_access_settings_name: str - The human-readable name of the private access settings object. - :param region: str - The cloud region for workspaces associated with this private access settings object. :param allowed_vpc_endpoint_ids: List[str] (optional) - An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when registering - the VPC endpoint configuration in your Databricks account. This is not the ID of the VPC endpoint in - AWS. - - Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC endpoints - that in your account that can connect to your workspace over AWS PrivateLink. - - If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, this - control only works for PrivateLink connections. To control how your workspace is accessed via public - internet, see [IP access lists]. - - [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html + An array of Databricks VPC endpoint IDs. This is the Databricks ID returned when registering the VPC + endpoint configuration in your Databricks account. This is not the ID of the VPC endpoint in AWS. + Only used when private_access_level is set to ENDPOINT. This is an allow list of VPC endpoints + registered in your Databricks account that can connect to your workspace over AWS PrivateLink. Note: + If hybrid access to your workspace is enabled by setting public_access_enabled to true, this control + only works for PrivateLink connections. To control how your workspace is accessed via public + internet, see IP access lists. :param private_access_level: :class:`PrivateAccessLevel` (optional) + The private access level controls which VPC endpoints can connect to the UI or API of any workspace + that attaches this private access settings object. `ACCOUNT` level access (the default) allows only + VPC endpoints that are registered in your Databricks account connect to your workspace. `ENDPOINT` + level access allows only specified VPC endpoints connect to your workspace. For details, see + allowed_vpc_endpoint_ids. + :param private_access_settings_name: str (optional) + The human-readable name of the private access settings object. :param public_access_enabled: bool (optional) Determines if the workspace can be accessed over public internet. For fully private workspaces, you - can optionally specify `false`, but only if you implement both the front-end and the back-end - PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled. + can optionally specify false, but only if you implement both the front-end and the back-end + PrivateLink connections. Otherwise, specify true, which means that public access is enabled. + :param region: str (optional) + The AWS region for workspaces attached to this private access settings object. :returns: :class:`PrivateAccessSettings` """ @@ -2042,42 +2123,29 @@ def create( ) return PrivateAccessSettings.from_dict(res) - def delete(self, private_access_settings_id: str): - """Deletes a private access settings object, which determines how your workspace is accessed over [AWS - PrivateLink]. - - Before configuring PrivateLink, read the [Databricks article about PrivateLink].", - - [AWS PrivateLink]: https://aws.amazon.com/privatelink - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html + def delete(self, private_access_settings_id: str) -> PrivateAccessSettings: + """Deletes a Databricks private access settings configuration, both specified by ID. :param private_access_settings_id: str - Databricks Account API private access settings ID. - + :returns: :class:`PrivateAccessSettings` """ headers = { "Accept": "application/json", } - self._api.do( + res = self._api.do( "DELETE", f"/api/2.0/accounts/{self._api.account_id}/private-access-settings/{private_access_settings_id}", headers=headers, ) + return PrivateAccessSettings.from_dict(res) def get(self, private_access_settings_id: str) -> PrivateAccessSettings: - """Gets a private access settings object, which specifies how your workspace is accessed over [AWS - PrivateLink]. - - Before configuring PrivateLink, read the [Databricks article about PrivateLink].", - - [AWS PrivateLink]: https://aws.amazon.com/privatelink - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html + """Gets a Databricks private access settings configuration, both specified by ID. :param private_access_settings_id: str - Databricks Account API private access settings ID. :returns: :class:`PrivateAccessSettings` """ @@ -2094,7 +2162,7 @@ def get(self, private_access_settings_id: str) -> PrivateAccessSettings: return PrivateAccessSettings.from_dict(res) def list(self) -> Iterator[PrivateAccessSettings]: - """Gets a list of all private access settings objects for an account, specified by ID. + """Lists Databricks private access settings for an account. :returns: Iterator over :class:`PrivateAccessSettings` @@ -2108,82 +2176,39 @@ def list(self) -> Iterator[PrivateAccessSettings]: return [PrivateAccessSettings.from_dict(v) for v in res] def replace( - self, - private_access_settings_id: str, - private_access_settings_name: str, - region: str, - *, - allowed_vpc_endpoint_ids: Optional[List[str]] = None, - private_access_level: Optional[PrivateAccessLevel] = None, - public_access_enabled: Optional[bool] = None, - ): + self, private_access_settings_id: str, customer_facing_private_access_settings: PrivateAccessSettings + ) -> PrivateAccessSettings: """Updates an existing private access settings object, which specifies how your workspace is accessed - over [AWS PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object - referenced by ID in the workspace's `private_access_settings_id` property. - - This operation completely overwrites your existing private access settings object attached to your - workspaces. All workspaces attached to the private access settings are affected by any change. If - `public_access_enabled`, `private_access_level`, or `allowed_vpc_endpoint_ids` are updated, effects of - these changes might take several minutes to propagate to the workspace API. - - You can share one private access settings object with multiple workspaces in a single account. - However, private access settings are specific to AWS regions, so only workspaces in the same AWS - region can use a given private access settings object. - - Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - - [AWS PrivateLink]: https://aws.amazon.com/privatelink - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html + over AWS PrivateLink. To use AWS PrivateLink, a workspace must have a private access settings object + referenced by ID in the workspace's private_access_settings_id property. This operation completely + overwrites your existing private access settings object attached to your workspaces. All workspaces + attached to the private access settings are affected by any change. If public_access_enabled, + private_access_level, or allowed_vpc_endpoint_ids are updated, effects of these changes might take + several minutes to propagate to the workspace API. You can share one private access settings object + with multiple workspaces in a single account. However, private access settings are specific to AWS + regions, so only workspaces in the same AWS region can use a given private access settings object. + Before configuring PrivateLink, read the Databricks article about PrivateLink. :param private_access_settings_id: str - Databricks Account API private access settings ID. - :param private_access_settings_name: str - The human-readable name of the private access settings object. - :param region: str - The cloud region for workspaces associated with this private access settings object. - :param allowed_vpc_endpoint_ids: List[str] (optional) - An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when registering - the VPC endpoint configuration in your Databricks account. This is not the ID of the VPC endpoint in - AWS. - - Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC endpoints - that in your account that can connect to your workspace over AWS PrivateLink. - - If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, this - control only works for PrivateLink connections. To control how your workspace is accessed via public - internet, see [IP access lists]. - - [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html - :param private_access_level: :class:`PrivateAccessLevel` (optional) - :param public_access_enabled: bool (optional) - Determines if the workspace can be accessed over public internet. For fully private workspaces, you - can optionally specify `false`, but only if you implement both the front-end and the back-end - PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled. - + Databricks private access settings ID. + :param customer_facing_private_access_settings: :class:`PrivateAccessSettings` + Properties of the new private access settings object. + :returns: :class:`PrivateAccessSettings` """ - body = {} - if allowed_vpc_endpoint_ids is not None: - body["allowed_vpc_endpoint_ids"] = [v for v in allowed_vpc_endpoint_ids] - if private_access_level is not None: - body["private_access_level"] = private_access_level.value - if private_access_settings_name is not None: - body["private_access_settings_name"] = private_access_settings_name - if public_access_enabled is not None: - body["public_access_enabled"] = public_access_enabled - if region is not None: - body["region"] = region + body = customer_facing_private_access_settings.as_dict() headers = { "Accept": "application/json", "Content-Type": "application/json", } - self._api.do( + res = self._api.do( "PUT", f"/api/2.0/accounts/{self._api.account_id}/private-access-settings/{private_access_settings_id}", body=body, headers=headers, ) + return PrivateAccessSettings.from_dict(res) class StorageAPI: @@ -2195,24 +2220,27 @@ class StorageAPI: def __init__(self, api_client): self._api = api_client - def create(self, storage_configuration_name: str, root_bucket_info: RootBucketInfo) -> StorageConfiguration: - """Creates new storage configuration for an account, specified by ID. Uploads a storage configuration - object that represents the root AWS S3 bucket in your account. Databricks stores related workspace - assets including DBFS, cluster logs, and job results. For the AWS S3 bucket, you need to configure the - required bucket policy. - - For information about how to create a new workspace with this API, see [Create a new workspace using - the Account API] - - [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html + def create( + self, storage_configuration_name: str, root_bucket_info: RootBucketInfo, *, role_arn: Optional[str] = None + ) -> StorageConfiguration: + """Creates a Databricks storage configuration for an account. :param storage_configuration_name: str The human-readable name of the storage configuration. :param root_bucket_info: :class:`RootBucketInfo` + Root S3 bucket information. + :param role_arn: str (optional) + Optional IAM role that is used to access the workspace catalog which is created during workspace + creation for UC by Default. If a storage configuration with this field populated is used to create a + workspace, then a workspace catalog is created together with the workspace. The workspace catalog + shares the root bucket with internal workspace storage (including DBFS root) but uses a dedicated + bucket path prefix. :returns: :class:`StorageConfiguration` """ body = {} + if role_arn is not None: + body["role_arn"] = role_arn if root_bucket_info is not None: body["root_bucket_info"] = root_bucket_info.as_dict() if storage_configuration_name is not None: @@ -2227,31 +2255,30 @@ def create(self, storage_configuration_name: str, root_bucket_info: RootBucketIn ) return StorageConfiguration.from_dict(res) - def delete(self, storage_configuration_id: str): + def delete(self, storage_configuration_id: str) -> StorageConfiguration: """Deletes a Databricks storage configuration. You cannot delete a storage configuration that is associated with any workspace. :param storage_configuration_id: str - Databricks Account API storage configuration ID. - + :returns: :class:`StorageConfiguration` """ headers = { "Accept": "application/json", } - self._api.do( + res = self._api.do( "DELETE", f"/api/2.0/accounts/{self._api.account_id}/storage-configurations/{storage_configuration_id}", headers=headers, ) + return StorageConfiguration.from_dict(res) def get(self, storage_configuration_id: str) -> StorageConfiguration: """Gets a Databricks storage configuration for an account, both specified by ID. :param storage_configuration_id: str - Databricks Account API storage configuration ID. :returns: :class:`StorageConfiguration` """ @@ -2268,7 +2295,7 @@ def get(self, storage_configuration_id: str) -> StorageConfiguration: return StorageConfiguration.from_dict(res) def list(self) -> Iterator[StorageConfiguration]: - """Gets a list of all Databricks storage configurations for your account, specified by ID. + """Lists Databricks storage configurations for an account, specified by ID. :returns: Iterator over :class:`StorageConfiguration` @@ -2290,11 +2317,11 @@ def __init__(self, api_client): def create( self, - vpc_endpoint_name: str, *, aws_vpc_endpoint_id: Optional[str] = None, gcp_vpc_endpoint_info: Optional[GcpVpcEndpointInfo] = None, region: Optional[str] = None, + vpc_endpoint_name: Optional[str] = None, ) -> VpcEndpoint: """Creates a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate privately with Databricks over [AWS PrivateLink]. @@ -2309,13 +2336,14 @@ def create( [VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/vpc-endpoints.html [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/privatelink-share-your-services.html - :param vpc_endpoint_name: str - The human-readable name of the storage configuration. :param aws_vpc_endpoint_id: str (optional) The ID of the VPC endpoint object in AWS. :param gcp_vpc_endpoint_info: :class:`GcpVpcEndpointInfo` (optional) + The cloud info of this vpc endpoint. :param region: str (optional) - The AWS region in which this VPC endpoint object exists. + The region in which this VPC endpoint object exists. + :param vpc_endpoint_name: str (optional) + The human-readable name of the storage configuration. :returns: :class:`VpcEndpoint` """ @@ -2338,29 +2366,23 @@ def create( ) return VpcEndpoint.from_dict(res) - def delete(self, vpc_endpoint_id: str): - """Deletes a VPC endpoint configuration, which represents an [AWS VPC endpoint] that can communicate - privately with Databricks over [AWS PrivateLink]. - - Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - - [AWS PrivateLink]: https://aws.amazon.com/privatelink - [AWS VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html + def delete(self, vpc_endpoint_id: str) -> VpcEndpoint: + """Deletes a Databricks VPC endpoint configuration. You cannot delete a VPC endpoint configuration that + is associated with any workspace. :param vpc_endpoint_id: str - Databricks VPC endpoint ID. - + :returns: :class:`VpcEndpoint` """ headers = { "Accept": "application/json", } - self._api.do( + res = self._api.do( "DELETE", f"/api/2.0/accounts/{self._api.account_id}/vpc-endpoints/{vpc_endpoint_id}", headers=headers ) + return VpcEndpoint.from_dict(res) def get(self, vpc_endpoint_id: str) -> VpcEndpoint: """Gets a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate @@ -2385,11 +2407,7 @@ def get(self, vpc_endpoint_id: str) -> VpcEndpoint: return VpcEndpoint.from_dict(res) def list(self) -> Iterator[VpcEndpoint]: - """Gets a list of all VPC endpoints for an account, specified by ID. - - Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html + """Lists Databricks VPC endpoint configurations for an account. :returns: Iterator over :class:`VpcEndpoint` @@ -2448,7 +2466,6 @@ def wait_get_workspace_running( def create( self, - workspace_name: str, *, aws_region: Optional[str] = None, cloud: Optional[str] = None, @@ -2458,7 +2475,6 @@ def create( deployment_name: Optional[str] = None, gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None, gke_config: Optional[GkeConfig] = None, - is_no_public_ip_enabled: Optional[bool] = None, location: Optional[str] = None, managed_services_customer_managed_key_id: Optional[str] = None, network_id: Optional[str] = None, @@ -2466,23 +2482,41 @@ def create( private_access_settings_id: Optional[str] = None, storage_configuration_id: Optional[str] = None, storage_customer_managed_key_id: Optional[str] = None, + workspace_name: Optional[str] = None, ) -> Wait[Workspace]: - """Creates a new workspace. + """Creates a new workspace using a credential configuration and a storage configuration, an optional + network configuration (if using a customer-managed VPC), an optional managed services key + configuration (if using customer-managed keys for managed services), and an optional storage key + configuration (if using customer-managed keys for storage). The key configurations used for managed + services and storage encryption can be the same or different. + + Important: This operation is asynchronous. A response with HTTP status code 200 means the request has + been accepted and is in progress, but does not mean that the workspace deployed successfully and is + running. The initial workspace status is typically PROVISIONING. Use the workspace ID (workspace_id) + field in the response to identify the new workspace and make repeated GET requests with the workspace + ID and check its status. The workspace becomes available when the status changes to RUNNING. + + You can share one customer-managed VPC with multiple workspaces in a single account. It is not + required to create a new VPC for each workspace. However, you cannot reuse subnets or Security Groups + between workspaces. If you plan to share one VPC with multiple workspaces, make sure you size your VPC + and subnets accordingly. Because a Databricks Account API network configuration encapsulates this + information, you cannot reuse a Databricks Account API network configuration across workspaces. + + For information about how to create a new workspace with this API including error handling, see + [Create a new workspace using the Account API]. + + Important: Customer-managed VPCs, PrivateLink, and customer-managed keys are supported on a limited + set of deployment and subscription types. If you have questions about availability, contact your + Databricks representative. - **Important**: This operation is asynchronous. A response with HTTP status code 200 means the request - has been accepted and is in progress, but does not mean that the workspace deployed successfully and - is running. The initial workspace status is typically `PROVISIONING`. Use the workspace ID - (`workspace_id`) field in the response to identify the new workspace and make repeated `GET` requests - with the workspace ID and check its status. The workspace becomes available when the status changes to - `RUNNING`. + This operation is available only if your account is on the E2 version of the platform or on a select + custom plan that allows multiple workspaces per account. + + [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html - :param workspace_name: str - The workspace's human-readable name. :param aws_region: str (optional) - The AWS region of the workspace's data plane. :param cloud: str (optional) - The cloud provider which the workspace uses. For Google Cloud workspaces, always set this field to - `gcp`. + The cloud name. This field always has the value `gcp`. :param cloud_resource_container: :class:`CloudResourceContainer` (optional) :param credentials_id: str (optional) ID of the workspace's credential configuration object. @@ -2492,55 +2526,49 @@ def create( key can be of maximum length of 127 characters, and cannot be empty. :param deployment_name: str (optional) The deployment name defines part of the subdomain for the workspace. The workspace URL for the web - application and REST APIs is `.cloud.databricks.com`. For example, if the - deployment name is `abcsales`, your workspace URL will be `https://abcsales.cloud.databricks.com`. + application and REST APIs is .cloud.databricks.com. For example, if the + deployment name is abcsales, your workspace URL will be https://abcsales.cloud.databricks.com. Hyphens are allowed. This property supports only the set of characters that are allowed in a - subdomain. - - To set this value, you must have a deployment name prefix. Contact your Databricks account team to - add an account deployment name prefix to your account. - - Workspace deployment names follow the account prefix and a hyphen. For example, if your account's - deployment prefix is `acme` and the workspace deployment name is `workspace-1`, the JSON response - for the `deployment_name` field becomes `acme-workspace-1`. The workspace URL would be - `acme-workspace-1.cloud.databricks.com`. - - You can also set the `deployment_name` to the reserved keyword `EMPTY` if you want the deployment - name to only include the deployment prefix. For example, if your account's deployment prefix is - `acme` and the workspace deployment name is `EMPTY`, the `deployment_name` becomes `acme` only and - the workspace URL is `acme.cloud.databricks.com`. - - This value must be unique across all non-deleted deployments across all AWS regions. - - If a new workspace omits this property, the server generates a unique deployment name for you with - the pattern `dbc-xxxxxxxx-xxxx`. + subdomain. To set this value, you must have a deployment name prefix. Contact your Databricks + account team to add an account deployment name prefix to your account. Workspace deployment names + follow the account prefix and a hyphen. For example, if your account's deployment prefix is acme and + the workspace deployment name is workspace-1, the JSON response for the deployment_name field + becomes acme-workspace-1. The workspace URL would be acme-workspace-1.cloud.databricks.com. You can + also set the deployment_name to the reserved keyword EMPTY if you want the deployment name to only + include the deployment prefix. For example, if your account's deployment prefix is acme and the + workspace deployment name is EMPTY, the deployment_name becomes acme only and the workspace URL is + acme.cloud.databricks.com. This value must be unique across all non-deleted deployments across all + AWS regions. If a new workspace omits this property, the server generates a unique deployment name + for you with the pattern dbc-xxxxxxxx-xxxx. :param gcp_managed_network_config: :class:`GcpManagedNetworkConfig` (optional) :param gke_config: :class:`GkeConfig` (optional) - :param is_no_public_ip_enabled: bool (optional) - Whether no public IP is enabled for the workspace. :param location: str (optional) - The Google Cloud region of the workspace data plane in your Google account. For example, `us-east4`. + The Google Cloud region of the workspace data plane in your Google account (for example, + `us-east4`). :param managed_services_customer_managed_key_id: str (optional) The ID of the workspace's managed services encryption key configuration object. This is used to help protect and control access to the workspace's notebooks, secrets, Databricks SQL queries, and query - history. The provided key configuration object property `use_cases` must contain `MANAGED_SERVICES`. + history. The provided key configuration object property use_cases must contain MANAGED_SERVICES. :param network_id: str (optional) + The ID of the workspace's network configuration object. To use AWS PrivateLink, this field is + required. :param pricing_tier: :class:`PricingTier` (optional) :param private_access_settings_id: str (optional) - ID of the workspace's private access settings object. Only used for PrivateLink. This ID must be - specified for customers using [AWS PrivateLink] for either front-end (user-to-workspace connection), - back-end (data plane to control plane connection), or both connection types. - - Before configuring PrivateLink, read the [Databricks article about PrivateLink].", + ID of the workspace's private access settings object. Only used for PrivateLink. You must specify + this ID if you are using [AWS PrivateLink] for either front-end (user-to-workspace connection), + back-end (data plane to control plane connection), or both connection types. Before configuring + PrivateLink, read the [Databricks article about PrivateLink].", [AWS PrivateLink]: https://aws.amazon.com/privatelink/ [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html :param storage_configuration_id: str (optional) - The ID of the workspace's storage configuration object. + ID of the workspace's storage configuration object. :param storage_customer_managed_key_id: str (optional) The ID of the workspace's storage encryption key configuration object. This is used to encrypt the workspace's root S3 bucket (root DBFS and system data) and, optionally, cluster EBS volumes. The - provided key configuration object property `use_cases` must contain `STORAGE`. + provided key configuration object property use_cases must contain STORAGE. + :param workspace_name: str (optional) + The human-readable name of the workspace. :returns: Long-running operation waiter for :class:`Workspace`. @@ -2563,8 +2591,6 @@ def create( body["gcp_managed_network_config"] = gcp_managed_network_config.as_dict() if gke_config is not None: body["gke_config"] = gke_config.as_dict() - if is_no_public_ip_enabled is not None: - body["is_no_public_ip_enabled"] = is_no_public_ip_enabled if location is not None: body["location"] = location if managed_services_customer_managed_key_id is not None: @@ -2597,7 +2623,6 @@ def create( def create_and_wait( self, - workspace_name: str, *, aws_region: Optional[str] = None, cloud: Optional[str] = None, @@ -2607,7 +2632,6 @@ def create_and_wait( deployment_name: Optional[str] = None, gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None, gke_config: Optional[GkeConfig] = None, - is_no_public_ip_enabled: Optional[bool] = None, location: Optional[str] = None, managed_services_customer_managed_key_id: Optional[str] = None, network_id: Optional[str] = None, @@ -2615,6 +2639,7 @@ def create_and_wait( private_access_settings_id: Optional[str] = None, storage_configuration_id: Optional[str] = None, storage_customer_managed_key_id: Optional[str] = None, + workspace_name: Optional[str] = None, timeout=timedelta(minutes=20), ) -> Workspace: return self.create( @@ -2626,7 +2651,6 @@ def create_and_wait( deployment_name=deployment_name, gcp_managed_network_config=gcp_managed_network_config, gke_config=gke_config, - is_no_public_ip_enabled=is_no_public_ip_enabled, location=location, managed_services_customer_managed_key_id=managed_services_customer_managed_key_id, network_id=network_id, @@ -2637,42 +2661,34 @@ def create_and_wait( workspace_name=workspace_name, ).result(timeout=timeout) - def delete(self, workspace_id: int): - """Terminates and deletes a Databricks workspace. From an API perspective, deletion is immediate. - However, it might take a few minutes for all workspaces resources to be deleted, depending on the size - and number of workspace resources. - - This operation is available only if your account is on the E2 version of the platform or on a select - custom plan that allows multiple workspaces per account. + def delete(self, workspace_id: int) -> Workspace: + """Deletes a Databricks workspace, both specified by ID. :param workspace_id: int - Workspace ID. - + :returns: :class:`Workspace` """ headers = { "Accept": "application/json", } - self._api.do("DELETE", f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}", headers=headers) + res = self._api.do( + "DELETE", f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}", headers=headers + ) + return Workspace.from_dict(res) def get(self, workspace_id: int) -> Workspace: """Gets information including status for a Databricks workspace, specified by ID. In the response, the `workspace_status` field indicates the current status. After initial workspace creation (which is asynchronous), make repeated `GET` requests with the workspace ID and check its status. The workspace - becomes available when the status changes to `RUNNING`. - - For information about how to create a new workspace with this API **including error handling**, see - [Create a new workspace using the Account API]. - - This operation is available only if your account is on the E2 version of the platform or on a select - custom plan that allows multiple workspaces per account. + becomes available when the status changes to `RUNNING`. For information about how to create a new + workspace with this API **including error handling**, see [Create a new workspace using the Account + API]. [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html :param workspace_id: int - Workspace ID. :returns: :class:`Workspace` """ @@ -2687,10 +2703,7 @@ def get(self, workspace_id: int) -> Workspace: return Workspace.from_dict(res) def list(self) -> Iterator[Workspace]: - """Gets a list of all workspaces associated with an account, specified by ID. - - This operation is available only if your account is on the E2 version of the platform or on a select - custom plan that allows multiple workspaces per account. + """Lists Databricks workspaces for an account. :returns: Iterator over :class:`Workspace` @@ -2704,202 +2717,58 @@ def list(self) -> Iterator[Workspace]: return [Workspace.from_dict(v) for v in res] def update( - self, - workspace_id: int, - *, - aws_region: Optional[str] = None, - credentials_id: Optional[str] = None, - custom_tags: Optional[Dict[str, str]] = None, - managed_services_customer_managed_key_id: Optional[str] = None, - network_connectivity_config_id: Optional[str] = None, - network_id: Optional[str] = None, - private_access_settings_id: Optional[str] = None, - storage_configuration_id: Optional[str] = None, - storage_customer_managed_key_id: Optional[str] = None, + self, workspace_id: int, customer_facing_workspace: Workspace, *, update_mask: Optional[str] = None ) -> Wait[Workspace]: - """Updates a workspace configuration for either a running workspace or a failed workspace. The elements - that can be updated varies between these two use cases. - - ### Update a failed workspace You can update a Databricks workspace configuration for failed workspace - deployment for some fields, but not all fields. For a failed workspace, this request supports updates - to the following fields only: - Credential configuration ID - Storage configuration ID - Network - configuration ID. Used only to add or change a network configuration for a customer-managed VPC. For a - failed workspace only, you can convert a workspace with Databricks-managed VPC to use a - customer-managed VPC by adding this ID. You cannot downgrade a workspace with a customer-managed VPC - to be a Databricks-managed VPC. You can update the network configuration for a failed or running - workspace to add PrivateLink support, though you must also add a private access settings object. - Key - configuration ID for managed services (control plane storage, such as notebook source and Databricks - SQL queries). Used only if you use customer-managed keys for managed services. - Key configuration ID - for workspace storage (root S3 bucket and, optionally, EBS volumes). Used only if you use - customer-managed keys for workspace storage. **Important**: If the workspace was ever in the running - state, even if briefly before becoming a failed workspace, you cannot add a new key configuration ID - for workspace storage. - Private access settings ID to add PrivateLink support. You can add or update - the private access settings ID to upgrade a workspace to add support for front-end, back-end, or both - types of connectivity. You cannot remove (downgrade) any existing front-end or back-end PrivateLink - support on a workspace. - Custom tags. Given you provide an empty custom tags, the update would not be - applied. - Network connectivity configuration ID to add serverless stable IP support. You can add or - update the network connectivity configuration ID to ensure the workspace uses the same set of stable - IP CIDR blocks to access your resources. You cannot remove a network connectivity configuration from - the workspace once attached, you can only switch to another one. - - After calling the `PATCH` operation to update the workspace configuration, make repeated `GET` - requests with the workspace ID and check the workspace status. The workspace is successful if the - status changes to `RUNNING`. - - For information about how to create a new workspace with this API **including error handling**, see - [Create a new workspace using the Account API]. - - ### Update a running workspace You can update a Databricks workspace configuration for running - workspaces for some fields, but not all fields. For a running workspace, this request supports - updating the following fields only: - Credential configuration ID - Network configuration ID. Used - only if you already use a customer-managed VPC. You cannot convert a running workspace from a - Databricks-managed VPC to a customer-managed VPC. You can use a network configuration update in this - API for a failed or running workspace to add support for PrivateLink, although you also need to add a - private access settings object. - Key configuration ID for managed services (control plane storage, - such as notebook source and Databricks SQL queries). Databricks does not directly encrypt the data - with the customer-managed key (CMK). Databricks uses both the CMK and the Databricks managed key (DMK) - that is unique to your workspace to encrypt the Data Encryption Key (DEK). Databricks uses the DEK to - encrypt your workspace's managed services persisted data. If the workspace does not already have a CMK - for managed services, adding this ID enables managed services encryption for new or updated data. - Existing managed services data that existed before adding the key remains not encrypted with the DEK - until it is modified. If the workspace already has customer-managed keys for managed services, this - request rotates (changes) the CMK keys and the DEK is re-encrypted with the DMK and the new CMK. - Key - configuration ID for workspace storage (root S3 bucket and, optionally, EBS volumes). You can set this - only if the workspace does not already have a customer-managed key configuration for workspace - storage. - Private access settings ID to add PrivateLink support. You can add or update the private - access settings ID to upgrade a workspace to add support for front-end, back-end, or both types of - connectivity. You cannot remove (downgrade) any existing front-end or back-end PrivateLink support on - a workspace. - Custom tags. Given you provide an empty custom tags, the update would not be applied. - - Network connectivity configuration ID to add serverless stable IP support. You can add or update the - network connectivity configuration ID to ensure the workspace uses the same set of stable IP CIDR - blocks to access your resources. You cannot remove a network connectivity configuration from the - workspace once attached, you can only switch to another one. - - **Important**: To update a running workspace, your workspace must have no running compute resources - that run in your workspace's VPC in the Classic data plane. For example, stop all all-purpose - clusters, job clusters, pools with running clusters, and Classic SQL warehouses. If you do not - terminate all cluster instances in the workspace before calling this API, the request will fail. - - ### Wait until changes take effect. After calling the `PATCH` operation to update the workspace - configuration, make repeated `GET` requests with the workspace ID and check the workspace status and - the status of the fields. * For workspaces with a Databricks-managed VPC, the workspace status becomes - `PROVISIONING` temporarily (typically under 20 minutes). If the workspace update is successful, the - workspace status changes to `RUNNING`. Note that you can also check the workspace status in the - [Account Console]. However, you cannot use or create clusters for another 20 minutes after that status - change. This results in a total of up to 40 minutes in which you cannot create clusters. If you create - or use clusters before this time interval elapses, clusters do not launch successfully, fail, or could - cause other unexpected behavior. * For workspaces with a customer-managed VPC, the workspace status - stays at status `RUNNING` and the VPC change happens immediately. A change to the storage - customer-managed key configuration ID might take a few minutes to update, so continue to check the - workspace until you observe that it has been updated. If the update fails, the workspace might revert - silently to its original configuration. After the workspace has been updated, you cannot use or create - clusters for another 20 minutes. If you create or use clusters before this time interval elapses, - clusters do not launch successfully, fail, or could cause other unexpected behavior. - - If you update the _storage_ customer-managed key configurations, it takes 20 minutes for the changes - to fully take effect. During the 20 minute wait, it is important that you stop all REST API calls to - the DBFS API. If you are modifying _only the managed services key configuration_, you can omit the 20 - minute wait. - - **Important**: Customer-managed keys and customer-managed VPCs are supported by only some deployment - types and subscription types. If you have questions about availability, contact your Databricks - representative. - - This operation is available only if your account is on the E2 version of the platform or on a select - custom plan that allows multiple workspaces per account. - - [Account Console]: https://docs.databricks.com/administration-guide/account-settings-e2/account-console-e2.html - [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html + """Updates a workspace. :param workspace_id: int - Workspace ID. - :param aws_region: str (optional) - The AWS region of the workspace's data plane (for example, `us-west-2`). This parameter is available - only for updating failed workspaces. - :param credentials_id: str (optional) - ID of the workspace's credential configuration object. This parameter is available for updating both - failed and running workspaces. - :param custom_tags: Dict[str,str] (optional) - The custom tags key-value pairing that is attached to this workspace. The key-value pair is a string - of utf-8 characters. The value can be an empty string, with maximum length of 255 characters. The - key can be of maximum length of 127 characters, and cannot be empty. - :param managed_services_customer_managed_key_id: str (optional) - The ID of the workspace's managed services encryption key configuration object. This parameter is - available only for updating failed workspaces. - :param network_connectivity_config_id: str (optional) - :param network_id: str (optional) - The ID of the workspace's network configuration object. Used only if you already use a - customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC to a - customer-managed VPC by updating the workspace to add a network configuration ID. - :param private_access_settings_id: str (optional) - The ID of the workspace's private access settings configuration object. This parameter is available - only for updating failed workspaces. - :param storage_configuration_id: str (optional) - The ID of the workspace's storage configuration object. This parameter is available only for - updating failed workspaces. - :param storage_customer_managed_key_id: str (optional) - The ID of the key configuration object for workspace storage. This parameter is available for - updating both failed and running workspaces. + A unique integer ID for the workspace + :param customer_facing_workspace: :class:`Workspace` + :param update_mask: str (optional) + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :returns: Long-running operation waiter for :class:`Workspace`. See :method:wait_get_workspace_running for more details. """ - body = {} - if aws_region is not None: - body["aws_region"] = aws_region - if credentials_id is not None: - body["credentials_id"] = credentials_id - if custom_tags is not None: - body["custom_tags"] = custom_tags - if managed_services_customer_managed_key_id is not None: - body["managed_services_customer_managed_key_id"] = managed_services_customer_managed_key_id - if network_connectivity_config_id is not None: - body["network_connectivity_config_id"] = network_connectivity_config_id - if network_id is not None: - body["network_id"] = network_id - if private_access_settings_id is not None: - body["private_access_settings_id"] = private_access_settings_id - if storage_configuration_id is not None: - body["storage_configuration_id"] = storage_configuration_id - if storage_customer_managed_key_id is not None: - body["storage_customer_managed_key_id"] = storage_customer_managed_key_id + body = customer_facing_workspace.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask headers = { "Accept": "application/json", "Content-Type": "application/json", } op_response = self._api.do( - "PATCH", f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}", body=body, headers=headers + "PATCH", + f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}", + query=query, + body=body, + headers=headers, ) return Wait( - self.wait_get_workspace_running, response=UpdateResponse.from_dict(op_response), workspace_id=workspace_id + self.wait_get_workspace_running, + response=Workspace.from_dict(op_response), + workspace_id=op_response["workspace_id"], ) def update_and_wait( self, workspace_id: int, + customer_facing_workspace: Workspace, *, - aws_region: Optional[str] = None, - credentials_id: Optional[str] = None, - custom_tags: Optional[Dict[str, str]] = None, - managed_services_customer_managed_key_id: Optional[str] = None, - network_connectivity_config_id: Optional[str] = None, - network_id: Optional[str] = None, - private_access_settings_id: Optional[str] = None, - storage_configuration_id: Optional[str] = None, - storage_customer_managed_key_id: Optional[str] = None, + update_mask: Optional[str] = None, timeout=timedelta(minutes=20), ) -> Workspace: return self.update( - aws_region=aws_region, - credentials_id=credentials_id, - custom_tags=custom_tags, - managed_services_customer_managed_key_id=managed_services_customer_managed_key_id, - network_connectivity_config_id=network_connectivity_config_id, - network_id=network_id, - private_access_settings_id=private_access_settings_id, - storage_configuration_id=storage_configuration_id, - storage_customer_managed_key_id=storage_customer_managed_key_id, - workspace_id=workspace_id, + customer_facing_workspace=customer_facing_workspace, update_mask=update_mask, workspace_id=workspace_id ).result(timeout=timeout) diff --git a/databricks/sdk/service/qualitymonitorv2.py b/databricks/sdk/service/qualitymonitorv2.py index a6fab7023..3f51ae81a 100755 --- a/databricks/sdk/service/qualitymonitorv2.py +++ b/databricks/sdk/service/qualitymonitorv2.py @@ -7,7 +7,7 @@ from enum import Enum from typing import Any, Dict, Iterator, List, Optional -from ._internal import _enum, _from_dict, _repeated_dict +from databricks.sdk.service._internal import _enum, _from_dict, _repeated_dict _LOG = logging.getLogger("databricks.sdk") @@ -17,6 +17,9 @@ @dataclass class AnomalyDetectionConfig: + job_type: Optional[AnomalyDetectionJobType] = None + """The type of the last run of the workflow.""" + last_run_id: Optional[str] = None """Run id of the last run of the workflow""" @@ -26,6 +29,8 @@ class AnomalyDetectionConfig: def as_dict(self) -> dict: """Serializes the AnomalyDetectionConfig into a dictionary suitable for use as a JSON request body.""" body = {} + if self.job_type is not None: + body["job_type"] = self.job_type.value if self.last_run_id is not None: body["last_run_id"] = self.last_run_id if self.latest_run_status is not None: @@ -35,6 +40,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the AnomalyDetectionConfig into a shallow dictionary of its immediate attributes.""" body = {} + if self.job_type is not None: + body["job_type"] = self.job_type if self.last_run_id is not None: body["last_run_id"] = self.last_run_id if self.latest_run_status is not None: @@ -45,11 +52,18 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> AnomalyDetectionConfig: """Deserializes the AnomalyDetectionConfig from a dictionary.""" return cls( + job_type=_enum(d, "job_type", AnomalyDetectionJobType), last_run_id=d.get("last_run_id", None), latest_run_status=_enum(d, "latest_run_status", AnomalyDetectionRunStatus), ) +class AnomalyDetectionJobType(Enum): + + ANOMALY_DETECTION_JOB_TYPE_INTERNAL_HIDDEN = "ANOMALY_DETECTION_JOB_TYPE_INTERNAL_HIDDEN" + ANOMALY_DETECTION_JOB_TYPE_NORMAL = "ANOMALY_DETECTION_JOB_TYPE_NORMAL" + + class AnomalyDetectionRunStatus(Enum): """Status of Anomaly Detection Job Run""" diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index f707aadf7..20a6087ba 100755 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -13,8 +13,10 @@ import requests +from databricks.sdk.service._internal import (Wait, _enum, _from_dict, + _repeated_dict) + from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated_dict _LOG = logging.getLogger("databricks.sdk") @@ -1034,24 +1036,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DataframeSplitInput: return cls(columns=d.get("columns", None), data=d.get("data", None), index=d.get("index", None)) -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() - - @dataclass class EmailNotifications: on_update_failure: Optional[List[str]] = None @@ -1139,15 +1123,15 @@ class EmbeddingsV1ResponseEmbeddingElementObject(Enum): @dataclass class EndpointCoreConfigInput: + name: str + """The name of the serving endpoint to update. This field is required.""" + auto_capture_config: Optional[AutoCaptureConfigInput] = None """Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog. Note: this field is deprecated for creating new provisioned throughput endpoints, or updating existing provisioned throughput endpoints that never have inference table configured; in these cases please use AI Gateway to manage inference tables.""" - name: Optional[str] = None - """The name of the serving endpoint to update. This field is required.""" - served_entities: Optional[List[ServedEntityInput]] = None """The list of served entities under the serving endpoint config.""" diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py index 3004f17da..ce2b25e1d 100755 --- a/databricks/sdk/service/settings.py +++ b/databricks/sdk/service/settings.py @@ -7,7 +7,8 @@ from enum import Enum from typing import Any, Dict, Iterator, List, Optional -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum +from databricks.sdk.service._internal import (_enum, _from_dict, + _repeated_dict, _repeated_enum) _LOG = logging.getLogger("databricks.sdk") @@ -1694,24 +1695,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeletePersonalComputeSettingResponse: return cls(etag=d.get("etag", None)) -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() - - @dataclass class DeleteRestrictWorkspaceAdminsSettingResponse: """The etag is returned.""" @@ -4548,24 +4531,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PublicTokenInfo: ) -@dataclass -class ReplaceResponse: - def as_dict(self) -> dict: - """Serializes the ReplaceResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ReplaceResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ReplaceResponse: - """Deserializes the ReplaceResponse from a dictionary.""" - return cls() - - @dataclass class RestrictWorkspaceAdminsMessage: status: RestrictWorkspaceAdminsMessageStatus @@ -4664,24 +4629,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RevokeTokenResponse: return cls() -@dataclass -class SetStatusResponse: - def as_dict(self) -> dict: - """Serializes the SetStatusResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SetStatusResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetStatusResponse: - """Deserializes the SetStatusResponse from a dictionary.""" - return cls() - - @dataclass class SlackConfig: channel_id: Optional[str] = None @@ -5210,24 +5157,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdatePrivateEndpointRule: ) -@dataclass -class UpdateResponse: - def as_dict(self) -> dict: - """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: - """Deserializes the UpdateResponse from a dictionary.""" - return cls() - - WorkspaceConf = Dict[str, str] diff --git a/databricks/sdk/service/settingsv2.py b/databricks/sdk/service/settingsv2.py index babfb1a09..322ffd47c 100755 --- a/databricks/sdk/service/settingsv2.py +++ b/databricks/sdk/service/settingsv2.py @@ -7,7 +7,7 @@ from enum import Enum from typing import Any, Dict, Iterator, List, Optional -from ._internal import _enum, _from_dict, _repeated_dict +from databricks.sdk.service._internal import _enum, _from_dict, _repeated_dict _LOG = logging.getLogger("databricks.sdk") diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py index ad791cc15..b6e491138 100755 --- a/databricks/sdk/service/sharing.py +++ b/databricks/sdk/service/sharing.py @@ -7,7 +7,8 @@ from enum import Enum from typing import Any, Dict, Iterator, List, Optional -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum +from databricks.sdk.service._internal import (_enum, _from_dict, + _repeated_dict, _repeated_enum) _LOG = logging.getLogger("databricks.sdk") @@ -55,24 +56,6 @@ class ColumnTypeName(Enum): VARIANT = "VARIANT" -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() - - @dataclass class DeltaSharingDependency: """Represents a UC dependency.""" @@ -1142,6 +1125,15 @@ class PermissionsChange: """The principal whose privileges we are changing. Only one of principal or principal_id should be specified, never both at the same time.""" + principal_id: Optional[int] = None + """An opaque internal ID that identifies the principal whose privileges should be removed. + + This field is intended for removing privileges associated with a deleted user. When set, only + the entries specified in the remove field are processed; any entries in the add field will be + rejected. + + Only one of principal or principal_id should be specified, never both at the same time.""" + remove: Optional[List[str]] = None """The set of privileges to remove.""" @@ -1152,6 +1144,8 @@ def as_dict(self) -> dict: body["add"] = [v for v in self.add] if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.remove: body["remove"] = [v for v in self.remove] return body @@ -1163,6 +1157,8 @@ def as_shallow_dict(self) -> dict: body["add"] = self.add if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.remove: body["remove"] = self.remove return body @@ -1170,7 +1166,12 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> PermissionsChange: """Deserializes the PermissionsChange from a dictionary.""" - return cls(add=d.get("add", None), principal=d.get("principal", None), remove=d.get("remove", None)) + return cls( + add=d.get("add", None), + principal=d.get("principal", None), + principal_id=d.get("principal_id", None), + remove=d.get("remove", None), + ) class Privilege(Enum): @@ -1228,6 +1229,10 @@ class PrivilegeAssignment: """The principal (user email address or group name). For deleted principals, `principal` is empty while `principal_id` is populated.""" + principal_id: Optional[int] = None + """Unique identifier of the principal. For active principals, both `principal` and `principal_id` + are present.""" + privileges: Optional[List[Privilege]] = None """The privileges assigned to the principal.""" @@ -1236,6 +1241,8 @@ def as_dict(self) -> dict: body = {} if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.privileges: body["privileges"] = [v.value for v in self.privileges] return body @@ -1245,6 +1252,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.privileges: body["privileges"] = self.privileges return body @@ -1252,7 +1261,11 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> PrivilegeAssignment: """Deserializes the PrivilegeAssignment from a dictionary.""" - return cls(principal=d.get("principal", None), privileges=_repeated_enum(d, "privileges", Privilege)) + return cls( + principal=d.get("principal", None), + principal_id=d.get("principal_id", None), + privileges=_repeated_enum(d, "privileges", Privilege), + ) @dataclass @@ -1875,6 +1888,10 @@ class ShareInfo: owner: Optional[str] = None """Username of current owner of share.""" + serverless_budget_policy_id: Optional[str] = None + """Serverless budget policy id (can only be created/updated when calling data-sharing service) + [Create,Update:IGN]""" + storage_location: Optional[str] = None """Storage Location URL (full path) for the share.""" @@ -1902,6 +1919,8 @@ def as_dict(self) -> dict: body["objects"] = [v.as_dict() for v in self.objects] if self.owner is not None: body["owner"] = self.owner + if self.serverless_budget_policy_id is not None: + body["serverless_budget_policy_id"] = self.serverless_budget_policy_id if self.storage_location is not None: body["storage_location"] = self.storage_location if self.storage_root is not None: @@ -1927,6 +1946,8 @@ def as_shallow_dict(self) -> dict: body["objects"] = self.objects if self.owner is not None: body["owner"] = self.owner + if self.serverless_budget_policy_id is not None: + body["serverless_budget_policy_id"] = self.serverless_budget_policy_id if self.storage_location is not None: body["storage_location"] = self.storage_location if self.storage_root is not None: @@ -1947,6 +1968,7 @@ def from_dict(cls, d: Dict[str, Any]) -> ShareInfo: name=d.get("name", None), objects=_repeated_dict(d, "objects", SharedDataObject), owner=d.get("owner", None), + serverless_budget_policy_id=d.get("serverless_budget_policy_id", None), storage_location=d.get("storage_location", None), storage_root=d.get("storage_root", None), updated_at=d.get("updated_at", None), @@ -3310,7 +3332,14 @@ class SharesAPI: def __init__(self, api_client): self._api = api_client - def create(self, name: str, *, comment: Optional[str] = None, storage_root: Optional[str] = None) -> ShareInfo: + def create( + self, + name: str, + *, + comment: Optional[str] = None, + serverless_budget_policy_id: Optional[str] = None, + storage_root: Optional[str] = None, + ) -> ShareInfo: """Creates a new share for data objects. Data objects can be added after creation with **update**. The caller must be a metastore admin or have the **CREATE_SHARE** privilege on the metastore. @@ -3318,6 +3347,9 @@ def create(self, name: str, *, comment: Optional[str] = None, storage_root: Opti Name of the share. :param comment: str (optional) User-provided free-form text description. + :param serverless_budget_policy_id: str (optional) + Serverless budget policy id (can only be created/updated when calling data-sharing service) + [Create,Update:IGN] :param storage_root: str (optional) Storage root URL for the share. @@ -3328,6 +3360,8 @@ def create(self, name: str, *, comment: Optional[str] = None, storage_root: Opti body["comment"] = comment if name is not None: body["name"] = name + if serverless_budget_policy_id is not None: + body["serverless_budget_policy_id"] = serverless_budget_policy_id if storage_root is not None: body["storage_root"] = storage_root headers = { @@ -3420,7 +3454,7 @@ def share_permissions( owner of the share. :param name: str - The name of the share. + The name of the Recipient. :param max_results: int (optional) Maximum number of permissions to return. - when set to 0, the page length is set to a server configured value (recommended); - when set to a value greater than 0, the page length is the minimum @@ -3454,6 +3488,7 @@ def update( comment: Optional[str] = None, new_name: Optional[str] = None, owner: Optional[str] = None, + serverless_budget_policy_id: Optional[str] = None, storage_root: Optional[str] = None, updates: Optional[List[SharedDataObjectUpdate]] = None, ) -> ShareInfo: @@ -3481,6 +3516,9 @@ def update( New name for the share. :param owner: str (optional) Username of current owner of share. + :param serverless_budget_policy_id: str (optional) + Serverless budget policy id (can only be created/updated when calling data-sharing service) + [Create,Update:IGN] :param storage_root: str (optional) Storage root URL for the share. :param updates: List[:class:`SharedDataObjectUpdate`] (optional) @@ -3495,6 +3533,8 @@ def update( body["new_name"] = new_name if owner is not None: body["owner"] = owner + if serverless_budget_policy_id is not None: + body["serverless_budget_policy_id"] = serverless_budget_policy_id if storage_root is not None: body["storage_root"] = storage_root if updates is not None: diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index 33b80c3c9..dcc8c88c6 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -10,8 +10,10 @@ from enum import Enum from typing import Any, Callable, Dict, Iterator, List, Optional +from databricks.sdk.service._internal import (Wait, _enum, _from_dict, + _repeated_dict, _repeated_enum) + from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum _LOG = logging.getLogger("databricks.sdk") @@ -1080,9 +1082,6 @@ def from_dict(cls, d: Dict[str, Any]) -> AlertV2Subscription: @dataclass class BaseChunkInfo: - """Describes metadata for a particular chunk, within a result set; this structure is used both - within a manifest, and when fetching individual chunk data or links.""" - byte_count: Optional[int] = None """The number of bytes in the result chunk. This field is not available when using `INLINE` disposition.""" @@ -1133,24 +1132,6 @@ def from_dict(cls, d: Dict[str, Any]) -> BaseChunkInfo: ) -@dataclass -class CancelExecutionResponse: - def as_dict(self) -> dict: - """Serializes the CancelExecutionResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CancelExecutionResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CancelExecutionResponse: - """Deserializes the CancelExecutionResponse from a dictionary.""" - return cls() - - @dataclass class Channel: """Configures the channel name and DBSQL version of the warehouse. CHANNEL_NAME_CUSTOM should be @@ -1686,8 +1667,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateVisualizationRequestVisualization class CreateWarehouseRequestWarehouseType(Enum): - """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` - and also set the field `enable_serverless_compute` to `true`.""" CLASSIC = "CLASSIC" PRO = "PRO" @@ -2251,8 +2230,6 @@ class Disposition(Enum): class EditWarehouseRequestWarehouseType(Enum): - """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` - and also set the field `enable_serverless_compute` to `true`.""" CLASSIC = "CLASSIC" PRO = "PRO" @@ -2341,6 +2318,7 @@ class EndpointHealth: """Deprecated. split into summary and details for security""" status: Optional[Status] = None + """Health status of the endpoint.""" summary: Optional[str] = None """A short summary of the health status in case of degraded/failed warehouses.""" @@ -2463,8 +2441,10 @@ class EndpointInfo: """ODBC parameters for the SQL warehouse""" spot_instance_policy: Optional[SpotInstancePolicy] = None + """Configurations whether the endpoint should use spot instances.""" state: Optional[State] = None + """state of the endpoint""" tags: Optional[EndpointTags] = None """A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS @@ -2594,8 +2574,6 @@ def from_dict(cls, d: Dict[str, Any]) -> EndpointInfo: class EndpointInfoWarehouseType(Enum): - """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` - and also set the field `enable_serverless_compute` to `true`.""" CLASSIC = "CLASSIC" PRO = "PRO" @@ -2725,6 +2703,9 @@ class ExternalLink: which point a new `external_link` must be requested.""" external_link: Optional[str] = None + """A URL pointing to a chunk of result data, hosted by an external service, with a short expiration + time (<= 15 minutes). As this URL contains a temporary credential, it should be considered + sensitive and the client should not expose this URL in a log.""" http_headers: Optional[Dict[str, str]] = None """HTTP headers that must be included with a GET request to the `external_link`. Each header is @@ -2735,7 +2716,7 @@ class ExternalLink: next_chunk_index: Optional[int] = None """When fetching, provides the `chunk_index` for the _next_ chunk. If absent, indicates there are no more chunks. The next chunk can be fetched with a - :method:statementexecution/getStatementResultChunkN request.""" + :method:statementexecution/getstatementresultchunkn request.""" next_chunk_internal_link: Optional[str] = None """When fetching, provides a link to fetch the _next_ chunk. If absent, indicates there are no more @@ -3077,8 +3058,10 @@ class GetWarehouseResponse: """ODBC parameters for the SQL warehouse""" spot_instance_policy: Optional[SpotInstancePolicy] = None + """Configurations whether the endpoint should use spot instances.""" state: Optional[State] = None + """state of the endpoint""" tags: Optional[EndpointTags] = None """A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS @@ -3087,6 +3070,8 @@ class GetWarehouseResponse: Supported values: - Number of tags < 45.""" warehouse_type: Optional[GetWarehouseResponseWarehouseType] = None + """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` + and also set the field `enable_serverless_compute` to `true`.""" def as_dict(self) -> dict: """Serializes the GetWarehouseResponse into a dictionary suitable for use as a JSON request body.""" @@ -3206,8 +3191,6 @@ def from_dict(cls, d: Dict[str, Any]) -> GetWarehouseResponse: class GetWarehouseResponseWarehouseType(Enum): - """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` - and also set the field `enable_serverless_compute` to `true`.""" CLASSIC = "CLASSIC" PRO = "PRO" @@ -3226,6 +3209,9 @@ class GetWorkspaceWarehouseConfigResponse: """Spark confs for external hive metastore configuration JSON serialized size must be less than <= 512K""" + enable_serverless_compute: Optional[bool] = None + """Enable Serverless compute for SQL warehouses""" + enabled_warehouse_types: Optional[List[WarehouseTypePair]] = None """List of Warehouse Types allowed in this workspace (limits allowed value of the type field in CreateWarehouse and EditWarehouse). Note: Some types cannot be disabled, they don't need to be @@ -3240,7 +3226,8 @@ class GetWorkspaceWarehouseConfigResponse: """GCP only: Google Service Account used to pass to cluster to access Google Cloud Storage""" instance_profile_arn: Optional[str] = None - """AWS Only: Instance profile used to pass IAM role to the cluster""" + """AWS Only: The instance profile used to pass an IAM role to the SQL warehouses. This + configuration is also applied to the workspace's serverless compute for notebooks and jobs.""" security_policy: Optional[GetWorkspaceWarehouseConfigResponseSecurityPolicy] = None """Security policy for warehouses""" @@ -3257,6 +3244,8 @@ def as_dict(self) -> dict: body["config_param"] = self.config_param.as_dict() if self.data_access_config: body["data_access_config"] = [v.as_dict() for v in self.data_access_config] + if self.enable_serverless_compute is not None: + body["enable_serverless_compute"] = self.enable_serverless_compute if self.enabled_warehouse_types: body["enabled_warehouse_types"] = [v.as_dict() for v in self.enabled_warehouse_types] if self.global_param: @@ -3280,6 +3269,8 @@ def as_shallow_dict(self) -> dict: body["config_param"] = self.config_param if self.data_access_config: body["data_access_config"] = self.data_access_config + if self.enable_serverless_compute is not None: + body["enable_serverless_compute"] = self.enable_serverless_compute if self.enabled_warehouse_types: body["enabled_warehouse_types"] = self.enabled_warehouse_types if self.global_param: @@ -3301,6 +3292,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GetWorkspaceWarehouseConfigResponse: channel=_from_dict(d, "channel", Channel), config_param=_from_dict(d, "config_param", RepeatedEndpointConfPairs), data_access_config=_repeated_dict(d, "data_access_config", EndpointConfPair), + enable_serverless_compute=d.get("enable_serverless_compute", None), enabled_warehouse_types=_repeated_dict(d, "enabled_warehouse_types", WarehouseTypePair), global_param=_from_dict(d, "global_param", RepeatedEndpointConfPairs), google_service_account=d.get("google_service_account", None), @@ -3311,7 +3303,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GetWorkspaceWarehouseConfigResponse: class GetWorkspaceWarehouseConfigResponseSecurityPolicy(Enum): - """Security policy for warehouses""" + """Security policy to be used for warehouses""" DATA_ACCESS_CONTROL = "DATA_ACCESS_CONTROL" NONE = "NONE" @@ -4264,12 +4256,18 @@ def from_dict(cls, d: Dict[str, Any]) -> ListVisualizationsForQueryResponse: @dataclass class ListWarehousesResponse: + next_page_token: Optional[str] = None + """A token, which can be sent as `page_token` to retrieve the next page. If this field is omitted, + there are no subsequent pages.""" + warehouses: Optional[List[EndpointInfo]] = None """A list of warehouses and their configurations.""" def as_dict(self) -> dict: """Serializes the ListWarehousesResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token if self.warehouses: body["warehouses"] = [v.as_dict() for v in self.warehouses] return body @@ -4277,6 +4275,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the ListWarehousesResponse into a shallow dictionary of its immediate attributes.""" body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token if self.warehouses: body["warehouses"] = self.warehouses return body @@ -4284,7 +4284,9 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListWarehousesResponse: """Deserializes the ListWarehousesResponse from a dictionary.""" - return cls(warehouses=_repeated_dict(d, "warehouses", EndpointInfo)) + return cls( + next_page_token=d.get("next_page_token", None), warehouses=_repeated_dict(d, "warehouses", EndpointInfo) + ) @dataclass @@ -5551,6 +5553,12 @@ def from_dict(cls, d: Dict[str, Any]) -> RestoreResponse: @dataclass class ResultData: + """Contains the result data of a single chunk when using `INLINE` disposition. When using + `EXTERNAL_LINKS` disposition, the array `external_links` is used instead to provide URLs to the + result data in cloud storage. Exactly one of these alternatives is used. (While the + `external_links` array prepares the API to return multiple links in a single response. Currently + only a single link is returned.)""" + byte_count: Optional[int] = None """The number of bytes in the result chunk. This field is not available when using `INLINE` disposition.""" @@ -5567,7 +5575,7 @@ class ResultData: next_chunk_index: Optional[int] = None """When fetching, provides the `chunk_index` for the _next_ chunk. If absent, indicates there are no more chunks. The next chunk can be fetched with a - :method:statementexecution/getStatementResultChunkN request.""" + :method:statementexecution/getstatementresultchunkn request.""" next_chunk_internal_link: Optional[str] = None """When fetching, provides a link to fetch the _next_ chunk. If absent, indicates there are no more @@ -5855,7 +5863,7 @@ def from_dict(cls, d: Dict[str, Any]) -> SetResponse: class SetWorkspaceWarehouseConfigRequestSecurityPolicy(Enum): - """Security policy for warehouses""" + """Security policy to be used for warehouses""" DATA_ACCESS_CONTROL = "DATA_ACCESS_CONTROL" NONE = "NONE" @@ -5881,7 +5889,20 @@ def from_dict(cls, d: Dict[str, Any]) -> SetWorkspaceWarehouseConfigResponse: class SpotInstancePolicy(Enum): - """Configurations whether the warehouse should use spot instances.""" + """EndpointSpotInstancePolicy configures whether the endpoint should use spot instances. + + The breakdown of how the EndpointSpotInstancePolicy converts to per cloud configurations is: + + +-------+--------------------------------------+--------------------------------+ | Cloud | + COST_OPTIMIZED | RELIABILITY_OPTIMIZED | + +-------+--------------------------------------+--------------------------------+ | AWS | On + Demand Driver with Spot Executors | On Demand Driver and Executors | | AZURE | On Demand Driver + and Executors | On Demand Driver and Executors | + +-------+--------------------------------------+--------------------------------+ + + While including "spot" in the enum name may limit the the future extensibility of this field + because it limits this enum to denoting "spot or not", this is the field that PM recommends + after discussion with customers per SC-48783.""" COST_OPTIMIZED = "COST_OPTIMIZED" POLICY_UNSPECIFIED = "POLICY_UNSPECIFIED" @@ -5907,7 +5928,7 @@ def from_dict(cls, d: Dict[str, Any]) -> StartWarehouseResponse: class State(Enum): - """State of the warehouse""" + """* State of a warehouse.""" DELETED = "DELETED" DELETING = "DELETING" @@ -6011,11 +6032,6 @@ def from_dict(cls, d: Dict[str, Any]) -> StatementResponse: class StatementState(Enum): - """Statement execution state: - `PENDING`: waiting for warehouse - `RUNNING`: running - - `SUCCEEDED`: execution was successful, result data available for fetch - `FAILED`: execution - failed; reason for failure described in accomanying error message - `CANCELED`: user canceled; - can come from explicit cancel call, or timeout with `on_wait_timeout=CANCEL` - `CLOSED`: - execution successful, and statement closed; result no longer available for fetch""" CANCELED = "CANCELED" CLOSED = "CLOSED" @@ -6032,6 +6048,11 @@ class StatementStatus: error: Optional[ServiceError] = None state: Optional[StatementState] = None + """Statement execution state: - `PENDING`: waiting for warehouse - `RUNNING`: running - + `SUCCEEDED`: execution was successful, result data available for fetch - `FAILED`: execution + failed; reason for failure described in accompanying error message - `CANCELED`: user canceled; + can come from explicit cancel call, or timeout with `on_wait_timeout=CANCEL` - `CLOSED`: + execution successful, and statement closed; result no longer available for fetch""" def as_dict(self) -> dict: """Serializes the StatementStatus into a dictionary suitable for use as a JSON request body.""" @@ -6058,12 +6079,10 @@ def from_dict(cls, d: Dict[str, Any]) -> StatementStatus: class Status(Enum): - """Health status of the warehouse.""" DEGRADED = "DEGRADED" FAILED = "FAILED" HEALTHY = "HEALTHY" - STATUS_UNSPECIFIED = "STATUS_UNSPECIFIED" @dataclass @@ -6214,20 +6233,35 @@ def from_dict(cls, d: Dict[str, Any]) -> TerminationReason: class TerminationReasonCode(Enum): - """status code indicating why the cluster was terminated""" + """The status code indicating why the cluster was terminated""" ABUSE_DETECTED = "ABUSE_DETECTED" + ACCESS_TOKEN_FAILURE = "ACCESS_TOKEN_FAILURE" + ALLOCATION_TIMEOUT = "ALLOCATION_TIMEOUT" + ALLOCATION_TIMEOUT_NODE_DAEMON_NOT_READY = "ALLOCATION_TIMEOUT_NODE_DAEMON_NOT_READY" + ALLOCATION_TIMEOUT_NO_HEALTHY_AND_WARMED_UP_CLUSTERS = "ALLOCATION_TIMEOUT_NO_HEALTHY_AND_WARMED_UP_CLUSTERS" + ALLOCATION_TIMEOUT_NO_HEALTHY_CLUSTERS = "ALLOCATION_TIMEOUT_NO_HEALTHY_CLUSTERS" + ALLOCATION_TIMEOUT_NO_MATCHED_CLUSTERS = "ALLOCATION_TIMEOUT_NO_MATCHED_CLUSTERS" + ALLOCATION_TIMEOUT_NO_READY_CLUSTERS = "ALLOCATION_TIMEOUT_NO_READY_CLUSTERS" + ALLOCATION_TIMEOUT_NO_UNALLOCATED_CLUSTERS = "ALLOCATION_TIMEOUT_NO_UNALLOCATED_CLUSTERS" + ALLOCATION_TIMEOUT_NO_WARMED_UP_CLUSTERS = "ALLOCATION_TIMEOUT_NO_WARMED_UP_CLUSTERS" ATTACH_PROJECT_FAILURE = "ATTACH_PROJECT_FAILURE" AWS_AUTHORIZATION_FAILURE = "AWS_AUTHORIZATION_FAILURE" + AWS_INACCESSIBLE_KMS_KEY_FAILURE = "AWS_INACCESSIBLE_KMS_KEY_FAILURE" + AWS_INSTANCE_PROFILE_UPDATE_FAILURE = "AWS_INSTANCE_PROFILE_UPDATE_FAILURE" AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE = "AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE" AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE = "AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE" + AWS_INVALID_KEY_PAIR = "AWS_INVALID_KEY_PAIR" + AWS_INVALID_KMS_KEY_STATE = "AWS_INVALID_KMS_KEY_STATE" AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE = "AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE" AWS_REQUEST_LIMIT_EXCEEDED = "AWS_REQUEST_LIMIT_EXCEEDED" + AWS_RESOURCE_QUOTA_EXCEEDED = "AWS_RESOURCE_QUOTA_EXCEEDED" AWS_UNSUPPORTED_FAILURE = "AWS_UNSUPPORTED_FAILURE" AZURE_BYOK_KEY_PERMISSION_FAILURE = "AZURE_BYOK_KEY_PERMISSION_FAILURE" AZURE_EPHEMERAL_DISK_FAILURE = "AZURE_EPHEMERAL_DISK_FAILURE" AZURE_INVALID_DEPLOYMENT_TEMPLATE = "AZURE_INVALID_DEPLOYMENT_TEMPLATE" AZURE_OPERATION_NOT_ALLOWED_EXCEPTION = "AZURE_OPERATION_NOT_ALLOWED_EXCEPTION" + AZURE_PACKED_DEPLOYMENT_PARTIAL_FAILURE = "AZURE_PACKED_DEPLOYMENT_PARTIAL_FAILURE" AZURE_QUOTA_EXCEEDED_EXCEPTION = "AZURE_QUOTA_EXCEEDED_EXCEPTION" AZURE_RESOURCE_MANAGER_THROTTLING = "AZURE_RESOURCE_MANAGER_THROTTLING" AZURE_RESOURCE_PROVIDER_THROTTLING = "AZURE_RESOURCE_PROVIDER_THROTTLING" @@ -6236,65 +6270,148 @@ class TerminationReasonCode(Enum): AZURE_VNET_CONFIGURATION_FAILURE = "AZURE_VNET_CONFIGURATION_FAILURE" BOOTSTRAP_TIMEOUT = "BOOTSTRAP_TIMEOUT" BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION = "BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION" + BOOTSTRAP_TIMEOUT_DUE_TO_MISCONFIG = "BOOTSTRAP_TIMEOUT_DUE_TO_MISCONFIG" + BUDGET_POLICY_LIMIT_ENFORCEMENT_ACTIVATED = "BUDGET_POLICY_LIMIT_ENFORCEMENT_ACTIVATED" + BUDGET_POLICY_RESOLUTION_FAILURE = "BUDGET_POLICY_RESOLUTION_FAILURE" + CLOUD_ACCOUNT_SETUP_FAILURE = "CLOUD_ACCOUNT_SETUP_FAILURE" + CLOUD_OPERATION_CANCELLED = "CLOUD_OPERATION_CANCELLED" CLOUD_PROVIDER_DISK_SETUP_FAILURE = "CLOUD_PROVIDER_DISK_SETUP_FAILURE" + CLOUD_PROVIDER_INSTANCE_NOT_LAUNCHED = "CLOUD_PROVIDER_INSTANCE_NOT_LAUNCHED" CLOUD_PROVIDER_LAUNCH_FAILURE = "CLOUD_PROVIDER_LAUNCH_FAILURE" + CLOUD_PROVIDER_LAUNCH_FAILURE_DUE_TO_MISCONFIG = "CLOUD_PROVIDER_LAUNCH_FAILURE_DUE_TO_MISCONFIG" CLOUD_PROVIDER_RESOURCE_STOCKOUT = "CLOUD_PROVIDER_RESOURCE_STOCKOUT" + CLOUD_PROVIDER_RESOURCE_STOCKOUT_DUE_TO_MISCONFIG = "CLOUD_PROVIDER_RESOURCE_STOCKOUT_DUE_TO_MISCONFIG" CLOUD_PROVIDER_SHUTDOWN = "CLOUD_PROVIDER_SHUTDOWN" + CLUSTER_OPERATION_THROTTLED = "CLUSTER_OPERATION_THROTTLED" + CLUSTER_OPERATION_TIMEOUT = "CLUSTER_OPERATION_TIMEOUT" COMMUNICATION_LOST = "COMMUNICATION_LOST" CONTAINER_LAUNCH_FAILURE = "CONTAINER_LAUNCH_FAILURE" CONTROL_PLANE_REQUEST_FAILURE = "CONTROL_PLANE_REQUEST_FAILURE" + CONTROL_PLANE_REQUEST_FAILURE_DUE_TO_MISCONFIG = "CONTROL_PLANE_REQUEST_FAILURE_DUE_TO_MISCONFIG" DATABASE_CONNECTION_FAILURE = "DATABASE_CONNECTION_FAILURE" + DATA_ACCESS_CONFIG_CHANGED = "DATA_ACCESS_CONFIG_CHANGED" DBFS_COMPONENT_UNHEALTHY = "DBFS_COMPONENT_UNHEALTHY" + DISASTER_RECOVERY_REPLICATION = "DISASTER_RECOVERY_REPLICATION" + DNS_RESOLUTION_ERROR = "DNS_RESOLUTION_ERROR" + DOCKER_CONTAINER_CREATION_EXCEPTION = "DOCKER_CONTAINER_CREATION_EXCEPTION" DOCKER_IMAGE_PULL_FAILURE = "DOCKER_IMAGE_PULL_FAILURE" + DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION = "DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION" + DOCKER_INVALID_OS_EXCEPTION = "DOCKER_INVALID_OS_EXCEPTION" + DRIVER_DNS_RESOLUTION_FAILURE = "DRIVER_DNS_RESOLUTION_FAILURE" + DRIVER_EVICTION = "DRIVER_EVICTION" + DRIVER_LAUNCH_TIMEOUT = "DRIVER_LAUNCH_TIMEOUT" + DRIVER_NODE_UNREACHABLE = "DRIVER_NODE_UNREACHABLE" + DRIVER_OUT_OF_DISK = "DRIVER_OUT_OF_DISK" + DRIVER_OUT_OF_MEMORY = "DRIVER_OUT_OF_MEMORY" + DRIVER_POD_CREATION_FAILURE = "DRIVER_POD_CREATION_FAILURE" + DRIVER_UNEXPECTED_FAILURE = "DRIVER_UNEXPECTED_FAILURE" + DRIVER_UNHEALTHY = "DRIVER_UNHEALTHY" DRIVER_UNREACHABLE = "DRIVER_UNREACHABLE" DRIVER_UNRESPONSIVE = "DRIVER_UNRESPONSIVE" + DYNAMIC_SPARK_CONF_SIZE_EXCEEDED = "DYNAMIC_SPARK_CONF_SIZE_EXCEEDED" + EOS_SPARK_IMAGE = "EOS_SPARK_IMAGE" EXECUTION_COMPONENT_UNHEALTHY = "EXECUTION_COMPONENT_UNHEALTHY" + EXECUTOR_POD_UNSCHEDULED = "EXECUTOR_POD_UNSCHEDULED" + GCP_API_RATE_QUOTA_EXCEEDED = "GCP_API_RATE_QUOTA_EXCEEDED" + GCP_DENIED_BY_ORG_POLICY = "GCP_DENIED_BY_ORG_POLICY" + GCP_FORBIDDEN = "GCP_FORBIDDEN" + GCP_IAM_TIMEOUT = "GCP_IAM_TIMEOUT" + GCP_INACCESSIBLE_KMS_KEY_FAILURE = "GCP_INACCESSIBLE_KMS_KEY_FAILURE" + GCP_INSUFFICIENT_CAPACITY = "GCP_INSUFFICIENT_CAPACITY" + GCP_IP_SPACE_EXHAUSTED = "GCP_IP_SPACE_EXHAUSTED" + GCP_KMS_KEY_PERMISSION_DENIED = "GCP_KMS_KEY_PERMISSION_DENIED" + GCP_NOT_FOUND = "GCP_NOT_FOUND" GCP_QUOTA_EXCEEDED = "GCP_QUOTA_EXCEEDED" + GCP_RESOURCE_QUOTA_EXCEEDED = "GCP_RESOURCE_QUOTA_EXCEEDED" + GCP_SERVICE_ACCOUNT_ACCESS_DENIED = "GCP_SERVICE_ACCOUNT_ACCESS_DENIED" GCP_SERVICE_ACCOUNT_DELETED = "GCP_SERVICE_ACCOUNT_DELETED" + GCP_SERVICE_ACCOUNT_NOT_FOUND = "GCP_SERVICE_ACCOUNT_NOT_FOUND" + GCP_SUBNET_NOT_READY = "GCP_SUBNET_NOT_READY" + GCP_TRUSTED_IMAGE_PROJECTS_VIOLATED = "GCP_TRUSTED_IMAGE_PROJECTS_VIOLATED" + GKE_BASED_CLUSTER_TERMINATION = "GKE_BASED_CLUSTER_TERMINATION" GLOBAL_INIT_SCRIPT_FAILURE = "GLOBAL_INIT_SCRIPT_FAILURE" HIVE_METASTORE_PROVISIONING_FAILURE = "HIVE_METASTORE_PROVISIONING_FAILURE" IMAGE_PULL_PERMISSION_DENIED = "IMAGE_PULL_PERMISSION_DENIED" INACTIVITY = "INACTIVITY" + INIT_CONTAINER_NOT_FINISHED = "INIT_CONTAINER_NOT_FINISHED" INIT_SCRIPT_FAILURE = "INIT_SCRIPT_FAILURE" INSTANCE_POOL_CLUSTER_FAILURE = "INSTANCE_POOL_CLUSTER_FAILURE" + INSTANCE_POOL_MAX_CAPACITY_REACHED = "INSTANCE_POOL_MAX_CAPACITY_REACHED" + INSTANCE_POOL_NOT_FOUND = "INSTANCE_POOL_NOT_FOUND" INSTANCE_UNREACHABLE = "INSTANCE_UNREACHABLE" + INSTANCE_UNREACHABLE_DUE_TO_MISCONFIG = "INSTANCE_UNREACHABLE_DUE_TO_MISCONFIG" + INTERNAL_CAPACITY_FAILURE = "INTERNAL_CAPACITY_FAILURE" INTERNAL_ERROR = "INTERNAL_ERROR" INVALID_ARGUMENT = "INVALID_ARGUMENT" + INVALID_AWS_PARAMETER = "INVALID_AWS_PARAMETER" + INVALID_INSTANCE_PLACEMENT_PROTOCOL = "INVALID_INSTANCE_PLACEMENT_PROTOCOL" INVALID_SPARK_IMAGE = "INVALID_SPARK_IMAGE" + INVALID_WORKER_IMAGE_FAILURE = "INVALID_WORKER_IMAGE_FAILURE" + IN_PENALTY_BOX = "IN_PENALTY_BOX" IP_EXHAUSTION_FAILURE = "IP_EXHAUSTION_FAILURE" JOB_FINISHED = "JOB_FINISHED" K8S_AUTOSCALING_FAILURE = "K8S_AUTOSCALING_FAILURE" K8S_DBR_CLUSTER_LAUNCH_TIMEOUT = "K8S_DBR_CLUSTER_LAUNCH_TIMEOUT" + LAZY_ALLOCATION_TIMEOUT = "LAZY_ALLOCATION_TIMEOUT" + MAINTENANCE_MODE = "MAINTENANCE_MODE" METASTORE_COMPONENT_UNHEALTHY = "METASTORE_COMPONENT_UNHEALTHY" NEPHOS_RESOURCE_MANAGEMENT = "NEPHOS_RESOURCE_MANAGEMENT" + NETVISOR_SETUP_TIMEOUT = "NETVISOR_SETUP_TIMEOUT" + NETWORK_CHECK_CONTROL_PLANE_FAILURE = "NETWORK_CHECK_CONTROL_PLANE_FAILURE" + NETWORK_CHECK_DNS_SERVER_FAILURE = "NETWORK_CHECK_DNS_SERVER_FAILURE" + NETWORK_CHECK_METADATA_ENDPOINT_FAILURE = "NETWORK_CHECK_METADATA_ENDPOINT_FAILURE" + NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE = "NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE" + NETWORK_CHECK_NIC_FAILURE = "NETWORK_CHECK_NIC_FAILURE" + NETWORK_CHECK_STORAGE_FAILURE = "NETWORK_CHECK_STORAGE_FAILURE" NETWORK_CONFIGURATION_FAILURE = "NETWORK_CONFIGURATION_FAILURE" NFS_MOUNT_FAILURE = "NFS_MOUNT_FAILURE" + NO_ACTIVATED_K8S = "NO_ACTIVATED_K8S" + NO_ACTIVATED_K8S_TESTING_TAG = "NO_ACTIVATED_K8S_TESTING_TAG" + NO_MATCHED_K8S = "NO_MATCHED_K8S" + NO_MATCHED_K8S_TESTING_TAG = "NO_MATCHED_K8S_TESTING_TAG" NPIP_TUNNEL_SETUP_FAILURE = "NPIP_TUNNEL_SETUP_FAILURE" NPIP_TUNNEL_TOKEN_FAILURE = "NPIP_TUNNEL_TOKEN_FAILURE" + POD_ASSIGNMENT_FAILURE = "POD_ASSIGNMENT_FAILURE" + POD_SCHEDULING_FAILURE = "POD_SCHEDULING_FAILURE" REQUEST_REJECTED = "REQUEST_REJECTED" REQUEST_THROTTLED = "REQUEST_THROTTLED" + RESOURCE_USAGE_BLOCKED = "RESOURCE_USAGE_BLOCKED" + SECRET_CREATION_FAILURE = "SECRET_CREATION_FAILURE" + SECRET_PERMISSION_DENIED = "SECRET_PERMISSION_DENIED" SECRET_RESOLUTION_ERROR = "SECRET_RESOLUTION_ERROR" + SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION = "SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION" SECURITY_DAEMON_REGISTRATION_EXCEPTION = "SECURITY_DAEMON_REGISTRATION_EXCEPTION" SELF_BOOTSTRAP_FAILURE = "SELF_BOOTSTRAP_FAILURE" + SERVERLESS_LONG_RUNNING_TERMINATED = "SERVERLESS_LONG_RUNNING_TERMINATED" SKIPPED_SLOW_NODES = "SKIPPED_SLOW_NODES" SLOW_IMAGE_DOWNLOAD = "SLOW_IMAGE_DOWNLOAD" SPARK_ERROR = "SPARK_ERROR" SPARK_IMAGE_DOWNLOAD_FAILURE = "SPARK_IMAGE_DOWNLOAD_FAILURE" + SPARK_IMAGE_DOWNLOAD_THROTTLED = "SPARK_IMAGE_DOWNLOAD_THROTTLED" + SPARK_IMAGE_NOT_FOUND = "SPARK_IMAGE_NOT_FOUND" SPARK_STARTUP_FAILURE = "SPARK_STARTUP_FAILURE" SPOT_INSTANCE_TERMINATION = "SPOT_INSTANCE_TERMINATION" + SSH_BOOTSTRAP_FAILURE = "SSH_BOOTSTRAP_FAILURE" STORAGE_DOWNLOAD_FAILURE = "STORAGE_DOWNLOAD_FAILURE" + STORAGE_DOWNLOAD_FAILURE_DUE_TO_MISCONFIG = "STORAGE_DOWNLOAD_FAILURE_DUE_TO_MISCONFIG" + STORAGE_DOWNLOAD_FAILURE_SLOW = "STORAGE_DOWNLOAD_FAILURE_SLOW" + STORAGE_DOWNLOAD_FAILURE_THROTTLED = "STORAGE_DOWNLOAD_FAILURE_THROTTLED" STS_CLIENT_SETUP_FAILURE = "STS_CLIENT_SETUP_FAILURE" SUBNET_EXHAUSTED_FAILURE = "SUBNET_EXHAUSTED_FAILURE" TEMPORARILY_UNAVAILABLE = "TEMPORARILY_UNAVAILABLE" TRIAL_EXPIRED = "TRIAL_EXPIRED" UNEXPECTED_LAUNCH_FAILURE = "UNEXPECTED_LAUNCH_FAILURE" + UNEXPECTED_POD_RECREATION = "UNEXPECTED_POD_RECREATION" UNKNOWN = "UNKNOWN" UNSUPPORTED_INSTANCE_TYPE = "UNSUPPORTED_INSTANCE_TYPE" UPDATE_INSTANCE_PROFILE_FAILURE = "UPDATE_INSTANCE_PROFILE_FAILURE" + USAGE_POLICY_ENTITLEMENT_DENIED = "USAGE_POLICY_ENTITLEMENT_DENIED" + USER_INITIATED_VM_TERMINATION = "USER_INITIATED_VM_TERMINATION" USER_REQUEST = "USER_REQUEST" WORKER_SETUP_FAILURE = "WORKER_SETUP_FAILURE" WORKSPACE_CANCELLED_ERROR = "WORKSPACE_CANCELLED_ERROR" WORKSPACE_CONFIGURATION_ERROR = "WORKSPACE_CONFIGURATION_ERROR" + WORKSPACE_UPDATE = "WORKSPACE_UPDATE" class TerminationReasonType(Enum): @@ -7008,12 +7125,14 @@ def from_dict(cls, d: Dict[str, Any]) -> WarehousePermissionsDescription: @dataclass class WarehouseTypePair: + """* Configuration values to enable or disable the access to specific warehouse types in the + workspace.""" + enabled: Optional[bool] = None """If set to false the specific warehouse type will not be be allowed as a value for warehouse_type in CreateWarehouse and EditWarehouse""" warehouse_type: Optional[WarehouseTypePairWarehouseType] = None - """Warehouse type: `PRO` or `CLASSIC`.""" def as_dict(self) -> dict: """Serializes the WarehouseTypePair into a dictionary suitable for use as a JSON request body.""" @@ -7042,7 +7161,6 @@ def from_dict(cls, d: Dict[str, Any]) -> WarehouseTypePair: class WarehouseTypePairWarehouseType(Enum): - """Warehouse type: `PRO` or `CLASSIC`.""" CLASSIC = "CLASSIC" PRO = "PRO" @@ -8714,10 +8832,10 @@ def delete(self, id: str): def update( self, - id: str, *, created_at: Optional[str] = None, description: Optional[str] = None, + id: Optional[str] = None, name: Optional[str] = None, options: Optional[Any] = None, query: Optional[LegacyQuery] = None, @@ -8731,11 +8849,11 @@ def update( [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - :param id: str - The UUID for this visualization. :param created_at: str (optional) :param description: str (optional) A short description of this visualization. This is not displayed in the UI. + :param id: str (optional) + The UUID for this visualization. :param name: str (optional) The name of the visualization that appears on dashboards and the query screen. :param options: Any (optional) @@ -8753,6 +8871,8 @@ def update( body["created_at"] = created_at if description is not None: body["description"] = description + if id is not None: + body["id"] = id if name is not None: body["name"] = name if options is not None: @@ -8819,17 +8939,17 @@ class StatementExecutionAPI: the statement execution has not yet finished. This can be set to either `CONTINUE`, to fallback to asynchronous mode, or it can be set to `CANCEL`, which cancels the statement. - In summary: - Synchronous mode - `wait_timeout=30s` and `on_wait_timeout=CANCEL` - The call waits up to 30 - seconds; if the statement execution finishes within this time, the result data is returned directly in the - response. If the execution takes longer than 30 seconds, the execution is canceled and the call returns - with a `CANCELED` state. - Asynchronous mode - `wait_timeout=0s` (`on_wait_timeout` is ignored) - The call - doesn't wait for the statement to finish but returns directly with a statement ID. The status of the - statement execution can be polled by issuing :method:statementexecution/getStatement with the statement - ID. Once the execution has succeeded, this call also returns the result and metadata in the response. - - Hybrid mode (default) - `wait_timeout=10s` and `on_wait_timeout=CONTINUE` - The call waits for up to 10 - seconds; if the statement execution finishes within this time, the result data is returned directly in the - response. If the execution takes longer than 10 seconds, a statement ID is returned. The statement ID can - be used to fetch status and results in the same way as in the asynchronous mode. + In summary: - **Synchronous mode** (`wait_timeout=30s` and `on_wait_timeout=CANCEL`): The call waits up to + 30 seconds; if the statement execution finishes within this time, the result data is returned directly in + the response. If the execution takes longer than 30 seconds, the execution is canceled and the call + returns with a `CANCELED` state. - **Asynchronous mode** (`wait_timeout=0s` and `on_wait_timeout` is + ignored): The call doesn't wait for the statement to finish but returns directly with a statement ID. The + status of the statement execution can be polled by issuing :method:statementexecution/getStatement with + the statement ID. Once the execution has succeeded, this call also returns the result and metadata in the + response. - **[Default] Hybrid mode** (`wait_timeout=10s` and `on_wait_timeout=CONTINUE`): The call waits + for up to 10 seconds; if the statement execution finishes within this time, the result data is returned + directly in the response. If the execution takes longer than 10 seconds, a statement ID is returned. The + statement ID can be used to fetch status and results in the same way as in the asynchronous mode. Depending on the size, the result can be split into multiple chunks. If the statement execution is successful, the statement response contains a manifest and the first chunk of the result. The manifest @@ -8884,7 +9004,7 @@ def __init__(self, api_client): def cancel_execution(self, statement_id: str): """Requests that an executing statement be canceled. Callers must poll for status to see the terminal - state. + state. Cancel response is empty; receiving response indicates successful receipt. :param statement_id: str The statement ID is returned upon successfully submitting a SQL statement, and is a required @@ -8912,7 +9032,52 @@ def execute_statement( schema: Optional[str] = None, wait_timeout: Optional[str] = None, ) -> StatementResponse: - """Execute a SQL statement + """Execute a SQL statement and optionally await its results for a specified time. + + **Use case: small result sets with INLINE + JSON_ARRAY** + + For flows that generate small and predictable result sets (<= 25 MiB), `INLINE` responses of + `JSON_ARRAY` result data are typically the simplest way to execute and fetch result data. + + **Use case: large result sets with EXTERNAL_LINKS** + + Using `EXTERNAL_LINKS` to fetch result data allows you to fetch large result sets efficiently. The + main differences from using `INLINE` disposition are that the result data is accessed with URLs, and + that there are 3 supported formats: `JSON_ARRAY`, `ARROW_STREAM` and `CSV` compared to only + `JSON_ARRAY` with `INLINE`. + + ** URLs** + + External links point to data stored within your workspace's internal storage, in the form of a URL. + The URLs are valid for only a short period, <= 15 minutes. Alongside each `external_link` is an + expiration field indicating the time at which the URL is no longer valid. In `EXTERNAL_LINKS` mode, + chunks can be resolved and fetched multiple times and in parallel. + + ---- + + ### **Warning: Databricks strongly recommends that you protect the URLs that are returned by the + `EXTERNAL_LINKS` disposition.** + + When you use the `EXTERNAL_LINKS` disposition, a short-lived, URL is generated, which can be used to + download the results directly from . As a short-lived is embedded in this URL, you should protect the + URL. + + Because URLs are already generated with embedded temporary s, you must not set an `Authorization` + header in the download requests. + + The `EXTERNAL_LINKS` disposition can be disabled upon request by creating a support case. + + See also [Security best practices]. + + ---- + + StatementResponse contains `statement_id` and `status`; other fields might be absent or present + depending on context. If the SQL warehouse fails to execute the provided statement, a 200 response is + returned with `status.state` set to `FAILED` (in contrast to a failure when accepting the request, + which results in a non-200 response). Details of the error can be found at `status.error` in case of + execution failures. + + [Security best practices]: https://docs.databricks.com/sql/admin/sql-execution-tutorial.html#security-best-practices :param statement: str The SQL statement to execute. The statement can optionally be parameterized, see `parameters`. The @@ -8926,12 +9091,32 @@ def execute_statement( representations and might not match the final size in the requested `format`. If the result was truncated due to the byte limit, then `truncated` in the response is set to `true`. When using `EXTERNAL_LINKS` disposition, a default `byte_limit` of 100 GiB is applied if `byte_limit` is not - explcitly set. + explicitly set. :param catalog: str (optional) Sets default catalog for statement execution, similar to [`USE CATALOG`] in SQL. [`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html :param disposition: :class:`Disposition` (optional) + The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`. + + Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY` + format, in a series of chunks. If a given statement produces a result set with a size larger than 25 + MiB, that statement execution is aborted, and no result set will be available. + + **NOTE** Byte limits are computed based upon internal representations of the result set data, and + might not match the sizes visible in JSON responses. + + Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links: + URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition + allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The + resulting links have two important properties: + + 1. They point to resources _external_ to the Databricks compute; therefore any associated + authentication information (typically a personal access token, OAuth token, or similar) _must be + removed_ when fetching from these links. + + 2. These are URLs with a specific expiration, indicated in the response. The behavior when + attempting to use an expired link is cloud specific. :param format: :class:`Format` (optional) Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and `CSV`. @@ -8982,13 +9167,13 @@ def execute_statement( For example, the following statement contains two parameters, `my_name` and `my_date`: - SELECT * FROM my_table WHERE name = :my_name AND date = :my_date + ``` SELECT * FROM my_table WHERE name = :my_name AND date = :my_date ``` The parameters can be passed in the request body as follows: - { ..., "statement": "SELECT * FROM my_table WHERE name = :my_name AND date = :my_date", + ` { ..., "statement": "SELECT * FROM my_table WHERE name = :my_name AND date = :my_date", "parameters": [ { "name": "my_name", "value": "the name" }, { "name": "my_date", "value": - "2020-01-01", "type": "DATE" } ] } + "2020-01-01", "type": "DATE" } ] } ` Currently, positional parameters denoted by a `?` marker are not supported by the Databricks SQL Statement Execution API. @@ -9049,15 +9234,16 @@ def execute_statement( "Content-Type": "application/json", } - res = self._api.do("POST", "/api/2.0/sql/statements/", body=body, headers=headers) + res = self._api.do("POST", "/api/2.0/sql/statements", body=body, headers=headers) return StatementResponse.from_dict(res) def get_statement(self, statement_id: str) -> StatementResponse: - """This request can be used to poll for the statement's status. When the `status.state` field is - `SUCCEEDED` it will also return the result manifest and the first chunk of the result data. When the - statement is in the terminal states `CANCELED`, `CLOSED` or `FAILED`, it returns HTTP 200 with the - state set. After at least 12 hours in terminal state, the statement is removed from the warehouse and - further calls will receive an HTTP 404 response. + """This request can be used to poll for the statement's status. StatementResponse contains `statement_id` + and `status`; other fields might be absent or present depending on context. When the `status.state` + field is `SUCCEEDED` it will also return the result manifest and the first chunk of the result data. + When the statement is in the terminal states `CANCELED`, `CLOSED` or `FAILED`, it returns HTTP 200 + with the state set. After at least 12 hours in terminal state, the statement is removed from the + warehouse and further calls will receive an HTTP 404 response. **NOTE** This call currently might take up to 5 seconds to get the latest status and result. @@ -9082,6 +9268,7 @@ def get_statement_result_chunk_n(self, statement_id: str, chunk_index: int) -> R can be used to fetch subsequent chunks. The response structure is identical to the nested `result` element described in the :method:statementexecution/getStatement request, and similarly includes the `next_chunk_index` and `next_chunk_internal_link` fields for simple iteration through the result set. + Depending on `disposition`, the response returns chunks of data either inline, or as links. :param statement_id: str The statement ID is returned upon successfully submitting a SQL statement, and is a required @@ -9192,8 +9379,7 @@ def create( The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it is automatically stopped. - Supported values: - Must be >= 0 mins for serverless warehouses - Must be == 0 or >= 10 mins for - non-serverless warehouses - 0 indicates no autostop. + Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop. Defaults to 120 mins :param channel: :class:`Channel` (optional) @@ -9234,12 +9420,15 @@ def create( Supported values: - Must be unique within an org. - Must be less than 100 characters. :param spot_instance_policy: :class:`SpotInstancePolicy` (optional) + Configurations whether the endpoint should use spot instances. :param tags: :class:`EndpointTags` (optional) A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated with this SQL warehouse. Supported values: - Number of tags < 45. :param warehouse_type: :class:`CreateWarehouseRequestWarehouseType` (optional) + Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and + also set the field `enable_serverless_compute` to `true`. :returns: Long-running operation waiter for :class:`GetWarehouseResponse`. @@ -9378,7 +9567,7 @@ def edit( Defaults to false. :param enable_serverless_compute: bool (optional) - Configures whether the warehouse should use serverless compute. + Configures whether the warehouse should use serverless compute :param instance_profile_arn: str (optional) Deprecated. Instance profile used to pass IAM role to the cluster :param max_num_clusters: int (optional) @@ -9400,12 +9589,15 @@ def edit( Supported values: - Must be unique within an org. - Must be less than 100 characters. :param spot_instance_policy: :class:`SpotInstancePolicy` (optional) + Configurations whether the endpoint should use spot instances. :param tags: :class:`EndpointTags` (optional) A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated with this SQL warehouse. Supported values: - Number of tags < 45. :param warehouse_type: :class:`EditWarehouseRequestWarehouseType` (optional) + Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and + also set the field `enable_serverless_compute` to `true`. :returns: Long-running operation waiter for :class:`GetWarehouseResponse`. @@ -9444,7 +9636,7 @@ def edit( } op_response = self._api.do("POST", f"/api/2.0/sql/warehouses/{id}/edit", body=body, headers=headers) - return Wait(self.wait_get_warehouse_running, response=EditWarehouseResponse.from_dict(op_response), id=id) + return Wait(self.wait_get_warehouse_running, id=id) def edit_and_wait( self, @@ -9545,26 +9737,45 @@ def get_workspace_warehouse_config(self) -> GetWorkspaceWarehouseConfigResponse: res = self._api.do("GET", "/api/2.0/sql/config/warehouses", headers=headers) return GetWorkspaceWarehouseConfigResponse.from_dict(res) - def list(self, *, run_as_user_id: Optional[int] = None) -> Iterator[EndpointInfo]: + def list( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None, run_as_user_id: Optional[int] = None + ) -> Iterator[EndpointInfo]: """Lists all SQL warehouses that a user has access to. + :param page_size: int (optional) + The max number of warehouses to return. + :param page_token: str (optional) + A page token, received from a previous `ListWarehouses` call. Provide this to retrieve the + subsequent page; otherwise the first will be retrieved. + + When paginating, all other parameters provided to `ListWarehouses` must match the call that provided + the page token. :param run_as_user_id: int (optional) - Service Principal which will be used to fetch the list of warehouses. If not specified, the user - from the session header is used. + Service Principal which will be used to fetch the list of endpoints. If not specified, SQL Gateway + will use the user from the session header. :returns: Iterator over :class:`EndpointInfo` """ query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token if run_as_user_id is not None: query["run_as_user_id"] = run_as_user_id headers = { "Accept": "application/json", } - json = self._api.do("GET", "/api/2.0/sql/warehouses", query=query, headers=headers) - parsed = ListWarehousesResponse.from_dict(json).warehouses - return parsed if parsed is not None else [] + while True: + json = self._api.do("GET", "/api/2.0/sql/warehouses", query=query, headers=headers) + if "warehouses" in json: + for v in json["warehouses"]: + yield EndpointInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] def set_permissions( self, warehouse_id: str, *, access_control_list: Optional[List[WarehouseAccessControlRequest]] = None @@ -9595,6 +9806,7 @@ def set_workspace_warehouse_config( channel: Optional[Channel] = None, config_param: Optional[RepeatedEndpointConfPairs] = None, data_access_config: Optional[List[EndpointConfPair]] = None, + enable_serverless_compute: Optional[bool] = None, enabled_warehouse_types: Optional[List[WarehouseTypePair]] = None, global_param: Optional[RepeatedEndpointConfPairs] = None, google_service_account: Optional[str] = None, @@ -9610,6 +9822,8 @@ def set_workspace_warehouse_config( Deprecated: Use sql_configuration_parameters :param data_access_config: List[:class:`EndpointConfPair`] (optional) Spark confs for external hive metastore configuration JSON serialized size must be less than <= 512K + :param enable_serverless_compute: bool (optional) + Enable Serverless compute for SQL warehouses :param enabled_warehouse_types: List[:class:`WarehouseTypePair`] (optional) List of Warehouse Types allowed in this workspace (limits allowed value of the type field in CreateWarehouse and EditWarehouse). Note: Some types cannot be disabled, they don't need to be @@ -9621,7 +9835,8 @@ def set_workspace_warehouse_config( :param google_service_account: str (optional) GCP only: Google Service Account used to pass to cluster to access Google Cloud Storage :param instance_profile_arn: str (optional) - AWS Only: Instance profile used to pass IAM role to the cluster + AWS Only: The instance profile used to pass an IAM role to the SQL warehouses. This configuration is + also applied to the workspace's serverless compute for notebooks and jobs. :param security_policy: :class:`SetWorkspaceWarehouseConfigRequestSecurityPolicy` (optional) Security policy for warehouses :param sql_configuration_parameters: :class:`RepeatedEndpointConfPairs` (optional) @@ -9636,6 +9851,8 @@ def set_workspace_warehouse_config( body["config_param"] = config_param.as_dict() if data_access_config is not None: body["data_access_config"] = [v.as_dict() for v in data_access_config] + if enable_serverless_compute is not None: + body["enable_serverless_compute"] = enable_serverless_compute if enabled_warehouse_types is not None: body["enabled_warehouse_types"] = [v.as_dict() for v in enabled_warehouse_types] if global_param is not None: @@ -9671,7 +9888,7 @@ def start(self, id: str) -> Wait[GetWarehouseResponse]: } op_response = self._api.do("POST", f"/api/2.0/sql/warehouses/{id}/start", headers=headers) - return Wait(self.wait_get_warehouse_running, response=StartWarehouseResponse.from_dict(op_response), id=id) + return Wait(self.wait_get_warehouse_running, id=id) def start_and_wait(self, id: str, timeout=timedelta(minutes=20)) -> GetWarehouseResponse: return self.start(id=id).result(timeout=timeout) @@ -9692,7 +9909,7 @@ def stop(self, id: str) -> Wait[GetWarehouseResponse]: } op_response = self._api.do("POST", f"/api/2.0/sql/warehouses/{id}/stop", headers=headers) - return Wait(self.wait_get_warehouse_stopped, response=StopWarehouseResponse.from_dict(op_response), id=id) + return Wait(self.wait_get_warehouse_stopped, id=id) def stop_and_wait(self, id: str, timeout=timedelta(minutes=20)) -> GetWarehouseResponse: return self.stop(id=id).result(timeout=timeout) diff --git a/databricks/sdk/service/tags.py b/databricks/sdk/service/tags.py index 9fa90681a..b59c81740 100755 --- a/databricks/sdk/service/tags.py +++ b/databricks/sdk/service/tags.py @@ -6,7 +6,7 @@ from dataclasses import dataclass from typing import Any, Dict, Iterator, List, Optional -from ._internal import _repeated_dict +from databricks.sdk.service._internal import _repeated_dict _LOG = logging.getLogger("databricks.sdk") @@ -14,6 +14,40 @@ # all definitions in this file are in alphabetical order +@dataclass +class ListTagAssignmentsResponse: + next_page_token: Optional[str] = None + """Pagination token to request the next page of tag assignments""" + + tag_assignments: Optional[List[TagAssignment]] = None + + def as_dict(self) -> dict: + """Serializes the ListTagAssignmentsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.tag_assignments: + body["tag_assignments"] = [v.as_dict() for v in self.tag_assignments] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListTagAssignmentsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.tag_assignments: + body["tag_assignments"] = self.tag_assignments + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListTagAssignmentsResponse: + """Deserializes the ListTagAssignmentsResponse from a dictionary.""" + return cls( + next_page_token=d.get("next_page_token", None), + tag_assignments=_repeated_dict(d, "tag_assignments", TagAssignment), + ) + + @dataclass class ListTagPoliciesResponse: next_page_token: Optional[str] = None @@ -46,6 +80,57 @@ def from_dict(cls, d: Dict[str, Any]) -> ListTagPoliciesResponse: ) +@dataclass +class TagAssignment: + entity_type: str + """The type of entity to which the tag is assigned. Allowed value is dashboards""" + + entity_id: str + """The identifier of the entity to which the tag is assigned""" + + tag_key: str + """The key of the tag. The characters , . : / - = and leading/trailing spaces are not allowed""" + + tag_value: Optional[str] = None + """The value of the tag""" + + def as_dict(self) -> dict: + """Serializes the TagAssignment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.entity_id is not None: + body["entity_id"] = self.entity_id + if self.entity_type is not None: + body["entity_type"] = self.entity_type + if self.tag_key is not None: + body["tag_key"] = self.tag_key + if self.tag_value is not None: + body["tag_value"] = self.tag_value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the TagAssignment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.entity_id is not None: + body["entity_id"] = self.entity_id + if self.entity_type is not None: + body["entity_type"] = self.entity_type + if self.tag_key is not None: + body["tag_key"] = self.tag_key + if self.tag_value is not None: + body["tag_value"] = self.tag_value + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> TagAssignment: + """Deserializes the TagAssignment from a dictionary.""" + return cls( + entity_id=d.get("entity_id", None), + entity_type=d.get("entity_type", None), + tag_key=d.get("tag_key", None), + tag_value=d.get("tag_value", None), + ) + + @dataclass class TagPolicy: tag_key: str @@ -133,6 +218,152 @@ def from_dict(cls, d: Dict[str, Any]) -> Value: return cls(name=d.get("name", None)) +class TagAssignmentsAPI: + """Manage tag assignments on workspace-scoped objects.""" + + def __init__(self, api_client): + self._api = api_client + + def create_tag_assignment(self, tag_assignment: TagAssignment) -> TagAssignment: + """Create a tag assignment + + :param tag_assignment: :class:`TagAssignment` + + :returns: :class:`TagAssignment` + """ + body = tag_assignment.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/entity-tag-assignments", body=body, headers=headers) + return TagAssignment.from_dict(res) + + def delete_tag_assignment(self, entity_type: str, entity_id: str, tag_key: str): + """Delete a tag assignment + + :param entity_type: str + The type of entity to which the tag is assigned. Allowed value is dashboards + :param entity_id: str + The identifier of the entity to which the tag is assigned + :param tag_key: str + The key of the tag. The characters , . : / - = and leading/trailing spaces are not allowed + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", f"/api/2.0/entity-tag-assignments/{entity_type}/{entity_id}/tags/{tag_key}", headers=headers + ) + + def get_tag_assignment(self, entity_type: str, entity_id: str, tag_key: str) -> TagAssignment: + """Get a tag assignment + + :param entity_type: str + The type of entity to which the tag is assigned. Allowed value is dashboards + :param entity_id: str + The identifier of the entity to which the tag is assigned + :param tag_key: str + The key of the tag. The characters , . : / - = and leading/trailing spaces are not allowed + + :returns: :class:`TagAssignment` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/entity-tag-assignments/{entity_type}/{entity_id}/tags/{tag_key}", headers=headers + ) + return TagAssignment.from_dict(res) + + def list_tag_assignments( + self, entity_type: str, entity_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[TagAssignment]: + """List the tag assignments for an entity + + :param entity_type: str + The type of entity to which the tag is assigned. Allowed value is dashboards + :param entity_id: str + The identifier of the entity to which the tag is assigned + :param page_size: int (optional) + Optional. Maximum number of tag assignments to return in a single page + :param page_token: str (optional) + Pagination token to go to the next page of tag assignments. Requests first page if absent. + + :returns: Iterator over :class:`TagAssignment` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do( + "GET", f"/api/2.0/entity-tag-assignments/{entity_type}/{entity_id}/tags", query=query, headers=headers + ) + if "tag_assignments" in json: + for v in json["tag_assignments"]: + yield TagAssignment.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update_tag_assignment( + self, entity_type: str, entity_id: str, tag_key: str, tag_assignment: TagAssignment, update_mask: str + ) -> TagAssignment: + """Update a tag assignment + + :param entity_type: str + The type of entity to which the tag is assigned. Allowed value is dashboards + :param entity_id: str + The identifier of the entity to which the tag is assigned + :param tag_key: str + The key of the tag. The characters , . : / - = and leading/trailing spaces are not allowed + :param tag_assignment: :class:`TagAssignment` + :param update_mask: str + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + + :returns: :class:`TagAssignment` + """ + body = tag_assignment.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/entity-tag-assignments/{entity_type}/{entity_id}/tags/{tag_key}", + query=query, + body=body, + headers=headers, + ) + return TagAssignment.from_dict(res) + + class TagPoliciesAPI: """The Tag Policy API allows you to manage policies for governed tags in Databricks. Permissions for tag policies can be managed using the [Account Access Control Proxy API]. diff --git a/databricks/sdk/service/vectorsearch.py b/databricks/sdk/service/vectorsearch.py index 8e706ccd6..f4e958999 100755 --- a/databricks/sdk/service/vectorsearch.py +++ b/databricks/sdk/service/vectorsearch.py @@ -10,8 +10,10 @@ from enum import Enum from typing import Any, Callable, Dict, Iterator, List, Optional +from databricks.sdk.service._internal import (Wait, _enum, _from_dict, + _repeated_dict) + from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated_dict _LOG = logging.getLogger("databricks.sdk") @@ -192,6 +194,11 @@ class DeltaSyncVectorIndexSpecRequest: columns from the source table are synced with the index. The primary key column and embedding source column or embedding vector column are always synced.""" + effective_budget_policy_id: Optional[str] = None + """The budget policy id applied to the vector search index""" + + effective_usage_policy_id: Optional[str] = None + embedding_source_columns: Optional[List[EmbeddingSourceColumn]] = None """The columns that contain the embedding source.""" @@ -216,6 +223,10 @@ def as_dict(self) -> dict: body = {} if self.columns_to_sync: body["columns_to_sync"] = [v for v in self.columns_to_sync] + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.embedding_source_columns: body["embedding_source_columns"] = [v.as_dict() for v in self.embedding_source_columns] if self.embedding_vector_columns: @@ -233,6 +244,10 @@ def as_shallow_dict(self) -> dict: body = {} if self.columns_to_sync: body["columns_to_sync"] = self.columns_to_sync + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.embedding_source_columns: body["embedding_source_columns"] = self.embedding_source_columns if self.embedding_vector_columns: @@ -250,6 +265,8 @@ def from_dict(cls, d: Dict[str, Any]) -> DeltaSyncVectorIndexSpecRequest: """Deserializes the DeltaSyncVectorIndexSpecRequest from a dictionary.""" return cls( columns_to_sync=d.get("columns_to_sync", None), + effective_budget_policy_id=d.get("effective_budget_policy_id", None), + effective_usage_policy_id=d.get("effective_usage_policy_id", None), embedding_source_columns=_repeated_dict(d, "embedding_source_columns", EmbeddingSourceColumn), embedding_vector_columns=_repeated_dict(d, "embedding_vector_columns", EmbeddingVectorColumn), embedding_writeback_table=d.get("embedding_writeback_table", None), @@ -260,6 +277,11 @@ def from_dict(cls, d: Dict[str, Any]) -> DeltaSyncVectorIndexSpecRequest: @dataclass class DeltaSyncVectorIndexSpecResponse: + effective_budget_policy_id: Optional[str] = None + """The budget policy id applied to the vector search index""" + + effective_usage_policy_id: Optional[str] = None + embedding_source_columns: Optional[List[EmbeddingSourceColumn]] = None """The columns that contain the embedding source.""" @@ -285,6 +307,10 @@ class DeltaSyncVectorIndexSpecResponse: def as_dict(self) -> dict: """Serializes the DeltaSyncVectorIndexSpecResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.embedding_source_columns: body["embedding_source_columns"] = [v.as_dict() for v in self.embedding_source_columns] if self.embedding_vector_columns: @@ -302,6 +328,10 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the DeltaSyncVectorIndexSpecResponse into a shallow dictionary of its immediate attributes.""" body = {} + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.embedding_source_columns: body["embedding_source_columns"] = self.embedding_source_columns if self.embedding_vector_columns: @@ -320,6 +350,8 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeltaSyncVectorIndexSpecResponse: """Deserializes the DeltaSyncVectorIndexSpecResponse from a dictionary.""" return cls( + effective_budget_policy_id=d.get("effective_budget_policy_id", None), + effective_usage_policy_id=d.get("effective_usage_policy_id", None), embedding_source_columns=_repeated_dict(d, "embedding_source_columns", EmbeddingSourceColumn), embedding_vector_columns=_repeated_dict(d, "embedding_vector_columns", EmbeddingVectorColumn), embedding_writeback_table=d.get("embedding_writeback_table", None), @@ -1103,6 +1135,24 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateEndpointCustomTagsResponse: return cls(custom_tags=_repeated_dict(d, "custom_tags", CustomTag), name=d.get("name", None)) +@dataclass +class UpdateVectorIndexUsagePolicyResponse: + def as_dict(self) -> dict: + """Serializes the UpdateVectorIndexUsagePolicyResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateVectorIndexUsagePolicyResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UpdateVectorIndexUsagePolicyResponse: + """Deserializes the UpdateVectorIndexUsagePolicyResponse from a dictionary.""" + return cls() + + @dataclass class UpsertDataResult: failed_primary_keys: Optional[List[str]] = None @@ -1412,7 +1462,12 @@ def wait_get_endpoint_vector_search_endpoint_online( raise TimeoutError(f"timed out after {timeout}: {status_message}") def create_endpoint( - self, name: str, endpoint_type: EndpointType, *, budget_policy_id: Optional[str] = None + self, + name: str, + endpoint_type: EndpointType, + *, + budget_policy_id: Optional[str] = None, + usage_policy_id: Optional[str] = None, ) -> Wait[EndpointInfo]: """Create a new endpoint. @@ -1422,6 +1477,8 @@ def create_endpoint( Type of endpoint :param budget_policy_id: str (optional) The budget policy id to be applied + :param usage_policy_id: str (optional) + The usage policy id to be applied once we've migrated to usage policies :returns: Long-running operation waiter for :class:`EndpointInfo`. @@ -1434,6 +1491,8 @@ def create_endpoint( body["endpoint_type"] = endpoint_type.value if name is not None: body["name"] = name + if usage_policy_id is not None: + body["usage_policy_id"] = usage_policy_id headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -1452,11 +1511,12 @@ def create_endpoint_and_wait( endpoint_type: EndpointType, *, budget_policy_id: Optional[str] = None, + usage_policy_id: Optional[str] = None, timeout=timedelta(minutes=20), ) -> EndpointInfo: - return self.create_endpoint(budget_policy_id=budget_policy_id, endpoint_type=endpoint_type, name=name).result( - timeout=timeout - ) + return self.create_endpoint( + budget_policy_id=budget_policy_id, endpoint_type=endpoint_type, name=name, usage_policy_id=usage_policy_id + ).result(timeout=timeout) def delete_endpoint(self, endpoint_name: str): """Delete a vector search endpoint. @@ -1860,6 +1920,22 @@ def sync_index(self, index_name: str): self._api.do("POST", f"/api/2.0/vector-search/indexes/{index_name}/sync", headers=headers) + def update_index_budget_policy(self, index_name: str) -> UpdateVectorIndexUsagePolicyResponse: + """Update the budget policy of an index + + :param index_name: str + Name of the vector search index + + :returns: :class:`UpdateVectorIndexUsagePolicyResponse` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/vector-search/indexes/{index_name}/usage-policy", headers=headers) + return UpdateVectorIndexUsagePolicyResponse.from_dict(res) + def upsert_data_vector_index(self, index_name: str, inputs_json: str) -> UpsertDataVectorIndexResponse: """Handles the upserting of data into a specified vector index. diff --git a/databricks/sdk/service/workspace.py b/databricks/sdk/service/workspace.py index cab860d9c..9de4820b8 100755 --- a/databricks/sdk/service/workspace.py +++ b/databricks/sdk/service/workspace.py @@ -7,7 +7,7 @@ from enum import Enum from typing import Any, Dict, Iterator, List, Optional -from ._internal import _enum, _from_dict, _repeated_dict +from databricks.sdk.service._internal import _enum, _from_dict, _repeated_dict _LOG = logging.getLogger("databricks.sdk") @@ -227,24 +227,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateRepoResponse: ) -@dataclass -class CreateScopeResponse: - def as_dict(self) -> dict: - """Serializes the CreateScopeResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateScopeResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateScopeResponse: - """Deserializes the CreateScopeResponse from a dictionary.""" - return cls() - - @dataclass class CredentialInfo: credential_id: int @@ -305,24 +287,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CredentialInfo: ) -@dataclass -class DeleteAclResponse: - def as_dict(self) -> dict: - """Serializes the DeleteAclResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteAclResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteAclResponse: - """Deserializes the DeleteAclResponse from a dictionary.""" - return cls() - - @dataclass class DeleteCredentialsResponse: def as_dict(self) -> dict: @@ -377,24 +341,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: return cls() -@dataclass -class DeleteScopeResponse: - def as_dict(self) -> dict: - """Serializes the DeleteScopeResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteScopeResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteScopeResponse: - """Deserializes the DeleteScopeResponse from a dictionary.""" - return cls() - - @dataclass class DeleteSecretResponse: def as_dict(self) -> dict: @@ -425,6 +371,12 @@ class ExportFormat(Enum): SOURCE = "SOURCE" +class ExportOutputs(Enum): + + ALL = "ALL" + NONE = "NONE" + + @dataclass class ExportResponse: """The request field `direct_download` determines whether a JSON response or binary contents are @@ -993,42 +945,6 @@ class ObjectType(Enum): REPO = "REPO" -@dataclass -class PutAclResponse: - def as_dict(self) -> dict: - """Serializes the PutAclResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PutAclResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PutAclResponse: - """Deserializes the PutAclResponse from a dictionary.""" - return cls() - - -@dataclass -class PutSecretResponse: - def as_dict(self) -> dict: - """Serializes the PutSecretResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PutSecretResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PutSecretResponse: - """Deserializes the PutSecretResponse from a dictionary.""" - return cls() - - @dataclass class RepoAccessControlRequest: group_name: Optional[str] = None @@ -2638,7 +2554,9 @@ def delete(self, path: str, *, recursive: Optional[bool] = None): self._api.do("POST", "/api/2.0/workspace/delete", body=body, headers=headers) - def export(self, path: str, *, format: Optional[ExportFormat] = None) -> ExportResponse: + def export( + self, path: str, *, format: Optional[ExportFormat] = None, outputs: Optional[ExportOutputs] = None + ) -> ExportResponse: """Exports an object or the contents of an entire directory. If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. @@ -2660,6 +2578,11 @@ def export(self, path: str, *, format: Optional[ExportFormat] = None) -> ExportR Directory exports will not include non-notebook entries. - `R_MARKDOWN`: The notebook is exported to R Markdown format. - `AUTO`: The object or directory is exported depending on the objects type. Directory exports will include notebooks and workspace files. + :param outputs: :class:`ExportOutputs` (optional) + This specifies which cell outputs should be included in the export (if the export format allows it). + If not specified, the behavior is determined by the format. For JUPYTER format, the default is to + include all outputs. This is a public endpoint, but only ALL or NONE is documented publically, + DATABRICKS is internal only :returns: :class:`ExportResponse` """ @@ -2667,6 +2590,8 @@ def export(self, path: str, *, format: Optional[ExportFormat] = None) -> ExportR query = {} if format is not None: query["format"] = format.value + if outputs is not None: + query["outputs"] = outputs.value if path is not None: query["path"] = path headers = { diff --git a/tests/databricks/sdk/service/common.py b/tests/databricks/sdk/service/common.py new file mode 100755 index 000000000..2fcd455c5 --- /dev/null +++ b/tests/databricks/sdk/service/common.py @@ -0,0 +1,232 @@ +# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +from __future__ import annotations + +import logging +from dataclasses import dataclass +from enum import Enum +from typing import Any, Dict, List, Optional + +from databricks.sdk.service._internal import _enum, _from_dict + +_LOG = logging.getLogger("databricks.sdk") + + +# all definitions in this file are in alphabetical order + + +@dataclass +class DatabricksServiceExceptionWithDetailsProto: + """Serialization format for DatabricksServiceException with error details. This message doesn't + work for ScalaPB-04 as google.protobuf.Any is only available to ScalaPB-09. Note the definition + of this message should be in sync with DatabricksServiceExceptionProto defined in + /api-base/proto/legacy/databricks.proto except the later one doesn't have the error details + field defined.""" + + details: Optional[List[dict]] = None + """@pbjson-skip""" + + error_code: Optional[ErrorCode] = None + + message: Optional[str] = None + + stack_trace: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the DatabricksServiceExceptionWithDetailsProto into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.details: + body["details"] = [v for v in self.details] + if self.error_code is not None: + body["error_code"] = self.error_code.value + if self.message is not None: + body["message"] = self.message + if self.stack_trace is not None: + body["stack_trace"] = self.stack_trace + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabricksServiceExceptionWithDetailsProto into a shallow dictionary of its immediate attributes.""" + body = {} + if self.details: + body["details"] = self.details + if self.error_code is not None: + body["error_code"] = self.error_code + if self.message is not None: + body["message"] = self.message + if self.stack_trace is not None: + body["stack_trace"] = self.stack_trace + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DatabricksServiceExceptionWithDetailsProto: + """Deserializes the DatabricksServiceExceptionWithDetailsProto from a dictionary.""" + return cls( + details=d.get("details", None), + error_code=_enum(d, "error_code", ErrorCode), + message=d.get("message", None), + stack_trace=d.get("stack_trace", None), + ) + + +class ErrorCode(Enum): + """Legacy definition of the ErrorCode enum. Please keep in sync with + api-base/proto/error_code.proto (except status code mapping annotations as this file doesn't + have them). Will be removed eventually, pending the ScalaPB 0.4 cleanup.""" + + ABORTED = "ABORTED" + ALREADY_EXISTS = "ALREADY_EXISTS" + BAD_REQUEST = "BAD_REQUEST" + CANCELLED = "CANCELLED" + CATALOG_ALREADY_EXISTS = "CATALOG_ALREADY_EXISTS" + CATALOG_DOES_NOT_EXIST = "CATALOG_DOES_NOT_EXIST" + CATALOG_NOT_EMPTY = "CATALOG_NOT_EMPTY" + COULD_NOT_ACQUIRE_LOCK = "COULD_NOT_ACQUIRE_LOCK" + CUSTOMER_UNAUTHORIZED = "CUSTOMER_UNAUTHORIZED" + DAC_ALREADY_EXISTS = "DAC_ALREADY_EXISTS" + DAC_DOES_NOT_EXIST = "DAC_DOES_NOT_EXIST" + DATA_LOSS = "DATA_LOSS" + DEADLINE_EXCEEDED = "DEADLINE_EXCEEDED" + DEPLOYMENT_TIMEOUT = "DEPLOYMENT_TIMEOUT" + DIRECTORY_NOT_EMPTY = "DIRECTORY_NOT_EMPTY" + DIRECTORY_PROTECTED = "DIRECTORY_PROTECTED" + DRY_RUN_FAILED = "DRY_RUN_FAILED" + ENDPOINT_NOT_FOUND = "ENDPOINT_NOT_FOUND" + EXTERNAL_LOCATION_ALREADY_EXISTS = "EXTERNAL_LOCATION_ALREADY_EXISTS" + EXTERNAL_LOCATION_DOES_NOT_EXIST = "EXTERNAL_LOCATION_DOES_NOT_EXIST" + FEATURE_DISABLED = "FEATURE_DISABLED" + GIT_CONFLICT = "GIT_CONFLICT" + GIT_REMOTE_ERROR = "GIT_REMOTE_ERROR" + GIT_SENSITIVE_TOKEN_DETECTED = "GIT_SENSITIVE_TOKEN_DETECTED" + GIT_UNKNOWN_REF = "GIT_UNKNOWN_REF" + GIT_URL_NOT_ON_ALLOW_LIST = "GIT_URL_NOT_ON_ALLOW_LIST" + INSECURE_PARTNER_RESPONSE = "INSECURE_PARTNER_RESPONSE" + INTERNAL_ERROR = "INTERNAL_ERROR" + INVALID_PARAMETER_VALUE = "INVALID_PARAMETER_VALUE" + INVALID_STATE = "INVALID_STATE" + INVALID_STATE_TRANSITION = "INVALID_STATE_TRANSITION" + IO_ERROR = "IO_ERROR" + IPYNB_FILE_IN_REPO = "IPYNB_FILE_IN_REPO" + MALFORMED_PARTNER_RESPONSE = "MALFORMED_PARTNER_RESPONSE" + MALFORMED_REQUEST = "MALFORMED_REQUEST" + MANAGED_RESOURCE_GROUP_DOES_NOT_EXIST = "MANAGED_RESOURCE_GROUP_DOES_NOT_EXIST" + MAX_BLOCK_SIZE_EXCEEDED = "MAX_BLOCK_SIZE_EXCEEDED" + MAX_CHILD_NODE_SIZE_EXCEEDED = "MAX_CHILD_NODE_SIZE_EXCEEDED" + MAX_LIST_SIZE_EXCEEDED = "MAX_LIST_SIZE_EXCEEDED" + MAX_NOTEBOOK_SIZE_EXCEEDED = "MAX_NOTEBOOK_SIZE_EXCEEDED" + MAX_READ_SIZE_EXCEEDED = "MAX_READ_SIZE_EXCEEDED" + METASTORE_ALREADY_EXISTS = "METASTORE_ALREADY_EXISTS" + METASTORE_DOES_NOT_EXIST = "METASTORE_DOES_NOT_EXIST" + METASTORE_NOT_EMPTY = "METASTORE_NOT_EMPTY" + NOT_FOUND = "NOT_FOUND" + NOT_IMPLEMENTED = "NOT_IMPLEMENTED" + PARTIAL_DELETE = "PARTIAL_DELETE" + PERMISSION_DENIED = "PERMISSION_DENIED" + PERMISSION_NOT_PROPAGATED = "PERMISSION_NOT_PROPAGATED" + PRINCIPAL_DOES_NOT_EXIST = "PRINCIPAL_DOES_NOT_EXIST" + PROJECTS_OPERATION_TIMEOUT = "PROJECTS_OPERATION_TIMEOUT" + PROVIDER_ALREADY_EXISTS = "PROVIDER_ALREADY_EXISTS" + PROVIDER_DOES_NOT_EXIST = "PROVIDER_DOES_NOT_EXIST" + PROVIDER_SHARE_NOT_ACCESSIBLE = "PROVIDER_SHARE_NOT_ACCESSIBLE" + QUOTA_EXCEEDED = "QUOTA_EXCEEDED" + RECIPIENT_ALREADY_EXISTS = "RECIPIENT_ALREADY_EXISTS" + RECIPIENT_DOES_NOT_EXIST = "RECIPIENT_DOES_NOT_EXIST" + REQUEST_LIMIT_EXCEEDED = "REQUEST_LIMIT_EXCEEDED" + RESOURCE_ALREADY_EXISTS = "RESOURCE_ALREADY_EXISTS" + RESOURCE_CONFLICT = "RESOURCE_CONFLICT" + RESOURCE_DOES_NOT_EXIST = "RESOURCE_DOES_NOT_EXIST" + RESOURCE_EXHAUSTED = "RESOURCE_EXHAUSTED" + RESOURCE_LIMIT_EXCEEDED = "RESOURCE_LIMIT_EXCEEDED" + SCHEMA_ALREADY_EXISTS = "SCHEMA_ALREADY_EXISTS" + SCHEMA_DOES_NOT_EXIST = "SCHEMA_DOES_NOT_EXIST" + SCHEMA_NOT_EMPTY = "SCHEMA_NOT_EMPTY" + SEARCH_QUERY_TOO_LONG = "SEARCH_QUERY_TOO_LONG" + SEARCH_QUERY_TOO_SHORT = "SEARCH_QUERY_TOO_SHORT" + SERVICE_UNDER_MAINTENANCE = "SERVICE_UNDER_MAINTENANCE" + SHARE_ALREADY_EXISTS = "SHARE_ALREADY_EXISTS" + SHARE_DOES_NOT_EXIST = "SHARE_DOES_NOT_EXIST" + STORAGE_CREDENTIAL_ALREADY_EXISTS = "STORAGE_CREDENTIAL_ALREADY_EXISTS" + STORAGE_CREDENTIAL_DOES_NOT_EXIST = "STORAGE_CREDENTIAL_DOES_NOT_EXIST" + TABLE_ALREADY_EXISTS = "TABLE_ALREADY_EXISTS" + TABLE_DOES_NOT_EXIST = "TABLE_DOES_NOT_EXIST" + TEMPORARILY_UNAVAILABLE = "TEMPORARILY_UNAVAILABLE" + UNAUTHENTICATED = "UNAUTHENTICATED" + UNAVAILABLE = "UNAVAILABLE" + UNKNOWN = "UNKNOWN" + UNPARSEABLE_HTTP_ERROR = "UNPARSEABLE_HTTP_ERROR" + WORKSPACE_TEMPORARILY_UNAVAILABLE = "WORKSPACE_TEMPORARILY_UNAVAILABLE" + + +@dataclass +class Operation: + """This resource represents a long-running operation that is the result of a network API call.""" + + done: Optional[bool] = None + """If the value is `false`, it means the operation is still in progress. If `true`, the operation + is completed, and either `error` or `response` is available.""" + + error: Optional[DatabricksServiceExceptionWithDetailsProto] = None + """The error result of the operation in case of failure or cancellation.""" + + metadata: Optional[dict] = None + """Service-specific metadata associated with the operation. It typically contains progress + information and common metadata such as create time. Some services might not provide such + metadata. Any method that returns a long-running operation should document the metadata type, if + any.""" + + name: Optional[str] = None + """The server-assigned name, which is only unique within the same service that originally returns + it. If you use the default HTTP mapping, the `name` should be a resource name ending with + `operations/{unique_id}`. + + Note: multi-segment resource names are not yet supported in the RPC framework and SDK/TF. Until + that support is added, `name` must be string without internal `/` separators.""" + + response: Optional[dict] = None + """The normal, successful response of the operation. If the original method returns no data on + success, such as `Delete`, the response is `google.protobuf.Empty`. If the original method is + standard `Get`/`Create`/`Update`, the response should be the resource. For other methods, the + response should have the type `XxxResponse`, where `Xxx` is the original method name. For + example, if the original method name is `TakeSnapshot()`, the inferred response type is + `TakeSnapshotResponse`.""" + + def as_dict(self) -> dict: + """Serializes the Operation into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.done is not None: + body["done"] = self.done + if self.error: + body["error"] = self.error.as_dict() + if self.metadata: + body["metadata"] = self.metadata + if self.name is not None: + body["name"] = self.name + if self.response: + body["response"] = self.response + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Operation into a shallow dictionary of its immediate attributes.""" + body = {} + if self.done is not None: + body["done"] = self.done + if self.error: + body["error"] = self.error + if self.metadata: + body["metadata"] = self.metadata + if self.name is not None: + body["name"] = self.name + if self.response: + body["response"] = self.response + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Operation: + """Deserializes the Operation from a dictionary.""" + return cls( + done=d.get("done", None), + error=_from_dict(d, "error", DatabricksServiceExceptionWithDetailsProto), + metadata=d.get("metadata", None), + name=d.get("name", None), + response=d.get("response", None), + ) diff --git a/tests/databricks/sdk/service/httpcallv2.py b/tests/databricks/sdk/service/httpcallv2.py new file mode 100755 index 000000000..efbe0c0d4 --- /dev/null +++ b/tests/databricks/sdk/service/httpcallv2.py @@ -0,0 +1,244 @@ +# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +from __future__ import annotations + +import logging +from dataclasses import dataclass +from typing import Any, Dict, List, Optional + +_LOG = logging.getLogger("databricks.sdk") + + +# all definitions in this file are in alphabetical order + + +@dataclass +class ComplexQueryParam: + nested_optional_query_param: Optional[str] = None + + nested_repeated_query_param: Optional[List[str]] = None + + def as_dict(self) -> dict: + """Serializes the ComplexQueryParam into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.nested_optional_query_param is not None: + body["nested_optional_query_param"] = self.nested_optional_query_param + if self.nested_repeated_query_param: + body["nested_repeated_query_param"] = [v for v in self.nested_repeated_query_param] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ComplexQueryParam into a shallow dictionary of its immediate attributes.""" + body = {} + if self.nested_optional_query_param is not None: + body["nested_optional_query_param"] = self.nested_optional_query_param + if self.nested_repeated_query_param: + body["nested_repeated_query_param"] = self.nested_repeated_query_param + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ComplexQueryParam: + """Deserializes the ComplexQueryParam from a dictionary.""" + return cls( + nested_optional_query_param=d.get("nested_optional_query_param", None), + nested_repeated_query_param=d.get("nested_repeated_query_param", None), + ) + + +@dataclass +class Resource: + any_field: Optional[dict] = None + + body_field: Optional[str] = None + + nested_path_param_bool: Optional[bool] = None + + nested_path_param_int: Optional[int] = None + + nested_path_param_string: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the Resource into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.any_field: + body["any_field"] = self.any_field + if self.body_field is not None: + body["body_field"] = self.body_field + if self.nested_path_param_bool is not None: + body["nested_path_param_bool"] = self.nested_path_param_bool + if self.nested_path_param_int is not None: + body["nested_path_param_int"] = self.nested_path_param_int + if self.nested_path_param_string is not None: + body["nested_path_param_string"] = self.nested_path_param_string + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Resource into a shallow dictionary of its immediate attributes.""" + body = {} + if self.any_field: + body["any_field"] = self.any_field + if self.body_field is not None: + body["body_field"] = self.body_field + if self.nested_path_param_bool is not None: + body["nested_path_param_bool"] = self.nested_path_param_bool + if self.nested_path_param_int is not None: + body["nested_path_param_int"] = self.nested_path_param_int + if self.nested_path_param_string is not None: + body["nested_path_param_string"] = self.nested_path_param_string + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Resource: + """Deserializes the Resource from a dictionary.""" + return cls( + any_field=d.get("any_field", None), + body_field=d.get("body_field", None), + nested_path_param_bool=d.get("nested_path_param_bool", None), + nested_path_param_int=d.get("nested_path_param_int", None), + nested_path_param_string=d.get("nested_path_param_string", None), + ) + + +class HttpCallV2API: + """Lorem Ipsum""" + + def __init__(self, api_client): + self._api = api_client + + def create_resource( + self, path_param_string: str, path_param_int: int, path_param_bool: bool, *, body_field: Optional[str] = None + ) -> Resource: + """This mimics "old" style post requests which have the resource inlined. + + :param path_param_string: str + :param path_param_int: int + :param path_param_bool: bool + :param body_field: str (optional) + Body element + + :returns: :class:`Resource` + """ + body = {} + if body_field is not None: + body["body_field"] = body_field + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/http-call/{path_param_string}/{path_param_int}/{path_param_bool}", + body=body, + headers=headers, + ) + return Resource.from_dict(res) + + def get_resource( + self, + path_param_string: str, + path_param_int: int, + path_param_bool: bool, + *, + field_mask: Optional[str] = None, + optional_complex_query_param: Optional[ComplexQueryParam] = None, + query_param_bool: Optional[bool] = None, + query_param_int: Optional[int] = None, + query_param_string: Optional[str] = None, + repeated_complex_query_param: Optional[List[ComplexQueryParam]] = None, + repeated_query_param: Optional[List[str]] = None, + ) -> Resource: + + query = {} + if field_mask is not None: + query["field_mask"] = field_mask + if optional_complex_query_param is not None: + query["optional_complex_query_param"] = optional_complex_query_param.as_dict() + if query_param_bool is not None: + query["query_param_bool"] = query_param_bool + if query_param_int is not None: + query["query_param_int"] = query_param_int + if query_param_string is not None: + query["query_param_string"] = query_param_string + if repeated_complex_query_param is not None: + query["repeated_complex_query_param"] = [v.as_dict() for v in repeated_complex_query_param] + if repeated_query_param is not None: + query["repeated_query_param"] = [v for v in repeated_query_param] + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/http-call/{path_param_string}/{path_param_int}/{path_param_bool}", + query=query, + headers=headers, + ) + return Resource.from_dict(res) + + def update_resource( + self, + nested_path_param_string: str, + nested_path_param_int: int, + nested_path_param_bool: bool, + resource: Resource, + *, + field_mask: Optional[str] = None, + optional_complex_query_param: Optional[ComplexQueryParam] = None, + query_param_bool: Optional[bool] = None, + query_param_int: Optional[int] = None, + query_param_string: Optional[str] = None, + repeated_complex_query_param: Optional[List[ComplexQueryParam]] = None, + repeated_query_param: Optional[List[str]] = None, + ) -> Resource: + """This mimics "new" style post requests which have a body field. + + :param nested_path_param_string: str + :param nested_path_param_int: int + :param nested_path_param_bool: bool + :param resource: :class:`Resource` + Body element + :param field_mask: str (optional) + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + :param optional_complex_query_param: :class:`ComplexQueryParam` (optional) + :param query_param_bool: bool (optional) + :param query_param_int: int (optional) + :param query_param_string: str (optional) + :param repeated_complex_query_param: List[:class:`ComplexQueryParam`] (optional) + :param repeated_query_param: List[str] (optional) + + :returns: :class:`Resource` + """ + body = resource.as_dict() + query = {} + if field_mask is not None: + query["field_mask"] = field_mask + if optional_complex_query_param is not None: + query["optional_complex_query_param"] = optional_complex_query_param.as_dict() + if query_param_bool is not None: + query["query_param_bool"] = query_param_bool + if query_param_int is not None: + query["query_param_int"] = query_param_int + if query_param_string is not None: + query["query_param_string"] = query_param_string + if repeated_complex_query_param is not None: + query["repeated_complex_query_param"] = [v.as_dict() for v in repeated_complex_query_param] + if repeated_query_param is not None: + query["repeated_query_param"] = [v for v in repeated_query_param] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/http-call/{nested_path_param_string}/{nested_path_param_int}/{nested_path_param_bool}", + query=query, + body=body, + headers=headers, + ) + return Resource.from_dict(res) diff --git a/tests/databricks/sdk/service/jsonmarshallv2.py b/tests/databricks/sdk/service/jsonmarshallv2.py new file mode 100755 index 000000000..996e7ce68 --- /dev/null +++ b/tests/databricks/sdk/service/jsonmarshallv2.py @@ -0,0 +1,478 @@ +# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +from __future__ import annotations + +import logging +from dataclasses import dataclass +from enum import Enum +from typing import Any, Dict, List, Optional + +from databricks.sdk.service._internal import (_enum, _from_dict, + _repeated_dict, _repeated_enum) + +_LOG = logging.getLogger("databricks.sdk") + + +# all definitions in this file are in alphabetical order + + +@dataclass +class NestedMessage: + optional_duration: Optional[str] = None + + optional_string: Optional[str] = None + + optional_timestamp: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the NestedMessage into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.optional_duration is not None: + body["optional_duration"] = self.optional_duration + if self.optional_string is not None: + body["optional_string"] = self.optional_string + if self.optional_timestamp is not None: + body["optional_timestamp"] = self.optional_timestamp + return body + + def as_shallow_dict(self) -> dict: + """Serializes the NestedMessage into a shallow dictionary of its immediate attributes.""" + body = {} + if self.optional_duration is not None: + body["optional_duration"] = self.optional_duration + if self.optional_string is not None: + body["optional_string"] = self.optional_string + if self.optional_timestamp is not None: + body["optional_timestamp"] = self.optional_timestamp + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> NestedMessage: + """Deserializes the NestedMessage from a dictionary.""" + return cls( + optional_duration=d.get("optional_duration", None), + optional_string=d.get("optional_string", None), + optional_timestamp=d.get("optional_timestamp", None), + ) + + +@dataclass +class OptionalFields: + duration: Optional[str] = None + + field_mask: Optional[str] = None + """The field mask must be a single string, with multiple fields separated by commas (no spaces). + The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, + as only the entire collection field can be specified. Field names must exactly match the + resource field names.""" + + legacy_duration: Optional[str] = None + """Legacy Well Known types""" + + legacy_field_mask: Optional[str] = None + """The field mask must be a single string, with multiple fields separated by commas (no spaces). + The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, + as only the entire collection field can be specified. Field names must exactly match the + resource field names.""" + + legacy_timestamp: Optional[str] = None + + list_value: Optional[List[any]] = None + + map: Optional[Dict[str, str]] = None + """Lint disable reason: This is a dummy field used to test SDK Generation logic.""" + + optional_bool: Optional[bool] = None + + optional_int32: Optional[int] = None + + optional_int64: Optional[int] = None + + optional_message: Optional[NestedMessage] = None + + optional_string: Optional[str] = None + + struct: Optional[Dict[str, any]] = None + + test_enum: Optional[TestEnum] = None + + timestamp: Optional[str] = None + + value: Optional[any] = None + + def as_dict(self) -> dict: + """Serializes the OptionalFields into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.duration is not None: + body["duration"] = self.duration + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.legacy_duration is not None: + body["legacy_duration"] = self.legacy_duration + if self.legacy_field_mask is not None: + body["legacy_field_mask"] = self.legacy_field_mask + if self.legacy_timestamp is not None: + body["legacy_timestamp"] = self.legacy_timestamp + if self.list_value: + body["list_value"] = [v for v in self.list_value] + if self.map: + body["map"] = self.map + if self.optional_bool is not None: + body["optional_bool"] = self.optional_bool + if self.optional_int32 is not None: + body["optional_int32"] = self.optional_int32 + if self.optional_int64 is not None: + body["optional_int64"] = self.optional_int64 + if self.optional_message: + body["optional_message"] = self.optional_message.as_dict() + if self.optional_string is not None: + body["optional_string"] = self.optional_string + if self.struct: + body["struct"] = self.struct + if self.test_enum is not None: + body["test_enum"] = self.test_enum.value + if self.timestamp is not None: + body["timestamp"] = self.timestamp + if self.value: + body["value"] = self.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the OptionalFields into a shallow dictionary of its immediate attributes.""" + body = {} + if self.duration is not None: + body["duration"] = self.duration + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.legacy_duration is not None: + body["legacy_duration"] = self.legacy_duration + if self.legacy_field_mask is not None: + body["legacy_field_mask"] = self.legacy_field_mask + if self.legacy_timestamp is not None: + body["legacy_timestamp"] = self.legacy_timestamp + if self.list_value: + body["list_value"] = self.list_value + if self.map: + body["map"] = self.map + if self.optional_bool is not None: + body["optional_bool"] = self.optional_bool + if self.optional_int32 is not None: + body["optional_int32"] = self.optional_int32 + if self.optional_int64 is not None: + body["optional_int64"] = self.optional_int64 + if self.optional_message: + body["optional_message"] = self.optional_message + if self.optional_string is not None: + body["optional_string"] = self.optional_string + if self.struct: + body["struct"] = self.struct + if self.test_enum is not None: + body["test_enum"] = self.test_enum + if self.timestamp is not None: + body["timestamp"] = self.timestamp + if self.value: + body["value"] = self.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> OptionalFields: + """Deserializes the OptionalFields from a dictionary.""" + return cls( + duration=d.get("duration", None), + field_mask=d.get("field_mask", None), + legacy_duration=d.get("legacy_duration", None), + legacy_field_mask=d.get("legacy_field_mask", None), + legacy_timestamp=d.get("legacy_timestamp", None), + list_value=d.get("list_value", None), + map=d.get("map", None), + optional_bool=d.get("optional_bool", None), + optional_int32=d.get("optional_int32", None), + optional_int64=d.get("optional_int64", None), + optional_message=_from_dict(d, "optional_message", NestedMessage), + optional_string=d.get("optional_string", None), + struct=d.get("struct", None), + test_enum=_enum(d, "test_enum", TestEnum), + timestamp=d.get("timestamp", None), + value=d.get("value", None), + ) + + +@dataclass +class RepeatedFields: + repeated_bool: Optional[List[bool]] = None + + repeated_duration: Optional[List[str]] = None + + repeated_field_mask: Optional[List[str]] = None + + repeated_int32: Optional[List[int]] = None + + repeated_int64: Optional[List[int]] = None + + repeated_list_value: Optional[List[List[any]]] = None + + repeated_message: Optional[List[NestedMessage]] = None + + repeated_string: Optional[List[str]] = None + + repeated_struct: Optional[List[Dict[str, any]]] = None + + repeated_timestamp: Optional[List[str]] = None + + repeated_value: Optional[List[any]] = None + + test_repeated_enum: Optional[List[TestEnum]] = None + + def as_dict(self) -> dict: + """Serializes the RepeatedFields into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.repeated_bool: + body["repeated_bool"] = [v for v in self.repeated_bool] + if self.repeated_duration: + body["repeated_duration"] = [v for v in self.repeated_duration] + if self.repeated_field_mask: + body["repeated_field_mask"] = [v for v in self.repeated_field_mask] + if self.repeated_int32: + body["repeated_int32"] = [v for v in self.repeated_int32] + if self.repeated_int64: + body["repeated_int64"] = [v for v in self.repeated_int64] + if self.repeated_list_value: + body["repeated_list_value"] = [v for v in self.repeated_list_value] + if self.repeated_message: + body["repeated_message"] = [v.as_dict() for v in self.repeated_message] + if self.repeated_string: + body["repeated_string"] = [v for v in self.repeated_string] + if self.repeated_struct: + body["repeated_struct"] = [v for v in self.repeated_struct] + if self.repeated_timestamp: + body["repeated_timestamp"] = [v for v in self.repeated_timestamp] + if self.repeated_value: + body["repeated_value"] = [v for v in self.repeated_value] + if self.test_repeated_enum: + body["test_repeated_enum"] = [v.value for v in self.test_repeated_enum] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the RepeatedFields into a shallow dictionary of its immediate attributes.""" + body = {} + if self.repeated_bool: + body["repeated_bool"] = self.repeated_bool + if self.repeated_duration: + body["repeated_duration"] = self.repeated_duration + if self.repeated_field_mask: + body["repeated_field_mask"] = self.repeated_field_mask + if self.repeated_int32: + body["repeated_int32"] = self.repeated_int32 + if self.repeated_int64: + body["repeated_int64"] = self.repeated_int64 + if self.repeated_list_value: + body["repeated_list_value"] = self.repeated_list_value + if self.repeated_message: + body["repeated_message"] = self.repeated_message + if self.repeated_string: + body["repeated_string"] = self.repeated_string + if self.repeated_struct: + body["repeated_struct"] = self.repeated_struct + if self.repeated_timestamp: + body["repeated_timestamp"] = self.repeated_timestamp + if self.repeated_value: + body["repeated_value"] = self.repeated_value + if self.test_repeated_enum: + body["test_repeated_enum"] = self.test_repeated_enum + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> RepeatedFields: + """Deserializes the RepeatedFields from a dictionary.""" + return cls( + repeated_bool=d.get("repeated_bool", None), + repeated_duration=d.get("repeated_duration", None), + repeated_field_mask=d.get("repeated_field_mask", None), + repeated_int32=d.get("repeated_int32", None), + repeated_int64=d.get("repeated_int64", None), + repeated_list_value=d.get("repeated_list_value", None), + repeated_message=_repeated_dict(d, "repeated_message", NestedMessage), + repeated_string=d.get("repeated_string", None), + repeated_struct=d.get("repeated_struct", None), + repeated_timestamp=d.get("repeated_timestamp", None), + repeated_value=d.get("repeated_value", None), + test_repeated_enum=_repeated_enum(d, "test_repeated_enum", TestEnum), + ) + + +@dataclass +class RequiredFields: + required_string: str + + required_int32: int + + required_int64: int + + required_bool: bool + + required_message: NestedMessage + + test_required_enum: TestEnum + + required_duration: str + + required_field_mask: str + """The field mask must be a single string, with multiple fields separated by commas (no spaces). + The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, + as only the entire collection field can be specified. Field names must exactly match the + resource field names.""" + + required_timestamp: str + + required_value: any + + required_list_value: List[any] + + required_struct: Dict[str, any] + + def as_dict(self) -> dict: + """Serializes the RequiredFields into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.required_bool is not None: + body["required_bool"] = self.required_bool + if self.required_duration is not None: + body["required_duration"] = self.required_duration + if self.required_field_mask is not None: + body["required_field_mask"] = self.required_field_mask + if self.required_int32 is not None: + body["required_int32"] = self.required_int32 + if self.required_int64 is not None: + body["required_int64"] = self.required_int64 + if self.required_list_value: + body["required_list_value"] = [v for v in self.required_list_value] + if self.required_message: + body["required_message"] = self.required_message.as_dict() + if self.required_string is not None: + body["required_string"] = self.required_string + if self.required_struct: + body["required_struct"] = self.required_struct + if self.required_timestamp is not None: + body["required_timestamp"] = self.required_timestamp + if self.required_value: + body["required_value"] = self.required_value + if self.test_required_enum is not None: + body["test_required_enum"] = self.test_required_enum.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the RequiredFields into a shallow dictionary of its immediate attributes.""" + body = {} + if self.required_bool is not None: + body["required_bool"] = self.required_bool + if self.required_duration is not None: + body["required_duration"] = self.required_duration + if self.required_field_mask is not None: + body["required_field_mask"] = self.required_field_mask + if self.required_int32 is not None: + body["required_int32"] = self.required_int32 + if self.required_int64 is not None: + body["required_int64"] = self.required_int64 + if self.required_list_value: + body["required_list_value"] = self.required_list_value + if self.required_message: + body["required_message"] = self.required_message + if self.required_string is not None: + body["required_string"] = self.required_string + if self.required_struct: + body["required_struct"] = self.required_struct + if self.required_timestamp is not None: + body["required_timestamp"] = self.required_timestamp + if self.required_value: + body["required_value"] = self.required_value + if self.test_required_enum is not None: + body["test_required_enum"] = self.test_required_enum + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> RequiredFields: + """Deserializes the RequiredFields from a dictionary.""" + return cls( + required_bool=d.get("required_bool", None), + required_duration=d.get("required_duration", None), + required_field_mask=d.get("required_field_mask", None), + required_int32=d.get("required_int32", None), + required_int64=d.get("required_int64", None), + required_list_value=d.get("required_list_value", None), + required_message=_from_dict(d, "required_message", NestedMessage), + required_string=d.get("required_string", None), + required_struct=d.get("required_struct", None), + required_timestamp=d.get("required_timestamp", None), + required_value=d.get("required_value", None), + test_required_enum=_enum(d, "test_required_enum", TestEnum), + ) + + +@dataclass +class Resource: + """We separate this into 3 submessages to simplify test cases. E.g., any required top level field + needs to be included in the expected json for each test case.""" + + optional_fields: Optional[OptionalFields] = None + + repeated_fields: Optional[RepeatedFields] = None + + required_fields: Optional[RequiredFields] = None + + def as_dict(self) -> dict: + """Serializes the Resource into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.optional_fields: + body["optional_fields"] = self.optional_fields.as_dict() + if self.repeated_fields: + body["repeated_fields"] = self.repeated_fields.as_dict() + if self.required_fields: + body["required_fields"] = self.required_fields.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Resource into a shallow dictionary of its immediate attributes.""" + body = {} + if self.optional_fields: + body["optional_fields"] = self.optional_fields + if self.repeated_fields: + body["repeated_fields"] = self.repeated_fields + if self.required_fields: + body["required_fields"] = self.required_fields + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Resource: + """Deserializes the Resource from a dictionary.""" + return cls( + optional_fields=_from_dict(d, "optional_fields", OptionalFields), + repeated_fields=_from_dict(d, "repeated_fields", RepeatedFields), + required_fields=_from_dict(d, "required_fields", RequiredFields), + ) + + +class TestEnum(Enum): + + TEST_ENUM_ONE = "TEST_ENUM_ONE" + TEST_ENUM_TWO = "TEST_ENUM_TWO" + + +class JsonMarshallV2API: + """Lorem Ipsum""" + + def __init__(self, api_client): + self._api = api_client + + def get_resource(self, name: str, resource: Resource) -> Resource: + + query = {} + if resource is not None: + query["resource"] = resource.as_dict() + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/json-marshall/{name}", query=query, headers=headers) + return Resource.from_dict(res) diff --git a/tests/databricks/sdk/service/lrotesting.py b/tests/databricks/sdk/service/lrotesting.py new file mode 100755 index 000000000..1e8f23a59 --- /dev/null +++ b/tests/databricks/sdk/service/lrotesting.py @@ -0,0 +1,103 @@ +# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +from __future__ import annotations + +import logging +from dataclasses import dataclass +from typing import Any, Dict, Optional + +_LOG = logging.getLogger("databricks.sdk") + + +# all definitions in this file are in alphabetical order + + +@dataclass +class TestResource: + """Test resource for LRO operations""" + + id: Optional[str] = None + """Unique identifier for the resource""" + + name: Optional[str] = None + """Name of the resource""" + + def as_dict(self) -> dict: + """Serializes the TestResource into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.id is not None: + body["id"] = self.id + if self.name is not None: + body["name"] = self.name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the TestResource into a shallow dictionary of its immediate attributes.""" + body = {} + if self.id is not None: + body["id"] = self.id + if self.name is not None: + body["name"] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> TestResource: + """Deserializes the TestResource from a dictionary.""" + return cls(id=d.get("id", None), name=d.get("name", None)) + + +class LroTestingAPI: + """Test service for Long Running Operations""" + + def __init__(self, api_client): + self._api = api_client + + def cancel_operation(self, name: str): + + headers = { + "Accept": "application/json", + } + + self._api.do("POST", f"/api/2.0/lro-testing/operations/{name}/cancel", headers=headers) + + def create_test_resource(self, resource: TestResource) -> common.Operation: + """Simple method to create test resource for LRO testing + + :param resource: :class:`TestResource` + The resource to create + + :returns: :class:`common.Operation` + """ + body = resource.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/lro-testing/resources", body=body, headers=headers) + return common.Operation.from_dict(res) + + def get_operation(self, name: str) -> common.Operation: + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/lro-testing/operations/{name}", headers=headers) + return common.Operation.from_dict(res) + + def get_test_resource(self, resource_id: str) -> TestResource: + """Simple method to get test resource + + :param resource_id: str + Resource ID to get + + :returns: :class:`TestResource` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/lro-testing/resources/{resource_id}", headers=headers) + return TestResource.from_dict(res) diff --git a/tests/generated/test_json_marshall.py b/tests/generated/test_json_marshall.py new file mode 100755 index 000000000..5ca9fe397 --- /dev/null +++ b/tests/generated/test_json_marshall.py @@ -0,0 +1,419 @@ +# Code generated by Databricks SDK Generator. DO NOT EDIT. + +import json +from typing import Any + +import pytest + +from tests.databricks.sdk.service.jsonmarshallv2 import (NestedMessage, + OptionalFields, + RepeatedFields, + RequiredFields, + TestEnum) + + +@pytest.mark.parametrize( + "from_dict_method,instance,expected_json", + [ + ( + OptionalFields.from_dict, + OptionalFields( + optional_string="test", + ), + """{ + "optional_string": "test" + }""", + ), + ( + OptionalFields.from_dict, + OptionalFields( + optional_int32=42, + ), + """{ + "optional_int32": 42 + }""", + ), + ( + OptionalFields.from_dict, + OptionalFields( + optional_int64=9223372036854775807, + ), + """{ + "optional_int64": 9223372036854775807 + }""", + ), + ( + OptionalFields.from_dict, + OptionalFields( + optional_bool=True, + ), + """{ + "optional_bool": true + }""", + ), + ( + OptionalFields.from_dict, + OptionalFields( + test_enum=TestEnum.TEST_ENUM_ONE, + ), + """{ + "test_enum": "TEST_ENUM_ONE" + }""", + ), + ( + OptionalFields.from_dict, + OptionalFields( + optional_message=NestedMessage( + optional_string="nested_value", + ), + ), + """{ + "optional_message": { + "optional_string": "nested_value" + } + }""", + ), + ( + OptionalFields.from_dict, + OptionalFields( + map={ + "key": "test_key", + "value": "test_value", + }, + ), + """{ + "map": { + "key": "test_key", + "value": "test_value" + } + }""", + ), + ( + OptionalFields.from_dict, + OptionalFields( + duration="3600s", + ), + """{ + "duration": "3600s" + }""", + ), + ( + OptionalFields.from_dict, + OptionalFields( + field_mask="optional_string,optional_int32", + ), + """{ + "field_mask": "optional_string,optional_int32" + }""", + ), + ( + OptionalFields.from_dict, + OptionalFields( + timestamp="2023-01-01T00:00:00Z", + ), + """{ + "timestamp": "2023-01-01T00:00:00Z" + }""", + ), + ( + OptionalFields.from_dict, + OptionalFields( + optional_bool=True, + optional_int32=42, + optional_string="test", + ), + """{ + "optional_string":"test", + "optional_int32":42, + "optional_bool":true + }""", + ), + ( + RequiredFields.from_dict, + RequiredFields( + required_bool=False, + required_duration="0s", + required_field_mask="", + required_int32=0, + required_int64=0, + required_list_value=[], + required_message=NestedMessage(), + required_string="", + required_struct={}, + required_timestamp="1970-01-01T00:00:00Z", + required_value=json.loads("{}"), + test_required_enum=TestEnum.TEST_ENUM_ONE, + ), + """{ + "required_string": "", + "required_int32": 0, + "required_int64": 0, + "required_bool": false, + "required_message": {}, + "test_required_enum": "TEST_ENUM_ONE", + "required_duration": "0s", + "required_field_mask": "", + "required_timestamp": "1970-01-01T00:00:00Z" + }""", + ), + ( + RequiredFields.from_dict, + RequiredFields( + required_bool=True, + required_duration="7200s", + required_field_mask="required_string,required_int32", + required_int32=42, + required_int64=1234567890123456789, + required_list_value=[], + required_message=NestedMessage(), + required_string="non_default_string", + required_struct={}, + required_timestamp="2023-12-31T23:59:59Z", + required_value=json.loads("{}"), + test_required_enum=TestEnum.TEST_ENUM_TWO, + ), + """{ + "required_string": "non_default_string", + "required_int32": 42, + "required_int64": 1234567890123456789, + "required_bool": true, + "required_message": {}, + "test_required_enum": "TEST_ENUM_TWO", + "required_duration": "7200s", + "required_field_mask": "required_string,required_int32", + "required_timestamp": "2023-12-31T23:59:59Z" + }""", + ), + ( + RepeatedFields.from_dict, + RepeatedFields( + repeated_string=[ + "item1", + "item2", + "item3", + ], + ), + """{ + "repeated_string": ["item1", "item2", "item3"] + }""", + ), + ( + RepeatedFields.from_dict, + RepeatedFields( + repeated_int32=[ + 1, + 2, + 3, + 4, + 5, + ], + ), + """{ + "repeated_int32": [1, 2, 3, 4, 5] + }""", + ), + ( + RepeatedFields.from_dict, + RepeatedFields( + repeated_int64=[ + 1000000000000000000, + 2000000000000000000, + ], + ), + """{ + "repeated_int64": [1000000000000000000, 2000000000000000000] + }""", + ), + ( + RepeatedFields.from_dict, + RepeatedFields( + repeated_bool=[ + True, + False, + True, + ], + ), + """{ + "repeated_bool": [true, false, true] + }""", + ), + ( + RepeatedFields.from_dict, + RepeatedFields( + test_repeated_enum=[ + TestEnum.TEST_ENUM_ONE, + TestEnum.TEST_ENUM_TWO, + ], + ), + """{ + "test_repeated_enum": ["TEST_ENUM_ONE", "TEST_ENUM_TWO"] + }""", + ), + ( + RepeatedFields.from_dict, + RepeatedFields( + repeated_message=[ + NestedMessage( + optional_string="nested1", + ), + NestedMessage( + optional_string="nested2", + ), + ], + ), + """{ + "repeated_message": [ + { + "optional_string": "nested1" + }, + { + "optional_string": "nested2" + } + ] + }""", + ), + ( + RepeatedFields.from_dict, + RepeatedFields( + repeated_duration=[ + "60s", + "120s", + "180s", + ], + ), + """{ + "repeated_duration": ["60s", "120s", "180s"] + }""", + ), + ( + RepeatedFields.from_dict, + RepeatedFields( + repeated_field_mask=[ + "field1", + "field2,field3", + ], + ), + """{ + "repeated_field_mask": ["field1", "field2,field3"] + }""", + ), + ( + RepeatedFields.from_dict, + RepeatedFields( + repeated_timestamp=[ + "2023-01-01T00:00:00Z", + "2023-01-02T00:00:00Z", + ], + ), + """{ + "repeated_timestamp": ["2023-01-01T00:00:00Z", "2023-01-02T00:00:00Z"] + }""", + ), + ( + RepeatedFields.from_dict, + RepeatedFields( + repeated_bool=[ + True, + False, + ], + repeated_int32=[ + 10, + 20, + 30, + ], + repeated_string=[ + "a", + "b", + "c", + ], + ), + """{ + "repeated_string": ["a", "b", "c"], + "repeated_int32": [10, 20, 30], + "repeated_bool": [true, false] + }""", + ), + ( + RepeatedFields.from_dict, + RepeatedFields( + repeated_string=[], + ), + """{}""", + ), + ( + OptionalFields.from_dict, + OptionalFields(), + """{}""", + ), + ( + OptionalFields.from_dict, + OptionalFields( + optional_bool=False, + optional_int32=0, + optional_int64=0, + optional_string="", + ), + """{ + "optional_int32": 0, + "optional_int64": 0, + "optional_bool": false, + "optional_string": "" + }""", + ), + ( + OptionalFields.from_dict, + OptionalFields( + legacy_duration="1s", + legacy_field_mask="legacy_duration,legacy_timestamp", + legacy_timestamp="2023-01-01T00:00:00Z", + ), + """{ + "legacy_duration": "1s", + "legacy_timestamp": "2023-01-01T00:00:00Z", + "legacy_field_mask": "legacy_duration,legacy_timestamp" + }""", + ), + ], + ids=[ + "OptionalString", + "OptionalInt32", + "OptionalInt64", + "OptionalBool", + "OptionalEnum", + "OptionalNestedMessage", + "OptionalMap", + "OptionalDuration", + "OptionalFieldMask", + "OptionalTimestamp", + "MultipleOptionalFields", + "RequiredFieldsExplicitDefaults", + "RequiredFieldsNonDefaults", + "RepeatedString", + "RepeatedInt32", + "RepeatedInt64", + "RepeatedBool", + "RepeatedEnum", + "RepeatedNestedMessage", + "RepeatedDuration", + "RepeatedFieldMask", + "RepeatedTimestamp", + "MultipleRepeatedFields", + "EmptyRepeatedFields", + "OptionalFieldsNoInput", + "OptionalFieldsZeroValues", + "LegacyWellKnownTypes", + ], +) +def test_python_marshall(from_dict_method: any, instance: Any, expected_json: str): + """Test Python object to dict conversion""" + + result = instance.as_dict() + expected_dict = json.loads(expected_json) + + assert result == expected_dict, f"Expected {expected_dict}, but got {result}" + + recreated = from_dict_method(result) + + final_dict = recreated.as_dict() + + assert final_dict == expected_dict, f"Expected {expected_dict}, but got {final_dict}"