From c0d7ee0d75628e4e2078b24ad332450af4abfa45 Mon Sep 17 00:00:00 2001 From: CaymanWilliams Date: Fri, 14 Feb 2025 16:11:00 -0700 Subject: [PATCH 01/12] merge upstream branch into fork --- .codegen.json | 17 +- .codegen/__init__.py.tmpl | 194 -- .codegen/_openapi_sha | 2 +- .codegen/error_mapping.py.tmpl | 20 - .codegen/error_overrides.py.tmpl | 20 - .codegen/example.py.tmpl | 112 - .codegen/lib.tmpl | 12 - .codegen/service.py.tmpl | 419 --- .gitattributes | 1 + .github/PULL_REQUEST_TEMPLATE.md | 34 +- .github/workflows/external-message.yml | 59 + .github/workflows/integration-tests.yml | 90 + .github/workflows/push.yml | 34 +- .github/workflows/release-test.yml | 7 +- .github/workflows/release.yml | 7 +- CHANGELOG.md | 291 ++ databricks/sdk/__init__.py | 593 ++-- databricks/sdk/_base_client.py | 92 +- databricks/sdk/config.py | 18 + databricks/sdk/credentials_provider.py | 174 +- databricks/sdk/data_plane.py | 2 +- databricks/sdk/mixins/files.py | 185 +- databricks/sdk/mixins/jobs.py | 84 + databricks/sdk/mixins/open_ai_client.py | 60 +- databricks/sdk/retries.py | 6 +- databricks/sdk/service/apps.py | 421 ++- databricks/sdk/service/billing.py | 627 +++- databricks/sdk/service/catalog.py | 3025 +++++++++++++++-- databricks/sdk/service/cleanrooms.py | 1283 +++++++ databricks/sdk/service/compute.py | 2123 +++++++++++- databricks/sdk/service/dashboards.py | 1228 +++++-- databricks/sdk/service/files.py | 173 +- databricks/sdk/service/iam.py | 515 ++- databricks/sdk/service/jobs.py | 1814 +++++++++- databricks/sdk/service/marketplace.py | 689 ++++ databricks/sdk/service/ml.py | 1047 +++++- databricks/sdk/service/oauth2.py | 769 ++++- databricks/sdk/service/pipelines.py | 713 +++- databricks/sdk/service/provisioning.py | 455 +++ databricks/sdk/service/serving.py | 1262 +++++-- databricks/sdk/service/settings.py | 2025 ++++++++++- databricks/sdk/service/sharing.py | 1062 +++--- databricks/sdk/service/sql.py | 1481 +++++++- databricks/sdk/service/vectorsearch.py | 290 ++ databricks/sdk/service/workspace.py | 462 ++- databricks/sdk/useragent.py | 54 + databricks/sdk/version.py | 2 +- docs/account/billing/budget_policy.rst | 88 + docs/account/billing/budgets.rst | 2 +- docs/account/billing/index.rst | 1 + .../account/oauth2/custom_app_integration.rst | 14 +- docs/account/oauth2/federation_policy.rst | 105 + docs/account/oauth2/index.rst | 2 + .../service_principal_federation_policy.rst | 115 + .../oauth2/service_principal_secrets.rst | 9 +- docs/account/provisioning/workspaces.rst | 13 +- .../settings/csp_enablement_account.rst | 12 +- .../settings/disable_legacy_features.rst | 12 +- .../settings/enable_ip_access_lists.rst | 63 + .../settings/esm_enablement_account.rst | 12 +- docs/account/settings/index.rst | 1 + docs/account/settings/personal_compute.rst | 12 +- docs/account/settings/settings.rst | 6 + docs/dbdataclasses/apps.rst | 12 - docs/dbdataclasses/billing.rst | 33 + docs/dbdataclasses/catalog.rst | 204 +- docs/dbdataclasses/cleanrooms.rst | 158 + docs/dbdataclasses/compute.rst | 37 +- docs/dbdataclasses/dashboards.rst | 124 +- docs/dbdataclasses/iam.rst | 26 + docs/dbdataclasses/index.rst | 1 + docs/dbdataclasses/jobs.rst | 110 +- docs/dbdataclasses/marketplace.rst | 3 + docs/dbdataclasses/oauth2.rst | 16 +- docs/dbdataclasses/pipelines.rst | 33 + docs/dbdataclasses/provisioning.rst | 4 + docs/dbdataclasses/serving.rst | 85 +- docs/dbdataclasses/settings.rst | 141 + docs/dbdataclasses/sharing.rst | 138 +- docs/dbdataclasses/sql.rst | 8 +- docs/dbdataclasses/workspace.rst | 3 + docs/gen-client-docs.py | 32 +- docs/workspace/apps/apps.rst | 39 +- docs/workspace/catalog/catalogs.rst | 4 +- docs/workspace/catalog/credentials.rst | 193 ++ docs/workspace/catalog/external_locations.rst | 1 - docs/workspace/catalog/index.rst | 1 + docs/workspace/catalog/online_tables.rst | 19 +- .../workspace/catalog/storage_credentials.rst | 1 - docs/workspace/catalog/tables.rst | 5 +- .../cleanrooms/clean_room_assets.rst | 94 + .../cleanrooms/clean_room_task_runs.rst | 25 + docs/workspace/cleanrooms/clean_rooms.rst | 95 + docs/workspace/cleanrooms/index.rst | 12 + docs/workspace/compute/cluster_policies.rst | 3 +- docs/workspace/compute/clusters.rst | 105 +- docs/workspace/compute/instance_pools.rst | 3 +- docs/workspace/dashboards/genie.rst | 19 + docs/workspace/dashboards/index.rst | 4 +- docs/workspace/dashboards/lakeview.rst | 75 +- .../dashboards/lakeview_embedded.rst | 19 + docs/workspace/dashboards/query_execution.rst | 46 + docs/workspace/files/files.rst | 13 +- docs/workspace/iam/access_control.rst | 23 + docs/workspace/iam/index.rst | 1 + docs/workspace/iam/permissions.rst | 3 +- docs/workspace/iam/users.rst | 3 +- docs/workspace/index.rst | 1 + docs/workspace/jobs/jobs.rst | 77 +- docs/workspace/ml/experiments.rst | 3 +- docs/workspace/ml/model_registry.rst | 4 +- docs/workspace/pipelines/pipelines.rst | 27 +- docs/workspace/provisioning/credentials.rst | 123 + docs/workspace/provisioning/index.rst | 10 + docs/workspace/serving/serving_endpoints.rst | 58 +- ...aibi_dashboard_embedding_access_policy.rst | 64 + ...i_dashboard_embedding_approved_domains.rst | 65 + .../settings/automatic_cluster_update.rst | 12 +- .../settings/compliance_security_profile.rst | 12 +- docs/workspace/settings/default_namespace.rst | 12 +- .../settings/disable_legacy_access.rst | 12 +- .../settings/disable_legacy_dbfs.rst | 12 +- .../settings/enhanced_security_monitoring.rst | 12 +- docs/workspace/settings/index.rst | 2 + .../settings/notification_destinations.rst | 1 + .../settings/restrict_workspace_admins.rst | 12 +- docs/workspace/settings/settings.rst | 12 + docs/workspace/settings/token_management.rst | 5 +- docs/workspace/sharing/index.rst | 1 - docs/workspace/sharing/providers.rst | 6 +- docs/workspace/sharing/recipients.rst | 20 +- docs/workspace/sql/alerts.rst | 12 +- docs/workspace/sql/dashboards.rst | 4 +- docs/workspace/sql/index.rst | 1 + docs/workspace/sql/queries.rst | 12 +- docs/workspace/sql/query_visualizations.rst | 12 +- docs/workspace/sql/redash_config.rst | 14 + docs/workspace/sql/statement_execution.rst | 9 +- docs/workspace/sql/warehouses.rst | 3 +- docs/workspace/workspace/repos.rst | 5 +- docs/workspace/workspace/workspace.rst | 5 +- setup.py | 85 +- tests/integration/test_auth.py | 19 +- tests/integration/test_clusters.py | 4 +- tests/integration/test_dbutils.py | 25 +- tests/integration/test_jobs.py | 25 +- tests/test_base_client.py | 224 +- tests/test_config.py | 5 + tests/test_core.py | 29 +- tests/test_credentials_provider.py | 145 + tests/test_data_plane.py | 2 +- tests/test_files.py | 340 ++ tests/test_jobs_mixin.py | 263 ++ tests/test_model_serving_auth.py | 73 +- tests/test_open_ai_mixin.py | 21 + tests/test_user_agent.py | 44 + 156 files changed, 24634 insertions(+), 3804 deletions(-) delete mode 100644 .codegen/__init__.py.tmpl delete mode 100644 .codegen/error_mapping.py.tmpl delete mode 100644 .codegen/error_overrides.py.tmpl delete mode 100644 .codegen/example.py.tmpl delete mode 100644 .codegen/lib.tmpl delete mode 100644 .codegen/service.py.tmpl create mode 100644 .github/workflows/external-message.yml create mode 100644 .github/workflows/integration-tests.yml create mode 100644 databricks/sdk/mixins/jobs.py create mode 100755 databricks/sdk/service/cleanrooms.py create mode 100644 docs/account/billing/budget_policy.rst create mode 100644 docs/account/oauth2/federation_policy.rst create mode 100644 docs/account/oauth2/service_principal_federation_policy.rst create mode 100644 docs/account/settings/enable_ip_access_lists.rst create mode 100644 docs/dbdataclasses/cleanrooms.rst create mode 100644 docs/workspace/catalog/credentials.rst create mode 100644 docs/workspace/cleanrooms/clean_room_assets.rst create mode 100644 docs/workspace/cleanrooms/clean_room_task_runs.rst create mode 100644 docs/workspace/cleanrooms/clean_rooms.rst create mode 100644 docs/workspace/cleanrooms/index.rst create mode 100644 docs/workspace/dashboards/lakeview_embedded.rst create mode 100644 docs/workspace/dashboards/query_execution.rst create mode 100644 docs/workspace/iam/access_control.rst create mode 100644 docs/workspace/provisioning/credentials.rst create mode 100644 docs/workspace/provisioning/index.rst create mode 100644 docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst create mode 100644 docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst create mode 100644 docs/workspace/sql/redash_config.rst create mode 100644 tests/test_credentials_provider.py create mode 100644 tests/test_files.py create mode 100644 tests/test_jobs_mixin.py diff --git a/.codegen.json b/.codegen.json index a1886bd80..3a880d1a9 100644 --- a/.codegen.json +++ b/.codegen.json @@ -1,20 +1,6 @@ { - "formatter": "yapf -pri $FILENAMES && autoflake -i $FILENAMES && isort $FILENAMES", + "mode": "py_v0", "changelog_config": ".codegen/changelog_config.yml", - "template_libraries": [ - ".codegen/lib.tmpl" - ], - "packages": { - ".codegen/service.py.tmpl": "databricks/sdk/service/{{.Name}}.py" - }, - "batch": { - ".codegen/__init__.py.tmpl": "databricks/sdk/__init__.py", - ".codegen/error_mapping.py.tmpl": "databricks/sdk/errors/platform.py", - ".codegen/error_overrides.py.tmpl": "databricks/sdk/errors/overrides.py" - }, - "samples": { - ".codegen/example.py.tmpl": "examples/{{if .IsAccount}}account{{else}}workspace{{end}}/{{.Service.SnakeName}}/{{.Method.SnakeName}}_{{.SnakeName}}.py" - }, "version": { "databricks/sdk/version.py": "__version__ = '$VERSION'" }, @@ -28,6 +14,7 @@ "pip install '.[dev]'" ], "post_generate": [ + "make fmt", "pytest -m 'not integration' --cov=databricks --cov-report html tests", "pip install .", "python docs/gen-client-docs.py" diff --git a/.codegen/__init__.py.tmpl b/.codegen/__init__.py.tmpl deleted file mode 100644 index d54e9dfff..000000000 --- a/.codegen/__init__.py.tmpl +++ /dev/null @@ -1,194 +0,0 @@ -import databricks.sdk.core as client -import databricks.sdk.dbutils as dbutils -from databricks.sdk.credentials_provider import CredentialsStrategy - -from databricks.sdk.mixins.files import DbfsExt -from databricks.sdk.mixins.compute import ClustersExt -from databricks.sdk.mixins.workspace import WorkspaceExt -from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt -{{- range .Services}} -from databricks.sdk.service.{{.Package.Name}} import {{.PascalName}}API{{end}} -from databricks.sdk.service.provisioning import Workspace -from databricks.sdk import azure -from typing import Optional - -{{$args := list "host" "account_id" "username" "password" "client_id" "client_secret" - "token" "profile" "config_file" "azure_workspace_resource_id" "azure_client_secret" - "azure_client_id" "azure_tenant_id" "azure_environment" "auth_type" "cluster_id" - "google_credentials" "google_service_account" }} - -{{- define "api" -}} - {{- $mixins := dict "ClustersAPI" "ClustersExt" "DbfsAPI" "DbfsExt" "WorkspaceAPI" "WorkspaceExt" "ServingEndpointsAPI" "ServingEndpointsExt" -}} - {{- $genApi := concat .PascalName "API" -}} - {{- getOrDefault $mixins $genApi $genApi -}} -{{- end -}} - -def _make_dbutils(config: client.Config): - # We try to directly check if we are in runtime, instead of - # trying to import from databricks.sdk.runtime. This is to prevent - # remote dbutils from being created without the config, which is both - # expensive (will need to check all credential providers) and can - # throw errors (when no env vars are set). - try: - from dbruntime import UserNamespaceInitializer - except ImportError: - return dbutils.RemoteDbUtils(config) - - # We are in runtime, so we can use the runtime dbutils - from databricks.sdk.runtime import dbutils as runtime_dbutils - return runtime_dbutils - - -class WorkspaceClient: - """ - The WorkspaceClient is a client for the workspace-level Databricks REST API. - """ - def __init__(self, *{{range $args}}, {{.}}: Optional[str] = None{{end}}, - debug_truncate_bytes: Optional[int] = None, - debug_headers: Optional[bool] = None, - product="unknown", - product_version="0.0.0", - credentials_strategy: Optional[CredentialsStrategy] = None, - credentials_provider: Optional[CredentialsStrategy] = None, - config: Optional[client.Config] = None): - if not config: - config = client.Config({{range $args}}{{.}}={{.}}, {{end}} - credentials_strategy=credentials_strategy, - credentials_provider=credentials_provider, - debug_truncate_bytes=debug_truncate_bytes, - debug_headers=debug_headers, - product=product, - product_version=product_version) - self._config = config.copy() - self._dbutils = _make_dbutils(self._config) - self._api_client = client.ApiClient(self._config) - - {{- range .Services}}{{if and (not .IsAccounts) (not .HasParent) .HasDataPlaneAPI (not .IsDataPlane)}} - {{.SnakeName}} = {{template "api" .}}(self._api_client){{end -}}{{end}} - - {{- range .Services}} - {{- if and (not .IsAccounts) (not .HasParent)}} - {{- if .IsDataPlane}} - self._{{.SnakeName}} = {{template "api" .}}(self._api_client, {{.ControlPlaneService.SnakeName}}) - {{- else if .HasDataPlaneAPI}} - self._{{.SnakeName}} = {{.SnakeName}} - {{- else}} - self._{{.SnakeName}} = {{template "api" .}}(self._api_client) - {{- end -}} - {{- end -}} - {{end}} - - @property - def config(self) -> client.Config: - return self._config - - @property - def api_client(self) -> client.ApiClient: - return self._api_client - - @property - def dbutils(self) -> dbutils.RemoteDbUtils: - return self._dbutils - - {{- range .Services}}{{if and (not .IsAccounts) (not .HasParent)}} - @property - def {{.SnakeName}}(self) -> {{template "api" .}}: - {{if .Description}}"""{{.Summary}}"""{{end}} - return self._{{.SnakeName}} - {{end -}}{{end}} - - def get_workspace_id(self) -> int: - """Get the workspace ID of the workspace that this client is connected to.""" - response = self._api_client.do("GET", - "/api/2.0/preview/scim/v2/Me", - response_headers=['X-Databricks-Org-Id']) - return int(response["X-Databricks-Org-Id"]) - - def __repr__(self): - return f"WorkspaceClient(host='{self._config.host}', auth_type='{self._config.auth_type}', ...)" - -class AccountClient: - """ - The AccountClient is a client for the account-level Databricks REST API. - """ - - def __init__(self, *{{range $args}}, {{.}}: Optional[str] = None{{end}}, - debug_truncate_bytes: Optional[int] = None, - debug_headers: Optional[bool] = None, - product="unknown", - product_version="0.0.0", - credentials_strategy: Optional[CredentialsStrategy] = None, - credentials_provider: Optional[CredentialsStrategy] = None, - config: Optional[client.Config] = None): - if not config: - config = client.Config({{range $args}}{{.}}={{.}}, {{end}} - credentials_strategy=credentials_strategy, - credentials_provider=credentials_provider, - debug_truncate_bytes=debug_truncate_bytes, - debug_headers=debug_headers, - product=product, - product_version=product_version) - self._config = config.copy() - self._api_client = client.ApiClient(self._config) - - {{- range .Services}}{{if and .IsAccounts (not .HasParent) .HasDataPlaneAPI (not .IsDataPlane)}} - {{(.TrimPrefix "account").SnakeName}} = {{template "api" .}}(self._api_client){{end -}}{{end}} - - {{- range .Services}} - {{- if and .IsAccounts (not .HasParent)}} - {{- if .IsDataPlane}} - self._{{(.TrimPrefix "account").SnakeName}} = {{template "api" .}}(self._api_client, {{.ControlPlaneService.SnakeName}}) - {{- else if .HasDataPlaneAPI}} - self._{{(.TrimPrefix "account").SnakeName}} = {{(.TrimPrefix "account").SnakeName}} - {{- else}} - self._{{(.TrimPrefix "account").SnakeName}} = {{template "api" .}}(self._api_client) - {{- end -}} - {{- end -}} - {{end}} - - @property - def config(self) -> client.Config: - return self._config - - @property - def api_client(self) -> client.ApiClient: - return self._api_client - - {{- range .Services}}{{if and .IsAccounts (not .HasParent)}} - @property - def {{(.TrimPrefix "account").SnakeName}}(self) -> {{template "api" .}}:{{if .Description}} - """{{.Summary}}"""{{end}} - return self._{{(.TrimPrefix "account").SnakeName}} - {{end -}}{{end}} - - def get_workspace_client(self, workspace: Workspace) -> WorkspaceClient: - """Constructs a ``WorkspaceClient`` for the given workspace. - - Returns a ``WorkspaceClient`` that is configured to use the same - credentials as this ``AccountClient``. The underlying config is - copied from this ``AccountClient``, but the ``host`` and - ``azure_workspace_resource_id`` are overridden to match the - given workspace, and the ``account_id`` field is cleared. - - Usage: - - .. code-block:: - - wss = list(a.workspaces.list()) - if len(wss) == 0: - pytest.skip("no workspaces") - w = a.get_workspace_client(wss[0]) - assert w.current_user.me().active - - :param workspace: The workspace to construct a client for. - :return: A ``WorkspaceClient`` for the given workspace. - """ - config = self._config.deep_copy() - config.host = config.environment.deployment_url(workspace.deployment_name) - config.azure_workspace_resource_id = azure.get_azure_resource_id(workspace) - config.account_id = None - config.init_auth() - return WorkspaceClient(config=config) - - def __repr__(self): - return f"AccountClient(account_id='{self._config.account_id}', auth_type='{self._config.auth_type}', ...)" diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 2d9cb6d86..2a9a021e0 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -cf9c61453990df0f9453670f2fe68e1b128647a2 \ No newline at end of file +99f644e72261ef5ecf8d74db20f4b7a1e09723cc diff --git a/.codegen/error_mapping.py.tmpl b/.codegen/error_mapping.py.tmpl deleted file mode 100644 index b3cc8cea6..000000000 --- a/.codegen/error_mapping.py.tmpl +++ /dev/null @@ -1,20 +0,0 @@ -# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -from .base import DatabricksError - -{{range .ExceptionTypes}} -class {{.PascalName}}({{if .Inherit -}} - {{.Inherit.PascalName}} - {{- else -}} - DatabricksError - {{- end -}}): - """{{.Comment " " 100 | trimSuffix "\"" }}""" -{{end}} - -STATUS_CODE_MAPPING = { {{range .ErrorStatusCodeMapping}} - {{.StatusCode}}: {{.PascalName}},{{- end}} -} - -ERROR_CODE_MAPPING = { {{range .ErrorCodeMapping}} - '{{.ErrorCode}}': {{.PascalName}},{{- end}} -} diff --git a/.codegen/error_overrides.py.tmpl b/.codegen/error_overrides.py.tmpl deleted file mode 100644 index adcfea555..000000000 --- a/.codegen/error_overrides.py.tmpl +++ /dev/null @@ -1,20 +0,0 @@ -# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -from .base import _ErrorOverride -from .platform import * -import re - - -_ALL_OVERRIDES = [ - {{ range .ErrorOverrides -}} - _ErrorOverride( - debug_name="{{.Name}}", - path_regex=re.compile(r'{{.PathRegex}}'), - verb="{{.Verb}}", - status_code_matcher=re.compile(r'{{replaceAll "'" "\\'" .StatusCodeMatcher}}'), - error_code_matcher=re.compile(r'{{replaceAll "'" "\\'" .ErrorCodeMatcher}}'), - message_matcher=re.compile(r'{{replaceAll "'" "\\'" .MessageMatcher}}'), - custom_error={{.OverrideErrorCode.PascalName}}, - ), -{{- end }} -] diff --git a/.codegen/example.py.tmpl b/.codegen/example.py.tmpl deleted file mode 100644 index dba71d9bf..000000000 --- a/.codegen/example.py.tmpl +++ /dev/null @@ -1,112 +0,0 @@ -from databricks.sdk import {{if .IsAccount}}AccountClient{{else}}WorkspaceClient{{end}} -from databricks.sdk.service import _internal{{range .Suite.ServiceToPackage}}, {{.}}{{end}} -import time, base64, os - -{{$example := .}} -{{if .IsAccount}}a{{else}}w{{end}} = {{if .IsAccount}}Account{{else}}Workspace{{end}}Client() - -{{range .Init}} -{{.SnakeName}} = {{template "expr" .Value}} -{{end}} - -{{range .Calls}} -{{if .Service -}} - {{template "svc-call" .}} -{{- else -}} - {{with .Assign}}{{.SnakeName}} = {{end}}{{template "expr" .}} -{{- end}} -{{end}} - -{{with .Cleanup}} -# cleanup -{{range . -}} - {{template "svc-call" .}} -{{end}} -{{end}} - -{{define "svc-call" -}} - {{with .Assign}}{{.SnakeName}} = {{end}}{{if .IsAccount}}a{{else}}w{{end}}.{{.Service.SnakeName}}.{{.Original.SnakeName}}{{if eq .Original.SnakeName "import"}}_{{end}}({{template "method-args" .}}) - {{- if .IsWait}}.result(){{end}} -{{- end}} - -{{define "expr" -}} -{{- if eq .Type "binary" -}} - {{template "expr" .Left}} {{.Op}} {{template "expr" .Right}} -{{- else if eq .Type "index" -}} - {{template "expr" .Left}}[{{template "expr" .Right}}] -{{- else if eq .Type "boolean" -}} - {{if .Value}}True{{else}}False{{end}} -{{- else if eq .Type "heredoc" -}} -"""{{.Value}}""" -{{- else if eq .Type "literal" -}} - {{.Value}} -{{- else if eq .Type "lookup" -}} - {{template "expr" .X}}.{{.Field.SnakeName}} -{{- else if eq .Type "enum" -}} - {{.Package}}.{{.Entity.PascalName}}.{{.ConstantName}} -{{- else if eq .Type "variable" -}} - {{if eq .SnakeName "true"}}True - {{- else if eq .SnakeName "false"}}False - {{else}}{{.SnakeName}}{{end}} -{{- else if eq .Type "entity" -}} - {{.Package}}.{{.PascalName}}({{template "kwargs" .FieldValues}}) -{{- else if eq .Type "call" -}} - {{template "call" .}} -{{- else if eq .Type "map" -}} - { {{range .Pairs}}{{template "expr" .Key}}: {{template "expr" .Value}},{{end}} } -{{- else if eq .Type "array" -}} - [ {{range $i, $x := .Values}}{{if $i}}, {{end}}{{template "expr" .}}{{end}} ] -{{- else -}} - /* UNKNOWN: {{.Type}} */ -{{- end -}} -{{- end}} - -{{define "method-args" -}} - {{with .Request -}} - {{template "kwargs" .}} - {{- else -}} - {{template "args" .}} - {{- end}} -{{- end}} - -{{define "kwargs" -}} - {{range $i, $x := . -}} - {{if $i}}, {{end}}{{.SnakeName}}={{template "expr" .Value}} - {{- end}} -{{- end}} - -{{define "args" -}} - {{range $i, $x := .Args -}} - {{if $i}}, {{end}}{{template "expr" .}} - {{- end}} -{{- end}} - -{{define "call" -}} -{{- if eq .PascalName "GetEnvOrSkipTest" -}} -os.environ[{{template "args" .}}] -{{- else if eq .PascalName "Dir" -}} -os.path.dirname({{template "args" .}}) -{{- else if eq .PascalName "Sprintf" -}} -{{range $i, $x := .Args}}{{if eq $i 0}}{{template "expr" .}} % ({{else}} {{if gt $i 1}}, {{end}} {{template "expr" .}} {{end}}{{end}}) -{{- else if eq .PascalName "MustParseInt64" -}} -{{template "args" .}} -{{- else if eq .PascalName "RandomEmail" -}} -f'sdk-{time.time_ns()}@example.com' -{{- else if eq .PascalName "RandomName" -}} -f'sdk-{time.time_ns()}' -{{- else if eq .PascalName "RandomHex" -}} -hex(time.time_ns())[2:] -{{- else if eq .PascalName "EncodeToString" -}} -base64.b64encode({{template "args" .}}.encode()).decode() -{{- else if eq .PascalName "CanonicalHostName" -}} -w.config.host -{{- else if eq .PascalName "SharedRunningCluster" -}} -w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"] -{{- else if eq .PascalName "DltNotebook" -}} -"CREATE LIVE TABLE dlt_sample AS SELECT 1" -{{- else if eq .PascalName "MyNotebookPath" -}} -f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}' -{{- else -}} -{{.SnakeName}}({{range $i, $x := .Args}}{{if $i}}, {{end}}{{template "expr" .}}{{end}}) -{{- end -}} -{{- end}} diff --git a/.codegen/lib.tmpl b/.codegen/lib.tmpl deleted file mode 100644 index 50233ca08..000000000 --- a/.codegen/lib.tmpl +++ /dev/null @@ -1,12 +0,0 @@ -{{ define "safe-name" -}} - {{/* https://docs.python.org/3/reference/lexical_analysis.html#keywords */}} - {{- $keywords := list "False" "await" "else" "import" "pass" "None" "break" "except" "in" "raise" - "True" "class" "finally" "is" "return" "and" "continue" "for" "lambda" "try" - "as" "def" "from" "nonlocal" "while" "assert" "del" "global" "not" "with" - "async" "elif" "if" "or" "yield" -}} - {{.}}{{ if in $keywords . }}_{{ end }} -{{- end}} - -{{ define "safe-snake-name" -}} - {{ template "safe-name" .SnakeName }} -{{- end}} diff --git a/.codegen/service.py.tmpl b/.codegen/service.py.tmpl deleted file mode 100644 index 4307e0913..000000000 --- a/.codegen/service.py.tmpl +++ /dev/null @@ -1,419 +0,0 @@ -# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -from __future__ import annotations -from dataclasses import dataclass -from datetime import timedelta -from enum import Enum -from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO -import time -import random -import logging -import requests - -from ..data_plane import DataPlaneService -from ..errors import OperationTimeout, OperationFailed -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter -from ..oauth import Token - -_LOG = logging.getLogger('databricks.sdk') - -{{range .ImportedEntities}} -from databricks.sdk.service import {{.Package.Name}}{{end}} - -# all definitions in this file are in alphabetical order -{{range .Types}} -{{if or .Fields .IsEmpty -}}{{if not .IsRequest}}@dataclass -class {{.PascalName}}{{if eq "List" .PascalName}}Request{{end}}:{{if .Description}} - """{{.Comment " " 100}}""" - {{end}} - {{- range .RequiredFields}} - {{template "safe-snake-name" .}}: {{template "type" .Entity}}{{if .Description}} - """{{.Comment " " 100 | trimSuffix "\""}}"""{{end}} - {{end}} - {{- range .NonRequiredFields}} - {{template "safe-snake-name" .}}: Optional[{{template "type" .Entity}}] = None{{if .Description}} - """{{.Comment " " 100 | trimSuffix "\""}}"""{{end}} - {{end}} - {{if or .IsEmpty .HasJsonField .HasHeaderField .HasByteStreamField -}} - def as_dict(self) -> dict: - """Serializes the {{.PascalName}}{{if eq "List" .PascalName}}Request{{end}} into a dictionary suitable for use as a JSON request body.""" - body = {} - {{range .Fields}}if self.{{template "safe-snake-name" .}}{{with .Entity.IsPrimitive}} is not None{{end}}: body['{{.Name}}'] = {{template "as_request_type" .}} - {{end -}} - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> {{.PascalName}}{{if eq "List" .PascalName}}Request{{end}}: - """Deserializes the {{.PascalName}}{{if eq "List" .PascalName}}Request{{end}} from a dictionary.""" - return cls({{range $i, $f := .Fields}}{{if $i}}, {{end}}{{template "safe-snake-name" $f}}={{template "from_dict_type" $f}}{{end}}) - {{end}} -{{end}} -{{else if .ArrayValue}}type {{.PascalName}} []{{template "type" .ArrayValue}} -{{else if .MapValue}}{{.PascalName}} = {{template "type" .}} -{{else if .Enum}}class {{.PascalName}}(Enum): - {{if .Description}}"""{{.Comment " " 100 | trimSuffix "\"" }}"""{{end}} - {{range .Enum }} - {{.ConstantName}} = '{{.Content}}'{{end}}{{end}} -{{end}} -{{- define "from_dict_type" -}} - {{- if not .Entity }}None - {{- else if .Entity.ArrayValue }} - {{- if (or .Entity.ArrayValue.IsObject .Entity.ArrayValue.IsExternal) }}_repeated_dict(d, '{{.Name}}', {{template "type" .Entity.ArrayValue}}) - {{- else if .Entity.ArrayValue.Enum }}_repeated_enum(d, '{{.Name}}', {{template "type" .Entity.ArrayValue}}) - {{- else}}d.get('{{.Name}}', None){{- end -}} - {{- else if or .Entity.IsObject .Entity.IsExternal .Entity.IsEmpty }}_from_dict(d, '{{.Name}}', {{template "type" .Entity}}) - {{- else if .Entity.Enum }}_enum(d, '{{.Name}}', {{template "type" .Entity}}) - {{- else if and .IsHeader (or .Entity.IsInt64 .Entity.IsInt) }} int(d.get('{{.Name}}', None)) - {{- else}}d.get('{{.Name}}', None){{- end -}} -{{- end -}} -{{- define "as_request_type" -}} - {{- if not .Entity }}None # ERROR: No Type - {{- /* This should be done recursively, but recursion in text templates is not supported. */ -}} - {{- else if .Entity.ArrayValue }}[{{if or .Entity.ArrayValue.IsObject .Entity.ArrayValue.IsExternal}}v.as_dict(){{ else if .Entity.ArrayValue.Enum }}v.value{{else}}v{{end}} for v in self.{{template "safe-snake-name" .}}] - {{- else if or .Entity.IsObject .Entity.IsExternal .Entity.IsEmpty }}self.{{template "safe-snake-name" .}}.as_dict() - {{- else if .Entity.Enum }}self.{{template "safe-snake-name" .}}.value - {{- else}}self.{{template "safe-snake-name" .}}{{- end -}} -{{- end -}} -{{- define "type" -}} - {{- if not . }}any # ERROR: No Type - {{- else if .IsExternal }}{{.Package.Name}}.{{.PascalName}} - {{- else if .ArrayValue }}List[{{template "type" .ArrayValue}}] - {{- else if .MapValue }}Dict[str,{{template "type" .MapValue}}] - {{- else if .IsObject }}{{.PascalName}}{{if eq "List" .PascalName}}Request{{end}} - {{- else if .Enum }}{{.PascalName}} - {{- else if .IsString}}str - {{- else if .IsAny}}Any - {{- else if .IsEmpty}}{{.PascalName}} - {{- else if .IsBool}}bool - {{- else if .IsInt64}}int - {{- else if .IsFloat64}}float - {{- else if .IsInt}}int - {{- else if .IsByteStream}}BinaryIO - {{- else}}any /* MISSING TYPE */ - {{- end -}} -{{- end -}} - -{{- define "type-doc" -}} - {{- if .IsExternal }}:class:`{{.PascalName}}` - {{- else if .IsEmpty}}:class:`{{template "type" .}}` - {{- else if .ArrayValue }}List[{{template "type-doc" .ArrayValue}}] - {{- else if .MapValue }}Dict[str,{{template "type-doc" .MapValue}}] - {{- else if .IsObject }}:class:`{{.PascalName}}{{if eq "List" .PascalName}}Request{{end}}` - {{- else if .Enum }}:class:`{{.PascalName}}` - {{- else}}{{template "type" . }} - {{- end -}} -{{- end -}} - -{{range .Services}} -class {{.PascalName}}API:{{if .Description}} - """{{.Comment " " 110}}""" - {{end}} - def __init__(self, api_client{{if .IsDataPlane}}, control_plane{{end}}): - self._api = api_client - {{if .IsDataPlane -}} - self._control_plane = control_plane - self._data_plane_service = DataPlaneService() - {{end -}} - {{range .Subservices}} - self._{{.SnakeName}} = {{.PascalName}}API(self._api){{end}} - - {{range .Subservices}} - @property - def {{.SnakeName}}(self) -> {{.PascalName}}API: - {{if .Description}}"""{{.Summary}}"""{{end}} - return self._{{.SnakeName}} - {{end}} - - {{range .Waits}} - def {{template "safe-snake-name" .}}(self{{range .Binding}}, {{template "safe-snake-name" .PollField}}: {{template "type" .PollField.Entity}}{{end}}, - timeout=timedelta(minutes={{.Timeout}}), callback: Optional[Callable[[{{.Poll.Response.PascalName}}], None]] = None) -> {{.Poll.Response.PascalName}}: - deadline = time.time() + timeout.total_seconds() - target_states = ({{range .Success}}{{.Entity.PascalName}}.{{.ConstantName}}, {{end}}){{if .Failure}} - failure_states = ({{range .Failure}}{{.Entity.PascalName}}.{{.ConstantName}}, {{end}}){{end}} - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.{{template "safe-snake-name" .Poll}}({{range $i, $b := .Binding}}{{if $i}}, {{end}}{{template "safe-snake-name" .PollField}}={{template "safe-snake-name" .PollField}}{{- end}}) - status = poll{{range .StatusPath}}.{{template "safe-snake-name" .}}{{end}} - {{if .ComplexMessagePath -}} - status_message = f'current status: {status}' - if poll.{{template "safe-snake-name" .MessagePathHead}}: - status_message = poll{{range .MessagePath}}.{{template "safe-snake-name" .}}{{end}} - {{- else if .MessagePath -}} - status_message = poll{{range .MessagePath}}.{{template "safe-snake-name" .}}{{end}} - {{- else -}} - status_message = f'current status: {status}' - {{- end}} - if status in target_states: - return poll - if callback: - callback(poll) - {{if .Failure -}} - if status in failure_states: - msg = f'failed to reach {{range $i, $e := .Success}}{{if $i}} or {{end}}{{$e.Content}}{{end}}, got {status}: {status_message}' - raise OperationFailed(msg) - {{end}}prefix = f"{{range $i, $b := .Binding}}{{if $i}}, {{end -}} - {{template "safe-snake-name" .PollField}}={{"{"}}{{template "safe-snake-name" .PollField}}{{"}"}} - {{- end}}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - {{end}} - - {{range .Methods}} - def {{template "safe-snake-name" .}}({{ template "method-parameters" . }}){{template "method-return-type" .}}: - {{if .Description}}"""{{.Comment " " 110 | trimSuffix "\"" }} - {{with .Request}}{{range .RequiredFields}} - :param {{template "safe-snake-name" .}}: {{template "type-doc" .Entity}}{{if .Description}} - {{.Comment " " 110 | trimSuffix "\"" }}{{end}} - {{- end}}{{range .NonRequiredFields}} - :param {{template "safe-snake-name" .}}: {{template "type-doc" .Entity}} (optional){{if .Description}} - {{.Comment " " 110 | trimSuffix "\"" }}{{end}} - {{- end}} - {{end}} - {{if and .Wait (and (not .IsCrudRead) (not (eq .SnakeName "get_run"))) -}} - :returns: - Long-running operation waiter for {{template "type-doc" .Wait.Poll.Response}}. - See :method:{{template "safe-snake-name" .Wait}} for more details. - {{- else if not .Response.IsEmpty }}:returns: {{if .Response.ArrayValue -}} - Iterator over {{template "type-doc" .Response.ArrayValue}} - {{- else if .Pagination -}} - Iterator over {{template "type-doc" .Pagination.Entity}} - {{- else -}} - {{template "type-doc" .Response}} - {{- end}}{{end}} - """{{end}} - {{if .Request -}} - {{template "method-serialize" .}} - {{- end}} - {{- if .Service.IsDataPlane}} - {{template "data-plane" .}} - {{- end}} - {{template "method-headers" . }} - {{if .Response.HasHeaderField -}} - {{template "method-response-headers" . }} - {{- end}} - {{template "method-call" .}} - - {{if and .Wait (and (not .IsCrudRead) (not (eq .SnakeName "get_run"))) }} - def {{.SnakeName}}_and_wait({{ template "method-parameters" . }}, - timeout=timedelta(minutes={{.Wait.Timeout}})) -> {{.Wait.Poll.Response.PascalName}}: - return self.{{template "safe-snake-name" .}}({{range $i, $x := .Request.Fields}}{{if $i}}, {{end}}{{template "safe-snake-name" .}}={{template "safe-snake-name" .}}{{end}}).result(timeout=timeout) - {{end}} - {{end -}} -{{- end}} - -{{define "data-plane" -}} - def info_getter(): - response = self._control_plane.{{.Service.DataPlaneInfoMethod.SnakeName}}( - {{- range .Service.DataPlaneInfoMethod.Request.Fields }} - {{.SnakeName}} = {{.SnakeName}}, - {{- end}} - ) - if response.{{(index .DataPlaneInfoFields 0).SnakeName}} is None: - raise Exception("Resource does not support direct Data Plane access") - return response{{range .DataPlaneInfoFields}}.{{.SnakeName}}{{end}} - - get_params = [{{- range .Service.DataPlaneInfoMethod.Request.Fields }}{{.SnakeName}},{{end}}] - data_plane_details = self._data_plane_service.get_data_plane_details('{{.SnakeName}}', get_params, info_getter, self._api.get_oauth_token) - token = data_plane_details.token - - def auth(r: requests.PreparedRequest) -> requests.PreparedRequest: - authorization = f"{token.token_type} {token.access_token}" - r.headers["Authorization"] = authorization - return r -{{- end}} - -{{define "method-parameters" -}} - self{{if .Request}} - {{- if .Request.MapValue }}, contents: {{template "type" .Request }}{{ end }} - {{range .Request.RequiredFields}}, {{template "safe-snake-name" .}}: {{template "type" .Entity}}{{end}} - {{if .Request.NonRequiredFields}}, * - {{range .Request.NonRequiredFields}}, {{template "safe-snake-name" .}}: Optional[{{template "type" .Entity}}] = None{{end}} - {{- end}} - {{- end}} -{{- end}} - -{{define "method-serialize" -}} - {{if or .Request.HasJsonField .Request.HasQueryField -}} - {{if .Request.HasJsonField}}body = {}{{end}}{{if .Request.HasQueryField}} - query = {}{{end}} - {{- range .Request.Fields}}{{ if and (not .IsPath) (not .IsHeader) }} - {{- if .IsQuery }} - if {{template "safe-snake-name" .}} is not None: query['{{.Name}}'] = {{template "method-param-bind" .}}{{end}} - {{- if .IsJson }} - if {{template "safe-snake-name" .}} is not None: body['{{.Name}}'] = {{template "method-param-bind" .}}{{end}} - {{- end}} - {{- end}} - {{- end}} -{{- end}} - -{{ define "method-headers" -}} - headers = { - {{- range $k, $v := .FixedRequestHeaders}}'{{ $k }}': '{{ $v }}',{{ end -}} - } -{{- end }} - -{{ define "method-response-headers" -}} - response_headers = [ - {{- range $h := .ResponseHeaders}}'{{ $h.Name }}',{{ end -}} - ] -{{- end }} - -{{- define "method-param-bind" -}} - {{- if not .Entity }}None # ERROR: No Type - {{- else if .Entity.ArrayValue }}[ - {{- if or .Entity.ArrayValue.IsObject .Entity.ArrayValue.IsExternal -}}v.as_dict() - {{- else if .Entity.ArrayValue.Enum -}}v.value - {{- else}}v{{end}} for v in {{template "safe-snake-name" .}}] - {{- else if .Entity.IsObject }}{{template "safe-snake-name" .}}.as_dict() - {{- else if .Entity.Enum }}{{template "safe-snake-name" .}}.value - {{- else}}{{template "safe-snake-name" .}}{{- end -}} -{{- end -}} - -{{define "method-call" -}} - {{if .Pagination -}}{{template "method-call-paginated" .}} - {{- else if and .Wait (and (not .IsCrudRead) (not (eq .SnakeName "get_run"))) -}}{{template "method-call-retried" .}} - {{- else}}{{template "method-call-default" .}}{{end}} -{{- end}} - -{{define "method-call-retried" -}} - {{if .Response}}op_response = {{end}}{{template "method-do" .}} - return Wait(self.{{template "safe-snake-name" .Wait}} - {{if .Response}}, response = {{.Response.PascalName}}.from_dict(op_response){{end}} - {{range .Wait.Binding}}, {{template "safe-snake-name" .PollField}}={{if .IsResponseBind}}op_response['{{.Bind.Name}}']{{else}}{{template "safe-snake-name" .Bind}}{{end}} - {{- end}}) -{{- end}} - -{{define "method-call-paginated" -}} - {{if .Pagination.MultiRequest}} - {{if .NeedsOffsetDedupe -}} - # deduplicate items that may have been added during iteration - seen = set() - {{- end}}{{if and .Pagination.Offset (not (eq .Path "/api/2.1/clusters/events")) }} - query['{{.Pagination.Offset.Name}}'] = - {{- if eq .Pagination.Increment 1 -}} - 1 - {{- else if contains .Path "/scim/v2/" -}} - 1 - {{- else -}} - 0 - {{- end}}{{end}}{{if and .Pagination.Limit (contains .Path "/scim/v2/")}} - if "{{.Pagination.Limit.Name}}" not in query: query['{{.Pagination.Limit.Name}}'] = 100 - {{- end}} - while True: - json = {{template "method-do" .}} - if '{{.Pagination.Results.Name}}' in json: - for v in json['{{.Pagination.Results.Name}}']: - {{if .NeedsOffsetDedupe -}} - i = v['{{.IdentifierField.Name}}'] - if i in seen: - continue - seen.add(i) - {{end -}} - yield {{.Pagination.Entity.PascalName}}.from_dict(v) - {{ if .Pagination.Token -}} - if '{{.Pagination.Token.Bind.Name}}' not in json or not json['{{.Pagination.Token.Bind.Name}}']: - return - {{if or (eq "GET" .Verb) (eq "HEAD" .Verb)}}query{{else}}body{{end}}['{{.Pagination.Token.PollField.Name}}'] = json['{{.Pagination.Token.Bind.Name}}'] - {{- else if eq .Path "/api/2.1/clusters/events" -}} - if 'next_page' not in json or not json['next_page']: - return - body = json['next_page'] - {{- else -}} - if '{{.Pagination.Results.Name}}' not in json or not json['{{.Pagination.Results.Name}}']: - return - {{ if eq .Pagination.Increment 1 -}} - query['{{.Pagination.Offset.Name}}'] += 1 - {{- else -}} - query['{{.Pagination.Offset.Name}}'] += len(json['{{.Pagination.Results.Name}}']) - {{- end}} - {{- end}} - {{else -}} - json = {{template "method-do" .}} - parsed = {{.Response.PascalName}}.from_dict(json).{{template "safe-snake-name" .Pagination.Results}} - return parsed if parsed is not None else [] - {{end}} -{{- end}} - -{{define "method-call-default" -}} - {{if not .Response.IsEmpty -}} - res = {{end}}{{template "method-do" .}} - {{if not .Response.IsEmpty -}} - {{- if .Response.ArrayValue -}} - return [{{.Response.ArrayValue.PascalName}}.from_dict(v) for v in res] - {{- else if .Response.MapValue -}} - return res - {{- else -}} - return {{template "type" .Response}}.from_dict(res) - {{- end}} - {{- end}} -{{- end}} - -{{define "method-do" -}} - self._api.do('{{.Verb}}', - {{- if .Service.IsDataPlane -}} - url=data_plane_details.endpoint_url - {{- else -}} - {{ template "path" . }} - {{- end -}} - {{if .Request}} - {{- if .Request.HasQueryField}}, query=query{{end}} - {{- if .Request.MapValue}}, body=contents - {{- else if .Request.HasJsonField}}, body=body{{end}} - {{end}} - , headers=headers - {{if .Response.HasHeaderField -}} - , response_headers=response_headers - {{- end}} - {{- if and .IsRequestByteStream .RequestBodyField }}, data={{template "safe-snake-name" .RequestBodyField}}{{ end }} - {{- if .Service.IsDataPlane -}} - ,auth=auth - {{- end -}} - {{- if .IsResponseByteStream }}, raw=True{{ end }}) -{{- end}} - -{{- define "path" -}} -{{- if .PathParts -}} - f'{{range .PathParts -}} - {{- .Prefix -}} - {{- if .Field -}} - {{- "{" -}} - {{- if .Field.IsPathMultiSegment -}}_escape_multi_segment_path_parameter({{ template "path-parameter" . }}) - {{- else -}}{{ template "path-parameter" . }} - {{- end -}} - {{- "}" -}} - {{- else if .IsAccountId}} - {{- "{" -}} - self._api.account_id - {{- "}" -}} - {{- end -}} - {{- end }}' -{{- else -}} - '{{.Path}}' -{{- end -}} -{{- end -}} - -{{- define "path-parameter" -}} - {{template "safe-snake-name" .Field}}{{with .Field.Entity.Enum}}.value{{end}} -{{- end -}} - -{{define "method-return-type" -}} - {{if and .Wait (and (not .IsCrudRead) (not (eq .SnakeName "get_run"))) }} -> Wait[{{.Wait.Poll.Response.PascalName}}] - {{- else if not .Response.IsEmpty }} -> {{if .Response.ArrayValue -}} - Iterator[{{template "type" .Response.ArrayValue}}] - {{- else if .Pagination -}} - Iterator[{{template "type" .Pagination.Entity}}] - {{- else -}} - {{- if .Response.IsExternal -}} - {{.Response.Package.Name}}.{{.Response.PascalName}} - {{- else -}} - {{.Response.PascalName}} - {{- end -}} - {{- end}}{{end}} -{{- end}} diff --git a/.gitattributes b/.gitattributes index c8e5b2f0b..a0bfc0940 100755 --- a/.gitattributes +++ b/.gitattributes @@ -4,6 +4,7 @@ databricks/sdk/errors/platform.py linguist-generated=true databricks/sdk/service/apps.py linguist-generated=true databricks/sdk/service/billing.py linguist-generated=true databricks/sdk/service/catalog.py linguist-generated=true +databricks/sdk/service/cleanrooms.py linguist-generated=true databricks/sdk/service/compute.py linguist-generated=true databricks/sdk/service/dashboards.py linguist-generated=true databricks/sdk/service/files.py linguist-generated=true diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index e2d7ab0db..91e519ede 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,12 +1,28 @@ -## Changes - +## What changes are proposed in this pull request? -## Tests - +Provide the readers and reviewers with the information they need to understand +this PR in a comprehensive manner. -- [ ] `make test` run locally -- [ ] `make fmt` applied -- [ ] relevant integration tests applied +Specifically, try to answer the two following questions: +- **WHAT** changes are being made in the PR? This should be a summary of the + major changes to allow the reader to quickly understand the PR without having + to look at the code. +- **WHY** are these changes needed? This should provide the context that the + reader might be missing. For example, were there any decisions behind the + change that are not reflected in the code itself? + +The “why part” is the most important of the two as it usually cannot be +inferred from the code itself. A well-written PR description will help future +developers (including your future self) to know how to interact and update your +code. + +## How is this tested? + +Describe any tests you have done; especially if test tests are not part of +the unit tests (e.g. local tests). + +**ALWAYS ANSWER THIS QUESTION:** Answer with "N/A" if tests are not applicable +to your PR (e.g. if the PR only modifies comments). Do not be afraid of +answering "Not tested" if the PR has not been tested. Being clear about what +has been done and not done provides important context to the reviewers. \ No newline at end of file diff --git a/.github/workflows/external-message.yml b/.github/workflows/external-message.yml new file mode 100644 index 000000000..6771057c7 --- /dev/null +++ b/.github/workflows/external-message.yml @@ -0,0 +1,59 @@ +name: PR Comment + +# WARNING: +# THIS WORKFLOW ALWAYS RUNS FOR EXTERNAL CONTRIBUTORS WITHOUT ANY APPROVAL. +# THIS WORKFLOW RUNS FROM MAIN BRANCH, NOT FROM THE PR BRANCH. +# DO NOT PULL THE PR OR EXECUTE ANY CODE FROM THE PR. + +on: + pull_request_target: + types: [opened, reopened, synchronize] + branches: + - main + +jobs: + comment-on-pr: + runs-on: + group: databricks-deco-testing-runner-group + labels: ubuntu-latest-deco + + permissions: + pull-requests: write + + steps: + - uses: actions/checkout@v4 + + - name: Delete old comments + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + # Delete previous comment if it exists + previous_comment_ids=$(gh api "repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/comments" \ + --jq '.[] | select(.body | startswith("")) | .id') + echo "Previous comment IDs: $previous_comment_ids" + # Iterate over each comment ID and delete the comment + if [ ! -z "$previous_comment_ids" ]; then + echo "$previous_comment_ids" | while read -r comment_id; do + echo "Deleting comment with ID: $comment_id" + gh api "repos/${{ github.repository }}/issues/comments/$comment_id" -X DELETE + done + fi + + - name: Comment on PR + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + COMMIT_SHA: ${{ github.event.pull_request.head.sha }} + run: | + gh pr comment ${{ github.event.pull_request.number }} --body \ + " + If integration tests don't run automatically, an authorized user can run them manually by following the instructions below: + + Trigger: + [go/deco-tests-run/sdk-py](https://go/deco-tests-run/sdk-py) + + Inputs: + * PR number: ${{github.event.pull_request.number}} + * Commit SHA: \`${{ env.COMMIT_SHA }}\` + + Checks will be approved automatically on success. + " diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml new file mode 100644 index 000000000..c308cc03c --- /dev/null +++ b/.github/workflows/integration-tests.yml @@ -0,0 +1,90 @@ +name: Integration Tests + +on: + + pull_request: + types: [opened, synchronize] + + merge_group: + + +jobs: + check-token: + name: Check secrets access + + runs-on: + group: databricks-deco-testing-runner-group + labels: ubuntu-latest-deco + + environment: "test-trigger-is" + outputs: + has_token: ${{ steps.set-token-status.outputs.has_token }} + steps: + - name: Check if DECO_WORKFLOW_TRIGGER_APP_ID is set + id: set-token-status + run: | + if [ -z "${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}" ]; then + echo "DECO_WORKFLOW_TRIGGER_APP_ID is empty. User has no access to secrets." + echo "::set-output name=has_token::false" + else + echo "DECO_WORKFLOW_TRIGGER_APP_ID is set. User has access to secrets." + echo "::set-output name=has_token::true" + fi + + trigger-tests: + name: Trigger Tests + + runs-on: + group: databricks-deco-testing-runner-group + labels: ubuntu-latest-deco + + needs: check-token + if: github.event_name == 'pull_request' && needs.check-token.outputs.has_token == 'true' + environment: "test-trigger-is" + + steps: + - uses: actions/checkout@v3 + + - name: Generate GitHub App Token + id: generate-token + uses: actions/create-github-app-token@v1 + with: + app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }} + private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }} + owner: ${{ secrets.ORG_NAME }} + repositories: ${{secrets.REPO_NAME}} + + - name: Trigger Workflow in Another Repo + env: + GH_TOKEN: ${{ steps.generate-token.outputs.token }} + run: | + gh workflow run sdk-py-isolated-pr.yml -R ${{ secrets.ORG_NAME }}/${{secrets.REPO_NAME}} \ + --ref main \ + -f pull_request_number=${{ github.event.pull_request.number }} \ + -f commit_sha=${{ github.event.pull_request.head.sha }} + + # Statuses and checks apply to specific commits (by hash). + # Enforcement of required checks is done both at the PR level and the merge queue level. + # In case of multiple commits in a single PR, the hash of the squashed commit + # will not match the one for the latest (approved) commit in the PR. + # We auto approve the check for the merge queue for two reasons: + # * Queue times out due to duration of tests. + # * Avoid running integration tests twice, since it was already run at the tip of the branch before squashing. + auto-approve: + if: github.event_name == 'merge_group' + + runs-on: + group: databricks-deco-testing-runner-group + labels: ubuntu-latest-deco + + steps: + - name: Mark Check + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + shell: bash + run: | + gh api -X POST -H "Accept: application/vnd.github+json" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + /repos/${{ github.repository }}/statuses/${{ github.sha }} \ + -f 'state=success' \ + -f 'context=Integration Tests Check' diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index c7600ebee..a839096c0 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -10,26 +10,26 @@ on: - main jobs: - tests: + tests-ubuntu: + uses: ./.github/workflows/test.yml strategy: fail-fast: false matrix: - pyVersion: [ '3.7', '3.8', '3.9', '3.10', '3.11', '3.12' ] - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Unshallow - run: git fetch --prune --unshallow - - - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.pyVersion }} - - - name: Run tests - run: make dev install test - + pyVersion: [ '3.8', '3.9', '3.10', '3.11', '3.12' ] + with: + os: ubuntu-latest + pyVersion: ${{ matrix.pyVersion }} + + tests-windows: + uses: ./.github/workflows/test.yml + strategy: + fail-fast: false + matrix: + pyVersion: [ '3.9', '3.10', '3.11', '3.12' ] + with: + os: windows-latest + pyVersion: ${{ matrix.pyVersion }} + fmt: runs-on: ubuntu-latest diff --git a/.github/workflows/release-test.yml b/.github/workflows/release-test.yml index c3349b75a..0e8c4d8e0 100644 --- a/.github/workflows/release-test.yml +++ b/.github/workflows/release-test.yml @@ -5,10 +5,15 @@ on: jobs: publish: - runs-on: ubuntu-latest + runs-on: + group: databricks-deco-testing-runner-group + labels: ubuntu-latest-deco + environment: release-test + permissions: id-token: write + steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 892bbc5c6..ae242c1d8 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -7,11 +7,16 @@ on: jobs: publish: - runs-on: ubuntu-latest + runs-on: + group: databricks-deco-testing-runner-group + labels: ubuntu-latest-deco + environment: release + permissions: contents: write id-token: write + steps: - uses: actions/checkout@v3 diff --git a/CHANGELOG.md b/CHANGELOG.md index 458921ee0..95a290655 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,296 @@ # Version changelog +## [Release] Release v0.44.1 + +### New Features and Improvements + + * Introduce new Credential Strategies for Agents ([#882](https://github.com/databricks/databricks-sdk-py/pull/882)). + + +### Internal Changes + + * GetRun logic paginates more arrays ([#867](https://github.com/databricks/databricks-sdk-py/pull/867)). + + + +## [Release] Release v0.44.0 + +### Internal Changes + + * Fix `tests/integration/test_dbutils.py::test_secrets` ([#884](https://github.com/databricks/databricks-sdk-py/pull/884)). + + +### API Changes: + + * Added `get_message_query_result_by_attachment()` method for [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/genie.html) workspace-level service. + * Added `id` field for `databricks.sdk.service.apps.App`. + * Added `limit_config` field for `databricks.sdk.service.billing.UpdateBudgetPolicyRequest`. + * Added `volumes` field for `databricks.sdk.service.compute.ClusterLogConf`. + * Removed `review_state`, `reviews` and `runner_collaborators` fields for `databricks.sdk.service.cleanrooms.CleanRoomAssetNotebook`. + +OpenAPI SHA: 99f644e72261ef5ecf8d74db20f4b7a1e09723cc, Date: 2025-02-11 + +## [Release] Release v0.43.0 + +### API Changes: + + * Added [w.lakeview_embedded](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/lakeview_embedded.html) workspace-level service and [w.query_execution](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/query_execution.html) workspace-level service. + * Added [w.redash_config](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/redash_config.html) workspace-level service. + * Added `gcp_oauth_token` field for `databricks.sdk.service.catalog.TemporaryCredentials`. + * Added `options` field for `databricks.sdk.service.catalog.UpdateCatalog`. + * Added `disabled` field for `databricks.sdk.service.jobs.RunTask`. + +OpenAPI SHA: c72c58f97b950fcb924a90ef164bcb10cfcd5ece, Date: 2025-02-03 + +### Bug Fixes + + * Fix docs generation when two services have the same name ([#872](https://github.com/databricks/databricks-sdk-py/pull/872)). + +### Internal Changes + + * Add CICD environment to the User Agent ([#866](https://github.com/databricks/databricks-sdk-py/pull/866)). + * Add unit tests for retriable requests ([#879](https://github.com/databricks/databricks-sdk-py/pull/879)). + * Extract "before retry" handler, use it to rewind the stream ([#878](https://github.com/databricks/databricks-sdk-py/pull/878)). + * Update Model Serving `http_request` mixin to correctly use the underlying API. ([#876](https://github.com/databricks/databricks-sdk-py/pull/876)). + +### Backward Incompatible Changes + +* Changed `create()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving/serving_endpoints.html) workspace-level service with new required argument order. +* Changed `http_request()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving/serving_endpoints.html) workspace-level service to type `http_request()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving/serving_endpoints.html) workspace-level service. +* Changed `http_request()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving/serving_endpoints.html) workspace-level service to return `databricks.sdk.service.serving.HttpRequestResponse` dataclass. +* Changed `config` field for `databricks.sdk.service.serving.CreateServingEndpoint` to no longer be required. +* Removed `securable_kind` field for `databricks.sdk.service.catalog.CatalogInfo`. +* Removed `securable_kind` field for `databricks.sdk.service.catalog.ConnectionInfo`. +* Removed `status_code` and `text` fields for `databricks.sdk.service.serving.ExternalFunctionResponse`. + +### API Changes: + +* Added [a.budget_policy](https://databricks-sdk-py.readthedocs.io/en/latest/account/billing/budget_policy.html) account-level service. +* Added [a.enable_ip_access_lists](https://databricks-sdk-py.readthedocs.io/en/latest/account/settings/settings/enable_ip_access_lists.html) account-level service. +* Added `review_state`, `reviews` and `runner_collaborators` fields for `databricks.sdk.service.cleanrooms.CleanRoomAssetNotebook`. +* Added `statement_id` field for `databricks.sdk.service.dashboards.QueryAttachment`. +* Added `effective_performance_target` field for `databricks.sdk.service.jobs.BaseRun`. +* Added `performance_target` field for `databricks.sdk.service.jobs.CreateJob`. +* Added `performance_target` field for `databricks.sdk.service.jobs.JobSettings`. +* Added `effective_performance_target` field for `databricks.sdk.service.jobs.Run`. +* Added `performance_target` field for `databricks.sdk.service.jobs.RunNow`. +* Added `effective_performance_target` field for `databricks.sdk.service.jobs.RunTask`. +* Added `run_as_repl` field for `databricks.sdk.service.jobs.SparkJarTask`. +* Added `user_authorized_scopes` field for `databricks.sdk.service.oauth2.CreateCustomAppIntegration`. +* Added `user_authorized_scopes` field for `databricks.sdk.service.oauth2.GetCustomAppIntegrationOutput`. +* Added `user_authorized_scopes` field for `databricks.sdk.service.oauth2.UpdateCustomAppIntegration`. +* Added `contents` field for `databricks.sdk.service.serving.HttpRequestResponse`. +* Added `clean_room` enum value for `databricks.sdk.service.catalog.SecurableType`. +* Added `budget_policy_limit_exceeded` enum value for `databricks.sdk.service.jobs.TerminationCodeCode`. +* Added `arclight_azure_exchange_token_with_user_delegation_key` enum value for `databricks.sdk.service.settings.TokenType`. + +OpenAPI SHA: 840c660106f820a1a5dff931d51fa5f65cd9fdd9, Date: 2025-01-28 + +## [Release] Release v0.41.0 + +### New Features and Improvements + + * Add `serving.http_request` to call external functions. ([#857](https://github.com/databricks/databricks-sdk-py/pull/857)). + * Files API client: recover on download failures ([#844](https://github.com/databricks/databricks-sdk-py/pull/844)) ([#845](https://github.com/databricks/databricks-sdk-py/pull/845)). + + +### Bug Fixes + + * Properly pass query parameters in apps and oauth2 ([#862](https://github.com/databricks/databricks-sdk-py/pull/862)). + + +### Internal Changes + + * Add unit tests for external-browser authentication ([#863](https://github.com/databricks/databricks-sdk-py/pull/863)). + * Decouple oauth2 and serving ([#855](https://github.com/databricks/databricks-sdk-py/pull/855)). + * Migrate workflows that need write access to use hosted runners ([#850](https://github.com/databricks/databricks-sdk-py/pull/850)). + * Stop testing Python 3.7 on Ubuntu ([#858](https://github.com/databricks/databricks-sdk-py/pull/858)). + + +### API Changes: + + * Added [w.access_control](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/access_control.html) workspace-level service. + * Added `http_request()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html) workspace-level service. + * Added `no_compute` field for `databricks.sdk.service.apps.CreateAppRequest`. + * Added `has_more` field for `databricks.sdk.service.jobs.BaseJob`. + * Added `has_more` field for `databricks.sdk.service.jobs.BaseRun`. + * Added `page_token` field for `databricks.sdk.service.jobs.GetJobRequest`. + * Added `has_more` and `next_page_token` fields for `databricks.sdk.service.jobs.Job`. + * Added `has_more` field for `databricks.sdk.service.jobs.Run`. + * Added `clean_rooms_notebook_output` field for `databricks.sdk.service.jobs.RunOutput`. + * Added `scopes` field for `databricks.sdk.service.oauth2.UpdateCustomAppIntegration`. + * Added `run_as` field for `databricks.sdk.service.pipelines.CreatePipeline`. + * Added `run_as` field for `databricks.sdk.service.pipelines.EditPipeline`. + * Added `authorization_details` and `endpoint_url` fields for `databricks.sdk.service.serving.DataPlaneInfo`. + * Added `contents` field for `databricks.sdk.service.serving.GetOpenApiResponse`. + * Added `activated`, `activation_url`, `authentication_type`, `cloud`, `comment`, `created_at`, `created_by`, `data_recipient_global_metastore_id`, `ip_access_list`, `metastore_id`, `name`, `owner`, `properties_kvpairs`, `region`, `sharing_code`, `tokens`, `updated_at` and `updated_by` fields for `databricks.sdk.service.sharing.RecipientInfo`. + * Added `expiration_time` field for `databricks.sdk.service.sharing.RecipientInfo`. + * Changed `update()` method for [a.account_federation_policy](https://databricks-sdk-py.readthedocs.io/en/latest/account/account_federation_policy.html) account-level service with new required argument order. + * Changed `update()` method for [a.service_principal_federation_policy](https://databricks-sdk-py.readthedocs.io/en/latest/account/service_principal_federation_policy.html) account-level service with new required argument order. + * Changed `update()` method for [w.recipients](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/recipients.html) workspace-level service to return `databricks.sdk.service.sharing.RecipientInfo` dataclass. + * Changed `update()` method for [w.recipients](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/recipients.html) workspace-level service return type to become non-empty. + * Changed `update()` method for [w.recipients](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/recipients.html) workspace-level service to type `update()` method for [w.recipients](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/recipients.html) workspace-level service. + * Changed `get_open_api()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html) workspace-level service return type to become non-empty. + * Changed `patch()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html) workspace-level service to type `patch()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html) workspace-level service. + * Changed `patch()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html) workspace-level service to return `databricks.sdk.service.serving.EndpointTags` dataclass. + * Changed `databricks.sdk.service.serving.EndpointTagList` dataclass to. + * Changed `collaborator_alias` field for `databricks.sdk.service.cleanrooms.CleanRoomCollaborator` to be required. + * Changed `collaborator_alias` field for `databricks.sdk.service.cleanrooms.CleanRoomCollaborator` to be required. + * Changed `update_mask` field for `databricks.sdk.service.oauth2.UpdateAccountFederationPolicyRequest` to no longer be required. + * Changed `update_mask` field for `databricks.sdk.service.oauth2.UpdateServicePrincipalFederationPolicyRequest` to no longer be required. + * Changed `days_of_week` field for `databricks.sdk.service.pipelines.RestartWindow` to type `databricks.sdk.service.pipelines.DayOfWeekList` dataclass. + * Changed `behavior` field for `databricks.sdk.service.serving.AiGatewayGuardrailPiiBehavior` to no longer be required. + * Changed `behavior` field for `databricks.sdk.service.serving.AiGatewayGuardrailPiiBehavior` to no longer be required. + * Changed `project_id` and `region` fields for `databricks.sdk.service.serving.GoogleCloudVertexAiConfig` to be required. + * Changed `project_id` and `region` fields for `databricks.sdk.service.serving.GoogleCloudVertexAiConfig` to be required. + * Changed `workload_type` field for `databricks.sdk.service.serving.ServedEntityInput` to type `databricks.sdk.service.serving.ServingModelWorkloadType` dataclass. + * Changed `workload_type` field for `databricks.sdk.service.serving.ServedEntityOutput` to type `databricks.sdk.service.serving.ServingModelWorkloadType` dataclass. + * Changed `workload_type` field for `databricks.sdk.service.serving.ServedModelOutput` to type `databricks.sdk.service.serving.ServingModelWorkloadType` dataclass. + +OpenAPI SHA: 58905570a9928fc9ed31fba14a2edaf9a7c55b08, Date: 2025-01-20 + +## [Release] Release v0.40.0 + +### API Changes: + + * Added [a.account_federation_policy](https://databricks-sdk-py.readthedocs.io/en/latest/account/account_federation_policy.html) account-level service and [a.service_principal_federation_policy](https://databricks-sdk-py.readthedocs.io/en/latest/account/service_principal_federation_policy.html) account-level service. + * Added `is_single_node`, `kind` and `use_ml_runtime` fields for `databricks.sdk.service.compute.ClusterAttributes`. + * Added `is_single_node`, `kind` and `use_ml_runtime` fields for `databricks.sdk.service.compute.ClusterDetails`. + * Added `is_single_node`, `kind` and `use_ml_runtime` fields for `databricks.sdk.service.compute.ClusterSpec`. + * Added `is_single_node`, `kind` and `use_ml_runtime` fields for `databricks.sdk.service.compute.CreateCluster`. + * Added `is_single_node`, `kind` and `use_ml_runtime` fields for `databricks.sdk.service.compute.EditCluster`. + * Added `is_single_node`, `kind` and `use_ml_runtime` fields for `databricks.sdk.service.compute.UpdateClusterResource`. + * Added `update_parameter_syntax` field for `databricks.sdk.service.dashboards.MigrateDashboardRequest`. + * Added `clean_rooms_notebook_task` field for `databricks.sdk.service.jobs.RunTask`. + * Added `clean_rooms_notebook_task` field for `databricks.sdk.service.jobs.SubmitTask`. + * Added `clean_rooms_notebook_task` field for `databricks.sdk.service.jobs.Task`. + * Changed `days_of_week` field for `databricks.sdk.service.pipelines.RestartWindow` to type `databricks.sdk.service.pipelines.RestartWindowDaysOfWeekList` dataclass. + +OpenAPI SHA: a6a317df8327c9b1e5cb59a03a42ffa2aabeef6d, Date: 2024-12-16 + +## [Release] Release v0.39.0 + +### Bug Fixes + + * Update Changelog file ([#830](https://github.com/databricks/databricks-sdk-py/pull/830)). + + +### Internal Changes + + * Fix a couple of typos in open_ai_client.py ([#829](https://github.com/databricks/databricks-sdk-py/pull/829)). + * Update SDK to OpenAPI spec ([#834](https://github.com/databricks/databricks-sdk-py/pull/834)). + + +### API Changes: + + * Added `databricks.sdk.service.cleanrooms` package. + * Added `delete()` method for [w.aibi_dashboard_embedding_access_policy](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/aibi_dashboard_embedding_access_policy.html) workspace-level service. + * Added `delete()` method for [w.aibi_dashboard_embedding_approved_domains](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/aibi_dashboard_embedding_approved_domains.html) workspace-level service. + * Added `databricks_gcp_service_account` field for `databricks.sdk.service.catalog.CreateCredentialRequest`. + * Added `databricks_gcp_service_account` field for `databricks.sdk.service.catalog.CredentialInfo`. + * Added `gcp_options` field for `databricks.sdk.service.catalog.GenerateTemporaryServiceCredentialRequest`. + * Added `databricks_gcp_service_account` field for `databricks.sdk.service.catalog.UpdateCredentialRequest`. + * Added `cached_query_schema` field for `databricks.sdk.service.dashboards.QueryAttachment`. + * Added . + * Removed `gcp_service_account_key` field for `databricks.sdk.service.catalog.CreateCredentialRequest`. + +OpenAPI SHA: 7016dcbf2e011459416cf408ce21143bcc4b3a25, Date: 2024-12-05 + +## [Release] Release v0.38.0 + +### New Features and Improvements + + * Read streams by 1MB chunks by default. ([#817](https://github.com/databricks/databricks-sdk-py/pull/817)). + +### Bug Fixes + + * Rewind seekable streams before retrying ([#821](https://github.com/databricks/databricks-sdk-py/pull/821)). + * Properly serialize nested data classes. + +### Internal Changes + + * Reformat SDK with YAPF 0.43. ([#822](https://github.com/databricks/databricks-sdk-py/pull/822)). + * Update Jobs GetRun API to support paginated responses for jobs and ForEach tasks ([#819](https://github.com/databricks/databricks-sdk-py/pull/819)). + +### API Changes: + + * Added `service_principal_client_id` field for `databricks.sdk.service.apps.App`. + * Added `azure_service_principal`, `gcp_service_account_key` and `read_only` fields for `databricks.sdk.service.catalog.CreateCredentialRequest`. + * Added `azure_service_principal`, `read_only` and `used_for_managed_storage` fields for `databricks.sdk.service.catalog.CredentialInfo`. + * Added `omit_username` field for `databricks.sdk.service.catalog.ListTablesRequest`. + * Added `azure_service_principal` and `read_only` fields for `databricks.sdk.service.catalog.UpdateCredentialRequest`. + * Added `external_location_name`, `read_only` and `url` fields for `databricks.sdk.service.catalog.ValidateCredentialRequest`. + * Added `is_dir` field for `databricks.sdk.service.catalog.ValidateCredentialResponse`. + * Added `only` field for `databricks.sdk.service.jobs.RunNow`. + * Added `restart_window` field for `databricks.sdk.service.pipelines.CreatePipeline`. + * Added `restart_window` field for `databricks.sdk.service.pipelines.EditPipeline`. + * Added `restart_window` field for `databricks.sdk.service.pipelines.PipelineSpec`. + * Added `private_access_settings_id` field for `databricks.sdk.service.provisioning.UpdateWorkspaceRequest`. + * Changed `create_credential()` and `generate_temporary_service_credential()` methods for [w.credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/credentials.html) workspace-level service with new required argument order. + * Changed `access_connector_id` field for `databricks.sdk.service.catalog.AzureManagedIdentity` to be required. + * Changed `access_connector_id` field for `databricks.sdk.service.catalog.AzureManagedIdentity` to be required. + * Changed `name` field for `databricks.sdk.service.catalog.CreateCredentialRequest` to be required. + * Changed `credential_name` field for `databricks.sdk.service.catalog.GenerateTemporaryServiceCredentialRequest` to be required. + +OpenAPI SHA: f2385add116e3716c8a90a0b68e204deb40f996c, Date: 2024-11-15 + +## [Release] Release v0.37.0 + +### Bug Fixes + + * Correctly generate classes with nested body fields ([#808](https://github.com/databricks/databricks-sdk-py/pull/808)). + + +### Internal Changes + + * Add `cleanrooms` package ([#806](https://github.com/databricks/databricks-sdk-py/pull/806)). + * Add test instructions for external contributors ([#804](https://github.com/databricks/databricks-sdk-py/pull/804)). + * Always write message for manual test execution ([#811](https://github.com/databricks/databricks-sdk-py/pull/811)). + * Automatically trigger integration tests on PR ([#800](https://github.com/databricks/databricks-sdk-py/pull/800)). + * Better isolate ML serving auth unit tests ([#803](https://github.com/databricks/databricks-sdk-py/pull/803)). + * Move templates in the code generator ([#809](https://github.com/databricks/databricks-sdk-py/pull/809)). + + +### API Changes: + + * Added [w.aibi_dashboard_embedding_access_policy](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/aibi_dashboard_embedding_access_policy.html) workspace-level service and [w.aibi_dashboard_embedding_approved_domains](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/aibi_dashboard_embedding_approved_domains.html) workspace-level service. + * Added [w.credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/credentials.html) workspace-level service. + * Added `app_deployment` field for `databricks.sdk.service.apps.CreateAppDeploymentRequest`. + * Added `app` field for `databricks.sdk.service.apps.CreateAppRequest`. + * Added `app` field for `databricks.sdk.service.apps.UpdateAppRequest`. + * Added `table` field for `databricks.sdk.service.catalog.CreateOnlineTableRequest`. + * Added `azure_aad` field for `databricks.sdk.service.catalog.GenerateTemporaryTableCredentialResponse`. + * Added `full_name` field for `databricks.sdk.service.catalog.StorageCredentialInfo`. + * Added `dashboard` field for `databricks.sdk.service.dashboards.CreateDashboardRequest`. + * Added `schedule` field for `databricks.sdk.service.dashboards.CreateScheduleRequest`. + * Added `subscription` field for `databricks.sdk.service.dashboards.CreateSubscriptionRequest`. + * Added `warehouse_id` field for `databricks.sdk.service.dashboards.Schedule`. + * Added `dashboard` field for `databricks.sdk.service.dashboards.UpdateDashboardRequest`. + * Added `schedule` field for `databricks.sdk.service.dashboards.UpdateScheduleRequest`. + * Added `page_token` field for `databricks.sdk.service.oauth2.ListServicePrincipalSecretsRequest`. + * Added `next_page_token` field for `databricks.sdk.service.oauth2.ListServicePrincipalSecretsResponse`. + * Added `connection_name` field for `databricks.sdk.service.pipelines.IngestionGatewayPipelineDefinition`. + * Added `is_no_public_ip_enabled` field for `databricks.sdk.service.provisioning.CreateWorkspaceRequest`. + * Added `external_customer_info` and `is_no_public_ip_enabled` fields for `databricks.sdk.service.provisioning.Workspace`. + * Added `last_used_day` field for `databricks.sdk.service.settings.TokenInfo`. + * Changed `create()` method for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service with new required argument order. + * Changed `execute_message_query()` method for [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/genie.html) workspace-level service . New request type is `databricks.sdk.service.dashboards.GenieExecuteMessageQueryRequest` dataclass. + * Changed `execute_message_query()` method for [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/genie.html) workspace-level service to type `execute_message_query()` method for [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/genie.html) workspace-level service. + * Changed `create()`, `create_schedule()`, `create_subscription()` and `update_schedule()` methods for [w.lakeview](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/lakeview.html) workspace-level service with new required argument order. + * Removed [w.clean_rooms](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/clean_rooms.html) workspace-level service. + * Removed `deployment_id`, `mode` and `source_code_path` fields for `databricks.sdk.service.apps.CreateAppDeploymentRequest`. + * Removed `description`, `name` and `resources` fields for `databricks.sdk.service.apps.CreateAppRequest`. + * Removed `description` and `resources` fields for `databricks.sdk.service.apps.UpdateAppRequest`. + * Removed `name` and `spec` fields for `databricks.sdk.service.catalog.CreateOnlineTableRequest`. + * Removed `display_name`, `parent_path`, `serialized_dashboard` and `warehouse_id` fields for `databricks.sdk.service.dashboards.CreateDashboardRequest`. + * Removed `cron_schedule`, `display_name` and `pause_status` fields for `databricks.sdk.service.dashboards.CreateScheduleRequest`. + * Removed `subscriber` field for `databricks.sdk.service.dashboards.CreateSubscriptionRequest`. + * Removed `display_name`, `etag`, `serialized_dashboard` and `warehouse_id` fields for `databricks.sdk.service.dashboards.UpdateDashboardRequest`. + * Removed `cron_schedule`, `display_name`, `etag` and `pause_status` fields for `databricks.sdk.service.dashboards.UpdateScheduleRequest`. + * Removed `prev_page_token` field for `databricks.sdk.service.jobs.Run`. + +OpenAPI SHA: 5285ce76f81314f342c1702d5c2ad4ef42488781, Date: 2024-11-04 + ## [Release] Release v0.36.0 ### Breaking Changes diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index 159946461..79b1c3353 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -1,21 +1,27 @@ +# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +import logging from typing import Optional import databricks.sdk.core as client import databricks.sdk.dbutils as dbutils +import databricks.sdk.service as service from databricks.sdk import azure from databricks.sdk.credentials_provider import CredentialsStrategy from databricks.sdk.mixins.compute import ClustersExt -from databricks.sdk.mixins.files import DbfsExt +from databricks.sdk.mixins.files import DbfsExt, FilesExt +from databricks.sdk.mixins.jobs import JobsExt from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt from databricks.sdk.mixins.workspace import WorkspaceExt from databricks.sdk.service.apps import AppsAPI -from databricks.sdk.service.billing import (BillableUsageAPI, BudgetsAPI, - LogDeliveryAPI, UsageDashboardsAPI) +from databricks.sdk.service.billing import (BillableUsageAPI, BudgetPolicyAPI, + BudgetsAPI, LogDeliveryAPI, + UsageDashboardsAPI) from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI, AccountMetastoresAPI, AccountStorageCredentialsAPI, ArtifactAllowlistsAPI, CatalogsAPI, - ConnectionsAPI, + ConnectionsAPI, CredentialsAPI, ExternalLocationsAPI, FunctionsAPI, GrantsAPI, MetastoresAPI, ModelVersionsAPI, OnlineTablesAPI, @@ -27,6 +33,9 @@ TableConstraintsAPI, TablesAPI, TemporaryTableCredentialsAPI, VolumesAPI, WorkspaceBindingsAPI) +from databricks.sdk.service.cleanrooms import (CleanRoomAssetsAPI, + CleanRoomsAPI, + CleanRoomTaskRunsAPI) from databricks.sdk.service.compute import (ClusterPoliciesAPI, ClustersAPI, CommandExecutionAPI, GlobalInitScriptsAPI, @@ -34,9 +43,12 @@ InstanceProfilesAPI, LibrariesAPI, PolicyComplianceForClustersAPI, PolicyFamiliesAPI) -from databricks.sdk.service.dashboards import GenieAPI, LakeviewAPI +from databricks.sdk.service.dashboards import (GenieAPI, LakeviewAPI, + LakeviewEmbeddedAPI, + QueryExecutionAPI) from databricks.sdk.service.files import DbfsAPI, FilesAPI -from databricks.sdk.service.iam import (AccountAccessControlAPI, +from databricks.sdk.service.iam import (AccessControlAPI, + AccountAccessControlAPI, AccountAccessControlProxyAPI, AccountGroupsAPI, AccountServicePrincipalsAPI, @@ -52,9 +64,11 @@ ProviderListingsAPI, ProviderPersonalizationRequestsAPI, ProviderProviderAnalyticsDashboardsAPI, ProviderProvidersAPI) from databricks.sdk.service.ml import ExperimentsAPI, ModelRegistryAPI -from databricks.sdk.service.oauth2 import (CustomAppIntegrationAPI, +from databricks.sdk.service.oauth2 import (AccountFederationPolicyAPI, + CustomAppIntegrationAPI, OAuthPublishedAppsAPI, PublishedAppIntegrationAPI, + ServicePrincipalFederationPolicyAPI, ServicePrincipalSecretsAPI) from databricks.sdk.service.pipelines import PipelinesAPI from databricks.sdk.service.provisioning import (CredentialsAPI, @@ -64,26 +78,18 @@ Workspace, WorkspacesAPI) from databricks.sdk.service.serving import (ServingEndpointsAPI, ServingEndpointsDataPlaneAPI) -from databricks.sdk.service.settings import (AccountIpAccessListsAPI, - AccountSettingsAPI, - AutomaticClusterUpdateAPI, - ComplianceSecurityProfileAPI, - CredentialsManagerAPI, - CspEnablementAccountAPI, - DefaultNamespaceAPI, - DisableLegacyAccessAPI, - DisableLegacyDbfsAPI, - DisableLegacyFeaturesAPI, - EnhancedSecurityMonitoringAPI, - EsmEnablementAccountAPI, - IpAccessListsAPI, - NetworkConnectivityAPI, - NotificationDestinationsAPI, - PersonalComputeAPI, - RestrictWorkspaceAdminsAPI, - SettingsAPI, TokenManagementAPI, - TokensAPI, WorkspaceConfAPI) -from databricks.sdk.service.sharing import (CleanRoomsAPI, ProvidersAPI, +from databricks.sdk.service.settings import ( + AccountIpAccessListsAPI, AccountSettingsAPI, + AibiDashboardEmbeddingAccessPolicyAPI, + AibiDashboardEmbeddingApprovedDomainsAPI, AutomaticClusterUpdateAPI, + ComplianceSecurityProfileAPI, CredentialsManagerAPI, + CspEnablementAccountAPI, DefaultNamespaceAPI, DisableLegacyAccessAPI, + DisableLegacyDbfsAPI, DisableLegacyFeaturesAPI, EnableIpAccessListsAPI, + EnhancedSecurityMonitoringAPI, EsmEnablementAccountAPI, IpAccessListsAPI, + NetworkConnectivityAPI, NotificationDestinationsAPI, PersonalComputeAPI, + RestrictWorkspaceAdminsAPI, SettingsAPI, TokenManagementAPI, TokensAPI, + WorkspaceConfAPI) +from databricks.sdk.service.sharing import (ProvidersAPI, RecipientActivationAPI, RecipientsAPI, SharesAPI) from databricks.sdk.service.sql import (AlertsAPI, AlertsLegacyAPI, @@ -93,12 +99,15 @@ QueryHistoryAPI, QueryVisualizationsAPI, QueryVisualizationsLegacyAPI, - StatementExecutionAPI, WarehousesAPI) + RedashConfigAPI, StatementExecutionAPI, + WarehousesAPI) from databricks.sdk.service.vectorsearch import (VectorSearchEndpointsAPI, VectorSearchIndexesAPI) from databricks.sdk.service.workspace import (GitCredentialsAPI, ReposAPI, SecretsAPI, WorkspaceAPI) +_LOG = logging.getLogger(__name__) + def _make_dbutils(config: client.Config): # We try to directly check if we are in runtime, instead of @@ -116,6 +125,14 @@ def _make_dbutils(config: client.Config): return runtime_dbutils +def _make_files_client(apiClient: client.ApiClient, config: client.Config): + if config.enable_experimental_files_api_client: + _LOG.info("Experimental Files API client is enabled") + return FilesExt(apiClient, config) + else: + return FilesAPI(apiClient) + + class WorkspaceClient: """ The WorkspaceClient is a client for the workspace-level Databricks REST API. @@ -177,98 +194,109 @@ def __init__(self, self._dbutils = _make_dbutils(self._config) self._api_client = client.ApiClient(self._config) serving_endpoints = ServingEndpointsExt(self._api_client) - self._account_access_control_proxy = AccountAccessControlProxyAPI(self._api_client) - self._alerts = AlertsAPI(self._api_client) - self._alerts_legacy = AlertsLegacyAPI(self._api_client) - self._apps = AppsAPI(self._api_client) - self._artifact_allowlists = ArtifactAllowlistsAPI(self._api_client) - self._catalogs = CatalogsAPI(self._api_client) - self._clean_rooms = CleanRoomsAPI(self._api_client) - self._cluster_policies = ClusterPoliciesAPI(self._api_client) + self._access_control = service.iam.AccessControlAPI(self._api_client) + self._account_access_control_proxy = service.iam.AccountAccessControlProxyAPI(self._api_client) + self._alerts = service.sql.AlertsAPI(self._api_client) + self._alerts_legacy = service.sql.AlertsLegacyAPI(self._api_client) + self._apps = service.apps.AppsAPI(self._api_client) + self._artifact_allowlists = service.catalog.ArtifactAllowlistsAPI(self._api_client) + self._catalogs = service.catalog.CatalogsAPI(self._api_client) + self._clean_room_assets = service.cleanrooms.CleanRoomAssetsAPI(self._api_client) + self._clean_room_task_runs = service.cleanrooms.CleanRoomTaskRunsAPI(self._api_client) + self._clean_rooms = service.cleanrooms.CleanRoomsAPI(self._api_client) + self._cluster_policies = service.compute.ClusterPoliciesAPI(self._api_client) self._clusters = ClustersExt(self._api_client) - self._command_execution = CommandExecutionAPI(self._api_client) - self._connections = ConnectionsAPI(self._api_client) - self._consumer_fulfillments = ConsumerFulfillmentsAPI(self._api_client) - self._consumer_installations = ConsumerInstallationsAPI(self._api_client) - self._consumer_listings = ConsumerListingsAPI(self._api_client) - self._consumer_personalization_requests = ConsumerPersonalizationRequestsAPI(self._api_client) - self._consumer_providers = ConsumerProvidersAPI(self._api_client) - self._credentials_manager = CredentialsManagerAPI(self._api_client) - self._current_user = CurrentUserAPI(self._api_client) - self._dashboard_widgets = DashboardWidgetsAPI(self._api_client) - self._dashboards = DashboardsAPI(self._api_client) - self._data_sources = DataSourcesAPI(self._api_client) + self._command_execution = service.compute.CommandExecutionAPI(self._api_client) + self._connections = service.catalog.ConnectionsAPI(self._api_client) + self._consumer_fulfillments = service.marketplace.ConsumerFulfillmentsAPI(self._api_client) + self._consumer_installations = service.marketplace.ConsumerInstallationsAPI(self._api_client) + self._consumer_listings = service.marketplace.ConsumerListingsAPI(self._api_client) + self._consumer_personalization_requests = service.marketplace.ConsumerPersonalizationRequestsAPI( + self._api_client) + self._consumer_providers = service.marketplace.ConsumerProvidersAPI(self._api_client) + self._credentials = service.catalog.CredentialsAPI(self._api_client) + self._credentials_manager = service.settings.CredentialsManagerAPI(self._api_client) + self._current_user = service.iam.CurrentUserAPI(self._api_client) + self._dashboard_widgets = service.sql.DashboardWidgetsAPI(self._api_client) + self._dashboards = service.sql.DashboardsAPI(self._api_client) + self._data_sources = service.sql.DataSourcesAPI(self._api_client) self._dbfs = DbfsExt(self._api_client) - self._dbsql_permissions = DbsqlPermissionsAPI(self._api_client) - self._experiments = ExperimentsAPI(self._api_client) - self._external_locations = ExternalLocationsAPI(self._api_client) - self._files = FilesAPI(self._api_client) - self._functions = FunctionsAPI(self._api_client) - self._genie = GenieAPI(self._api_client) - self._git_credentials = GitCredentialsAPI(self._api_client) - self._global_init_scripts = GlobalInitScriptsAPI(self._api_client) - self._grants = GrantsAPI(self._api_client) - self._groups = GroupsAPI(self._api_client) - self._instance_pools = InstancePoolsAPI(self._api_client) - self._instance_profiles = InstanceProfilesAPI(self._api_client) - self._ip_access_lists = IpAccessListsAPI(self._api_client) - self._jobs = JobsAPI(self._api_client) - self._lakeview = LakeviewAPI(self._api_client) - self._libraries = LibrariesAPI(self._api_client) - self._metastores = MetastoresAPI(self._api_client) - self._model_registry = ModelRegistryAPI(self._api_client) - self._model_versions = ModelVersionsAPI(self._api_client) - self._notification_destinations = NotificationDestinationsAPI(self._api_client) - self._online_tables = OnlineTablesAPI(self._api_client) - self._permission_migration = PermissionMigrationAPI(self._api_client) - self._permissions = PermissionsAPI(self._api_client) - self._pipelines = PipelinesAPI(self._api_client) - self._policy_compliance_for_clusters = PolicyComplianceForClustersAPI(self._api_client) - self._policy_compliance_for_jobs = PolicyComplianceForJobsAPI(self._api_client) - self._policy_families = PolicyFamiliesAPI(self._api_client) - self._provider_exchange_filters = ProviderExchangeFiltersAPI(self._api_client) - self._provider_exchanges = ProviderExchangesAPI(self._api_client) - self._provider_files = ProviderFilesAPI(self._api_client) - self._provider_listings = ProviderListingsAPI(self._api_client) - self._provider_personalization_requests = ProviderPersonalizationRequestsAPI(self._api_client) - self._provider_provider_analytics_dashboards = ProviderProviderAnalyticsDashboardsAPI( + self._dbsql_permissions = service.sql.DbsqlPermissionsAPI(self._api_client) + self._experiments = service.ml.ExperimentsAPI(self._api_client) + self._external_locations = service.catalog.ExternalLocationsAPI(self._api_client) + self._files = _make_files_client(self._api_client, self._config) + self._functions = service.catalog.FunctionsAPI(self._api_client) + self._genie = service.dashboards.GenieAPI(self._api_client) + self._git_credentials = service.workspace.GitCredentialsAPI(self._api_client) + self._global_init_scripts = service.compute.GlobalInitScriptsAPI(self._api_client) + self._grants = service.catalog.GrantsAPI(self._api_client) + self._groups = service.iam.GroupsAPI(self._api_client) + self._instance_pools = service.compute.InstancePoolsAPI(self._api_client) + self._instance_profiles = service.compute.InstanceProfilesAPI(self._api_client) + self._ip_access_lists = service.settings.IpAccessListsAPI(self._api_client) + self._jobs = JobsExt(self._api_client) + self._lakeview = service.dashboards.LakeviewAPI(self._api_client) + self._lakeview_embedded = service.dashboards.LakeviewEmbeddedAPI(self._api_client) + self._libraries = service.compute.LibrariesAPI(self._api_client) + self._metastores = service.catalog.MetastoresAPI(self._api_client) + self._model_registry = service.ml.ModelRegistryAPI(self._api_client) + self._model_versions = service.catalog.ModelVersionsAPI(self._api_client) + self._notification_destinations = service.settings.NotificationDestinationsAPI(self._api_client) + self._online_tables = service.catalog.OnlineTablesAPI(self._api_client) + self._permission_migration = service.iam.PermissionMigrationAPI(self._api_client) + self._permissions = service.iam.PermissionsAPI(self._api_client) + self._pipelines = service.pipelines.PipelinesAPI(self._api_client) + self._policy_compliance_for_clusters = service.compute.PolicyComplianceForClustersAPI( + self._api_client) + self._policy_compliance_for_jobs = service.jobs.PolicyComplianceForJobsAPI(self._api_client) + self._policy_families = service.compute.PolicyFamiliesAPI(self._api_client) + self._provider_exchange_filters = service.marketplace.ProviderExchangeFiltersAPI(self._api_client) + self._provider_exchanges = service.marketplace.ProviderExchangesAPI(self._api_client) + self._provider_files = service.marketplace.ProviderFilesAPI(self._api_client) + self._provider_listings = service.marketplace.ProviderListingsAPI(self._api_client) + self._provider_personalization_requests = service.marketplace.ProviderPersonalizationRequestsAPI( self._api_client) - self._provider_providers = ProviderProvidersAPI(self._api_client) - self._providers = ProvidersAPI(self._api_client) - self._quality_monitors = QualityMonitorsAPI(self._api_client) - self._queries = QueriesAPI(self._api_client) - self._queries_legacy = QueriesLegacyAPI(self._api_client) - self._query_history = QueryHistoryAPI(self._api_client) - self._query_visualizations = QueryVisualizationsAPI(self._api_client) - self._query_visualizations_legacy = QueryVisualizationsLegacyAPI(self._api_client) - self._recipient_activation = RecipientActivationAPI(self._api_client) - self._recipients = RecipientsAPI(self._api_client) - self._registered_models = RegisteredModelsAPI(self._api_client) - self._repos = ReposAPI(self._api_client) - self._resource_quotas = ResourceQuotasAPI(self._api_client) - self._schemas = SchemasAPI(self._api_client) - self._secrets = SecretsAPI(self._api_client) - self._service_principals = ServicePrincipalsAPI(self._api_client) + self._provider_provider_analytics_dashboards = service.marketplace.ProviderProviderAnalyticsDashboardsAPI( + self._api_client) + self._provider_providers = service.marketplace.ProviderProvidersAPI(self._api_client) + self._providers = service.sharing.ProvidersAPI(self._api_client) + self._quality_monitors = service.catalog.QualityMonitorsAPI(self._api_client) + self._queries = service.sql.QueriesAPI(self._api_client) + self._queries_legacy = service.sql.QueriesLegacyAPI(self._api_client) + self._query_execution = service.dashboards.QueryExecutionAPI(self._api_client) + self._query_history = service.sql.QueryHistoryAPI(self._api_client) + self._query_visualizations = service.sql.QueryVisualizationsAPI(self._api_client) + self._query_visualizations_legacy = service.sql.QueryVisualizationsLegacyAPI(self._api_client) + self._recipient_activation = service.sharing.RecipientActivationAPI(self._api_client) + self._recipients = service.sharing.RecipientsAPI(self._api_client) + self._redash_config = service.sql.RedashConfigAPI(self._api_client) + self._registered_models = service.catalog.RegisteredModelsAPI(self._api_client) + self._repos = service.workspace.ReposAPI(self._api_client) + self._resource_quotas = service.catalog.ResourceQuotasAPI(self._api_client) + self._schemas = service.catalog.SchemasAPI(self._api_client) + self._secrets = service.workspace.SecretsAPI(self._api_client) + self._service_principals = service.iam.ServicePrincipalsAPI(self._api_client) self._serving_endpoints = serving_endpoints - self._serving_endpoints_data_plane = ServingEndpointsDataPlaneAPI(self._api_client, serving_endpoints) - self._settings = SettingsAPI(self._api_client) - self._shares = SharesAPI(self._api_client) - self._statement_execution = StatementExecutionAPI(self._api_client) - self._storage_credentials = StorageCredentialsAPI(self._api_client) - self._system_schemas = SystemSchemasAPI(self._api_client) - self._table_constraints = TableConstraintsAPI(self._api_client) - self._tables = TablesAPI(self._api_client) - self._temporary_table_credentials = TemporaryTableCredentialsAPI(self._api_client) - self._token_management = TokenManagementAPI(self._api_client) - self._tokens = TokensAPI(self._api_client) - self._users = UsersAPI(self._api_client) - self._vector_search_endpoints = VectorSearchEndpointsAPI(self._api_client) - self._vector_search_indexes = VectorSearchIndexesAPI(self._api_client) - self._volumes = VolumesAPI(self._api_client) - self._warehouses = WarehousesAPI(self._api_client) + self._serving_endpoints_data_plane = service.serving.ServingEndpointsDataPlaneAPI( + self._api_client, serving_endpoints) + self._settings = service.settings.SettingsAPI(self._api_client) + self._shares = service.sharing.SharesAPI(self._api_client) + self._statement_execution = service.sql.StatementExecutionAPI(self._api_client) + self._storage_credentials = service.catalog.StorageCredentialsAPI(self._api_client) + self._system_schemas = service.catalog.SystemSchemasAPI(self._api_client) + self._table_constraints = service.catalog.TableConstraintsAPI(self._api_client) + self._tables = service.catalog.TablesAPI(self._api_client) + self._temporary_table_credentials = service.catalog.TemporaryTableCredentialsAPI(self._api_client) + self._token_management = service.settings.TokenManagementAPI(self._api_client) + self._tokens = service.settings.TokensAPI(self._api_client) + self._users = service.iam.UsersAPI(self._api_client) + self._vector_search_endpoints = service.vectorsearch.VectorSearchEndpointsAPI(self._api_client) + self._vector_search_indexes = service.vectorsearch.VectorSearchIndexesAPI(self._api_client) + self._volumes = service.catalog.VolumesAPI(self._api_client) + self._warehouses = service.sql.WarehousesAPI(self._api_client) self._workspace = WorkspaceExt(self._api_client) - self._workspace_bindings = WorkspaceBindingsAPI(self._api_client) - self._workspace_conf = WorkspaceConfAPI(self._api_client) + self._workspace_bindings = service.catalog.WorkspaceBindingsAPI(self._api_client) + self._workspace_conf = service.settings.WorkspaceConfAPI(self._api_client) @property def config(self) -> client.Config: @@ -283,42 +311,57 @@ def dbutils(self) -> dbutils.RemoteDbUtils: return self._dbutils @property - def account_access_control_proxy(self) -> AccountAccessControlProxyAPI: + def access_control(self) -> service.iam.AccessControlAPI: + """Rule based Access Control for Databricks Resources.""" + return self._access_control + + @property + def account_access_control_proxy(self) -> service.iam.AccountAccessControlProxyAPI: """These APIs manage access rules on resources in an account.""" return self._account_access_control_proxy @property - def alerts(self) -> AlertsAPI: + def alerts(self) -> service.sql.AlertsAPI: """The alerts API can be used to perform CRUD operations on alerts.""" return self._alerts @property - def alerts_legacy(self) -> AlertsLegacyAPI: + def alerts_legacy(self) -> service.sql.AlertsLegacyAPI: """The alerts API can be used to perform CRUD operations on alerts.""" return self._alerts_legacy @property - def apps(self) -> AppsAPI: + def apps(self) -> service.apps.AppsAPI: """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on.""" return self._apps @property - def artifact_allowlists(self) -> ArtifactAllowlistsAPI: + def artifact_allowlists(self) -> service.catalog.ArtifactAllowlistsAPI: """In Databricks Runtime 13.3 and above, you can add libraries and init scripts to the `allowlist` in UC so that users can leverage these artifacts on compute configured with shared access mode.""" return self._artifact_allowlists @property - def catalogs(self) -> CatalogsAPI: + def catalogs(self) -> service.catalog.CatalogsAPI: """A catalog is the first layer of Unity Catalog’s three-level namespace.""" return self._catalogs @property - def clean_rooms(self) -> CleanRoomsAPI: - """A clean room is a secure, privacy-protecting environment where two or more parties can share sensitive enterprise data, including customer data, for measurements, insights, activation and other use cases.""" + def clean_room_assets(self) -> service.cleanrooms.CleanRoomAssetsAPI: + """Clean room assets are data and code objects — Tables, volumes, and notebooks that are shared with the clean room.""" + return self._clean_room_assets + + @property + def clean_room_task_runs(self) -> service.cleanrooms.CleanRoomTaskRunsAPI: + """Clean room task runs are the executions of notebooks in a clean room.""" + return self._clean_room_task_runs + + @property + def clean_rooms(self) -> service.cleanrooms.CleanRoomsAPI: + """A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting environment where multiple parties can work together on sensitive enterprise data without direct access to each other’s data.""" return self._clean_rooms @property - def cluster_policies(self) -> ClusterPoliciesAPI: + def cluster_policies(self) -> service.compute.ClusterPoliciesAPI: """You can use cluster policies to control users' ability to configure clusters based on a set of rules.""" return self._cluster_policies @@ -328,62 +371,67 @@ def clusters(self) -> ClustersExt: return self._clusters @property - def command_execution(self) -> CommandExecutionAPI: + def command_execution(self) -> service.compute.CommandExecutionAPI: """This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters.""" return self._command_execution @property - def connections(self) -> ConnectionsAPI: + def connections(self) -> service.catalog.ConnectionsAPI: """Connections allow for creating a connection to an external data source.""" return self._connections @property - def consumer_fulfillments(self) -> ConsumerFulfillmentsAPI: + def consumer_fulfillments(self) -> service.marketplace.ConsumerFulfillmentsAPI: """Fulfillments are entities that allow consumers to preview installations.""" return self._consumer_fulfillments @property - def consumer_installations(self) -> ConsumerInstallationsAPI: + def consumer_installations(self) -> service.marketplace.ConsumerInstallationsAPI: """Installations are entities that allow consumers to interact with Databricks Marketplace listings.""" return self._consumer_installations @property - def consumer_listings(self) -> ConsumerListingsAPI: + def consumer_listings(self) -> service.marketplace.ConsumerListingsAPI: """Listings are the core entities in the Marketplace.""" return self._consumer_listings @property - def consumer_personalization_requests(self) -> ConsumerPersonalizationRequestsAPI: + def consumer_personalization_requests(self) -> service.marketplace.ConsumerPersonalizationRequestsAPI: """Personalization Requests allow customers to interact with the individualized Marketplace listing flow.""" return self._consumer_personalization_requests @property - def consumer_providers(self) -> ConsumerProvidersAPI: + def consumer_providers(self) -> service.marketplace.ConsumerProvidersAPI: """Providers are the entities that publish listings to the Marketplace.""" return self._consumer_providers @property - def credentials_manager(self) -> CredentialsManagerAPI: + def credentials(self) -> service.catalog.CredentialsAPI: + """A credential represents an authentication and authorization mechanism for accessing services on your cloud tenant.""" + return self._credentials + + @property + def credentials_manager(self) -> service.settings.CredentialsManagerAPI: """Credentials manager interacts with with Identity Providers to to perform token exchanges using stored credentials and refresh tokens.""" return self._credentials_manager @property - def current_user(self) -> CurrentUserAPI: + def current_user(self) -> service.iam.CurrentUserAPI: """This API allows retrieving information about currently authenticated user or service principal.""" return self._current_user @property - def dashboard_widgets(self) -> DashboardWidgetsAPI: + def dashboard_widgets(self) -> service.sql.DashboardWidgetsAPI: """This is an evolving API that facilitates the addition and removal of widgets from existing dashboards within the Databricks Workspace.""" return self._dashboard_widgets @property - def dashboards(self) -> DashboardsAPI: + def dashboards(self) -> service.sql.DashboardsAPI: """In general, there is little need to modify dashboards using the API.""" return self._dashboards @property - def data_sources(self) -> DataSourcesAPI: + def data_sources(self) -> service.sql.DataSourcesAPI: """This API is provided to assist you in making new query objects.""" return self._data_sources @@ -393,247 +441,263 @@ def dbfs(self) -> DbfsExt: return self._dbfs @property - def dbsql_permissions(self) -> DbsqlPermissionsAPI: + def dbsql_permissions(self) -> service.sql.DbsqlPermissionsAPI: """The SQL Permissions API is similar to the endpoints of the :method:permissions/set.""" return self._dbsql_permissions @property - def experiments(self) -> ExperimentsAPI: + def experiments(self) -> service.ml.ExperimentsAPI: """Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment.""" return self._experiments @property - def external_locations(self) -> ExternalLocationsAPI: + def external_locations(self) -> service.catalog.ExternalLocationsAPI: """An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path.""" return self._external_locations @property - def files(self) -> FilesAPI: + def files(self) -> service.files.FilesAPI: """The Files API is a standard HTTP API that allows you to read, write, list, and delete files and directories by referring to their URI.""" return self._files @property - def functions(self) -> FunctionsAPI: + def functions(self) -> service.catalog.FunctionsAPI: """Functions implement User-Defined Functions (UDFs) in Unity Catalog.""" return self._functions @property - def genie(self) -> GenieAPI: + def genie(self) -> service.dashboards.GenieAPI: """Genie provides a no-code experience for business users, powered by AI/BI.""" return self._genie @property - def git_credentials(self) -> GitCredentialsAPI: + def git_credentials(self) -> service.workspace.GitCredentialsAPI: """Registers personal access token for Databricks to do operations on behalf of the user.""" return self._git_credentials @property - def global_init_scripts(self) -> GlobalInitScriptsAPI: + def global_init_scripts(self) -> service.compute.GlobalInitScriptsAPI: """The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace.""" return self._global_init_scripts @property - def grants(self) -> GrantsAPI: + def grants(self) -> service.catalog.GrantsAPI: """In Unity Catalog, data is secure by default.""" return self._grants @property - def groups(self) -> GroupsAPI: + def groups(self) -> service.iam.GroupsAPI: """Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects.""" return self._groups @property - def instance_pools(self) -> InstancePoolsAPI: + def instance_pools(self) -> service.compute.InstancePoolsAPI: """Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times.""" return self._instance_pools @property - def instance_profiles(self) -> InstanceProfilesAPI: + def instance_profiles(self) -> service.compute.InstanceProfilesAPI: """The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with.""" return self._instance_profiles @property - def ip_access_lists(self) -> IpAccessListsAPI: + def ip_access_lists(self) -> service.settings.IpAccessListsAPI: """IP Access List enables admins to configure IP access lists.""" return self._ip_access_lists @property - def jobs(self) -> JobsAPI: + def jobs(self) -> JobsExt: """The Jobs API allows you to create, edit, and delete jobs.""" return self._jobs @property - def lakeview(self) -> LakeviewAPI: + def lakeview(self) -> service.dashboards.LakeviewAPI: """These APIs provide specific management operations for Lakeview dashboards.""" return self._lakeview @property - def libraries(self) -> LibrariesAPI: + def lakeview_embedded(self) -> service.dashboards.LakeviewEmbeddedAPI: + """Token-based Lakeview APIs for embedding dashboards in external applications.""" + return self._lakeview_embedded + + @property + def libraries(self) -> service.compute.LibrariesAPI: """The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster.""" return self._libraries @property - def metastores(self) -> MetastoresAPI: + def metastores(self) -> service.catalog.MetastoresAPI: """A metastore is the top-level container of objects in Unity Catalog.""" return self._metastores @property - def model_registry(self) -> ModelRegistryAPI: + def model_registry(self) -> service.ml.ModelRegistryAPI: """Note: This API reference documents APIs for the Workspace Model Registry.""" return self._model_registry @property - def model_versions(self) -> ModelVersionsAPI: + def model_versions(self) -> service.catalog.ModelVersionsAPI: """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog.""" return self._model_versions @property - def notification_destinations(self) -> NotificationDestinationsAPI: + def notification_destinations(self) -> service.settings.NotificationDestinationsAPI: """The notification destinations API lets you programmatically manage a workspace's notification destinations.""" return self._notification_destinations @property - def online_tables(self) -> OnlineTablesAPI: + def online_tables(self) -> service.catalog.OnlineTablesAPI: """Online tables provide lower latency and higher QPS access to data from Delta tables.""" return self._online_tables @property - def permission_migration(self) -> PermissionMigrationAPI: + def permission_migration(self) -> service.iam.PermissionMigrationAPI: """APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx.""" return self._permission_migration @property - def permissions(self) -> PermissionsAPI: + def permissions(self) -> service.iam.PermissionsAPI: """Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints.""" return self._permissions @property - def pipelines(self) -> PipelinesAPI: + def pipelines(self) -> service.pipelines.PipelinesAPI: """The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.""" return self._pipelines @property - def policy_compliance_for_clusters(self) -> PolicyComplianceForClustersAPI: + def policy_compliance_for_clusters(self) -> service.compute.PolicyComplianceForClustersAPI: """The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your workspace.""" return self._policy_compliance_for_clusters @property - def policy_compliance_for_jobs(self) -> PolicyComplianceForJobsAPI: + def policy_compliance_for_jobs(self) -> service.jobs.PolicyComplianceForJobsAPI: """The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace.""" return self._policy_compliance_for_jobs @property - def policy_families(self) -> PolicyFamiliesAPI: + def policy_families(self) -> service.compute.PolicyFamiliesAPI: """View available policy families.""" return self._policy_families @property - def provider_exchange_filters(self) -> ProviderExchangeFiltersAPI: + def provider_exchange_filters(self) -> service.marketplace.ProviderExchangeFiltersAPI: """Marketplace exchanges filters curate which groups can access an exchange.""" return self._provider_exchange_filters @property - def provider_exchanges(self) -> ProviderExchangesAPI: + def provider_exchanges(self) -> service.marketplace.ProviderExchangesAPI: """Marketplace exchanges allow providers to share their listings with a curated set of customers.""" return self._provider_exchanges @property - def provider_files(self) -> ProviderFilesAPI: + def provider_files(self) -> service.marketplace.ProviderFilesAPI: """Marketplace offers a set of file APIs for various purposes such as preview notebooks and provider icons.""" return self._provider_files @property - def provider_listings(self) -> ProviderListingsAPI: + def provider_listings(self) -> service.marketplace.ProviderListingsAPI: """Listings are the core entities in the Marketplace.""" return self._provider_listings @property - def provider_personalization_requests(self) -> ProviderPersonalizationRequestsAPI: + def provider_personalization_requests(self) -> service.marketplace.ProviderPersonalizationRequestsAPI: """Personalization requests are an alternate to instantly available listings.""" return self._provider_personalization_requests @property - def provider_provider_analytics_dashboards(self) -> ProviderProviderAnalyticsDashboardsAPI: + def provider_provider_analytics_dashboards( + self) -> service.marketplace.ProviderProviderAnalyticsDashboardsAPI: """Manage templated analytics solution for providers.""" return self._provider_provider_analytics_dashboards @property - def provider_providers(self) -> ProviderProvidersAPI: + def provider_providers(self) -> service.marketplace.ProviderProvidersAPI: """Providers are entities that manage assets in Marketplace.""" return self._provider_providers @property - def providers(self) -> ProvidersAPI: + def providers(self) -> service.sharing.ProvidersAPI: """A data provider is an object representing the organization in the real world who shares the data.""" return self._providers @property - def quality_monitors(self) -> QualityMonitorsAPI: + def quality_monitors(self) -> service.catalog.QualityMonitorsAPI: """A monitor computes and monitors data or model quality metrics for a table over time.""" return self._quality_monitors @property - def queries(self) -> QueriesAPI: + def queries(self) -> service.sql.QueriesAPI: """The queries API can be used to perform CRUD operations on queries.""" return self._queries @property - def queries_legacy(self) -> QueriesLegacyAPI: + def queries_legacy(self) -> service.sql.QueriesLegacyAPI: """These endpoints are used for CRUD operations on query definitions.""" return self._queries_legacy @property - def query_history(self) -> QueryHistoryAPI: + def query_execution(self) -> service.dashboards.QueryExecutionAPI: + """Query execution APIs for AI / BI Dashboards.""" + return self._query_execution + + @property + def query_history(self) -> service.sql.QueryHistoryAPI: """A service responsible for storing and retrieving the list of queries run against SQL endpoints and serverless compute.""" return self._query_history @property - def query_visualizations(self) -> QueryVisualizationsAPI: + def query_visualizations(self) -> service.sql.QueryVisualizationsAPI: """This is an evolving API that facilitates the addition and removal of visualizations from existing queries in the Databricks Workspace.""" return self._query_visualizations @property - def query_visualizations_legacy(self) -> QueryVisualizationsLegacyAPI: + def query_visualizations_legacy(self) -> service.sql.QueryVisualizationsLegacyAPI: """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace.""" return self._query_visualizations_legacy @property - def recipient_activation(self) -> RecipientActivationAPI: + def recipient_activation(self) -> service.sharing.RecipientActivationAPI: """The Recipient Activation API is only applicable in the open sharing model where the recipient object has the authentication type of `TOKEN`.""" return self._recipient_activation @property - def recipients(self) -> RecipientsAPI: + def recipients(self) -> service.sharing.RecipientsAPI: """A recipient is an object you create using :method:recipients/create to represent an organization which you want to allow access shares.""" return self._recipients @property - def registered_models(self) -> RegisteredModelsAPI: + def redash_config(self) -> service.sql.RedashConfigAPI: + """Redash V2 service for workspace configurations (internal).""" + return self._redash_config + + @property + def registered_models(self) -> service.catalog.RegisteredModelsAPI: """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog.""" return self._registered_models @property - def repos(self) -> ReposAPI: + def repos(self) -> service.workspace.ReposAPI: """The Repos API allows users to manage their git repos.""" return self._repos @property - def resource_quotas(self) -> ResourceQuotasAPI: + def resource_quotas(self) -> service.catalog.ResourceQuotasAPI: """Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that can be created.""" return self._resource_quotas @property - def schemas(self) -> SchemasAPI: + def schemas(self) -> service.catalog.SchemasAPI: """A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace.""" return self._schemas @property - def secrets(self) -> SecretsAPI: + def secrets(self) -> service.workspace.SecretsAPI: """The Secrets API allows you to manage secrets, secret scopes, and access permissions.""" return self._secrets @property - def service_principals(self) -> ServicePrincipalsAPI: + def service_principals(self) -> service.iam.ServicePrincipalsAPI: """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.""" return self._service_principals @@ -643,82 +707,82 @@ def serving_endpoints(self) -> ServingEndpointsExt: return self._serving_endpoints @property - def serving_endpoints_data_plane(self) -> ServingEndpointsDataPlaneAPI: + def serving_endpoints_data_plane(self) -> service.serving.ServingEndpointsDataPlaneAPI: """Serving endpoints DataPlane provides a set of operations to interact with data plane endpoints for Serving endpoints service.""" return self._serving_endpoints_data_plane @property - def settings(self) -> SettingsAPI: + def settings(self) -> service.settings.SettingsAPI: """Workspace Settings API allows users to manage settings at the workspace level.""" return self._settings @property - def shares(self) -> SharesAPI: + def shares(self) -> service.sharing.SharesAPI: """A share is a container instantiated with :method:shares/create.""" return self._shares @property - def statement_execution(self) -> StatementExecutionAPI: + def statement_execution(self) -> service.sql.StatementExecutionAPI: """The Databricks SQL Statement Execution API can be used to execute SQL statements on a SQL warehouse and fetch the result.""" return self._statement_execution @property - def storage_credentials(self) -> StorageCredentialsAPI: + def storage_credentials(self) -> service.catalog.StorageCredentialsAPI: """A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant.""" return self._storage_credentials @property - def system_schemas(self) -> SystemSchemasAPI: + def system_schemas(self) -> service.catalog.SystemSchemasAPI: """A system schema is a schema that lives within the system catalog.""" return self._system_schemas @property - def table_constraints(self) -> TableConstraintsAPI: + def table_constraints(self) -> service.catalog.TableConstraintsAPI: """Primary key and foreign key constraints encode relationships between fields in tables.""" return self._table_constraints @property - def tables(self) -> TablesAPI: + def tables(self) -> service.catalog.TablesAPI: """A table resides in the third layer of Unity Catalog’s three-level namespace.""" return self._tables @property - def temporary_table_credentials(self) -> TemporaryTableCredentialsAPI: + def temporary_table_credentials(self) -> service.catalog.TemporaryTableCredentialsAPI: """Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage locationswhere table data is stored in Databricks.""" return self._temporary_table_credentials @property - def token_management(self) -> TokenManagementAPI: + def token_management(self) -> service.settings.TokenManagementAPI: """Enables administrators to get all tokens and delete tokens for other users.""" return self._token_management @property - def tokens(self) -> TokensAPI: + def tokens(self) -> service.settings.TokensAPI: """The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs.""" return self._tokens @property - def users(self) -> UsersAPI: + def users(self) -> service.iam.UsersAPI: """User identities recognized by Databricks and represented by email addresses.""" return self._users @property - def vector_search_endpoints(self) -> VectorSearchEndpointsAPI: + def vector_search_endpoints(self) -> service.vectorsearch.VectorSearchEndpointsAPI: """**Endpoint**: Represents the compute resources to host vector search indexes.""" return self._vector_search_endpoints @property - def vector_search_indexes(self) -> VectorSearchIndexesAPI: + def vector_search_indexes(self) -> service.vectorsearch.VectorSearchIndexesAPI: """**Index**: An efficient representation of your embedding vectors that supports real-time and efficient approximate nearest neighbor (ANN) search queries.""" return self._vector_search_indexes @property - def volumes(self) -> VolumesAPI: + def volumes(self) -> service.catalog.VolumesAPI: """Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files.""" return self._volumes @property - def warehouses(self) -> WarehousesAPI: + def warehouses(self) -> service.sql.WarehousesAPI: """A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL.""" return self._warehouses @@ -728,12 +792,12 @@ def workspace(self) -> WorkspaceExt: return self._workspace @property - def workspace_bindings(self) -> WorkspaceBindingsAPI: + def workspace_bindings(self) -> service.catalog.WorkspaceBindingsAPI: """A securable in Databricks can be configured as __OPEN__ or __ISOLATED__.""" return self._workspace_bindings @property - def workspace_conf(self) -> WorkspaceConfAPI: + def workspace_conf(self) -> service.settings.WorkspaceConfAPI: """This API allows updating known workspace settings for advanced users.""" return self._workspace_conf @@ -807,32 +871,36 @@ def __init__(self, product_version=product_version) self._config = config.copy() self._api_client = client.ApiClient(self._config) - self._access_control = AccountAccessControlAPI(self._api_client) - self._billable_usage = BillableUsageAPI(self._api_client) - self._credentials = CredentialsAPI(self._api_client) - self._custom_app_integration = CustomAppIntegrationAPI(self._api_client) - self._encryption_keys = EncryptionKeysAPI(self._api_client) - self._groups = AccountGroupsAPI(self._api_client) - self._ip_access_lists = AccountIpAccessListsAPI(self._api_client) - self._log_delivery = LogDeliveryAPI(self._api_client) - self._metastore_assignments = AccountMetastoreAssignmentsAPI(self._api_client) - self._metastores = AccountMetastoresAPI(self._api_client) - self._network_connectivity = NetworkConnectivityAPI(self._api_client) - self._networks = NetworksAPI(self._api_client) - self._o_auth_published_apps = OAuthPublishedAppsAPI(self._api_client) - self._private_access = PrivateAccessAPI(self._api_client) - self._published_app_integration = PublishedAppIntegrationAPI(self._api_client) - self._service_principal_secrets = ServicePrincipalSecretsAPI(self._api_client) - self._service_principals = AccountServicePrincipalsAPI(self._api_client) - self._settings = AccountSettingsAPI(self._api_client) - self._storage = StorageAPI(self._api_client) - self._storage_credentials = AccountStorageCredentialsAPI(self._api_client) - self._usage_dashboards = UsageDashboardsAPI(self._api_client) - self._users = AccountUsersAPI(self._api_client) - self._vpc_endpoints = VpcEndpointsAPI(self._api_client) - self._workspace_assignment = WorkspaceAssignmentAPI(self._api_client) - self._workspaces = WorkspacesAPI(self._api_client) - self._budgets = BudgetsAPI(self._api_client) + self._access_control = service.iam.AccountAccessControlAPI(self._api_client) + self._billable_usage = service.billing.BillableUsageAPI(self._api_client) + self._budget_policy = service.billing.BudgetPolicyAPI(self._api_client) + self._credentials = service.provisioning.CredentialsAPI(self._api_client) + self._custom_app_integration = service.oauth2.CustomAppIntegrationAPI(self._api_client) + self._encryption_keys = service.provisioning.EncryptionKeysAPI(self._api_client) + self._federation_policy = service.oauth2.AccountFederationPolicyAPI(self._api_client) + self._groups = service.iam.AccountGroupsAPI(self._api_client) + self._ip_access_lists = service.settings.AccountIpAccessListsAPI(self._api_client) + self._log_delivery = service.billing.LogDeliveryAPI(self._api_client) + self._metastore_assignments = service.catalog.AccountMetastoreAssignmentsAPI(self._api_client) + self._metastores = service.catalog.AccountMetastoresAPI(self._api_client) + self._network_connectivity = service.settings.NetworkConnectivityAPI(self._api_client) + self._networks = service.provisioning.NetworksAPI(self._api_client) + self._o_auth_published_apps = service.oauth2.OAuthPublishedAppsAPI(self._api_client) + self._private_access = service.provisioning.PrivateAccessAPI(self._api_client) + self._published_app_integration = service.oauth2.PublishedAppIntegrationAPI(self._api_client) + self._service_principal_federation_policy = service.oauth2.ServicePrincipalFederationPolicyAPI( + self._api_client) + self._service_principal_secrets = service.oauth2.ServicePrincipalSecretsAPI(self._api_client) + self._service_principals = service.iam.AccountServicePrincipalsAPI(self._api_client) + self._settings = service.settings.AccountSettingsAPI(self._api_client) + self._storage = service.provisioning.StorageAPI(self._api_client) + self._storage_credentials = service.catalog.AccountStorageCredentialsAPI(self._api_client) + self._usage_dashboards = service.billing.UsageDashboardsAPI(self._api_client) + self._users = service.iam.AccountUsersAPI(self._api_client) + self._vpc_endpoints = service.provisioning.VpcEndpointsAPI(self._api_client) + self._workspace_assignment = service.iam.WorkspaceAssignmentAPI(self._api_client) + self._workspaces = service.provisioning.WorkspacesAPI(self._api_client) + self._budgets = service.billing.BudgetsAPI(self._api_client) @property def config(self) -> client.Config: @@ -843,132 +911,147 @@ def api_client(self) -> client.ApiClient: return self._api_client @property - def access_control(self) -> AccountAccessControlAPI: + def access_control(self) -> service.iam.AccountAccessControlAPI: """These APIs manage access rules on resources in an account.""" return self._access_control @property - def billable_usage(self) -> BillableUsageAPI: + def billable_usage(self) -> service.billing.BillableUsageAPI: """This API allows you to download billable usage logs for the specified account and date range.""" return self._billable_usage @property - def credentials(self) -> CredentialsAPI: + def budget_policy(self) -> service.billing.BudgetPolicyAPI: + """A service serves REST API about Budget policies.""" + return self._budget_policy + + @property + def credentials(self) -> service.provisioning.CredentialsAPI: """These APIs manage credential configurations for this workspace.""" return self._credentials @property - def custom_app_integration(self) -> CustomAppIntegrationAPI: + def custom_app_integration(self) -> service.oauth2.CustomAppIntegrationAPI: """These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.""" return self._custom_app_integration @property - def encryption_keys(self) -> EncryptionKeysAPI: + def encryption_keys(self) -> service.provisioning.EncryptionKeysAPI: """These APIs manage encryption key configurations for this workspace (optional).""" return self._encryption_keys @property - def groups(self) -> AccountGroupsAPI: + def federation_policy(self) -> service.oauth2.AccountFederationPolicyAPI: + """These APIs manage account federation policies.""" + return self._federation_policy + + @property + def groups(self) -> service.iam.AccountGroupsAPI: """Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.""" return self._groups @property - def ip_access_lists(self) -> AccountIpAccessListsAPI: + def ip_access_lists(self) -> service.settings.AccountIpAccessListsAPI: """The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.""" return self._ip_access_lists @property - def log_delivery(self) -> LogDeliveryAPI: + def log_delivery(self) -> service.billing.LogDeliveryAPI: """These APIs manage log delivery configurations for this account.""" return self._log_delivery @property - def metastore_assignments(self) -> AccountMetastoreAssignmentsAPI: + def metastore_assignments(self) -> service.catalog.AccountMetastoreAssignmentsAPI: """These APIs manage metastore assignments to a workspace.""" return self._metastore_assignments @property - def metastores(self) -> AccountMetastoresAPI: + def metastores(self) -> service.catalog.AccountMetastoresAPI: """These APIs manage Unity Catalog metastores for an account.""" return self._metastores @property - def network_connectivity(self) -> NetworkConnectivityAPI: + def network_connectivity(self) -> service.settings.NetworkConnectivityAPI: """These APIs provide configurations for the network connectivity of your workspaces for serverless compute resources.""" return self._network_connectivity @property - def networks(self) -> NetworksAPI: + def networks(self) -> service.provisioning.NetworksAPI: """These APIs manage network configurations for customer-managed VPCs (optional).""" return self._networks @property - def o_auth_published_apps(self) -> OAuthPublishedAppsAPI: + def o_auth_published_apps(self) -> service.oauth2.OAuthPublishedAppsAPI: """These APIs enable administrators to view all the available published OAuth applications in Databricks.""" return self._o_auth_published_apps @property - def private_access(self) -> PrivateAccessAPI: + def private_access(self) -> service.provisioning.PrivateAccessAPI: """These APIs manage private access settings for this account.""" return self._private_access @property - def published_app_integration(self) -> PublishedAppIntegrationAPI: + def published_app_integration(self) -> service.oauth2.PublishedAppIntegrationAPI: """These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.""" return self._published_app_integration @property - def service_principal_secrets(self) -> ServicePrincipalSecretsAPI: + def service_principal_federation_policy(self) -> service.oauth2.ServicePrincipalFederationPolicyAPI: + """These APIs manage service principal federation policies.""" + return self._service_principal_federation_policy + + @property + def service_principal_secrets(self) -> service.oauth2.ServicePrincipalSecretsAPI: """These APIs enable administrators to manage service principal secrets.""" return self._service_principal_secrets @property - def service_principals(self) -> AccountServicePrincipalsAPI: + def service_principals(self) -> service.iam.AccountServicePrincipalsAPI: """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.""" return self._service_principals @property - def settings(self) -> AccountSettingsAPI: + def settings(self) -> service.settings.AccountSettingsAPI: """Accounts Settings API allows users to manage settings at the account level.""" return self._settings @property - def storage(self) -> StorageAPI: + def storage(self) -> service.provisioning.StorageAPI: """These APIs manage storage configurations for this workspace.""" return self._storage @property - def storage_credentials(self) -> AccountStorageCredentialsAPI: + def storage_credentials(self) -> service.catalog.AccountStorageCredentialsAPI: """These APIs manage storage credentials for a particular metastore.""" return self._storage_credentials @property - def usage_dashboards(self) -> UsageDashboardsAPI: + def usage_dashboards(self) -> service.billing.UsageDashboardsAPI: """These APIs manage usage dashboards for this account.""" return self._usage_dashboards @property - def users(self) -> AccountUsersAPI: + def users(self) -> service.iam.AccountUsersAPI: """User identities recognized by Databricks and represented by email addresses.""" return self._users @property - def vpc_endpoints(self) -> VpcEndpointsAPI: + def vpc_endpoints(self) -> service.provisioning.VpcEndpointsAPI: """These APIs manage VPC endpoint configurations for this account.""" return self._vpc_endpoints @property - def workspace_assignment(self) -> WorkspaceAssignmentAPI: + def workspace_assignment(self) -> service.iam.WorkspaceAssignmentAPI: """The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.""" return self._workspace_assignment @property - def workspaces(self) -> WorkspacesAPI: + def workspaces(self) -> service.provisioning.WorkspacesAPI: """These APIs manage workspaces for this account.""" return self._workspaces @property - def budgets(self) -> BudgetsAPI: + def budgets(self) -> service.billing.BudgetsAPI: """These APIs manage budget configurations for this account.""" return self._budgets diff --git a/databricks/sdk/_base_client.py b/databricks/sdk/_base_client.py index 95ce39cbe..58fcb10a5 100644 --- a/databricks/sdk/_base_client.py +++ b/databricks/sdk/_base_client.py @@ -1,5 +1,7 @@ +import io import logging import urllib.parse +from abc import ABC, abstractmethod from datetime import timedelta from types import TracebackType from typing import (Any, BinaryIO, Callable, Dict, Iterable, Iterator, List, @@ -50,7 +52,8 @@ def __init__(self, http_timeout_seconds: float = None, extra_error_customizers: List[_ErrorCustomizer] = None, debug_headers: bool = False, - clock: Clock = None): + clock: Clock = None, + streaming_buffer_size: int = 1024 * 1024): # 1MB """ :param debug_truncate_bytes: :param retry_timeout_seconds: @@ -68,6 +71,7 @@ def __init__(self, :param extra_error_customizers: :param debug_headers: Whether to include debug headers in the request log. :param clock: Clock object to use for time-related operations. + :param streaming_buffer_size: The size of the buffer to use for streaming responses. """ self._debug_truncate_bytes = debug_truncate_bytes or 96 @@ -78,6 +82,7 @@ def __init__(self, self._clock = clock or RealClock() self._session = requests.Session() self._session.auth = self._authenticate + self._streaming_buffer_size = streaming_buffer_size # We don't use `max_retries` from HTTPAdapter to align with a more production-ready # retry strategy established in the Databricks SDK for Go. See _is_retryable and @@ -127,6 +132,14 @@ def flatten_dict(d: Dict[str, Any]) -> Dict[str, Any]: flattened = dict(flatten_dict(with_fixed_bools)) return flattened + @staticmethod + def _is_seekable_stream(data) -> bool: + if data is None: + return False + if not isinstance(data, io.IOBase): + return False + return data.seekable() + def do(self, method: str, url: str, @@ -141,24 +154,52 @@ def do(self, if headers is None: headers = {} headers['User-Agent'] = self._user_agent_base - retryable = retried(timeout=timedelta(seconds=self._retry_timeout_seconds), - is_retryable=self._is_retryable, - clock=self._clock) - response = retryable(self._perform)(method, - url, - query=query, - headers=headers, - body=body, - raw=raw, - files=files, - data=data, - auth=auth) + + # Wrap strings and bytes in a seekable stream so that we can rewind them. + if isinstance(data, (str, bytes)): + data = io.BytesIO(data.encode('utf-8') if isinstance(data, str) else data) + + if not data: + # The request is not a stream. + call = retried(timeout=timedelta(seconds=self._retry_timeout_seconds), + is_retryable=self._is_retryable, + clock=self._clock)(self._perform) + elif self._is_seekable_stream(data): + # Keep track of the initial position of the stream so that we can rewind to it + # if we need to retry the request. + initial_data_position = data.tell() + + def rewind(): + logger.debug(f"Rewinding input data to offset {initial_data_position} before retry") + data.seek(initial_data_position) + + call = retried(timeout=timedelta(seconds=self._retry_timeout_seconds), + is_retryable=self._is_retryable, + clock=self._clock, + before_retry=rewind)(self._perform) + else: + # Do not retry if the stream is not seekable. This is necessary to avoid bugs + # where the retry doesn't re-read already read data from the stream. + logger.debug(f"Retry disabled for non-seekable stream: type={type(data)}") + call = self._perform + + response = call(method, + url, + query=query, + headers=headers, + body=body, + raw=raw, + files=files, + data=data, + auth=auth) resp = dict() for header in response_headers if response_headers else []: resp[header] = response.headers.get(Casing.to_header_case(header)) if raw: - resp["contents"] = _StreamingResponse(response) + streaming_response = _StreamingResponse(response) + streaming_response.set_chunk_size(self._streaming_buffer_size) + resp["contents"] = streaming_response return resp if not len(response.content): return resp @@ -243,8 +284,20 @@ def _record_request_log(self, response: requests.Response, raw: bool = False) -> logger.debug(RoundTrip(response, self._debug_headers, self._debug_truncate_bytes, raw).generate()) +class _RawResponse(ABC): + + @abstractmethod + # follows Response signature: https://github.com/psf/requests/blob/main/src/requests/models.py#L799 + def iter_content(self, chunk_size: int = 1, decode_unicode: bool = False): + pass + + @abstractmethod + def close(self): + pass + + class _StreamingResponse(BinaryIO): - _response: requests.Response + _response: _RawResponse _buffer: bytes _content: Union[Iterator[bytes], None] _chunk_size: Union[int, None] @@ -256,7 +309,7 @@ def fileno(self) -> int: def flush(self) -> int: pass - def __init__(self, response: requests.Response, chunk_size: Union[int, None] = None): + def __init__(self, response: _RawResponse, chunk_size: Union[int, None] = None): self._response = response self._buffer = b'' self._content = None @@ -266,7 +319,7 @@ def _open(self) -> None: if self._closed: raise ValueError("I/O operation on closed file") if not self._content: - self._content = self._response.iter_content(chunk_size=self._chunk_size) + self._content = self._response.iter_content(chunk_size=self._chunk_size, decode_unicode=False) def __enter__(self) -> BinaryIO: self._open() @@ -283,6 +336,11 @@ def isatty(self) -> bool: return False def read(self, n: int = -1) -> bytes: + """ + Read up to n bytes from the response stream. If n is negative, read + until the end of the stream. + """ + self._open() read_everything = n < 0 remaining_bytes = n diff --git a/databricks/sdk/config.py b/databricks/sdk/config.py index b4efdf603..a556b5988 100644 --- a/databricks/sdk/config.py +++ b/databricks/sdk/config.py @@ -92,6 +92,7 @@ class Config: max_connections_per_pool: int = ConfigAttribute() databricks_environment: Optional[DatabricksEnvironment] = None +<<<<<<< HEAD def __init__(self, *, # Deprecated. Use credentials_strategy instead. @@ -101,6 +102,23 @@ def __init__(self, product_version=None, clock: Optional[Clock] = None, **kwargs): +======= + enable_experimental_files_api_client: bool = ConfigAttribute( + env='DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT') + files_api_client_download_max_total_recovers = None + files_api_client_download_max_total_recovers_without_progressing = 1 + + def __init__( + self, + *, + # Deprecated. Use credentials_strategy instead. + credentials_provider: Optional[CredentialsStrategy] = None, + credentials_strategy: Optional[CredentialsStrategy] = None, + product=None, + product_version=None, + clock: Optional[Clock] = None, + **kwargs): +>>>>>>> upstream/main self._header_factory = None self._inner = {} self._user_agent_other_info = [] diff --git a/databricks/sdk/credentials_provider.py b/databricks/sdk/credentials_provider.py index a79151b5a..9a5b0748f 100644 --- a/databricks/sdk/credentials_provider.py +++ b/databricks/sdk/credentials_provider.py @@ -9,6 +9,10 @@ import platform import subprocess import sys +<<<<<<< HEAD +======= +import threading +>>>>>>> upstream/main import time from datetime import datetime from typing import Callable, Dict, List, Optional, Tuple, Union @@ -167,6 +171,7 @@ def oauth_service_principal(cfg: 'Config') -> Optional[CredentialsProvider]: oidc = cfg.oidc_endpoints if oidc is None: return None + token_source = ClientCredentials(client_id=cfg.client_id, client_secret=cfg.client_secret, token_url=oidc.token_endpoint, @@ -187,6 +192,10 @@ def token() -> Token: def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]: if cfg.auth_type != 'external-browser': return None +<<<<<<< HEAD +======= + +>>>>>>> upstream/main client_id, client_secret = None, None if cfg.client_id: client_id = cfg.client_id @@ -194,12 +203,20 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]: elif cfg.azure_client_id: client_id = cfg.azure_client client_secret = cfg.azure_client_secret +<<<<<<< HEAD if not client_id: client_id = 'databricks-cli' # Load cached credentials from disk if they exist. # Note that these are local to the Python SDK and not reused by other SDKs. +======= + if not client_id: + client_id = 'databricks-cli' + + # Load cached credentials from disk if they exist. Note that these are + # local to the Python SDK and not reused by other SDKs. +>>>>>>> upstream/main oidc_endpoints = cfg.oidc_endpoints redirect_url = 'http://localhost:8020' token_cache = TokenCache(host=cfg.host, @@ -209,6 +226,7 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]: redirect_url=redirect_url) credentials = token_cache.load() if credentials: +<<<<<<< HEAD # Force a refresh in case the loaded credentials are expired. credentials.token() else: @@ -220,6 +238,27 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]: if not consent: return None credentials = consent.launch_external_browser() +======= + try: + # Pro-actively refresh the loaded credentials. This is done + # to detect if the token is expired and needs to be refreshed + # by going through the OAuth login flow. + credentials.token() + return credentials(cfg) + # TODO: We should ideally use more specific exceptions. + except Exception as e: + logger.warning(f'Failed to refresh cached token: {e}. Initiating new OAuth login flow') + + oauth_client = OAuthClient(oidc_endpoints=oidc_endpoints, + client_id=client_id, + redirect_url=redirect_url, + client_secret=client_secret) + consent = oauth_client.initiate_consent() + if not consent: + return None + + credentials = consent.launch_external_browser() +>>>>>>> upstream/main token_cache.save(credentials) return credentials(cfg) @@ -304,11 +343,12 @@ def github_oidc_azure(cfg: 'Config') -> Optional[CredentialsProvider]: # detect Azure AD Tenant ID if it's not specified directly token_endpoint = cfg.oidc_endpoints.token_endpoint cfg.azure_tenant_id = token_endpoint.replace(aad_endpoint, '').split('/')[0] - inner = ClientCredentials(client_id=cfg.azure_client_id, - client_secret="", # we have no (rotatable) secrets in OIDC flow - token_url=f"{aad_endpoint}{cfg.azure_tenant_id}/oauth2/token", - endpoint_params=params, - use_params=True) + inner = ClientCredentials( + client_id=cfg.azure_client_id, + client_secret="", # we have no (rotatable) secrets in OIDC flow + token_url=f"{aad_endpoint}{cfg.azure_tenant_id}/oauth2/token", + endpoint_params=params, + use_params=True) def refreshed_headers() -> Dict[str, str]: token = inner.token() @@ -666,12 +706,18 @@ def __init__(self, cfg: 'Config'): self.host = cfg.host def refresh(self) -> Token: - resp = requests.get(self.url, - timeout=self._metadata_service_timeout, - headers={ - self.METADATA_SERVICE_VERSION_HEADER: self.METADATA_SERVICE_VERSION, - self.METADATA_SERVICE_HOST_HEADER: self.host - }) + resp = requests.get( + self.url, + timeout=self._metadata_service_timeout, + headers={ + self.METADATA_SERVICE_VERSION_HEADER: self.METADATA_SERVICE_VERSION, + self.METADATA_SERVICE_HOST_HEADER: self.host + }, + proxies={ + # Explicitly exclude localhost from being proxied. This is necessary + # for Metadata URLs which typically point to localhost. + "no_proxy": "localhost,127.0.0.1" + }) json_resp: dict[str, Union[str, float]] = resp.json() access_token = json_resp.get("access_token", None) if access_token is None: @@ -707,6 +753,7 @@ def inner() -> Dict[str, str]: # This Code is derived from Mlflow DatabricksModelServingConfigProvider # https://github.com/mlflow/mlflow/blob/1219e3ef1aac7d337a618a352cd859b336cf5c81/mlflow/legacy_databricks_cli/configure/provider.py#L332 class ModelServingAuthProvider(): +<<<<<<< HEAD _MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH = "/var/credentials-secret/model-dependencies-oauth-token" def __init__(self): @@ -715,6 +762,19 @@ def __init__(self): self.refresh_duration = 300 # 300 Seconds def should_fetch_model_serving_environment_oauth(self) -> bool: +======= + USER_CREDENTIALS = "user_credentials" + + _MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH = "/var/credentials-secret/model-dependencies-oauth-token" + + def __init__(self, credential_type: Optional[str]): + self.expiry_time = -1 + self.current_token = None + self.refresh_duration = 300 # 300 Seconds + self.credential_type = credential_type + + def should_fetch_model_serving_environment_oauth() -> bool: +>>>>>>> upstream/main """ Check whether this is the model serving environment Additionally check if the oauth token file path exists @@ -723,15 +783,25 @@ def should_fetch_model_serving_environment_oauth(self) -> bool: is_in_model_serving_env = (os.environ.get("IS_IN_DB_MODEL_SERVING_ENV") or os.environ.get("IS_IN_DATABRICKS_MODEL_SERVING_ENV") or "false") return (is_in_model_serving_env == "true" +<<<<<<< HEAD and os.path.isfile(self._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH)) def get_model_dependency_oauth_token(self, should_retry=True) -> str: +======= + and os.path.isfile(ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH)) + + def _get_model_dependency_oauth_token(self, should_retry=True) -> str: +>>>>>>> upstream/main # Use Cached value if it is valid if self.current_token is not None and self.expiry_time > time.time(): return self.current_token try: +<<<<<<< HEAD with open(self._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH) as f: +======= + with open(ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH) as f: +>>>>>>> upstream/main oauth_dict = json.load(f) self.current_token = oauth_dict["OAUTH_TOKEN"][0]["oauthTokenValue"] self.expiry_time = time.time() + self.refresh_duration @@ -741,20 +811,42 @@ def get_model_dependency_oauth_token(self, should_retry=True) -> str: logger.warning("Unable to read oauth token on first attmept in Model Serving Environment", exc_info=e) time.sleep(0.5) +<<<<<<< HEAD return self.get_model_dependency_oauth_token(should_retry=False) +======= + return self._get_model_dependency_oauth_token(should_retry=False) +>>>>>>> upstream/main else: raise RuntimeError( "Unable to read OAuth credentials from the file mounted in Databricks Model Serving" ) from e return self.current_token +<<<<<<< HEAD def get_databricks_host_token(self) -> Optional[Tuple[str, str]]: if not self.should_fetch_model_serving_environment_oauth(): +======= + def _get_invokers_token(self): + current_thread = threading.current_thread() + thread_data = current_thread.__dict__ + invokers_token = None + if "invokers_token" in thread_data: + invokers_token = thread_data["invokers_token"] + + if invokers_token is None: + raise RuntimeError("Unable to read Invokers Token in Databricks Model Serving") + + return invokers_token + + def get_databricks_host_token(self) -> Optional[Tuple[str, str]]: + if not ModelServingAuthProvider.should_fetch_model_serving_environment_oauth(): +>>>>>>> upstream/main return None # read from DB_MODEL_SERVING_HOST_ENV_VAR if available otherwise MODEL_SERVING_HOST_ENV_VAR host = os.environ.get("DATABRICKS_MODEL_SERVING_HOST_URL") or os.environ.get( "DB_MODEL_SERVING_HOST_URL") +<<<<<<< HEAD token = self.get_model_dependency_oauth_token() return (host, token) @@ -767,6 +859,19 @@ def model_serving_auth(cfg: 'Config') -> Optional[CredentialsProvider]: if not model_serving_auth_provider.should_fetch_model_serving_environment_oauth(): logger.debug("model-serving: Not in Databricks Model Serving, skipping") return None +======= + + if self.credential_type == ModelServingAuthProvider.USER_CREDENTIALS: + return (host, self._get_invokers_token()) + else: + return (host, self._get_model_dependency_oauth_token()) + + +def model_serving_auth_visitor(cfg: 'Config', + credential_type: Optional[str] = None) -> Optional[CredentialsProvider]: + try: + model_serving_auth_provider = ModelServingAuthProvider(credential_type) +>>>>>>> upstream/main host, token = model_serving_auth_provider.get_databricks_host_token() if token is None: raise ValueError( @@ -777,7 +882,10 @@ def model_serving_auth(cfg: 'Config') -> Optional[CredentialsProvider]: except Exception as e: logger.warning("Unable to get auth from Databricks Model Serving Environment", exc_info=e) return None +<<<<<<< HEAD +======= +>>>>>>> upstream/main logger.info("Using Databricks Model Serving Authentication") def inner() -> Dict[str, str]: @@ -788,6 +896,18 @@ def inner() -> Dict[str, str]: return inner +<<<<<<< HEAD +======= +@credentials_strategy('model-serving', []) +def model_serving_auth(cfg: 'Config') -> Optional[CredentialsProvider]: + if not ModelServingAuthProvider.should_fetch_model_serving_environment_oauth(): + logger.debug("model-serving: Not in Databricks Model Serving, skipping") + return None + + return model_serving_auth_visitor(cfg) + + +>>>>>>> upstream/main class DefaultCredentials: """ Select the first applicable credential provider from the chain """ @@ -830,3 +950,35 @@ def __call__(self, cfg: 'Config') -> CredentialsProvider: raise ValueError( f'cannot configure default credentials, please check {auth_flow_url} to configure credentials for your preferred authentication method.' ) + + +class ModelServingUserCredentials(CredentialsStrategy): + """ + This credential strategy is designed for authenticating the Databricks SDK in the model serving environment using user-specific rights. + In the model serving environment, the strategy retrieves a downscoped user token from the thread-local variable. + In any other environments, the class defaults to the DefaultCredentialStrategy. + To use this credential strategy, instantiate the WorkspaceClient with the ModelServingUserCredentials strategy as follows: + + invokers_client = WorkspaceClient(credential_strategy = ModelServingUserCredentials()) + """ + + def __init__(self): + self.credential_type = ModelServingAuthProvider.USER_CREDENTIALS + self.default_credentials = DefaultCredentials() + + def auth_type(self): + if ModelServingAuthProvider.should_fetch_model_serving_environment_oauth(): + return "model_serving_" + self.credential_type + else: + return self.default_credentials.auth_type() + + def __call__(self, cfg: 'Config') -> CredentialsProvider: + if ModelServingAuthProvider.should_fetch_model_serving_environment_oauth(): + header_factory = model_serving_auth_visitor(cfg, self.credential_type) + if not header_factory: + raise ValueError( + f"Unable to authenticate using {self.credential_type} in Databricks Model Serving Environment" + ) + return header_factory + else: + return self.default_credentials(cfg) diff --git a/databricks/sdk/data_plane.py b/databricks/sdk/data_plane.py index 6f6ddf80c..5ad9b79ad 100644 --- a/databricks/sdk/data_plane.py +++ b/databricks/sdk/data_plane.py @@ -3,7 +3,6 @@ from typing import Callable, List from databricks.sdk.oauth import Token -from databricks.sdk.service.oauth2 import DataPlaneInfo @dataclass @@ -19,6 +18,7 @@ class DataPlaneDetails: class DataPlaneService: """Helper class to fetch and manage DataPlane details.""" + from .service.serving import DataPlaneInfo def __init__(self): self._data_plane_info = {} diff --git a/databricks/sdk/mixins/files.py b/databricks/sdk/mixins/files.py index 1e109a1a7..678b4b630 100644 --- a/databricks/sdk/mixins/files.py +++ b/databricks/sdk/mixins/files.py @@ -1,6 +1,7 @@ from __future__ import annotations import base64 +import logging import os import pathlib import platform @@ -8,19 +9,27 @@ import sys from abc import ABC, abstractmethod from collections import deque +from collections.abc import Iterator from io import BytesIO from types import TracebackType from typing import (TYPE_CHECKING, AnyStr, BinaryIO, Generator, Iterable, - Iterator, Type, Union) + Optional, Type, Union) from urllib import parse +from requests import RequestException + +from .._base_client import _RawResponse, _StreamingResponse from .._property import _cached_property from ..errors import NotFound from ..service import files +from ..service._internal import _escape_multi_segment_path_parameter +from ..service.files import DownloadResponse if TYPE_CHECKING: from _typeshed import Self +_LOG = logging.getLogger(__name__) + class _DbfsIO(BinaryIO): MAX_CHUNK_SIZE = 1024 * 1024 @@ -636,3 +645,177 @@ def delete(self, path: str, *, recursive=False): if p.is_dir and not recursive: raise IOError('deleting directories requires recursive flag') p.delete(recursive=recursive) + + +class FilesExt(files.FilesAPI): + __doc__ = files.FilesAPI.__doc__ + + def __init__(self, api_client, config: Config): + super().__init__(api_client) + self._config = config.copy() + + def download(self, file_path: str) -> DownloadResponse: + """Download a file. + + Downloads a file of any size. The file contents are the response body. + This is a standard HTTP file download, not a JSON RPC. + + It is strongly recommended, for fault tolerance reasons, + to iteratively consume from the stream with a maximum read(size) + defined instead of using indefinite-size reads. + + :param file_path: str + The remote path of the file, e.g. /Volumes/path/to/your/file + + :returns: :class:`DownloadResponse` + """ + + initial_response: DownloadResponse = self._download_raw_stream(file_path=file_path, + start_byte_offset=0, + if_unmodified_since_timestamp=None) + + wrapped_response = self._wrap_stream(file_path, initial_response) + initial_response.contents._response = wrapped_response + return initial_response + + def _download_raw_stream(self, + file_path: str, + start_byte_offset: int, + if_unmodified_since_timestamp: Optional[str] = None) -> DownloadResponse: + headers = {'Accept': 'application/octet-stream', } + + if start_byte_offset and not if_unmodified_since_timestamp: + raise Exception("if_unmodified_since_timestamp is required if start_byte_offset is specified") + + if start_byte_offset: + headers['Range'] = f'bytes={start_byte_offset}-' + + if if_unmodified_since_timestamp: + headers['If-Unmodified-Since'] = if_unmodified_since_timestamp + + response_headers = ['content-length', 'content-type', 'last-modified', ] + res = self._api.do('GET', + f'/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}', + headers=headers, + response_headers=response_headers, + raw=True) + + result = DownloadResponse.from_dict(res) + if not isinstance(result.contents, _StreamingResponse): + raise Exception("Internal error: response contents is of unexpected type: " + + type(result.contents).__name__) + + return result + + def _wrap_stream(self, file_path: str, downloadResponse: DownloadResponse): + underlying_response = _ResilientIterator._extract_raw_response(downloadResponse) + return _ResilientResponse(self, + file_path, + downloadResponse.last_modified, + offset=0, + underlying_response=underlying_response) + + +class _ResilientResponse(_RawResponse): + + def __init__(self, api: FilesExt, file_path: str, file_last_modified: str, offset: int, + underlying_response: _RawResponse): + self.api = api + self.file_path = file_path + self.underlying_response = underlying_response + self.offset = offset + self.file_last_modified = file_last_modified + + def iter_content(self, chunk_size=1, decode_unicode=False): + if decode_unicode: + raise ValueError('Decode unicode is not supported') + + iterator = self.underlying_response.iter_content(chunk_size=chunk_size, decode_unicode=False) + self.iterator = _ResilientIterator(iterator, self.file_path, self.file_last_modified, self.offset, + self.api, chunk_size) + return self.iterator + + def close(self): + self.iterator.close() + + +class _ResilientIterator(Iterator): + # This class tracks current offset (returned to the client code) + # and recovers from failures by requesting download from the current offset. + + @staticmethod + def _extract_raw_response(download_response: DownloadResponse) -> _RawResponse: + streaming_response: _StreamingResponse = download_response.contents # this is an instance of _StreamingResponse + return streaming_response._response + + def __init__(self, underlying_iterator, file_path: str, file_last_modified: str, offset: int, + api: FilesExt, chunk_size: int): + self._underlying_iterator = underlying_iterator + self._api = api + self._file_path = file_path + + # Absolute current offset (0-based), i.e. number of bytes from the beginning of the file + # that were so far returned to the caller code. + self._offset = offset + self._file_last_modified = file_last_modified + self._chunk_size = chunk_size + + self._total_recovers_count: int = 0 + self._recovers_without_progressing_count: int = 0 + self._closed: bool = False + + def _should_recover(self) -> bool: + if self._total_recovers_count == self._api._config.files_api_client_download_max_total_recovers: + _LOG.debug("Total recovers limit exceeded") + return False + if self._api._config.files_api_client_download_max_total_recovers_without_progressing is not None and self._recovers_without_progressing_count >= self._api._config.files_api_client_download_max_total_recovers_without_progressing: + _LOG.debug("No progression recovers limit exceeded") + return False + return True + + def _recover(self) -> bool: + if not self._should_recover(): + return False # recover suppressed, rethrow original exception + + self._total_recovers_count += 1 + self._recovers_without_progressing_count += 1 + + try: + self._underlying_iterator.close() + + _LOG.debug("Trying to recover from offset " + str(self._offset)) + + # following call includes all the required network retries + downloadResponse = self._api._download_raw_stream(self._file_path, self._offset, + self._file_last_modified) + underlying_response = _ResilientIterator._extract_raw_response(downloadResponse) + self._underlying_iterator = underlying_response.iter_content(chunk_size=self._chunk_size, + decode_unicode=False) + _LOG.debug("Recover succeeded") + return True + except: + return False # recover failed, rethrow original exception + + def __next__(self): + if self._closed: + # following _BaseClient + raise ValueError("I/O operation on closed file") + + while True: + try: + returned_bytes = next(self._underlying_iterator) + self._offset += len(returned_bytes) + self._recovers_without_progressing_count = 0 + return returned_bytes + + except StopIteration: + raise + + # https://requests.readthedocs.io/en/latest/user/quickstart/#errors-and-exceptions + except RequestException: + if not self._recover(): + raise + + def close(self): + self._underlying_iterator.close() + self._closed = True diff --git a/databricks/sdk/mixins/jobs.py b/databricks/sdk/mixins/jobs.py new file mode 100644 index 000000000..d5e2a1728 --- /dev/null +++ b/databricks/sdk/mixins/jobs.py @@ -0,0 +1,84 @@ +from typing import Optional + +from databricks.sdk.service import jobs +from databricks.sdk.service.jobs import Job + + +class JobsExt(jobs.JobsAPI): + + def get_run(self, + run_id: int, + *, + include_history: Optional[bool] = None, + include_resolved_values: Optional[bool] = None, + page_token: Optional[str] = None) -> jobs.Run: + """Get a single job run. + + Retrieve the metadata of a run. If a run has multiple pages of tasks, it will paginate through all pages of tasks, iterations, job_clusters, job_parameters, and repair history. + + :param run_id: int + The canonical identifier of the run for which to retrieve the metadata. This field is required. + :param include_history: bool (optional) + Whether to include the repair history in the response. + :param include_resolved_values: bool (optional) + Whether to include resolved parameter values in the response. + :param page_token: str (optional) + To list the next page of job tasks, set this field to the value of the `next_page_token` returned in + the GetJob response. + + :returns: :class:`Run` + """ + run = super().get_run(run_id, + include_history=include_history, + include_resolved_values=include_resolved_values, + page_token=page_token) + + # When querying a Job run, a page token is returned when there are more than 100 tasks. No iterations are defined for a Job run. Therefore, the next page in the response only includes the next page of tasks. + # When querying a ForEach task run, a page token is returned when there are more than 100 iterations. Only a single task is returned, corresponding to the ForEach task itself. Therefore, the client only reads the iterations from the next page and not the tasks. + is_paginating_iterations = run.iterations is not None and len(run.iterations) > 0 + + # runs/get response includes next_page_token as long as there are more pages to fetch. + while run.next_page_token is not None: + next_run = super().get_run(run_id, + include_history=include_history, + include_resolved_values=include_resolved_values, + page_token=run.next_page_token) + if is_paginating_iterations: + run.iterations.extend(next_run.iterations) + else: + run.tasks.extend(next_run.tasks) + # Each new page of runs/get response includes the next page of the job_clusters, job_parameters, and repair history. + run.job_clusters.extend(next_run.job_clusters) + run.job_parameters.extend(next_run.job_parameters) + run.repair_history.extend(next_run.repair_history) + run.next_page_token = next_run.next_page_token + + return run + + def get(self, job_id: int, *, page_token: Optional[str] = None) -> Job: + """Get a single job. + + Retrieves the details for a single job. If the job has multiple pages of tasks, job_clusters, parameters or environments, + it will paginate through all pages and aggregate the results. + + :param job_id: int + The canonical identifier of the job to retrieve information about. This field is required. + :param page_token: str (optional) + Use `next_page_token` returned from the previous GetJob to request the next page of the job's + sub-resources. + + :returns: :class:`Job` + """ + job = super().get(job_id, page_token=page_token) + + # jobs/get response includes next_page_token as long as there are more pages to fetch. + while job.next_page_token is not None: + next_job = super().get(job_id, page_token=job.next_page_token) + # Each new page of jobs/get response includes the next page of the tasks, job_clusters, job_parameters, and environments. + job.settings.tasks.extend(next_job.settings.tasks) + job.settings.job_clusters.extend(next_job.settings.job_clusters) + job.settings.parameters.extend(next_job.settings.parameters) + job.settings.environments.extend(next_job.settings.environments) + job.next_page_token = next_job.next_page_token + + return job \ No newline at end of file diff --git a/databricks/sdk/mixins/open_ai_client.py b/databricks/sdk/mixins/open_ai_client.py index f7a8af02d..e5bea9607 100644 --- a/databricks/sdk/mixins/open_ai_client.py +++ b/databricks/sdk/mixins/open_ai_client.py @@ -1,4 +1,10 @@ -from databricks.sdk.service.serving import ServingEndpointsAPI +import json as js +from typing import Dict, Optional + +from requests import Response + +from databricks.sdk.service.serving import (ExternalFunctionRequestHttpMethod, + ServingEndpointsAPI) class ServingEndpointsExt(ServingEndpointsAPI): @@ -29,7 +35,7 @@ def get_open_ai_client(self): from openai import OpenAI except Exception: raise ImportError( - "Open AI is not installed. Please install the Databricks SDK with the following command `pip isntall databricks-sdk[openai]`" + "Open AI is not installed. Please install the Databricks SDK with the following command `pip install databricks-sdk[openai]`" ) return OpenAI( @@ -42,7 +48,7 @@ def get_langchain_chat_open_ai_client(self, model): from langchain_openai import ChatOpenAI except Exception: raise ImportError( - "Langchain Open AI is not installed. Please install the Databricks SDK with the following command `pip isntall databricks-sdk[openai]` and ensure you are using python>3.7" + "Langchain Open AI is not installed. Please install the Databricks SDK with the following command `pip install databricks-sdk[openai]` and ensure you are using python>3.7" ) return ChatOpenAI( @@ -50,3 +56,51 @@ def get_langchain_chat_open_ai_client(self, model): openai_api_base=self._api._cfg.host + "/serving-endpoints", api_key="no-token", # Passing in a placeholder to pass validations, this will not be used http_client=self._get_authorized_http_client()) + + def http_request(self, + conn: str, + method: ExternalFunctionRequestHttpMethod, + path: str, + *, + headers: Optional[Dict[str, str]] = None, + json: Optional[Dict[str, str]] = None, + params: Optional[Dict[str, str]] = None) -> Response: + """Make external services call using the credentials stored in UC Connection. + **NOTE:** Experimental: This API may change or be removed in a future release without warning. + :param conn: str + The connection name to use. This is required to identify the external connection. + :param method: :class:`ExternalFunctionRequestHttpMethod` + The HTTP method to use (e.g., 'GET', 'POST'). This is required. + :param path: str + The relative path for the API endpoint. This is required. + :param headers: Dict[str,str] (optional) + Additional headers for the request. If not provided, only auth headers from connections would be + passed. + :param json: Dict[str,str] (optional) + JSON payload for the request. + :param params: Dict[str,str] (optional) + Query parameters for the request. + :returns: :class:`Response` + """ + response = Response() + response.status_code = 200 + server_response = super().http_request(connection_name=conn, + method=method, + path=path, + headers=js.dumps(headers) if headers is not None else None, + json=js.dumps(json) if json is not None else None, + params=js.dumps(params) if params is not None else None) + + # Read the content from the HttpRequestResponse object + if hasattr(server_response, "contents") and hasattr(server_response.contents, "read"): + raw_content = server_response.contents.read() # Read the bytes + else: + raise ValueError("Invalid response from the server.") + + # Set the raw content + if isinstance(raw_content, bytes): + response._content = raw_content + else: + raise ValueError("Contents must be bytes.") + + return response diff --git a/databricks/sdk/retries.py b/databricks/sdk/retries.py index b98c54281..4f55087ea 100644 --- a/databricks/sdk/retries.py +++ b/databricks/sdk/retries.py @@ -13,7 +13,8 @@ def retried(*, on: Sequence[Type[BaseException]] = None, is_retryable: Callable[[BaseException], Optional[str]] = None, timeout=timedelta(minutes=20), - clock: Clock = None): + clock: Clock = None, + before_retry: Callable = None): has_allowlist = on is not None has_callback = is_retryable is not None if not (has_allowlist or has_callback) or (has_allowlist and has_callback): @@ -54,6 +55,9 @@ def wrapper(*args, **kwargs): raise err logger.debug(f'Retrying: {retry_reason} (sleeping ~{sleep}s)') + if before_retry: + before_retry() + clock.sleep(sleep + random()) attempt += 1 raise TimeoutError(f'Timed out after {timeout}') from last_err diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py index 52796d0e8..d15a6bef2 100755 --- a/databricks/sdk/service/apps.py +++ b/databricks/sdk/service/apps.py @@ -45,6 +45,9 @@ class App: description: Optional[str] = None """The description of the app.""" + id: Optional[str] = None + """The unique identifier of the app.""" + pending_deployment: Optional[AppDeployment] = None """The pending deployment of the app. A deployment is considered pending when it is being prepared for deployment to the app compute.""" @@ -52,6 +55,8 @@ class App: resources: Optional[List[AppResource]] = None """Resources for the app.""" + service_principal_client_id: Optional[str] = None + service_principal_id: Optional[int] = None service_principal_name: Optional[str] = None @@ -76,9 +81,37 @@ def as_dict(self) -> dict: if self.default_source_code_path is not None: body['default_source_code_path'] = self.default_source_code_path if self.description is not None: body['description'] = self.description + if self.id is not None: body['id'] = self.id if self.name is not None: body['name'] = self.name if self.pending_deployment: body['pending_deployment'] = self.pending_deployment.as_dict() if self.resources: body['resources'] = [v.as_dict() for v in self.resources] + if self.service_principal_client_id is not None: + body['service_principal_client_id'] = self.service_principal_client_id + if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.update_time is not None: body['update_time'] = self.update_time + if self.updater is not None: body['updater'] = self.updater + if self.url is not None: body['url'] = self.url + return body + + def as_shallow_dict(self) -> dict: + """Serializes the App into a shallow dictionary of its immediate attributes.""" + body = {} + if self.active_deployment: body['active_deployment'] = self.active_deployment + if self.app_status: body['app_status'] = self.app_status + if self.compute_status: body['compute_status'] = self.compute_status + if self.create_time is not None: body['create_time'] = self.create_time + if self.creator is not None: body['creator'] = self.creator + if self.default_source_code_path is not None: + body['default_source_code_path'] = self.default_source_code_path + if self.description is not None: body['description'] = self.description + if self.id is not None: body['id'] = self.id + if self.name is not None: body['name'] = self.name + if self.pending_deployment: body['pending_deployment'] = self.pending_deployment + if self.resources: body['resources'] = self.resources + if self.service_principal_client_id is not None: + body['service_principal_client_id'] = self.service_principal_client_id if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name @@ -97,9 +130,11 @@ def from_dict(cls, d: Dict[str, any]) -> App: creator=d.get('creator', None), default_source_code_path=d.get('default_source_code_path', None), description=d.get('description', None), + id=d.get('id', None), name=d.get('name', None), pending_deployment=_from_dict(d, 'pending_deployment', AppDeployment), resources=_repeated_dict(d, 'resources', AppResource), + service_principal_client_id=d.get('service_principal_client_id', None), service_principal_id=d.get('service_principal_id', None), service_principal_name=d.get('service_principal_name', None), update_time=d.get('update_time', None), @@ -131,6 +166,16 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the AppAccessControlRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AppAccessControlRequest: """Deserializes the AppAccessControlRequest from a dictionary.""" @@ -168,6 +213,17 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the AppAccessControlResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AppAccessControlResponse: """Deserializes the AppAccessControlResponse from a dictionary.""" @@ -221,6 +277,19 @@ def as_dict(self) -> dict: if self.update_time is not None: body['update_time'] = self.update_time return body + def as_shallow_dict(self) -> dict: + """Serializes the AppDeployment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.create_time is not None: body['create_time'] = self.create_time + if self.creator is not None: body['creator'] = self.creator + if self.deployment_artifacts: body['deployment_artifacts'] = self.deployment_artifacts + if self.deployment_id is not None: body['deployment_id'] = self.deployment_id + if self.mode is not None: body['mode'] = self.mode + if self.source_code_path is not None: body['source_code_path'] = self.source_code_path + if self.status: body['status'] = self.status + if self.update_time is not None: body['update_time'] = self.update_time + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AppDeployment: """Deserializes the AppDeployment from a dictionary.""" @@ -245,6 +314,12 @@ def as_dict(self) -> dict: if self.source_code_path is not None: body['source_code_path'] = self.source_code_path return body + def as_shallow_dict(self) -> dict: + """Serializes the AppDeploymentArtifacts into a shallow dictionary of its immediate attributes.""" + body = {} + if self.source_code_path is not None: body['source_code_path'] = self.source_code_path + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AppDeploymentArtifacts: """Deserializes the AppDeploymentArtifacts from a dictionary.""" @@ -280,6 +355,13 @@ def as_dict(self) -> dict: if self.state is not None: body['state'] = self.state.value return body + def as_shallow_dict(self) -> dict: + """Serializes the AppDeploymentStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.message is not None: body['message'] = self.message + if self.state is not None: body['state'] = self.state + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AppDeploymentStatus: """Deserializes the AppDeploymentStatus from a dictionary.""" @@ -303,6 +385,14 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the AppPermission into a shallow dictionary of its immediate attributes.""" + body = {} + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AppPermission: """Deserializes the AppPermission from a dictionary.""" @@ -335,6 +425,14 @@ def as_dict(self) -> dict: if self.object_type is not None: body['object_type'] = self.object_type return body + def as_shallow_dict(self) -> dict: + """Serializes the AppPermissions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AppPermissions: """Deserializes the AppPermissions from a dictionary.""" @@ -357,6 +455,13 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the AppPermissionsDescription into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AppPermissionsDescription: """Deserializes the AppPermissionsDescription from a dictionary.""" @@ -379,6 +484,13 @@ def as_dict(self) -> dict: if self.app_name is not None: body['app_name'] = self.app_name return body + def as_shallow_dict(self) -> dict: + """Serializes the AppPermissionsRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.app_name is not None: body['app_name'] = self.app_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AppPermissionsRequest: """Deserializes the AppPermissionsRequest from a dictionary.""" @@ -413,6 +525,17 @@ def as_dict(self) -> dict: if self.sql_warehouse: body['sql_warehouse'] = self.sql_warehouse.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the AppResource into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.job: body['job'] = self.job + if self.name is not None: body['name'] = self.name + if self.secret: body['secret'] = self.secret + if self.serving_endpoint: body['serving_endpoint'] = self.serving_endpoint + if self.sql_warehouse: body['sql_warehouse'] = self.sql_warehouse + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AppResource: """Deserializes the AppResource from a dictionary.""" @@ -440,6 +563,13 @@ def as_dict(self) -> dict: if self.permission is not None: body['permission'] = self.permission.value return body + def as_shallow_dict(self) -> dict: + """Serializes the AppResourceJob into a shallow dictionary of its immediate attributes.""" + body = {} + if self.id is not None: body['id'] = self.id + if self.permission is not None: body['permission'] = self.permission + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AppResourceJob: """Deserializes the AppResourceJob from a dictionary.""" @@ -474,6 +604,14 @@ def as_dict(self) -> dict: if self.scope is not None: body['scope'] = self.scope return body + def as_shallow_dict(self) -> dict: + """Serializes the AppResourceSecret into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.permission is not None: body['permission'] = self.permission + if self.scope is not None: body['scope'] = self.scope + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AppResourceSecret: """Deserializes the AppResourceSecret from a dictionary.""" @@ -506,6 +644,13 @@ def as_dict(self) -> dict: if self.permission is not None: body['permission'] = self.permission.value return body + def as_shallow_dict(self) -> dict: + """Serializes the AppResourceServingEndpoint into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: body['name'] = self.name + if self.permission is not None: body['permission'] = self.permission + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AppResourceServingEndpoint: """Deserializes the AppResourceServingEndpoint from a dictionary.""" @@ -536,6 +681,13 @@ def as_dict(self) -> dict: if self.permission is not None: body['permission'] = self.permission.value return body + def as_shallow_dict(self) -> dict: + """Serializes the AppResourceSqlWarehouse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.id is not None: body['id'] = self.id + if self.permission is not None: body['permission'] = self.permission + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AppResourceSqlWarehouse: """Deserializes the AppResourceSqlWarehouse from a dictionary.""" @@ -573,6 +725,13 @@ def as_dict(self) -> dict: if self.state is not None: body['state'] = self.state.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ApplicationStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.message is not None: body['message'] = self.message + if self.state is not None: body['state'] = self.state + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ApplicationStatus: """Deserializes the ApplicationStatus from a dictionary.""" @@ -605,74 +764,17 @@ def as_dict(self) -> dict: if self.state is not None: body['state'] = self.state.value return body - @classmethod - def from_dict(cls, d: Dict[str, any]) -> ComputeStatus: - """Deserializes the ComputeStatus from a dictionary.""" - return cls(message=d.get('message', None), state=_enum(d, 'state', ComputeState)) - - -@dataclass -class CreateAppDeploymentRequest: - app_name: Optional[str] = None - """The name of the app.""" - - deployment_id: Optional[str] = None - """The unique id of the deployment.""" - - mode: Optional[AppDeploymentMode] = None - """The mode of which the deployment will manage the source code.""" - - source_code_path: Optional[str] = None - """The workspace file system path of the source code used to create the app deployment. This is - different from `deployment_artifacts.source_code_path`, which is the path used by the deployed - app. The former refers to the original source code location of the app in the workspace during - deployment creation, whereas the latter provides a system generated stable snapshotted source - code path used by the deployment.""" - - def as_dict(self) -> dict: - """Serializes the CreateAppDeploymentRequest into a dictionary suitable for use as a JSON request body.""" + def as_shallow_dict(self) -> dict: + """Serializes the ComputeStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.app_name is not None: body['app_name'] = self.app_name - if self.deployment_id is not None: body['deployment_id'] = self.deployment_id - if self.mode is not None: body['mode'] = self.mode.value - if self.source_code_path is not None: body['source_code_path'] = self.source_code_path - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> CreateAppDeploymentRequest: - """Deserializes the CreateAppDeploymentRequest from a dictionary.""" - return cls(app_name=d.get('app_name', None), - deployment_id=d.get('deployment_id', None), - mode=_enum(d, 'mode', AppDeploymentMode), - source_code_path=d.get('source_code_path', None)) - - -@dataclass -class CreateAppRequest: - name: str - """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. - It must be unique within the workspace.""" - - description: Optional[str] = None - """The description of the app.""" - - resources: Optional[List[AppResource]] = None - """Resources for the app.""" - - def as_dict(self) -> dict: - """Serializes the CreateAppRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name - if self.resources: body['resources'] = [v.as_dict() for v in self.resources] + if self.message is not None: body['message'] = self.message + if self.state is not None: body['state'] = self.state return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> CreateAppRequest: - """Deserializes the CreateAppRequest from a dictionary.""" - return cls(description=d.get('description', None), - name=d.get('name', None), - resources=_repeated_dict(d, 'resources', AppResource)) + def from_dict(cls, d: Dict[str, any]) -> ComputeStatus: + """Deserializes the ComputeStatus from a dictionary.""" + return cls(message=d.get('message', None), state=_enum(d, 'state', ComputeState)) @dataclass @@ -686,6 +788,12 @@ def as_dict(self) -> dict: if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body + def as_shallow_dict(self) -> dict: + """Serializes the GetAppPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission_levels: body['permission_levels'] = self.permission_levels + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetAppPermissionLevelsResponse: """Deserializes the GetAppPermissionLevelsResponse from a dictionary.""" @@ -707,6 +815,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListAppDeploymentsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.app_deployments: body['app_deployments'] = self.app_deployments + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListAppDeploymentsResponse: """Deserializes the ListAppDeploymentsResponse from a dictionary.""" @@ -728,6 +843,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListAppsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.apps: body['apps'] = self.apps + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListAppsResponse: """Deserializes the ListAppsResponse from a dictionary.""" @@ -746,34 +868,6 @@ class StopAppRequest: """The name of the app.""" -@dataclass -class UpdateAppRequest: - name: str - """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. - It must be unique within the workspace.""" - - description: Optional[str] = None - """The description of the app.""" - - resources: Optional[List[AppResource]] = None - """Resources for the app.""" - - def as_dict(self) -> dict: - """Serializes the UpdateAppRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name - if self.resources: body['resources'] = [v.as_dict() for v in self.resources] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> UpdateAppRequest: - """Deserializes the UpdateAppRequest from a dictionary.""" - return cls(description=d.get('description', None), - name=d.get('name', None), - resources=_repeated_dict(d, 'resources', AppResource)) - - class AppsAPI: """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on.""" @@ -813,29 +907,31 @@ def wait_get_app_active(self, attempt += 1 raise TimeoutError(f'timed out after {timeout}: {status_message}') - def wait_get_app_stopped(self, - name: str, - timeout=timedelta(minutes=20), - callback: Optional[Callable[[App], None]] = None) -> App: + def wait_get_deployment_app_succeeded( + self, + app_name: str, + deployment_id: str, + timeout=timedelta(minutes=20), + callback: Optional[Callable[[AppDeployment], None]] = None) -> AppDeployment: deadline = time.time() + timeout.total_seconds() - target_states = (ComputeState.STOPPED, ) - failure_states = (ComputeState.ERROR, ) + target_states = (AppDeploymentState.SUCCEEDED, ) + failure_states = (AppDeploymentState.FAILED, ) status_message = 'polling...' attempt = 1 while time.time() < deadline: - poll = self.get(name=name) - status = poll.compute_status.state + poll = self.get_deployment(app_name=app_name, deployment_id=deployment_id) + status = poll.status.state status_message = f'current status: {status}' - if poll.compute_status: - status_message = poll.compute_status.message + if poll.status: + status_message = poll.status.message if status in target_states: return poll if callback: callback(poll) if status in failure_states: - msg = f'failed to reach STOPPED, got {status}: {status_message}' + msg = f'failed to reach SUCCEEDED, got {status}: {status_message}' raise OperationFailed(msg) - prefix = f"name={name}" + prefix = f"app_name={app_name}, deployment_id={deployment_id}" sleep = attempt if sleep > 10: # sleep 10s max per attempt @@ -845,31 +941,29 @@ def wait_get_app_stopped(self, attempt += 1 raise TimeoutError(f'timed out after {timeout}: {status_message}') - def wait_get_deployment_app_succeeded( - self, - app_name: str, - deployment_id: str, - timeout=timedelta(minutes=20), - callback: Optional[Callable[[AppDeployment], None]] = None) -> AppDeployment: + def wait_get_app_stopped(self, + name: str, + timeout=timedelta(minutes=20), + callback: Optional[Callable[[App], None]] = None) -> App: deadline = time.time() + timeout.total_seconds() - target_states = (AppDeploymentState.SUCCEEDED, ) - failure_states = (AppDeploymentState.FAILED, ) + target_states = (ComputeState.STOPPED, ) + failure_states = (ComputeState.ERROR, ) status_message = 'polling...' attempt = 1 while time.time() < deadline: - poll = self.get_deployment(app_name=app_name, deployment_id=deployment_id) - status = poll.status.state + poll = self.get(name=name) + status = poll.compute_status.state status_message = f'current status: {status}' - if poll.status: - status_message = poll.status.message + if poll.compute_status: + status_message = poll.compute_status.message if status in target_states: return poll if callback: callback(poll) if status in failure_states: - msg = f'failed to reach SUCCEEDED, got {status}: {status_message}' + msg = f'failed to reach STOPPED, got {status}: {status_message}' raise OperationFailed(msg) - prefix = f"app_name={app_name}, deployment_id={deployment_id}" + prefix = f"name={name}" sleep = attempt if sleep > 10: # sleep 10s max per attempt @@ -879,43 +973,33 @@ def wait_get_deployment_app_succeeded( attempt += 1 raise TimeoutError(f'timed out after {timeout}: {status_message}') - def create(self, - name: str, - *, - description: Optional[str] = None, - resources: Optional[List[AppResource]] = None) -> Wait[App]: + def create(self, *, app: Optional[App] = None, no_compute: Optional[bool] = None) -> Wait[App]: """Create an app. Creates a new app. - :param name: str - The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It - must be unique within the workspace. - :param description: str (optional) - The description of the app. - :param resources: List[:class:`AppResource`] (optional) - Resources for the app. + :param app: :class:`App` (optional) + :param no_compute: bool (optional) + If true, the app will not be started after creation. :returns: Long-running operation waiter for :class:`App`. See :method:wait_get_app_active for more details. """ - body = {} - if description is not None: body['description'] = description - if name is not None: body['name'] = name - if resources is not None: body['resources'] = [v.as_dict() for v in resources] + body = app.as_dict() + query = {} + if no_compute is not None: query['no_compute'] = no_compute headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - op_response = self._api.do('POST', '/api/2.0/apps', body=body, headers=headers) + op_response = self._api.do('POST', '/api/2.0/apps', query=query, body=body, headers=headers) return Wait(self.wait_get_app_active, response=App.from_dict(op_response), name=op_response['name']) def create_and_wait(self, - name: str, *, - description: Optional[str] = None, - resources: Optional[List[AppResource]] = None, + app: Optional[App] = None, + no_compute: Optional[bool] = None, timeout=timedelta(minutes=20)) -> App: - return self.create(description=description, name=name, resources=resources).result(timeout=timeout) + return self.create(app=app, no_compute=no_compute).result(timeout=timeout) def delete(self, name: str) -> App: """Delete an app. @@ -933,37 +1017,20 @@ def delete(self, name: str) -> App: res = self._api.do('DELETE', f'/api/2.0/apps/{name}', headers=headers) return App.from_dict(res) - def deploy(self, - app_name: str, - *, - deployment_id: Optional[str] = None, - mode: Optional[AppDeploymentMode] = None, - source_code_path: Optional[str] = None) -> Wait[AppDeployment]: + def deploy(self, app_name: str, *, app_deployment: Optional[AppDeployment] = None) -> Wait[AppDeployment]: """Create an app deployment. Creates an app deployment for the app with the supplied name. :param app_name: str The name of the app. - :param deployment_id: str (optional) - The unique id of the deployment. - :param mode: :class:`AppDeploymentMode` (optional) - The mode of which the deployment will manage the source code. - :param source_code_path: str (optional) - The workspace file system path of the source code used to create the app deployment. This is - different from `deployment_artifacts.source_code_path`, which is the path used by the deployed app. - The former refers to the original source code location of the app in the workspace during deployment - creation, whereas the latter provides a system generated stable snapshotted source code path used by - the deployment. + :param app_deployment: :class:`AppDeployment` (optional) :returns: Long-running operation waiter for :class:`AppDeployment`. See :method:wait_get_deployment_app_succeeded for more details. """ - body = {} - if deployment_id is not None: body['deployment_id'] = deployment_id - if mode is not None: body['mode'] = mode.value - if source_code_path is not None: body['source_code_path'] = source_code_path + body = app_deployment.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } op_response = self._api.do('POST', @@ -975,18 +1042,12 @@ def deploy(self, app_name=app_name, deployment_id=op_response['deployment_id']) - def deploy_and_wait( - self, - app_name: str, - *, - deployment_id: Optional[str] = None, - mode: Optional[AppDeploymentMode] = None, - source_code_path: Optional[str] = None, - timeout=timedelta(minutes=20)) -> AppDeployment: - return self.deploy(app_name=app_name, - deployment_id=deployment_id, - mode=mode, - source_code_path=source_code_path).result(timeout=timeout) + def deploy_and_wait(self, + app_name: str, + *, + app_deployment: Optional[AppDeployment] = None, + timeout=timedelta(minutes=20)) -> AppDeployment: + return self.deploy(app_deployment=app_deployment, app_name=app_name).result(timeout=timeout) def get(self, name: str) -> App: """Get an app. @@ -1121,7 +1182,8 @@ def set_permissions( access_control_list: Optional[List[AppAccessControlRequest]] = None) -> AppPermissions: """Set app permissions. - Sets permissions on an app. Apps can inherit permissions from their root object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param app_name: str The app for which to get or manage permissions. @@ -1179,11 +1241,7 @@ def stop(self, name: str) -> Wait[App]: def stop_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> App: return self.stop(name=name).result(timeout=timeout) - def update(self, - name: str, - *, - description: Optional[str] = None, - resources: Optional[List[AppResource]] = None) -> App: + def update(self, name: str, *, app: Optional[App] = None) -> App: """Update an app. Updates the app with the supplied name. @@ -1191,16 +1249,11 @@ def update(self, :param name: str The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It must be unique within the workspace. - :param description: str (optional) - The description of the app. - :param resources: List[:class:`AppResource`] (optional) - Resources for the app. + :param app: :class:`App` (optional) :returns: :class:`App` """ - body = {} - if description is not None: body['description'] = description - if resources is not None: body['resources'] = [v.as_dict() for v in resources] + body = app.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('PATCH', f'/api/2.0/apps/{name}', body=body, headers=headers) diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py index cfb7ba0b4..e23e676fe 100755 --- a/databricks/sdk/service/billing.py +++ b/databricks/sdk/service/billing.py @@ -11,6 +11,8 @@ _LOG = logging.getLogger('databricks.sdk') +from databricks.sdk.service import compute + # all definitions in this file are in alphabetical order @@ -34,6 +36,15 @@ def as_dict(self) -> dict: if self.target is not None: body['target'] = self.target return body + def as_shallow_dict(self) -> dict: + """Serializes the ActionConfiguration into a shallow dictionary of its immediate attributes.""" + body = {} + if self.action_configuration_id is not None: + body['action_configuration_id'] = self.action_configuration_id + if self.action_type is not None: body['action_type'] = self.action_type + if self.target is not None: body['target'] = self.target + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ActionConfiguration: """Deserializes the ActionConfiguration from a dictionary.""" @@ -83,6 +94,18 @@ def as_dict(self) -> dict: if self.trigger_type is not None: body['trigger_type'] = self.trigger_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the AlertConfiguration into a shallow dictionary of its immediate attributes.""" + body = {} + if self.action_configurations: body['action_configurations'] = self.action_configurations + if self.alert_configuration_id is not None: + body['alert_configuration_id'] = self.alert_configuration_id + if self.quantity_threshold is not None: body['quantity_threshold'] = self.quantity_threshold + if self.quantity_type is not None: body['quantity_type'] = self.quantity_type + if self.time_period is not None: body['time_period'] = self.time_period + if self.trigger_type is not None: body['trigger_type'] = self.trigger_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AlertConfiguration: """Deserializes the AlertConfiguration from a dictionary.""" @@ -149,6 +172,19 @@ def as_dict(self) -> dict: if self.update_time is not None: body['update_time'] = self.update_time return body + def as_shallow_dict(self) -> dict: + """Serializes the BudgetConfiguration into a shallow dictionary of its immediate attributes.""" + body = {} + if self.account_id is not None: body['account_id'] = self.account_id + if self.alert_configurations: body['alert_configurations'] = self.alert_configurations + if self.budget_configuration_id is not None: + body['budget_configuration_id'] = self.budget_configuration_id + if self.create_time is not None: body['create_time'] = self.create_time + if self.display_name is not None: body['display_name'] = self.display_name + if self.filter: body['filter'] = self.filter + if self.update_time is not None: body['update_time'] = self.update_time + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> BudgetConfiguration: """Deserializes the BudgetConfiguration from a dictionary.""" @@ -178,6 +214,13 @@ def as_dict(self) -> dict: if self.workspace_id: body['workspace_id'] = self.workspace_id.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the BudgetConfigurationFilter into a shallow dictionary of its immediate attributes.""" + body = {} + if self.tags: body['tags'] = self.tags + if self.workspace_id: body['workspace_id'] = self.workspace_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilter: """Deserializes the BudgetConfigurationFilter from a dictionary.""" @@ -198,6 +241,13 @@ def as_dict(self) -> dict: if self.values: body['values'] = [v for v in self.values] return body + def as_shallow_dict(self) -> dict: + """Serializes the BudgetConfigurationFilterClause into a shallow dictionary of its immediate attributes.""" + body = {} + if self.operator is not None: body['operator'] = self.operator + if self.values: body['values'] = self.values + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterClause: """Deserializes the BudgetConfigurationFilterClause from a dictionary.""" @@ -223,6 +273,13 @@ def as_dict(self) -> dict: if self.value: body['value'] = self.value.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the BudgetConfigurationFilterTagClause into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.value: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterTagClause: """Deserializes the BudgetConfigurationFilterTagClause from a dictionary.""" @@ -242,6 +299,13 @@ def as_dict(self) -> dict: if self.values: body['values'] = [v for v in self.values] return body + def as_shallow_dict(self) -> dict: + """Serializes the BudgetConfigurationFilterWorkspaceIdClause into a shallow dictionary of its immediate attributes.""" + body = {} + if self.operator is not None: body['operator'] = self.operator + if self.values: body['values'] = self.values + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterWorkspaceIdClause: """Deserializes the BudgetConfigurationFilterWorkspaceIdClause from a dictionary.""" @@ -249,6 +313,44 @@ def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterWorkspaceIdCla values=d.get('values', None)) +@dataclass +class BudgetPolicy: + """Contains the BudgetPolicy details.""" + + policy_id: str + """The Id of the policy. This field is generated by Databricks and globally unique.""" + + custom_tags: Optional[List[compute.CustomPolicyTag]] = None + """A list of tags defined by the customer. At most 20 entries are allowed per policy.""" + + policy_name: Optional[str] = None + """The name of the policy. - Must be unique among active policies. - Can contain only characters + from the ISO 8859-1 (latin1) set.""" + + def as_dict(self) -> dict: + """Serializes the BudgetPolicy into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.custom_tags: body['custom_tags'] = [v.as_dict() for v in self.custom_tags] + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.policy_name is not None: body['policy_name'] = self.policy_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the BudgetPolicy into a shallow dictionary of its immediate attributes.""" + body = {} + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.policy_name is not None: body['policy_name'] = self.policy_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> BudgetPolicy: + """Deserializes the BudgetPolicy from a dictionary.""" + return cls(custom_tags=_repeated_dict(d, 'custom_tags', compute.CustomPolicyTag), + policy_id=d.get('policy_id', None), + policy_name=d.get('policy_name', None)) + + @dataclass class CreateBillingUsageDashboardRequest: dashboard_type: Optional[UsageDashboardType] = None @@ -265,6 +367,13 @@ def as_dict(self) -> dict: if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateBillingUsageDashboardRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dashboard_type is not None: body['dashboard_type'] = self.dashboard_type + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateBillingUsageDashboardRequest: """Deserializes the CreateBillingUsageDashboardRequest from a dictionary.""" @@ -283,6 +392,12 @@ def as_dict(self) -> dict: if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateBillingUsageDashboardResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateBillingUsageDashboardResponse: """Deserializes the CreateBillingUsageDashboardResponse from a dictionary.""" @@ -316,6 +431,15 @@ def as_dict(self) -> dict: if self.filter: body['filter'] = self.filter.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateBudgetConfigurationBudget into a shallow dictionary of its immediate attributes.""" + body = {} + if self.account_id is not None: body['account_id'] = self.account_id + if self.alert_configurations: body['alert_configurations'] = self.alert_configurations + if self.display_name is not None: body['display_name'] = self.display_name + if self.filter: body['filter'] = self.filter + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationBudget: """Deserializes the CreateBudgetConfigurationBudget from a dictionary.""" @@ -341,6 +465,13 @@ def as_dict(self) -> dict: if self.target is not None: body['target'] = self.target return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateBudgetConfigurationBudgetActionConfigurations into a shallow dictionary of its immediate attributes.""" + body = {} + if self.action_type is not None: body['action_type'] = self.action_type + if self.target is not None: body['target'] = self.target + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationBudgetActionConfigurations: """Deserializes the CreateBudgetConfigurationBudgetActionConfigurations from a dictionary.""" @@ -378,6 +509,16 @@ def as_dict(self) -> dict: if self.trigger_type is not None: body['trigger_type'] = self.trigger_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateBudgetConfigurationBudgetAlertConfigurations into a shallow dictionary of its immediate attributes.""" + body = {} + if self.action_configurations: body['action_configurations'] = self.action_configurations + if self.quantity_threshold is not None: body['quantity_threshold'] = self.quantity_threshold + if self.quantity_type is not None: body['quantity_type'] = self.quantity_type + if self.time_period is not None: body['time_period'] = self.time_period + if self.trigger_type is not None: body['trigger_type'] = self.trigger_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationBudgetAlertConfigurations: """Deserializes the CreateBudgetConfigurationBudgetAlertConfigurations from a dictionary.""" @@ -400,6 +541,12 @@ def as_dict(self) -> dict: if self.budget: body['budget'] = self.budget.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateBudgetConfigurationRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.budget: body['budget'] = self.budget + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationRequest: """Deserializes the CreateBudgetConfigurationRequest from a dictionary.""" @@ -417,12 +564,57 @@ def as_dict(self) -> dict: if self.budget: body['budget'] = self.budget.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateBudgetConfigurationResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.budget: body['budget'] = self.budget + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationResponse: """Deserializes the CreateBudgetConfigurationResponse from a dictionary.""" return cls(budget=_from_dict(d, 'budget', BudgetConfiguration)) +@dataclass +class CreateBudgetPolicyRequest: + """A request to create a BudgetPolicy.""" + + custom_tags: Optional[List[compute.CustomPolicyTag]] = None + """A list of tags defined by the customer. At most 40 entries are allowed per policy.""" + + policy_name: Optional[str] = None + """The name of the policy. - Must be unique among active policies. - Can contain only characters of + 0-9, a-z, A-Z, -, =, ., :, /, @, _, +, whitespace.""" + + request_id: Optional[str] = None + """A unique identifier for this request. Restricted to 36 ASCII characters. A random UUID is + recommended. This request is only idempotent if a `request_id` is provided.""" + + def as_dict(self) -> dict: + """Serializes the CreateBudgetPolicyRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.custom_tags: body['custom_tags'] = [v.as_dict() for v in self.custom_tags] + if self.policy_name is not None: body['policy_name'] = self.policy_name + if self.request_id is not None: body['request_id'] = self.request_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CreateBudgetPolicyRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.policy_name is not None: body['policy_name'] = self.policy_name + if self.request_id is not None: body['request_id'] = self.request_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateBudgetPolicyRequest: + """Deserializes the CreateBudgetPolicyRequest from a dictionary.""" + return cls(custom_tags=_repeated_dict(d, 'custom_tags', compute.CustomPolicyTag), + policy_name=d.get('policy_name', None), + request_id=d.get('request_id', None)) + + @dataclass class CreateLogDeliveryConfigurationParams: log_type: LogType @@ -509,6 +701,21 @@ def as_dict(self) -> dict: if self.workspace_ids_filter: body['workspace_ids_filter'] = [v for v in self.workspace_ids_filter] return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateLogDeliveryConfigurationParams into a shallow dictionary of its immediate attributes.""" + body = {} + if self.config_name is not None: body['config_name'] = self.config_name + if self.credentials_id is not None: body['credentials_id'] = self.credentials_id + if self.delivery_path_prefix is not None: body['delivery_path_prefix'] = self.delivery_path_prefix + if self.delivery_start_time is not None: body['delivery_start_time'] = self.delivery_start_time + if self.log_type is not None: body['log_type'] = self.log_type + if self.output_format is not None: body['output_format'] = self.output_format + if self.status is not None: body['status'] = self.status + if self.storage_configuration_id is not None: + body['storage_configuration_id'] = self.storage_configuration_id + if self.workspace_ids_filter: body['workspace_ids_filter'] = self.workspace_ids_filter + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateLogDeliveryConfigurationParams: """Deserializes the CreateLogDeliveryConfigurationParams from a dictionary.""" @@ -531,6 +738,30 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteBudgetConfigurationResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> DeleteBudgetConfigurationResponse: + """Deserializes the DeleteBudgetConfigurationResponse from a dictionary.""" + return cls() + + +@dataclass +class DeleteResponse: + + def as_dict(self) -> dict: + """Serializes the DeleteBudgetConfigurationResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteBudgetConfigurationResponse: """Deserializes the DeleteBudgetConfigurationResponse from a dictionary.""" @@ -563,12 +794,56 @@ def as_dict(self) -> dict: if self.contents: body['contents'] = self.contents return body + def as_shallow_dict(self) -> dict: + """Serializes the DownloadResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.contents: body['contents'] = self.contents + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DownloadResponse: """Deserializes the DownloadResponse from a dictionary.""" return cls(contents=d.get('contents', None)) +@dataclass +class Filter: + """Structured representation of a filter to be applied to a list of policies. All specified filters + will be applied in conjunction.""" + + creator_user_id: Optional[int] = None + """The policy creator user id to be filtered on. If unspecified, all policies will be returned.""" + + creator_user_name: Optional[str] = None + """The policy creator user name to be filtered on. If unspecified, all policies will be returned.""" + + policy_name: Optional[str] = None + """The partial name of policies to be filtered on. If unspecified, all policies will be returned.""" + + def as_dict(self) -> dict: + """Serializes the Filter into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.creator_user_id is not None: body['creator_user_id'] = self.creator_user_id + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.policy_name is not None: body['policy_name'] = self.policy_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Filter into a shallow dictionary of its immediate attributes.""" + body = {} + if self.creator_user_id is not None: body['creator_user_id'] = self.creator_user_id + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.policy_name is not None: body['policy_name'] = self.policy_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> Filter: + """Deserializes the Filter from a dictionary.""" + return cls(creator_user_id=d.get('creator_user_id', None), + creator_user_name=d.get('creator_user_name', None), + policy_name=d.get('policy_name', None)) + + @dataclass class GetBillingUsageDashboardResponse: dashboard_id: Optional[str] = None @@ -584,6 +859,13 @@ def as_dict(self) -> dict: if self.dashboard_url is not None: body['dashboard_url'] = self.dashboard_url return body + def as_shallow_dict(self) -> dict: + """Serializes the GetBillingUsageDashboardResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.dashboard_url is not None: body['dashboard_url'] = self.dashboard_url + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetBillingUsageDashboardResponse: """Deserializes the GetBillingUsageDashboardResponse from a dictionary.""" @@ -600,12 +882,39 @@ def as_dict(self) -> dict: if self.budget: body['budget'] = self.budget.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the GetBudgetConfigurationResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.budget: body['budget'] = self.budget + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetBudgetConfigurationResponse: """Deserializes the GetBudgetConfigurationResponse from a dictionary.""" return cls(budget=_from_dict(d, 'budget', BudgetConfiguration)) +@dataclass +class LimitConfig: + """The limit configuration of the policy. Limit configuration provide a budget policy level cost + control by enforcing the limit.""" + + def as_dict(self) -> dict: + """Serializes the LimitConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the LimitConfig into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> LimitConfig: + """Deserializes the LimitConfig from a dictionary.""" + return cls() + + @dataclass class ListBudgetConfigurationsResponse: budgets: Optional[List[BudgetConfiguration]] = None @@ -621,6 +930,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListBudgetConfigurationsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.budgets: body['budgets'] = self.budgets + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListBudgetConfigurationsResponse: """Deserializes the ListBudgetConfigurationsResponse from a dictionary.""" @@ -628,6 +944,44 @@ def from_dict(cls, d: Dict[str, any]) -> ListBudgetConfigurationsResponse: next_page_token=d.get('next_page_token', None)) +@dataclass +class ListBudgetPoliciesResponse: + """A list of policies.""" + + next_page_token: Optional[str] = None + """A token that can be sent as `page_token` to retrieve the next page. If this field is omitted, + there are no subsequent pages.""" + + policies: Optional[List[BudgetPolicy]] = None + + previous_page_token: Optional[str] = None + """A token that can be sent as `page_token` to retrieve the previous page. In this field is + omitted, there are no previous pages.""" + + def as_dict(self) -> dict: + """Serializes the ListBudgetPoliciesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.policies: body['policies'] = [v.as_dict() for v in self.policies] + if self.previous_page_token is not None: body['previous_page_token'] = self.previous_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListBudgetPoliciesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.policies: body['policies'] = self.policies + if self.previous_page_token is not None: body['previous_page_token'] = self.previous_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListBudgetPoliciesResponse: + """Deserializes the ListBudgetPoliciesResponse from a dictionary.""" + return cls(next_page_token=d.get('next_page_token', None), + policies=_repeated_dict(d, 'policies', BudgetPolicy), + previous_page_token=d.get('previous_page_token', None)) + + class LogDeliveryConfigStatus(Enum): """Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable the @@ -744,6 +1098,26 @@ def as_dict(self) -> dict: if self.workspace_ids_filter: body['workspace_ids_filter'] = [v for v in self.workspace_ids_filter] return body + def as_shallow_dict(self) -> dict: + """Serializes the LogDeliveryConfiguration into a shallow dictionary of its immediate attributes.""" + body = {} + if self.account_id is not None: body['account_id'] = self.account_id + if self.config_id is not None: body['config_id'] = self.config_id + if self.config_name is not None: body['config_name'] = self.config_name + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.credentials_id is not None: body['credentials_id'] = self.credentials_id + if self.delivery_path_prefix is not None: body['delivery_path_prefix'] = self.delivery_path_prefix + if self.delivery_start_time is not None: body['delivery_start_time'] = self.delivery_start_time + if self.log_delivery_status: body['log_delivery_status'] = self.log_delivery_status + if self.log_type is not None: body['log_type'] = self.log_type + if self.output_format is not None: body['output_format'] = self.output_format + if self.status is not None: body['status'] = self.status + if self.storage_configuration_id is not None: + body['storage_configuration_id'] = self.storage_configuration_id + if self.update_time is not None: body['update_time'] = self.update_time + if self.workspace_ids_filter: body['workspace_ids_filter'] = self.workspace_ids_filter + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> LogDeliveryConfiguration: """Deserializes the LogDeliveryConfiguration from a dictionary.""" @@ -796,6 +1170,16 @@ def as_dict(self) -> dict: if self.status is not None: body['status'] = self.status.value return body + def as_shallow_dict(self) -> dict: + """Serializes the LogDeliveryStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.last_attempt_time is not None: body['last_attempt_time'] = self.last_attempt_time + if self.last_successful_attempt_time is not None: + body['last_successful_attempt_time'] = self.last_successful_attempt_time + if self.message is not None: body['message'] = self.message + if self.status is not None: body['status'] = self.status + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> LogDeliveryStatus: """Deserializes the LogDeliveryStatus from a dictionary.""" @@ -846,12 +1230,50 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the PatchStatusResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PatchStatusResponse: """Deserializes the PatchStatusResponse from a dictionary.""" return cls() +@dataclass +class SortSpec: + descending: Optional[bool] = None + """Whether to sort in descending order.""" + + field: Optional[SortSpecField] = None + """The filed to sort by""" + + def as_dict(self) -> dict: + """Serializes the SortSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.descending is not None: body['descending'] = self.descending + if self.field is not None: body['field'] = self.field.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SortSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.descending is not None: body['descending'] = self.descending + if self.field is not None: body['field'] = self.field + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> SortSpec: + """Deserializes the SortSpec from a dictionary.""" + return cls(descending=d.get('descending', None), field=_enum(d, 'field', SortSpecField)) + + +class SortSpecField(Enum): + + POLICY_NAME = 'POLICY_NAME' + + @dataclass class UpdateBudgetConfigurationBudget: account_id: Optional[str] = None @@ -884,6 +1306,17 @@ def as_dict(self) -> dict: if self.filter: body['filter'] = self.filter.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateBudgetConfigurationBudget into a shallow dictionary of its immediate attributes.""" + body = {} + if self.account_id is not None: body['account_id'] = self.account_id + if self.alert_configurations: body['alert_configurations'] = self.alert_configurations + if self.budget_configuration_id is not None: + body['budget_configuration_id'] = self.budget_configuration_id + if self.display_name is not None: body['display_name'] = self.display_name + if self.filter: body['filter'] = self.filter + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateBudgetConfigurationBudget: """Deserializes the UpdateBudgetConfigurationBudget from a dictionary.""" @@ -909,6 +1342,13 @@ def as_dict(self) -> dict: if self.budget_id is not None: body['budget_id'] = self.budget_id return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateBudgetConfigurationRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.budget: body['budget'] = self.budget + if self.budget_id is not None: body['budget_id'] = self.budget_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateBudgetConfigurationRequest: """Deserializes the UpdateBudgetConfigurationRequest from a dictionary.""" @@ -927,6 +1367,12 @@ def as_dict(self) -> dict: if self.budget: body['budget'] = self.budget.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateBudgetConfigurationResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.budget: body['budget'] = self.budget + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateBudgetConfigurationResponse: """Deserializes the UpdateBudgetConfigurationResponse from a dictionary.""" @@ -952,6 +1398,14 @@ def as_dict(self) -> dict: if self.status is not None: body['status'] = self.status.value return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateLogDeliveryConfigurationStatusRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.log_delivery_configuration_id is not None: + body['log_delivery_configuration_id'] = self.log_delivery_configuration_id + if self.status is not None: body['status'] = self.status + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateLogDeliveryConfigurationStatusRequest: """Deserializes the UpdateLogDeliveryConfigurationStatusRequest from a dictionary.""" @@ -976,6 +1430,13 @@ def as_dict(self) -> dict: body['log_delivery_configuration'] = self.log_delivery_configuration.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the WrappedCreateLogDeliveryConfiguration into a shallow dictionary of its immediate attributes.""" + body = {} + if self.log_delivery_configuration: + body['log_delivery_configuration'] = self.log_delivery_configuration + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> WrappedCreateLogDeliveryConfiguration: """Deserializes the WrappedCreateLogDeliveryConfiguration from a dictionary.""" @@ -994,6 +1455,13 @@ def as_dict(self) -> dict: body['log_delivery_configuration'] = self.log_delivery_configuration.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the WrappedLogDeliveryConfiguration into a shallow dictionary of its immediate attributes.""" + body = {} + if self.log_delivery_configuration: + body['log_delivery_configuration'] = self.log_delivery_configuration + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> WrappedLogDeliveryConfiguration: """Deserializes the WrappedLogDeliveryConfiguration from a dictionary.""" @@ -1012,6 +1480,13 @@ def as_dict(self) -> dict: body['log_delivery_configurations'] = [v.as_dict() for v in self.log_delivery_configurations] return body + def as_shallow_dict(self) -> dict: + """Serializes the WrappedLogDeliveryConfigurations into a shallow dictionary of its immediate attributes.""" + body = {} + if self.log_delivery_configurations: + body['log_delivery_configurations'] = self.log_delivery_configurations + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> WrappedLogDeliveryConfigurations: """Deserializes the WrappedLogDeliveryConfigurations from a dictionary.""" @@ -1068,6 +1543,156 @@ def download(self, return DownloadResponse.from_dict(res) +class BudgetPolicyAPI: + """A service serves REST API about Budget policies""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, + *, + custom_tags: Optional[List[compute.CustomPolicyTag]] = None, + policy_name: Optional[str] = None, + request_id: Optional[str] = None) -> BudgetPolicy: + """Create a budget policy. + + Creates a new policy. + + :param custom_tags: List[:class:`CustomPolicyTag`] (optional) + A list of tags defined by the customer. At most 40 entries are allowed per policy. + :param policy_name: str (optional) + The name of the policy. - Must be unique among active policies. - Can contain only characters of + 0-9, a-z, A-Z, -, =, ., :, /, @, _, +, whitespace. + :param request_id: str (optional) + A unique identifier for this request. Restricted to 36 ASCII characters. A random UUID is + recommended. This request is only idempotent if a `request_id` is provided. + + :returns: :class:`BudgetPolicy` + """ + body = {} + if custom_tags is not None: body['custom_tags'] = [v.as_dict() for v in custom_tags] + if policy_name is not None: body['policy_name'] = policy_name + if request_id is not None: body['request_id'] = request_id + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('POST', + f'/api/2.1/accounts/{self._api.account_id}/budget-policies', + body=body, + headers=headers) + return BudgetPolicy.from_dict(res) + + def delete(self, policy_id: str): + """Delete a budget policy. + + Deletes a policy + + :param policy_id: str + The Id of the policy. + + + """ + + headers = {'Accept': 'application/json', } + + self._api.do('DELETE', + f'/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}', + headers=headers) + + def get(self, policy_id: str) -> BudgetPolicy: + """Get a budget policy. + + Retrieves a policy by it's ID. + + :param policy_id: str + The Id of the policy. + + :returns: :class:`BudgetPolicy` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do('GET', + f'/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}', + headers=headers) + return BudgetPolicy.from_dict(res) + + def list(self, + *, + filter_by: Optional[Filter] = None, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + sort_spec: Optional[SortSpec] = None) -> Iterator[BudgetPolicy]: + """List policies. + + Lists all policies. Policies are returned in the alphabetically ascending order of their names. + + :param filter_by: :class:`Filter` (optional) + A filter to apply to the list of policies. + :param page_size: int (optional) + The maximum number of budget policies to return. If unspecified, at most 100 budget policies will be + returned. The maximum value is 1000; values above 1000 will be coerced to 1000. + :param page_token: str (optional) + A page token, received from a previous `ListServerlessPolicies` call. Provide this to retrieve the + subsequent page. If unspecified, the first page will be returned. + + When paginating, all other parameters provided to `ListServerlessPoliciesRequest` must match the + call that provided the page token. + :param sort_spec: :class:`SortSpec` (optional) + The sort specification. + + :returns: Iterator over :class:`BudgetPolicy` + """ + + query = {} + if filter_by is not None: query['filter_by'] = filter_by.as_dict() + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + if sort_spec is not None: query['sort_spec'] = sort_spec.as_dict() + headers = {'Accept': 'application/json', } + + while True: + json = self._api.do('GET', + f'/api/2.1/accounts/{self._api.account_id}/budget-policies', + query=query, + headers=headers) + if 'policies' in json: + for v in json['policies']: + yield BudgetPolicy.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + def update(self, + policy_id: str, + *, + limit_config: Optional[LimitConfig] = None, + policy: Optional[BudgetPolicy] = None) -> BudgetPolicy: + """Update a budget policy. + + Updates a policy + + :param policy_id: str + The Id of the policy. This field is generated by Databricks and globally unique. + :param limit_config: :class:`LimitConfig` (optional) + DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy + :param policy: :class:`BudgetPolicy` (optional) + Contains the BudgetPolicy details. + + :returns: :class:`BudgetPolicy` + """ + body = policy.as_dict() + query = {} + if limit_config is not None: query['limit_config'] = limit_config.as_dict() + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('PATCH', + f'/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}', + query=query, + body=body, + headers=headers) + return BudgetPolicy.from_dict(res) + + class BudgetsAPI: """These APIs manage budget configurations for this account. Budgets enable you to monitor usage across your account. You can set up budgets to either track account-wide spending, or apply filters to track the @@ -1121,7 +1746,7 @@ def get(self, budget_id: str) -> GetBudgetConfigurationResponse: Gets a budget configuration for an account. Both account and budget configuration are specified by ID. :param budget_id: str - The Databricks budget configuration ID. + The budget configuration ID :returns: :class:`GetBudgetConfigurationResponse` """ diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index b149dbbaa..83d7de4e8 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -3,11 +3,15 @@ from __future__ import annotations import logging +import random +import time from dataclasses import dataclass +from datetime import timedelta from enum import Enum -from typing import Dict, Iterator, List, Optional +from typing import Callable, Dict, Iterator, List, Optional -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum +from ..errors import OperationFailed +from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum _LOG = logging.getLogger('databricks.sdk') @@ -24,6 +28,12 @@ def as_dict(self) -> dict: if self.metastore_info: body['metastore_info'] = self.metastore_info.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the AccountsCreateMetastore into a shallow dictionary of its immediate attributes.""" + body = {} + if self.metastore_info: body['metastore_info'] = self.metastore_info + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AccountsCreateMetastore: """Deserializes the AccountsCreateMetastore from a dictionary.""" @@ -48,6 +58,14 @@ def as_dict(self) -> dict: if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body + def as_shallow_dict(self) -> dict: + """Serializes the AccountsCreateMetastoreAssignment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AccountsCreateMetastoreAssignment: """Deserializes the AccountsCreateMetastoreAssignment from a dictionary.""" @@ -70,6 +88,13 @@ def as_dict(self) -> dict: if self.metastore_id is not None: body['metastore_id'] = self.metastore_id return body + def as_shallow_dict(self) -> dict: + """Serializes the AccountsCreateStorageCredential into a shallow dictionary of its immediate attributes.""" + body = {} + if self.credential_info: body['credential_info'] = self.credential_info + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AccountsCreateStorageCredential: """Deserializes the AccountsCreateStorageCredential from a dictionary.""" @@ -87,6 +112,12 @@ def as_dict(self) -> dict: if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the AccountsMetastoreAssignment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AccountsMetastoreAssignment: """Deserializes the AccountsMetastoreAssignment from a dictionary.""" @@ -103,6 +134,12 @@ def as_dict(self) -> dict: if self.metastore_info: body['metastore_info'] = self.metastore_info.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the AccountsMetastoreInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.metastore_info: body['metastore_info'] = self.metastore_info + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AccountsMetastoreInfo: """Deserializes the AccountsMetastoreInfo from a dictionary.""" @@ -119,6 +156,12 @@ def as_dict(self) -> dict: if self.credential_info: body['credential_info'] = self.credential_info.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the AccountsStorageCredentialInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.credential_info: body['credential_info'] = self.credential_info + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AccountsStorageCredentialInfo: """Deserializes the AccountsStorageCredentialInfo from a dictionary.""" @@ -139,6 +182,13 @@ def as_dict(self) -> dict: if self.metastore_info: body['metastore_info'] = self.metastore_info.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the AccountsUpdateMetastore into a shallow dictionary of its immediate attributes.""" + body = {} + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.metastore_info: body['metastore_info'] = self.metastore_info + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AccountsUpdateMetastore: """Deserializes the AccountsUpdateMetastore from a dictionary.""" @@ -164,6 +214,14 @@ def as_dict(self) -> dict: if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body + def as_shallow_dict(self) -> dict: + """Serializes the AccountsUpdateMetastoreAssignment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AccountsUpdateMetastoreAssignment: """Deserializes the AccountsUpdateMetastoreAssignment from a dictionary.""" @@ -191,6 +249,15 @@ def as_dict(self) -> dict: body['storage_credential_name'] = self.storage_credential_name return body + def as_shallow_dict(self) -> dict: + """Serializes the AccountsUpdateStorageCredential into a shallow dictionary of its immediate attributes.""" + body = {} + if self.credential_info: body['credential_info'] = self.credential_info + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.storage_credential_name is not None: + body['storage_credential_name'] = self.storage_credential_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AccountsUpdateStorageCredential: """Deserializes the AccountsUpdateStorageCredential from a dictionary.""" @@ -222,6 +289,15 @@ def as_dict(self) -> dict: if self.metastore_id is not None: body['metastore_id'] = self.metastore_id return body + def as_shallow_dict(self) -> dict: + """Serializes the ArtifactAllowlistInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.artifact_matchers: body['artifact_matchers'] = self.artifact_matchers + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ArtifactAllowlistInfo: """Deserializes the ArtifactAllowlistInfo from a dictionary.""" @@ -246,6 +322,13 @@ def as_dict(self) -> dict: if self.match_type is not None: body['match_type'] = self.match_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ArtifactMatcher into a shallow dictionary of its immediate attributes.""" + body = {} + if self.artifact is not None: body['artifact'] = self.artifact + if self.match_type is not None: body['match_type'] = self.match_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ArtifactMatcher: """Deserializes the ArtifactMatcher from a dictionary.""" @@ -268,6 +351,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the AssignResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AssignResponse: """Deserializes the AssignResponse from a dictionary.""" @@ -301,6 +389,15 @@ def as_dict(self) -> dict: if self.session_token is not None: body['session_token'] = self.session_token return body + def as_shallow_dict(self) -> dict: + """Serializes the AwsCredentials into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_key_id is not None: body['access_key_id'] = self.access_key_id + if self.access_point is not None: body['access_point'] = self.access_point + if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key + if self.session_token is not None: body['session_token'] = self.session_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AwsCredentials: """Deserializes the AwsCredentials from a dictionary.""" @@ -310,6 +407,44 @@ def from_dict(cls, d: Dict[str, any]) -> AwsCredentials: session_token=d.get('session_token', None)) +@dataclass +class AwsIamRole: + """The AWS IAM role configuration""" + + external_id: Optional[str] = None + """The external ID used in role assumption to prevent the confused deputy problem.""" + + role_arn: Optional[str] = None + """The Amazon Resource Name (ARN) of the AWS IAM role used to vend temporary credentials.""" + + unity_catalog_iam_arn: Optional[str] = None + """The Amazon Resource Name (ARN) of the AWS IAM user managed by Databricks. This is the identity + that is going to assume the AWS IAM role.""" + + def as_dict(self) -> dict: + """Serializes the AwsIamRole into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.external_id is not None: body['external_id'] = self.external_id + if self.role_arn is not None: body['role_arn'] = self.role_arn + if self.unity_catalog_iam_arn is not None: body['unity_catalog_iam_arn'] = self.unity_catalog_iam_arn + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AwsIamRole into a shallow dictionary of its immediate attributes.""" + body = {} + if self.external_id is not None: body['external_id'] = self.external_id + if self.role_arn is not None: body['role_arn'] = self.role_arn + if self.unity_catalog_iam_arn is not None: body['unity_catalog_iam_arn'] = self.unity_catalog_iam_arn + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AwsIamRole: + """Deserializes the AwsIamRole from a dictionary.""" + return cls(external_id=d.get('external_id', None), + role_arn=d.get('role_arn', None), + unity_catalog_iam_arn=d.get('unity_catalog_iam_arn', None)) + + @dataclass class AwsIamRoleRequest: role_arn: str @@ -321,6 +456,12 @@ def as_dict(self) -> dict: if self.role_arn is not None: body['role_arn'] = self.role_arn return body + def as_shallow_dict(self) -> dict: + """Serializes the AwsIamRoleRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.role_arn is not None: body['role_arn'] = self.role_arn + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AwsIamRoleRequest: """Deserializes the AwsIamRoleRequest from a dictionary.""" @@ -347,6 +488,14 @@ def as_dict(self) -> dict: if self.unity_catalog_iam_arn is not None: body['unity_catalog_iam_arn'] = self.unity_catalog_iam_arn return body + def as_shallow_dict(self) -> dict: + """Serializes the AwsIamRoleResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.external_id is not None: body['external_id'] = self.external_id + if self.role_arn is not None: body['role_arn'] = self.role_arn + if self.unity_catalog_iam_arn is not None: body['unity_catalog_iam_arn'] = self.unity_catalog_iam_arn + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AwsIamRoleResponse: """Deserializes the AwsIamRoleResponse from a dictionary.""" @@ -355,6 +504,78 @@ def from_dict(cls, d: Dict[str, any]) -> AwsIamRoleResponse: unity_catalog_iam_arn=d.get('unity_catalog_iam_arn', None)) +@dataclass +class AzureActiveDirectoryToken: + """Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed + Identity. Read more at + https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token""" + + aad_token: Optional[str] = None + """Opaque token that contains claims that you can use in Azure Active Directory to access cloud + services.""" + + def as_dict(self) -> dict: + """Serializes the AzureActiveDirectoryToken into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.aad_token is not None: body['aad_token'] = self.aad_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AzureActiveDirectoryToken into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aad_token is not None: body['aad_token'] = self.aad_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AzureActiveDirectoryToken: + """Deserializes the AzureActiveDirectoryToken from a dictionary.""" + return cls(aad_token=d.get('aad_token', None)) + + +@dataclass +class AzureManagedIdentity: + """The Azure managed identity configuration.""" + + access_connector_id: str + """The Azure resource ID of the Azure Databricks Access Connector. Use the format + `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}`.""" + + credential_id: Optional[str] = None + """The Databricks internal ID that represents this managed identity. This field is only used to + persist the credential_id once it is fetched from the credentials manager - as we only use the + protobuf serializer to store credentials, this ID gets persisted to the database. .""" + + managed_identity_id: Optional[str] = None + """The Azure resource ID of the managed identity. Use the format, + `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identity-name}` + This is only available for user-assgined identities. For system-assigned identities, the + access_connector_id is used to identify the identity. If this field is not provided, then we + assume the AzureManagedIdentity is using the system-assigned identity.""" + + def as_dict(self) -> dict: + """Serializes the AzureManagedIdentity into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AzureManagedIdentity into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AzureManagedIdentity: + """Deserializes the AzureManagedIdentity from a dictionary.""" + return cls(access_connector_id=d.get('access_connector_id', None), + credential_id=d.get('credential_id', None), + managed_identity_id=d.get('managed_identity_id', None)) + + @dataclass class AzureManagedIdentityRequest: access_connector_id: str @@ -375,6 +596,13 @@ def as_dict(self) -> dict: if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id return body + def as_shallow_dict(self) -> dict: + """Serializes the AzureManagedIdentityRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id + if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AzureManagedIdentityRequest: """Deserializes the AzureManagedIdentityRequest from a dictionary.""" @@ -406,6 +634,14 @@ def as_dict(self) -> dict: if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id return body + def as_shallow_dict(self) -> dict: + """Serializes the AzureManagedIdentityResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AzureManagedIdentityResponse: """Deserializes the AzureManagedIdentityResponse from a dictionary.""" @@ -416,6 +652,8 @@ def from_dict(cls, d: Dict[str, any]) -> AzureManagedIdentityResponse: @dataclass class AzureServicePrincipal: + """The Azure service principal configuration. Only applicable when purpose is **STORAGE**.""" + directory_id: str """The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application.""" @@ -433,6 +671,14 @@ def as_dict(self) -> dict: if self.directory_id is not None: body['directory_id'] = self.directory_id return body + def as_shallow_dict(self) -> dict: + """Serializes the AzureServicePrincipal into a shallow dictionary of its immediate attributes.""" + body = {} + if self.application_id is not None: body['application_id'] = self.application_id + if self.client_secret is not None: body['client_secret'] = self.client_secret + if self.directory_id is not None: body['directory_id'] = self.directory_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AzureServicePrincipal: """Deserializes the AzureServicePrincipal from a dictionary.""" @@ -455,6 +701,12 @@ def as_dict(self) -> dict: if self.sas_token is not None: body['sas_token'] = self.sas_token return body + def as_shallow_dict(self) -> dict: + """Serializes the AzureUserDelegationSas into a shallow dictionary of its immediate attributes.""" + body = {} + if self.sas_token is not None: body['sas_token'] = self.sas_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AzureUserDelegationSas: """Deserializes the AzureUserDelegationSas from a dictionary.""" @@ -469,6 +721,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the CancelRefreshResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CancelRefreshResponse: """Deserializes the CancelRefreshResponse from a dictionary.""" @@ -530,9 +787,6 @@ class CatalogInfo: provisioning_info: Optional[ProvisioningInfo] = None """Status of an asynchronously provisioned resource.""" - securable_kind: Optional[CatalogInfoSecurableKind] = None - """Kind of catalog securable.""" - securable_type: Optional[str] = None share_name: Optional[str] = None @@ -574,7 +828,36 @@ def as_dict(self) -> dict: if self.properties: body['properties'] = self.properties if self.provider_name is not None: body['provider_name'] = self.provider_name if self.provisioning_info: body['provisioning_info'] = self.provisioning_info.as_dict() - if self.securable_kind is not None: body['securable_kind'] = self.securable_kind.value + if self.securable_type is not None: body['securable_type'] = self.securable_type + if self.share_name is not None: body['share_name'] = self.share_name + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CatalogInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.browse_only is not None: body['browse_only'] = self.browse_only + if self.catalog_type is not None: body['catalog_type'] = self.catalog_type + if self.comment is not None: body['comment'] = self.comment + if self.connection_name is not None: body['connection_name'] = self.connection_name + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.effective_predictive_optimization_flag: + body['effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag + if self.enable_predictive_optimization is not None: + body['enable_predictive_optimization'] = self.enable_predictive_optimization + if self.full_name is not None: body['full_name'] = self.full_name + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.owner is not None: body['owner'] = self.owner + if self.properties: body['properties'] = self.properties + if self.provider_name is not None: body['provider_name'] = self.provider_name + if self.provisioning_info: body['provisioning_info'] = self.provisioning_info if self.securable_type is not None: body['securable_type'] = self.securable_type if self.share_name is not None: body['share_name'] = self.share_name if self.storage_location is not None: body['storage_location'] = self.storage_location @@ -605,7 +888,6 @@ def from_dict(cls, d: Dict[str, any]) -> CatalogInfo: properties=d.get('properties', None), provider_name=d.get('provider_name', None), provisioning_info=_from_dict(d, 'provisioning_info', ProvisioningInfo), - securable_kind=_enum(d, 'securable_kind', CatalogInfoSecurableKind), securable_type=d.get('securable_type', None), share_name=d.get('share_name', None), storage_location=d.get('storage_location', None), @@ -614,24 +896,6 @@ def from_dict(cls, d: Dict[str, any]) -> CatalogInfo: updated_by=d.get('updated_by', None)) -class CatalogInfoSecurableKind(Enum): - """Kind of catalog securable.""" - - CATALOG_DELTASHARING = 'CATALOG_DELTASHARING' - CATALOG_FOREIGN_BIGQUERY = 'CATALOG_FOREIGN_BIGQUERY' - CATALOG_FOREIGN_DATABRICKS = 'CATALOG_FOREIGN_DATABRICKS' - CATALOG_FOREIGN_MYSQL = 'CATALOG_FOREIGN_MYSQL' - CATALOG_FOREIGN_POSTGRESQL = 'CATALOG_FOREIGN_POSTGRESQL' - CATALOG_FOREIGN_REDSHIFT = 'CATALOG_FOREIGN_REDSHIFT' - CATALOG_FOREIGN_SNOWFLAKE = 'CATALOG_FOREIGN_SNOWFLAKE' - CATALOG_FOREIGN_SQLDW = 'CATALOG_FOREIGN_SQLDW' - CATALOG_FOREIGN_SQLSERVER = 'CATALOG_FOREIGN_SQLSERVER' - CATALOG_INTERNAL = 'CATALOG_INTERNAL' - CATALOG_STANDARD = 'CATALOG_STANDARD' - CATALOG_SYSTEM = 'CATALOG_SYSTEM' - CATALOG_SYSTEM_DELTASHARING = 'CATALOG_SYSTEM_DELTASHARING' - - class CatalogIsolationMode(Enum): """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" @@ -666,6 +930,14 @@ def as_dict(self) -> dict: if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key return body + def as_shallow_dict(self) -> dict: + """Serializes the CloudflareApiToken into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_key_id is not None: body['access_key_id'] = self.access_key_id + if self.account_id is not None: body['account_id'] = self.account_id + if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CloudflareApiToken: """Deserializes the CloudflareApiToken from a dictionary.""" @@ -700,7 +972,6 @@ class ColumnInfo: """Full data type specification, JSON-serialized.""" type_name: Optional[ColumnTypeName] = None - """Name of type (INT, STRUCT, MAP, etc.).""" type_precision: Optional[int] = None """Digits of precision; required for DecimalTypes.""" @@ -728,6 +999,23 @@ def as_dict(self) -> dict: if self.type_text is not None: body['type_text'] = self.type_text return body + def as_shallow_dict(self) -> dict: + """Serializes the ColumnInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: body['comment'] = self.comment + if self.mask: body['mask'] = self.mask + if self.name is not None: body['name'] = self.name + if self.nullable is not None: body['nullable'] = self.nullable + if self.partition_index is not None: body['partition_index'] = self.partition_index + if self.position is not None: body['position'] = self.position + if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type + if self.type_json is not None: body['type_json'] = self.type_json + if self.type_name is not None: body['type_name'] = self.type_name + if self.type_precision is not None: body['type_precision'] = self.type_precision + if self.type_scale is not None: body['type_scale'] = self.type_scale + if self.type_text is not None: body['type_text'] = self.type_text + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ColumnInfo: """Deserializes the ColumnInfo from a dictionary.""" @@ -762,6 +1050,13 @@ def as_dict(self) -> dict: if self.using_column_names: body['using_column_names'] = [v for v in self.using_column_names] return body + def as_shallow_dict(self) -> dict: + """Serializes the ColumnMask into a shallow dictionary of its immediate attributes.""" + body = {} + if self.function_name is not None: body['function_name'] = self.function_name + if self.using_column_names: body['using_column_names'] = self.using_column_names + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ColumnMask: """Deserializes the ColumnMask from a dictionary.""" @@ -770,7 +1065,6 @@ def from_dict(cls, d: Dict[str, any]) -> ColumnMask: class ColumnTypeName(Enum): - """Name of type (INT, STRUCT, MAP, etc.).""" ARRAY = 'ARRAY' BINARY = 'BINARY' @@ -793,6 +1087,7 @@ class ColumnTypeName(Enum): TIMESTAMP = 'TIMESTAMP' TIMESTAMP_NTZ = 'TIMESTAMP_NTZ' USER_DEFINED_TYPE = 'USER_DEFINED_TYPE' + VARIANT = 'VARIANT' @dataclass @@ -839,9 +1134,6 @@ class ConnectionInfo: read_only: Optional[bool] = None """If the connection is read only.""" - securable_kind: Optional[ConnectionInfoSecurableKind] = None - """Kind of connection securable.""" - securable_type: Optional[str] = None updated_at: Optional[int] = None @@ -870,7 +1162,29 @@ def as_dict(self) -> dict: if self.properties: body['properties'] = self.properties if self.provisioning_info: body['provisioning_info'] = self.provisioning_info.as_dict() if self.read_only is not None: body['read_only'] = self.read_only - if self.securable_kind is not None: body['securable_kind'] = self.securable_kind.value + if self.securable_type is not None: body['securable_type'] = self.securable_type + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.url is not None: body['url'] = self.url + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ConnectionInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: body['comment'] = self.comment + if self.connection_id is not None: body['connection_id'] = self.connection_id + if self.connection_type is not None: body['connection_type'] = self.connection_type + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.credential_type is not None: body['credential_type'] = self.credential_type + if self.full_name is not None: body['full_name'] = self.full_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.owner is not None: body['owner'] = self.owner + if self.properties: body['properties'] = self.properties + if self.provisioning_info: body['provisioning_info'] = self.provisioning_info + if self.read_only is not None: body['read_only'] = self.read_only if self.securable_type is not None: body['securable_type'] = self.securable_type if self.updated_at is not None: body['updated_at'] = self.updated_at if self.updated_by is not None: body['updated_by'] = self.updated_by @@ -894,31 +1208,12 @@ def from_dict(cls, d: Dict[str, any]) -> ConnectionInfo: properties=d.get('properties', None), provisioning_info=_from_dict(d, 'provisioning_info', ProvisioningInfo), read_only=d.get('read_only', None), - securable_kind=_enum(d, 'securable_kind', ConnectionInfoSecurableKind), securable_type=d.get('securable_type', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None), url=d.get('url', None)) -class ConnectionInfoSecurableKind(Enum): - """Kind of connection securable.""" - - CONNECTION_BIGQUERY = 'CONNECTION_BIGQUERY' - CONNECTION_BUILTIN_HIVE_METASTORE = 'CONNECTION_BUILTIN_HIVE_METASTORE' - CONNECTION_DATABRICKS = 'CONNECTION_DATABRICKS' - CONNECTION_EXTERNAL_HIVE_METASTORE = 'CONNECTION_EXTERNAL_HIVE_METASTORE' - CONNECTION_GLUE = 'CONNECTION_GLUE' - CONNECTION_HTTP_BEARER = 'CONNECTION_HTTP_BEARER' - CONNECTION_MYSQL = 'CONNECTION_MYSQL' - CONNECTION_ONLINE_CATALOG = 'CONNECTION_ONLINE_CATALOG' - CONNECTION_POSTGRESQL = 'CONNECTION_POSTGRESQL' - CONNECTION_REDSHIFT = 'CONNECTION_REDSHIFT' - CONNECTION_SNOWFLAKE = 'CONNECTION_SNOWFLAKE' - CONNECTION_SQLDW = 'CONNECTION_SQLDW' - CONNECTION_SQLSERVER = 'CONNECTION_SQLSERVER' - - class ConnectionType(Enum): """The type of connection.""" @@ -961,6 +1256,16 @@ def as_dict(self) -> dict: if self.timestamp is not None: body['timestamp'] = self.timestamp return body + def as_shallow_dict(self) -> dict: + """Serializes the ContinuousUpdateStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.initial_pipeline_sync_progress: + body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress + if self.last_processed_commit_version is not None: + body['last_processed_commit_version'] = self.last_processed_commit_version + if self.timestamp is not None: body['timestamp'] = self.timestamp + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ContinuousUpdateStatus: """Deserializes the ContinuousUpdateStatus from a dictionary.""" @@ -1011,6 +1316,19 @@ def as_dict(self) -> dict: if self.storage_root is not None: body['storage_root'] = self.storage_root return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateCatalog into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: body['comment'] = self.comment + if self.connection_name is not None: body['connection_name'] = self.connection_name + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.properties: body['properties'] = self.properties + if self.provider_name is not None: body['provider_name'] = self.provider_name + if self.share_name is not None: body['share_name'] = self.share_name + if self.storage_root is not None: body['storage_root'] = self.storage_root + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateCatalog: """Deserializes the CreateCatalog from a dictionary.""" @@ -1055,8 +1373,19 @@ def as_dict(self) -> dict: if self.read_only is not None: body['read_only'] = self.read_only return body - @classmethod - def from_dict(cls, d: Dict[str, any]) -> CreateConnection: + def as_shallow_dict(self) -> dict: + """Serializes the CreateConnection into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: body['comment'] = self.comment + if self.connection_type is not None: body['connection_type'] = self.connection_type + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.properties: body['properties'] = self.properties + if self.read_only is not None: body['read_only'] = self.read_only + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateConnection: """Deserializes the CreateConnection from a dictionary.""" return cls(comment=d.get('comment', None), connection_type=_enum(d, 'connection_type', ConnectionType), @@ -1066,6 +1395,83 @@ def from_dict(cls, d: Dict[str, any]) -> CreateConnection: read_only=d.get('read_only', None)) +@dataclass +class CreateCredentialRequest: + name: str + """The credential name. The name must be unique among storage and service credentials within the + metastore.""" + + aws_iam_role: Optional[AwsIamRole] = None + """The AWS IAM role configuration""" + + azure_managed_identity: Optional[AzureManagedIdentity] = None + """The Azure managed identity configuration.""" + + azure_service_principal: Optional[AzureServicePrincipal] = None + """The Azure service principal configuration. Only applicable when purpose is **STORAGE**.""" + + comment: Optional[str] = None + """Comment associated with the credential.""" + + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None + """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" + + purpose: Optional[CredentialPurpose] = None + """Indicates the purpose of the credential.""" + + read_only: Optional[bool] = None + """Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**.""" + + skip_validation: Optional[bool] = None + """Optional. Supplying true to this argument skips validation of the created set of credentials.""" + + def as_dict(self) -> dict: + """Serializes the CreateCredentialRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: + body['azure_service_principal'] = self.azure_service_principal.as_dict() + if self.comment is not None: body['comment'] = self.comment + if self.databricks_gcp_service_account: + body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict() + if self.name is not None: body['name'] = self.name + if self.purpose is not None: body['purpose'] = self.purpose.value + if self.read_only is not None: body['read_only'] = self.read_only + if self.skip_validation is not None: body['skip_validation'] = self.skip_validation + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CreateCredentialRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity + if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal + if self.comment is not None: body['comment'] = self.comment + if self.databricks_gcp_service_account: + body['databricks_gcp_service_account'] = self.databricks_gcp_service_account + if self.name is not None: body['name'] = self.name + if self.purpose is not None: body['purpose'] = self.purpose + if self.read_only is not None: body['read_only'] = self.read_only + if self.skip_validation is not None: body['skip_validation'] = self.skip_validation + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateCredentialRequest: + """Deserializes the CreateCredentialRequest from a dictionary.""" + return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole), + azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity), + azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal), + comment=d.get('comment', None), + databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account', + DatabricksGcpServiceAccount), + name=d.get('name', None), + purpose=_enum(d, 'purpose', CredentialPurpose), + read_only=d.get('read_only', None), + skip_validation=d.get('skip_validation', None)) + + @dataclass class CreateExternalLocation: name: str @@ -1111,6 +1517,20 @@ def as_dict(self) -> dict: if self.url is not None: body['url'] = self.url return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateExternalLocation into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_point is not None: body['access_point'] = self.access_point + if self.comment is not None: body['comment'] = self.comment + if self.credential_name is not None: body['credential_name'] = self.credential_name + if self.encryption_details: body['encryption_details'] = self.encryption_details + if self.fallback is not None: body['fallback'] = self.fallback + if self.name is not None: body['name'] = self.name + if self.read_only is not None: body['read_only'] = self.read_only + if self.skip_validation is not None: body['skip_validation'] = self.skip_validation + if self.url is not None: body['url'] = self.url + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateExternalLocation: """Deserializes the CreateExternalLocation from a dictionary.""" @@ -1218,6 +1638,32 @@ def as_dict(self) -> dict: if self.sql_path is not None: body['sql_path'] = self.sql_path return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateFunction into a shallow dictionary of its immediate attributes.""" + body = {} + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.comment is not None: body['comment'] = self.comment + if self.data_type is not None: body['data_type'] = self.data_type + if self.external_language is not None: body['external_language'] = self.external_language + if self.external_name is not None: body['external_name'] = self.external_name + if self.full_data_type is not None: body['full_data_type'] = self.full_data_type + if self.input_params: body['input_params'] = self.input_params + if self.is_deterministic is not None: body['is_deterministic'] = self.is_deterministic + if self.is_null_call is not None: body['is_null_call'] = self.is_null_call + if self.name is not None: body['name'] = self.name + if self.parameter_style is not None: body['parameter_style'] = self.parameter_style + if self.properties is not None: body['properties'] = self.properties + if self.return_params: body['return_params'] = self.return_params + if self.routine_body is not None: body['routine_body'] = self.routine_body + if self.routine_definition is not None: body['routine_definition'] = self.routine_definition + if self.routine_dependencies: body['routine_dependencies'] = self.routine_dependencies + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.security_type is not None: body['security_type'] = self.security_type + if self.specific_name is not None: body['specific_name'] = self.specific_name + if self.sql_data_access is not None: body['sql_data_access'] = self.sql_data_access + if self.sql_path is not None: body['sql_path'] = self.sql_path + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateFunction: """Deserializes the CreateFunction from a dictionary.""" @@ -1261,6 +1707,12 @@ def as_dict(self) -> dict: if self.function_info: body['function_info'] = self.function_info.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateFunctionRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.function_info: body['function_info'] = self.function_info + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateFunctionRequest: """Deserializes the CreateFunctionRequest from a dictionary.""" @@ -1278,7 +1730,7 @@ class CreateFunctionRoutineBody(Enum): class CreateFunctionSecurityType(Enum): - """Function security type.""" + """The security type of the function.""" DEFINER = 'DEFINER' @@ -1312,6 +1764,14 @@ def as_dict(self) -> dict: if self.storage_root is not None: body['storage_root'] = self.storage_root return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateMetastore into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: body['name'] = self.name + if self.region is not None: body['region'] = self.region + if self.storage_root is not None: body['storage_root'] = self.storage_root + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateMetastore: """Deserializes the CreateMetastore from a dictionary.""" @@ -1340,6 +1800,14 @@ def as_dict(self) -> dict: if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateMetastoreAssignment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateMetastoreAssignment: """Deserializes the CreateMetastoreAssignment from a dictionary.""" @@ -1419,6 +1887,27 @@ def as_dict(self) -> dict: if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateMonitor into a shallow dictionary of its immediate attributes.""" + body = {} + if self.assets_dir is not None: body['assets_dir'] = self.assets_dir + if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name + if self.custom_metrics: body['custom_metrics'] = self.custom_metrics + if self.data_classification_config: + body['data_classification_config'] = self.data_classification_config + if self.inference_log: body['inference_log'] = self.inference_log + if self.notifications: body['notifications'] = self.notifications + if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name + if self.schedule: body['schedule'] = self.schedule + if self.skip_builtin_dashboard is not None: + body['skip_builtin_dashboard'] = self.skip_builtin_dashboard + if self.slicing_exprs: body['slicing_exprs'] = self.slicing_exprs + if self.snapshot: body['snapshot'] = self.snapshot + if self.table_name is not None: body['table_name'] = self.table_name + if self.time_series: body['time_series'] = self.time_series + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateMonitor: """Deserializes the CreateMonitor from a dictionary.""" @@ -1439,29 +1928,6 @@ def from_dict(cls, d: Dict[str, any]) -> CreateMonitor: warehouse_id=d.get('warehouse_id', None)) -@dataclass -class CreateOnlineTableRequest: - """Online Table information.""" - - name: Optional[str] = None - """Full three-part (catalog, schema, table) name of the table.""" - - spec: Optional[OnlineTableSpec] = None - """Specification of the online table.""" - - def as_dict(self) -> dict: - """Serializes the CreateOnlineTableRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.name is not None: body['name'] = self.name - if self.spec: body['spec'] = self.spec.as_dict() - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> CreateOnlineTableRequest: - """Deserializes the CreateOnlineTableRequest from a dictionary.""" - return cls(name=d.get('name', None), spec=_from_dict(d, 'spec', OnlineTableSpec)) - - @dataclass class CreateRegisteredModelRequest: catalog_name: str @@ -1489,6 +1955,16 @@ def as_dict(self) -> dict: if self.storage_location is not None: body['storage_location'] = self.storage_location return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateRegisteredModelRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.storage_location is not None: body['storage_location'] = self.storage_location + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateRegisteredModelRequest: """Deserializes the CreateRegisteredModelRequest from a dictionary.""" @@ -1507,6 +1983,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateResponse: """Deserializes the CreateResponse from a dictionary.""" @@ -1540,6 +2021,16 @@ def as_dict(self) -> dict: if self.storage_root is not None: body['storage_root'] = self.storage_root return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateSchema into a shallow dictionary of its immediate attributes.""" + body = {} + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.properties: body['properties'] = self.properties + if self.storage_root is not None: body['storage_root'] = self.storage_root + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateSchema: """Deserializes the CreateSchema from a dictionary.""" @@ -1595,6 +2086,21 @@ def as_dict(self) -> dict: if self.skip_validation is not None: body['skip_validation'] = self.skip_validation return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateStorageCredential into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity + if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal + if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token + if self.comment is not None: body['comment'] = self.comment + if self.databricks_gcp_service_account: + body['databricks_gcp_service_account'] = self.databricks_gcp_service_account + if self.name is not None: body['name'] = self.name + if self.read_only is not None: body['read_only'] = self.read_only + if self.skip_validation is not None: body['skip_validation'] = self.skip_validation + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateStorageCredential: """Deserializes the CreateStorageCredential from a dictionary.""" @@ -1627,6 +2133,13 @@ def as_dict(self) -> dict: if self.full_name_arg is not None: body['full_name_arg'] = self.full_name_arg return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateTableConstraint into a shallow dictionary of its immediate attributes.""" + body = {} + if self.constraint: body['constraint'] = self.constraint + if self.full_name_arg is not None: body['full_name_arg'] = self.full_name_arg + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateTableConstraint: """Deserializes the CreateTableConstraint from a dictionary.""" @@ -1664,6 +2177,17 @@ def as_dict(self) -> dict: if self.volume_type is not None: body['volume_type'] = self.volume_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateVolumeRequestContent into a shallow dictionary of its immediate attributes.""" + body = {} + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.volume_type is not None: body['volume_type'] = self.volume_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateVolumeRequestContent: """Deserializes the CreateVolumeRequestContent from a dictionary.""" @@ -1675,88 +2199,313 @@ def from_dict(cls, d: Dict[str, any]) -> CreateVolumeRequestContent: volume_type=_enum(d, 'volume_type', VolumeType)) -class CredentialType(Enum): - """The type of credential.""" +@dataclass +class CredentialInfo: + aws_iam_role: Optional[AwsIamRole] = None + """The AWS IAM role configuration""" - BEARER_TOKEN = 'BEARER_TOKEN' - USERNAME_PASSWORD = 'USERNAME_PASSWORD' + azure_managed_identity: Optional[AzureManagedIdentity] = None + """The Azure managed identity configuration.""" + azure_service_principal: Optional[AzureServicePrincipal] = None + """The Azure service principal configuration. Only applicable when purpose is **STORAGE**.""" -@dataclass -class CurrentWorkspaceBindings: - """Currently assigned workspaces""" + comment: Optional[str] = None + """Comment associated with the credential.""" - workspaces: Optional[List[int]] = None - """A list of workspace IDs.""" + created_at: Optional[int] = None + """Time at which this credential was created, in epoch milliseconds.""" - def as_dict(self) -> dict: - """Serializes the CurrentWorkspaceBindings into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.workspaces: body['workspaces'] = [v for v in self.workspaces] - return body + created_by: Optional[str] = None + """Username of credential creator.""" - @classmethod - def from_dict(cls, d: Dict[str, any]) -> CurrentWorkspaceBindings: - """Deserializes the CurrentWorkspaceBindings from a dictionary.""" - return cls(workspaces=d.get('workspaces', None)) + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None + """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" + full_name: Optional[str] = None + """The full name of the credential.""" -class DataSourceFormat(Enum): - """Data source format""" + id: Optional[str] = None + """The unique identifier of the credential.""" - AVRO = 'AVRO' - BIGQUERY_FORMAT = 'BIGQUERY_FORMAT' - CSV = 'CSV' - DATABRICKS_FORMAT = 'DATABRICKS_FORMAT' - DELTA = 'DELTA' - DELTASHARING = 'DELTASHARING' - HIVE_CUSTOM = 'HIVE_CUSTOM' - HIVE_SERDE = 'HIVE_SERDE' - JSON = 'JSON' - MYSQL_FORMAT = 'MYSQL_FORMAT' - NETSUITE_FORMAT = 'NETSUITE_FORMAT' - ORC = 'ORC' - PARQUET = 'PARQUET' - POSTGRESQL_FORMAT = 'POSTGRESQL_FORMAT' - REDSHIFT_FORMAT = 'REDSHIFT_FORMAT' - SALESFORCE_FORMAT = 'SALESFORCE_FORMAT' - SNOWFLAKE_FORMAT = 'SNOWFLAKE_FORMAT' - SQLDW_FORMAT = 'SQLDW_FORMAT' - SQLSERVER_FORMAT = 'SQLSERVER_FORMAT' - TEXT = 'TEXT' - UNITY_CATALOG = 'UNITY_CATALOG' - VECTOR_INDEX_FORMAT = 'VECTOR_INDEX_FORMAT' - WORKDAY_RAAS_FORMAT = 'WORKDAY_RAAS_FORMAT' + isolation_mode: Optional[IsolationMode] = None + """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" + metastore_id: Optional[str] = None + """Unique identifier of the parent metastore.""" -@dataclass -class DatabricksGcpServiceAccountRequest: + name: Optional[str] = None + """The credential name. The name must be unique among storage and service credentials within the + metastore.""" - def as_dict(self) -> dict: - """Serializes the DatabricksGcpServiceAccountRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - return body + owner: Optional[str] = None + """Username of current owner of credential.""" - @classmethod - def from_dict(cls, d: Dict[str, any]) -> DatabricksGcpServiceAccountRequest: - """Deserializes the DatabricksGcpServiceAccountRequest from a dictionary.""" - return cls() + purpose: Optional[CredentialPurpose] = None + """Indicates the purpose of the credential.""" + + read_only: Optional[bool] = None + """Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**.""" + updated_at: Optional[int] = None + """Time at which this credential was last modified, in epoch milliseconds.""" -@dataclass -class DatabricksGcpServiceAccountResponse: - credential_id: Optional[str] = None - """The Databricks internal ID that represents this service account. This is an output-only field.""" + updated_by: Optional[str] = None + """Username of user who last modified the credential.""" - email: Optional[str] = None - """The email of the service account. This is an output-only field.""" + used_for_managed_storage: Optional[bool] = None + """Whether this credential is the current metastore's root storage credential. Only applicable when + purpose is **STORAGE**.""" def as_dict(self) -> dict: - """Serializes the DatabricksGcpServiceAccountResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the CredentialInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential_id is not None: body['credential_id'] = self.credential_id - if self.email is not None: body['email'] = self.email - return body + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: + body['azure_service_principal'] = self.azure_service_principal.as_dict() + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.databricks_gcp_service_account: + body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict() + if self.full_name is not None: body['full_name'] = self.full_name + if self.id is not None: body['id'] = self.id + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.purpose is not None: body['purpose'] = self.purpose.value + if self.read_only is not None: body['read_only'] = self.read_only + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.used_for_managed_storage is not None: + body['used_for_managed_storage'] = self.used_for_managed_storage + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CredentialInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity + if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.databricks_gcp_service_account: + body['databricks_gcp_service_account'] = self.databricks_gcp_service_account + if self.full_name is not None: body['full_name'] = self.full_name + if self.id is not None: body['id'] = self.id + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.purpose is not None: body['purpose'] = self.purpose + if self.read_only is not None: body['read_only'] = self.read_only + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.used_for_managed_storage is not None: + body['used_for_managed_storage'] = self.used_for_managed_storage + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CredentialInfo: + """Deserializes the CredentialInfo from a dictionary.""" + return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole), + azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity), + azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal), + comment=d.get('comment', None), + created_at=d.get('created_at', None), + created_by=d.get('created_by', None), + databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account', + DatabricksGcpServiceAccount), + full_name=d.get('full_name', None), + id=d.get('id', None), + isolation_mode=_enum(d, 'isolation_mode', IsolationMode), + metastore_id=d.get('metastore_id', None), + name=d.get('name', None), + owner=d.get('owner', None), + purpose=_enum(d, 'purpose', CredentialPurpose), + read_only=d.get('read_only', None), + updated_at=d.get('updated_at', None), + updated_by=d.get('updated_by', None), + used_for_managed_storage=d.get('used_for_managed_storage', None)) + + +class CredentialPurpose(Enum): + + SERVICE = 'SERVICE' + STORAGE = 'STORAGE' + + +class CredentialType(Enum): + """The type of credential.""" + + BEARER_TOKEN = 'BEARER_TOKEN' + USERNAME_PASSWORD = 'USERNAME_PASSWORD' + + +@dataclass +class CredentialValidationResult: + message: Optional[str] = None + """Error message would exist when the result does not equal to **PASS**.""" + + result: Optional[ValidateCredentialResult] = None + """The results of the tested operation.""" + + def as_dict(self) -> dict: + """Serializes the CredentialValidationResult into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.message is not None: body['message'] = self.message + if self.result is not None: body['result'] = self.result.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CredentialValidationResult into a shallow dictionary of its immediate attributes.""" + body = {} + if self.message is not None: body['message'] = self.message + if self.result is not None: body['result'] = self.result + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CredentialValidationResult: + """Deserializes the CredentialValidationResult from a dictionary.""" + return cls(message=d.get('message', None), result=_enum(d, 'result', ValidateCredentialResult)) + + +@dataclass +class CurrentWorkspaceBindings: + """Currently assigned workspaces""" + + workspaces: Optional[List[int]] = None + """A list of workspace IDs.""" + + def as_dict(self) -> dict: + """Serializes the CurrentWorkspaceBindings into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.workspaces: body['workspaces'] = [v for v in self.workspaces] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CurrentWorkspaceBindings into a shallow dictionary of its immediate attributes.""" + body = {} + if self.workspaces: body['workspaces'] = self.workspaces + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CurrentWorkspaceBindings: + """Deserializes the CurrentWorkspaceBindings from a dictionary.""" + return cls(workspaces=d.get('workspaces', None)) + + +class DataSourceFormat(Enum): + """Data source format""" + + AVRO = 'AVRO' + BIGQUERY_FORMAT = 'BIGQUERY_FORMAT' + CSV = 'CSV' + DATABRICKS_FORMAT = 'DATABRICKS_FORMAT' + DELTA = 'DELTA' + DELTASHARING = 'DELTASHARING' + HIVE_CUSTOM = 'HIVE_CUSTOM' + HIVE_SERDE = 'HIVE_SERDE' + JSON = 'JSON' + MYSQL_FORMAT = 'MYSQL_FORMAT' + NETSUITE_FORMAT = 'NETSUITE_FORMAT' + ORC = 'ORC' + PARQUET = 'PARQUET' + POSTGRESQL_FORMAT = 'POSTGRESQL_FORMAT' + REDSHIFT_FORMAT = 'REDSHIFT_FORMAT' + SALESFORCE_FORMAT = 'SALESFORCE_FORMAT' + SNOWFLAKE_FORMAT = 'SNOWFLAKE_FORMAT' + SQLDW_FORMAT = 'SQLDW_FORMAT' + SQLSERVER_FORMAT = 'SQLSERVER_FORMAT' + TEXT = 'TEXT' + UNITY_CATALOG = 'UNITY_CATALOG' + VECTOR_INDEX_FORMAT = 'VECTOR_INDEX_FORMAT' + WORKDAY_RAAS_FORMAT = 'WORKDAY_RAAS_FORMAT' + + +@dataclass +class DatabricksGcpServiceAccount: + """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" + + credential_id: Optional[str] = None + """The Databricks internal ID that represents this managed identity. This field is only used to + persist the credential_id once it is fetched from the credentials manager - as we only use the + protobuf serializer to store credentials, this ID gets persisted to the database""" + + email: Optional[str] = None + """The email of the service account.""" + + private_key_id: Optional[str] = None + """The ID that represents the private key for this Service Account""" + + def as_dict(self) -> dict: + """Serializes the DatabricksGcpServiceAccount into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.email is not None: body['email'] = self.email + if self.private_key_id is not None: body['private_key_id'] = self.private_key_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabricksGcpServiceAccount into a shallow dictionary of its immediate attributes.""" + body = {} + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.email is not None: body['email'] = self.email + if self.private_key_id is not None: body['private_key_id'] = self.private_key_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> DatabricksGcpServiceAccount: + """Deserializes the DatabricksGcpServiceAccount from a dictionary.""" + return cls(credential_id=d.get('credential_id', None), + email=d.get('email', None), + private_key_id=d.get('private_key_id', None)) + + +@dataclass +class DatabricksGcpServiceAccountRequest: + + def as_dict(self) -> dict: + """Serializes the DatabricksGcpServiceAccountRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabricksGcpServiceAccountRequest into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> DatabricksGcpServiceAccountRequest: + """Deserializes the DatabricksGcpServiceAccountRequest from a dictionary.""" + return cls() + + +@dataclass +class DatabricksGcpServiceAccountResponse: + credential_id: Optional[str] = None + """The Databricks internal ID that represents this service account. This is an output-only field.""" + + email: Optional[str] = None + """The email of the service account. This is an output-only field.""" + + def as_dict(self) -> dict: + """Serializes the DatabricksGcpServiceAccountResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.email is not None: body['email'] = self.email + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabricksGcpServiceAccountResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.email is not None: body['email'] = self.email + return body @classmethod def from_dict(cls, d: Dict[str, any]) -> DatabricksGcpServiceAccountResponse: @@ -1772,12 +2521,36 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteAliasResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteAliasResponse: """Deserializes the DeleteAliasResponse from a dictionary.""" return cls() +@dataclass +class DeleteCredentialResponse: + + def as_dict(self) -> dict: + """Serializes the DeleteCredentialResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DeleteCredentialResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> DeleteCredentialResponse: + """Deserializes the DeleteCredentialResponse from a dictionary.""" + return cls() + + @dataclass class DeleteResponse: @@ -1786,6 +2559,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" @@ -1806,6 +2584,12 @@ def as_dict(self) -> dict: if self.delta_runtime_properties: body['delta_runtime_properties'] = self.delta_runtime_properties return body + def as_shallow_dict(self) -> dict: + """Serializes the DeltaRuntimePropertiesKvPairs into a shallow dictionary of its immediate attributes.""" + body = {} + if self.delta_runtime_properties: body['delta_runtime_properties'] = self.delta_runtime_properties + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeltaRuntimePropertiesKvPairs: """Deserializes the DeltaRuntimePropertiesKvPairs from a dictionary.""" @@ -1830,6 +2614,13 @@ def as_dict(self) -> dict: if self.table: body['table'] = self.table.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the Dependency into a shallow dictionary of its immediate attributes.""" + body = {} + if self.function: body['function'] = self.function + if self.table: body['table'] = self.table + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Dependency: """Deserializes the Dependency from a dictionary.""" @@ -1850,6 +2641,12 @@ def as_dict(self) -> dict: if self.dependencies: body['dependencies'] = [v.as_dict() for v in self.dependencies] return body + def as_shallow_dict(self) -> dict: + """Serializes the DependencyList into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dependencies: body['dependencies'] = self.dependencies + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DependencyList: """Deserializes the DependencyList from a dictionary.""" @@ -1864,6 +2661,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DisableResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DisableResponse: """Deserializes the DisableResponse from a dictionary.""" @@ -1882,6 +2684,12 @@ def as_dict(self) -> dict: body['privilege_assignments'] = [v.as_dict() for v in self.privilege_assignments] return body + def as_shallow_dict(self) -> dict: + """Serializes the EffectivePermissionsList into a shallow dictionary of its immediate attributes.""" + body = {} + if self.privilege_assignments: body['privilege_assignments'] = self.privilege_assignments + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EffectivePermissionsList: """Deserializes the EffectivePermissionsList from a dictionary.""" @@ -1910,6 +2718,14 @@ def as_dict(self) -> dict: if self.value is not None: body['value'] = self.value.value return body + def as_shallow_dict(self) -> dict: + """Serializes the EffectivePredictiveOptimizationFlag into a shallow dictionary of its immediate attributes.""" + body = {} + if self.inherited_from_name is not None: body['inherited_from_name'] = self.inherited_from_name + if self.inherited_from_type is not None: body['inherited_from_type'] = self.inherited_from_type + if self.value is not None: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EffectivePredictiveOptimizationFlag: """Deserializes the EffectivePredictiveOptimizationFlag from a dictionary.""" @@ -1948,6 +2764,14 @@ def as_dict(self) -> dict: if self.privilege is not None: body['privilege'] = self.privilege.value return body + def as_shallow_dict(self) -> dict: + """Serializes the EffectivePrivilege into a shallow dictionary of its immediate attributes.""" + body = {} + if self.inherited_from_name is not None: body['inherited_from_name'] = self.inherited_from_name + if self.inherited_from_type is not None: body['inherited_from_type'] = self.inherited_from_type + if self.privilege is not None: body['privilege'] = self.privilege + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EffectivePrivilege: """Deserializes the EffectivePrivilege from a dictionary.""" @@ -1971,6 +2795,13 @@ def as_dict(self) -> dict: if self.privileges: body['privileges'] = [v.as_dict() for v in self.privileges] return body + def as_shallow_dict(self) -> dict: + """Serializes the EffectivePrivilegeAssignment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.principal is not None: body['principal'] = self.principal + if self.privileges: body['privileges'] = self.privileges + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EffectivePrivilegeAssignment: """Deserializes the EffectivePrivilegeAssignment from a dictionary.""" @@ -1994,6 +2825,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the EnableResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EnableResponse: """Deserializes the EnableResponse from a dictionary.""" @@ -2013,6 +2849,12 @@ def as_dict(self) -> dict: if self.sse_encryption_details: body['sse_encryption_details'] = self.sse_encryption_details.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the EncryptionDetails into a shallow dictionary of its immediate attributes.""" + body = {} + if self.sse_encryption_details: body['sse_encryption_details'] = self.sse_encryption_details + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EncryptionDetails: """Deserializes the EncryptionDetails from a dictionary.""" @@ -2052,7 +2894,6 @@ class ExternalLocationInfo: sufficient.""" isolation_mode: Optional[IsolationMode] = None - """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" metastore_id: Optional[str] = None """Unique identifier of metastore hosting the external location.""" @@ -2097,6 +2938,28 @@ def as_dict(self) -> dict: if self.url is not None: body['url'] = self.url return body + def as_shallow_dict(self) -> dict: + """Serializes the ExternalLocationInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_point is not None: body['access_point'] = self.access_point + if self.browse_only is not None: body['browse_only'] = self.browse_only + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.credential_name is not None: body['credential_name'] = self.credential_name + if self.encryption_details: body['encryption_details'] = self.encryption_details + if self.fallback is not None: body['fallback'] = self.fallback + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.read_only is not None: body['read_only'] = self.read_only + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.url is not None: body['url'] = self.url + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ExternalLocationInfo: """Deserializes the ExternalLocationInfo from a dictionary.""" @@ -2141,6 +3004,14 @@ def as_dict(self) -> dict: if self.timestamp is not None: body['timestamp'] = self.timestamp return body + def as_shallow_dict(self) -> dict: + """Serializes the FailedStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.last_processed_commit_version is not None: + body['last_processed_commit_version'] = self.last_processed_commit_version + if self.timestamp is not None: body['timestamp'] = self.timestamp + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> FailedStatus: """Deserializes the FailedStatus from a dictionary.""" @@ -2171,6 +3042,15 @@ def as_dict(self) -> dict: if self.parent_table is not None: body['parent_table'] = self.parent_table return body + def as_shallow_dict(self) -> dict: + """Serializes the ForeignKeyConstraint into a shallow dictionary of its immediate attributes.""" + body = {} + if self.child_columns: body['child_columns'] = self.child_columns + if self.name is not None: body['name'] = self.name + if self.parent_columns: body['parent_columns'] = self.parent_columns + if self.parent_table is not None: body['parent_table'] = self.parent_table + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ForeignKeyConstraint: """Deserializes the ForeignKeyConstraint from a dictionary.""" @@ -2194,6 +3074,12 @@ def as_dict(self) -> dict: if self.function_full_name is not None: body['function_full_name'] = self.function_full_name return body + def as_shallow_dict(self) -> dict: + """Serializes the FunctionDependency into a shallow dictionary of its immediate attributes.""" + body = {} + if self.function_full_name is not None: body['function_full_name'] = self.function_full_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> FunctionDependency: """Deserializes the FunctionDependency from a dictionary.""" @@ -2330,6 +3216,41 @@ def as_dict(self) -> dict: if self.updated_by is not None: body['updated_by'] = self.updated_by return body + def as_shallow_dict(self) -> dict: + """Serializes the FunctionInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.browse_only is not None: body['browse_only'] = self.browse_only + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.data_type is not None: body['data_type'] = self.data_type + if self.external_language is not None: body['external_language'] = self.external_language + if self.external_name is not None: body['external_name'] = self.external_name + if self.full_data_type is not None: body['full_data_type'] = self.full_data_type + if self.full_name is not None: body['full_name'] = self.full_name + if self.function_id is not None: body['function_id'] = self.function_id + if self.input_params: body['input_params'] = self.input_params + if self.is_deterministic is not None: body['is_deterministic'] = self.is_deterministic + if self.is_null_call is not None: body['is_null_call'] = self.is_null_call + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.parameter_style is not None: body['parameter_style'] = self.parameter_style + if self.properties is not None: body['properties'] = self.properties + if self.return_params: body['return_params'] = self.return_params + if self.routine_body is not None: body['routine_body'] = self.routine_body + if self.routine_definition is not None: body['routine_definition'] = self.routine_definition + if self.routine_dependencies: body['routine_dependencies'] = self.routine_dependencies + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.security_type is not None: body['security_type'] = self.security_type + if self.specific_name is not None: body['specific_name'] = self.specific_name + if self.sql_data_access is not None: body['sql_data_access'] = self.sql_data_access + if self.sql_path is not None: body['sql_path'] = self.sql_path + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> FunctionInfo: """Deserializes the FunctionInfo from a dictionary.""" @@ -2382,7 +3303,7 @@ class FunctionInfoRoutineBody(Enum): class FunctionInfoSecurityType(Enum): - """Function security type.""" + """The security type of the function.""" DEFINER = 'DEFINER' @@ -2404,7 +3325,6 @@ class FunctionParameterInfo: """Full data type spec, SQL/catalogString text.""" type_name: ColumnTypeName - """Name of type (INT, STRUCT, MAP, etc.).""" position: int """Ordinal position of column (starting at position 0).""" @@ -2450,6 +3370,23 @@ def as_dict(self) -> dict: if self.type_text is not None: body['type_text'] = self.type_text return body + def as_shallow_dict(self) -> dict: + """Serializes the FunctionParameterInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.parameter_default is not None: body['parameter_default'] = self.parameter_default + if self.parameter_mode is not None: body['parameter_mode'] = self.parameter_mode + if self.parameter_type is not None: body['parameter_type'] = self.parameter_type + if self.position is not None: body['position'] = self.position + if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type + if self.type_json is not None: body['type_json'] = self.type_json + if self.type_name is not None: body['type_name'] = self.type_name + if self.type_precision is not None: body['type_precision'] = self.type_precision + if self.type_scale is not None: body['type_scale'] = self.type_scale + if self.type_text is not None: body['type_text'] = self.type_text + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> FunctionParameterInfo: """Deserializes the FunctionParameterInfo from a dictionary.""" @@ -2478,6 +3415,12 @@ def as_dict(self) -> dict: if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] return body + def as_shallow_dict(self) -> dict: + """Serializes the FunctionParameterInfos into a shallow dictionary of its immediate attributes.""" + body = {} + if self.parameters: body['parameters'] = self.parameters + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> FunctionParameterInfos: """Deserializes the FunctionParameterInfos from a dictionary.""" @@ -2510,10 +3453,106 @@ def as_dict(self) -> dict: if self.oauth_token is not None: body['oauth_token'] = self.oauth_token return body + def as_shallow_dict(self) -> dict: + """Serializes the GcpOauthToken into a shallow dictionary of its immediate attributes.""" + body = {} + if self.oauth_token is not None: body['oauth_token'] = self.oauth_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GcpOauthToken: + """Deserializes the GcpOauthToken from a dictionary.""" + return cls(oauth_token=d.get('oauth_token', None)) + + +@dataclass +class GenerateTemporaryServiceCredentialAzureOptions: + """The Azure cloud options to customize the requested temporary credential""" + + resources: Optional[List[str]] = None + """The resources to which the temporary Azure credential should apply. These resources are the + scopes that are passed to the token provider (see + https://learn.microsoft.com/python/api/azure-core/azure.core.credentials.tokencredential?view=azure-python)""" + + def as_dict(self) -> dict: + """Serializes the GenerateTemporaryServiceCredentialAzureOptions into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.resources: body['resources'] = [v for v in self.resources] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GenerateTemporaryServiceCredentialAzureOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.resources: body['resources'] = self.resources + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryServiceCredentialAzureOptions: + """Deserializes the GenerateTemporaryServiceCredentialAzureOptions from a dictionary.""" + return cls(resources=d.get('resources', None)) + + +@dataclass +class GenerateTemporaryServiceCredentialGcpOptions: + """The GCP cloud options to customize the requested temporary credential""" + + scopes: Optional[List[str]] = None + """The scopes to which the temporary GCP credential should apply. These resources are the scopes + that are passed to the token provider (see + https://google-auth.readthedocs.io/en/latest/reference/google.auth.html#google.auth.credentials.Credentials)""" + + def as_dict(self) -> dict: + """Serializes the GenerateTemporaryServiceCredentialGcpOptions into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.scopes: body['scopes'] = [v for v in self.scopes] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GenerateTemporaryServiceCredentialGcpOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.scopes: body['scopes'] = self.scopes + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryServiceCredentialGcpOptions: + """Deserializes the GenerateTemporaryServiceCredentialGcpOptions from a dictionary.""" + return cls(scopes=d.get('scopes', None)) + + +@dataclass +class GenerateTemporaryServiceCredentialRequest: + credential_name: str + """The name of the service credential used to generate a temporary credential""" + + azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None + """The Azure cloud options to customize the requested temporary credential""" + + gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions] = None + """The GCP cloud options to customize the requested temporary credential""" + + def as_dict(self) -> dict: + """Serializes the GenerateTemporaryServiceCredentialRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.azure_options: body['azure_options'] = self.azure_options.as_dict() + if self.credential_name is not None: body['credential_name'] = self.credential_name + if self.gcp_options: body['gcp_options'] = self.gcp_options.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GenerateTemporaryServiceCredentialRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.azure_options: body['azure_options'] = self.azure_options + if self.credential_name is not None: body['credential_name'] = self.credential_name + if self.gcp_options: body['gcp_options'] = self.gcp_options + return body + @classmethod - def from_dict(cls, d: Dict[str, any]) -> GcpOauthToken: - """Deserializes the GcpOauthToken from a dictionary.""" - return cls(oauth_token=d.get('oauth_token', None)) + def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryServiceCredentialRequest: + """Deserializes the GenerateTemporaryServiceCredentialRequest from a dictionary.""" + return cls(azure_options=_from_dict(d, 'azure_options', + GenerateTemporaryServiceCredentialAzureOptions), + credential_name=d.get('credential_name', None), + gcp_options=_from_dict(d, 'gcp_options', GenerateTemporaryServiceCredentialGcpOptions)) @dataclass @@ -2533,6 +3572,13 @@ def as_dict(self) -> dict: if self.table_id is not None: body['table_id'] = self.table_id return body + def as_shallow_dict(self) -> dict: + """Serializes the GenerateTemporaryTableCredentialRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.operation is not None: body['operation'] = self.operation + if self.table_id is not None: body['table_id'] = self.table_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryTableCredentialRequest: """Deserializes the GenerateTemporaryTableCredentialRequest from a dictionary.""" @@ -2545,6 +3591,11 @@ class GenerateTemporaryTableCredentialResponse: """AWS temporary credentials for API authentication. Read more at https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html.""" + azure_aad: Optional[AzureActiveDirectoryToken] = None + """Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed + Identity. Read more at + https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token""" + azure_user_delegation_sas: Optional[AzureUserDelegationSas] = None """Azure temporary credentials for API authentication. Read more at https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas""" @@ -2568,6 +3619,7 @@ def as_dict(self) -> dict: """Serializes the GenerateTemporaryTableCredentialResponse into a dictionary suitable for use as a JSON request body.""" body = {} if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials.as_dict() + if self.azure_aad: body['azure_aad'] = self.azure_aad.as_dict() if self.azure_user_delegation_sas: body['azure_user_delegation_sas'] = self.azure_user_delegation_sas.as_dict() if self.expiration_time is not None: body['expiration_time'] = self.expiration_time @@ -2576,10 +3628,23 @@ def as_dict(self) -> dict: if self.url is not None: body['url'] = self.url return body + def as_shallow_dict(self) -> dict: + """Serializes the GenerateTemporaryTableCredentialResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials + if self.azure_aad: body['azure_aad'] = self.azure_aad + if self.azure_user_delegation_sas: body['azure_user_delegation_sas'] = self.azure_user_delegation_sas + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.gcp_oauth_token: body['gcp_oauth_token'] = self.gcp_oauth_token + if self.r2_temp_credentials: body['r2_temp_credentials'] = self.r2_temp_credentials + if self.url is not None: body['url'] = self.url + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryTableCredentialResponse: """Deserializes the GenerateTemporaryTableCredentialResponse from a dictionary.""" return cls(aws_temp_credentials=_from_dict(d, 'aws_temp_credentials', AwsCredentials), + azure_aad=_from_dict(d, 'azure_aad', AzureActiveDirectoryToken), azure_user_delegation_sas=_from_dict(d, 'azure_user_delegation_sas', AzureUserDelegationSas), expiration_time=d.get('expiration_time', None), @@ -2591,6 +3656,7 @@ def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryTableCredentialRespons class GetBindingsSecurableType(Enum): CATALOG = 'catalog' + CREDENTIAL = 'credential' EXTERNAL_LOCATION = 'external_location' STORAGE_CREDENTIAL = 'storage_credential' @@ -2687,6 +3753,38 @@ def as_dict(self) -> dict: if self.updated_by is not None: body['updated_by'] = self.updated_by return body + def as_shallow_dict(self) -> dict: + """Serializes the GetMetastoreSummaryResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cloud is not None: body['cloud'] = self.cloud + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.default_data_access_config_id is not None: + body['default_data_access_config_id'] = self.default_data_access_config_id + if self.delta_sharing_organization_name is not None: + body['delta_sharing_organization_name'] = self.delta_sharing_organization_name + if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: + body[ + 'delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds + if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope + if self.external_access_enabled is not None: + body['external_access_enabled'] = self.external_access_enabled + if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.privilege_model_version is not None: + body['privilege_model_version'] = self.privilege_model_version + if self.region is not None: body['region'] = self.region + if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.storage_root_credential_id is not None: + body['storage_root_credential_id'] = self.storage_root_credential_id + if self.storage_root_credential_name is not None: + body['storage_root_credential_name'] = self.storage_root_credential_name + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetMetastoreSummaryResponse: """Deserializes the GetMetastoreSummaryResponse from a dictionary.""" @@ -2731,6 +3829,12 @@ def as_dict(self) -> dict: if self.quota_info: body['quota_info'] = self.quota_info.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the GetQuotaResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.quota_info: body['quota_info'] = self.quota_info + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetQuotaResponse: """Deserializes the GetQuotaResponse from a dictionary.""" @@ -2738,7 +3842,6 @@ def from_dict(cls, d: Dict[str, any]) -> GetQuotaResponse: class IsolationMode(Enum): - """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" ISOLATION_MODE_ISOLATED = 'ISOLATION_MODE_ISOLATED' ISOLATION_MODE_OPEN = 'ISOLATION_MODE_OPEN' @@ -2756,6 +3859,12 @@ def as_dict(self) -> dict: if self.workspace_ids: body['workspace_ids'] = [v for v in self.workspace_ids] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListAccountMetastoreAssignmentsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.workspace_ids: body['workspace_ids'] = self.workspace_ids + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListAccountMetastoreAssignmentsResponse: """Deserializes the ListAccountMetastoreAssignmentsResponse from a dictionary.""" @@ -2774,6 +3883,12 @@ def as_dict(self) -> dict: body['storage_credentials'] = [v.as_dict() for v in self.storage_credentials] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListAccountStorageCredentialsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.storage_credentials: body['storage_credentials'] = self.storage_credentials + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListAccountStorageCredentialsResponse: """Deserializes the ListAccountStorageCredentialsResponse from a dictionary.""" @@ -2796,6 +3911,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListCatalogsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.catalogs: body['catalogs'] = self.catalogs + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListCatalogsResponse: """Deserializes the ListCatalogsResponse from a dictionary.""" @@ -2819,6 +3941,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListConnectionsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.connections: body['connections'] = self.connections + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListConnectionsResponse: """Deserializes the ListConnectionsResponse from a dictionary.""" @@ -2826,6 +3955,35 @@ def from_dict(cls, d: Dict[str, any]) -> ListConnectionsResponse: next_page_token=d.get('next_page_token', None)) +@dataclass +class ListCredentialsResponse: + credentials: Optional[List[CredentialInfo]] = None + + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + + def as_dict(self) -> dict: + """Serializes the ListCredentialsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.credentials: body['credentials'] = [v.as_dict() for v in self.credentials] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListCredentialsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.credentials: body['credentials'] = self.credentials + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListCredentialsResponse: + """Deserializes the ListCredentialsResponse from a dictionary.""" + return cls(credentials=_repeated_dict(d, 'credentials', CredentialInfo), + next_page_token=d.get('next_page_token', None)) + + @dataclass class ListExternalLocationsResponse: external_locations: Optional[List[ExternalLocationInfo]] = None @@ -2843,6 +4001,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListExternalLocationsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.external_locations: body['external_locations'] = self.external_locations + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListExternalLocationsResponse: """Deserializes the ListExternalLocationsResponse from a dictionary.""" @@ -2866,6 +4031,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListFunctionsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.functions: body['functions'] = self.functions + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListFunctionsResponse: """Deserializes the ListFunctionsResponse from a dictionary.""" @@ -2884,6 +4056,12 @@ def as_dict(self) -> dict: if self.metastores: body['metastores'] = [v.as_dict() for v in self.metastores] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListMetastoresResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.metastores: body['metastores'] = self.metastores + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListMetastoresResponse: """Deserializes the ListMetastoresResponse from a dictionary.""" @@ -2905,6 +4083,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListModelVersionsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.model_versions: body['model_versions'] = self.model_versions + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListModelVersionsResponse: """Deserializes the ListModelVersionsResponse from a dictionary.""" @@ -2928,6 +4113,13 @@ def as_dict(self) -> dict: if self.quotas: body['quotas'] = [v.as_dict() for v in self.quotas] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListQuotasResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.quotas: body['quotas'] = self.quotas + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListQuotasResponse: """Deserializes the ListQuotasResponse from a dictionary.""" @@ -2950,6 +4142,13 @@ def as_dict(self) -> dict: if self.registered_models: body['registered_models'] = [v.as_dict() for v in self.registered_models] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListRegisteredModelsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.registered_models: body['registered_models'] = self.registered_models + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListRegisteredModelsResponse: """Deserializes the ListRegisteredModelsResponse from a dictionary.""" @@ -2973,6 +4172,13 @@ def as_dict(self) -> dict: if self.schemas: body['schemas'] = [v.as_dict() for v in self.schemas] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListSchemasResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.schemas: body['schemas'] = self.schemas + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListSchemasResponse: """Deserializes the ListSchemasResponse from a dictionary.""" @@ -2996,6 +4202,13 @@ def as_dict(self) -> dict: body['storage_credentials'] = [v.as_dict() for v in self.storage_credentials] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListStorageCredentialsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.storage_credentials: body['storage_credentials'] = self.storage_credentials + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListStorageCredentialsResponse: """Deserializes the ListStorageCredentialsResponse from a dictionary.""" @@ -3019,6 +4232,13 @@ def as_dict(self) -> dict: if self.schemas: body['schemas'] = [v.as_dict() for v in self.schemas] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListSystemSchemasResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.schemas: body['schemas'] = self.schemas + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListSystemSchemasResponse: """Deserializes the ListSystemSchemasResponse from a dictionary.""" @@ -3042,6 +4262,13 @@ def as_dict(self) -> dict: if self.tables: body['tables'] = [v.as_dict() for v in self.tables] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListTableSummariesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.tables: body['tables'] = self.tables + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListTableSummariesResponse: """Deserializes the ListTableSummariesResponse from a dictionary.""" @@ -3065,6 +4292,13 @@ def as_dict(self) -> dict: if self.tables: body['tables'] = [v.as_dict() for v in self.tables] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListTablesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.tables: body['tables'] = self.tables + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListTablesResponse: """Deserializes the ListTablesResponse from a dictionary.""" @@ -3088,6 +4322,13 @@ def as_dict(self) -> dict: if self.volumes: body['volumes'] = [v.as_dict() for v in self.volumes] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListVolumesResponseContent into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.volumes: body['volumes'] = self.volumes + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListVolumesResponseContent: """Deserializes the ListVolumesResponseContent from a dictionary.""" @@ -3120,6 +4361,14 @@ def as_dict(self) -> dict: if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body + def as_shallow_dict(self) -> dict: + """Serializes the MetastoreAssignment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> MetastoreAssignment: """Deserializes the MetastoreAssignment from a dictionary.""" @@ -3220,6 +4469,38 @@ def as_dict(self) -> dict: if self.updated_by is not None: body['updated_by'] = self.updated_by return body + def as_shallow_dict(self) -> dict: + """Serializes the MetastoreInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cloud is not None: body['cloud'] = self.cloud + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.default_data_access_config_id is not None: + body['default_data_access_config_id'] = self.default_data_access_config_id + if self.delta_sharing_organization_name is not None: + body['delta_sharing_organization_name'] = self.delta_sharing_organization_name + if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: + body[ + 'delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds + if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope + if self.external_access_enabled is not None: + body['external_access_enabled'] = self.external_access_enabled + if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.privilege_model_version is not None: + body['privilege_model_version'] = self.privilege_model_version + if self.region is not None: body['region'] = self.region + if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.storage_root_credential_id is not None: + body['storage_root_credential_id'] = self.storage_root_credential_id + if self.storage_root_credential_name is not None: + body['storage_root_credential_name'] = self.storage_root_credential_name + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> MetastoreInfo: """Deserializes the MetastoreInfo from a dictionary.""" @@ -3339,6 +4620,31 @@ def as_dict(self) -> dict: if self.version is not None: body['version'] = self.version return body + def as_shallow_dict(self) -> dict: + """Serializes the ModelVersionInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aliases: body['aliases'] = self.aliases + if self.browse_only is not None: body['browse_only'] = self.browse_only + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.id is not None: body['id'] = self.id + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.model_name is not None: body['model_name'] = self.model_name + if self.model_version_dependencies: + body['model_version_dependencies'] = self.model_version_dependencies + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_workspace_id is not None: body['run_workspace_id'] = self.run_workspace_id + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.source is not None: body['source'] = self.source + if self.status is not None: body['status'] = self.status + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.version is not None: body['version'] = self.version + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ModelVersionInfo: """Deserializes the ModelVersionInfo from a dictionary.""" @@ -3395,6 +4701,15 @@ def as_dict(self) -> dict: if self.timezone_id is not None: body['timezone_id'] = self.timezone_id return body + def as_shallow_dict(self) -> dict: + """Serializes the MonitorCronSchedule into a shallow dictionary of its immediate attributes.""" + body = {} + if self.pause_status is not None: body['pause_status'] = self.pause_status + if self.quartz_cron_expression is not None: + body['quartz_cron_expression'] = self.quartz_cron_expression + if self.timezone_id is not None: body['timezone_id'] = self.timezone_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> MonitorCronSchedule: """Deserializes the MonitorCronSchedule from a dictionary.""" @@ -3421,6 +4736,12 @@ def as_dict(self) -> dict: if self.enabled is not None: body['enabled'] = self.enabled return body + def as_shallow_dict(self) -> dict: + """Serializes the MonitorDataClassificationConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.enabled is not None: body['enabled'] = self.enabled + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> MonitorDataClassificationConfig: """Deserializes the MonitorDataClassificationConfig from a dictionary.""" @@ -3439,6 +4760,12 @@ def as_dict(self) -> dict: if self.email_addresses: body['email_addresses'] = [v for v in self.email_addresses] return body + def as_shallow_dict(self) -> dict: + """Serializes the MonitorDestination into a shallow dictionary of its immediate attributes.""" + body = {} + if self.email_addresses: body['email_addresses'] = self.email_addresses + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> MonitorDestination: """Deserializes the MonitorDestination from a dictionary.""" @@ -3490,6 +4817,18 @@ def as_dict(self) -> dict: if self.timestamp_col is not None: body['timestamp_col'] = self.timestamp_col return body + def as_shallow_dict(self) -> dict: + """Serializes the MonitorInferenceLog into a shallow dictionary of its immediate attributes.""" + body = {} + if self.granularities: body['granularities'] = self.granularities + if self.label_col is not None: body['label_col'] = self.label_col + if self.model_id_col is not None: body['model_id_col'] = self.model_id_col + if self.prediction_col is not None: body['prediction_col'] = self.prediction_col + if self.prediction_proba_col is not None: body['prediction_proba_col'] = self.prediction_proba_col + if self.problem_type is not None: body['problem_type'] = self.problem_type + if self.timestamp_col is not None: body['timestamp_col'] = self.timestamp_col + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> MonitorInferenceLog: """Deserializes the MonitorInferenceLog from a dictionary.""" @@ -3601,6 +4940,33 @@ def as_dict(self) -> dict: if self.time_series: body['time_series'] = self.time_series.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the MonitorInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.assets_dir is not None: body['assets_dir'] = self.assets_dir + if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name + if self.custom_metrics: body['custom_metrics'] = self.custom_metrics + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.data_classification_config: + body['data_classification_config'] = self.data_classification_config + if self.drift_metrics_table_name is not None: + body['drift_metrics_table_name'] = self.drift_metrics_table_name + if self.inference_log: body['inference_log'] = self.inference_log + if self.latest_monitor_failure_msg is not None: + body['latest_monitor_failure_msg'] = self.latest_monitor_failure_msg + if self.monitor_version is not None: body['monitor_version'] = self.monitor_version + if self.notifications: body['notifications'] = self.notifications + if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name + if self.profile_metrics_table_name is not None: + body['profile_metrics_table_name'] = self.profile_metrics_table_name + if self.schedule: body['schedule'] = self.schedule + if self.slicing_exprs: body['slicing_exprs'] = self.slicing_exprs + if self.snapshot: body['snapshot'] = self.snapshot + if self.status is not None: body['status'] = self.status + if self.table_name is not None: body['table_name'] = self.table_name + if self.time_series: body['time_series'] = self.time_series + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> MonitorInfo: """Deserializes the MonitorInfo from a dictionary.""" @@ -3672,6 +5038,16 @@ def as_dict(self) -> dict: if self.type is not None: body['type'] = self.type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the MonitorMetric into a shallow dictionary of its immediate attributes.""" + body = {} + if self.definition is not None: body['definition'] = self.definition + if self.input_columns: body['input_columns'] = self.input_columns + if self.name is not None: body['name'] = self.name + if self.output_data_type is not None: body['output_data_type'] = self.output_data_type + if self.type is not None: body['type'] = self.type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> MonitorMetric: """Deserializes the MonitorMetric from a dictionary.""" @@ -3712,6 +5088,14 @@ def as_dict(self) -> dict: body['on_new_classification_tag_detected'] = self.on_new_classification_tag_detected.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the MonitorNotifications into a shallow dictionary of its immediate attributes.""" + body = {} + if self.on_failure: body['on_failure'] = self.on_failure + if self.on_new_classification_tag_detected: + body['on_new_classification_tag_detected'] = self.on_new_classification_tag_detected + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> MonitorNotifications: """Deserializes the MonitorNotifications from a dictionary.""" @@ -3751,6 +5135,17 @@ def as_dict(self) -> dict: if self.trigger is not None: body['trigger'] = self.trigger.value return body + def as_shallow_dict(self) -> dict: + """Serializes the MonitorRefreshInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.end_time_ms is not None: body['end_time_ms'] = self.end_time_ms + if self.message is not None: body['message'] = self.message + if self.refresh_id is not None: body['refresh_id'] = self.refresh_id + if self.start_time_ms is not None: body['start_time_ms'] = self.start_time_ms + if self.state is not None: body['state'] = self.state + if self.trigger is not None: body['trigger'] = self.trigger + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> MonitorRefreshInfo: """Deserializes the MonitorRefreshInfo from a dictionary.""" @@ -3790,6 +5185,12 @@ def as_dict(self) -> dict: if self.refreshes: body['refreshes'] = [v.as_dict() for v in self.refreshes] return body + def as_shallow_dict(self) -> dict: + """Serializes the MonitorRefreshListResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.refreshes: body['refreshes'] = self.refreshes + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> MonitorRefreshListResponse: """Deserializes the MonitorRefreshListResponse from a dictionary.""" @@ -3804,6 +5205,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the MonitorSnapshot into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> MonitorSnapshot: """Deserializes the MonitorSnapshot from a dictionary.""" @@ -3831,6 +5237,13 @@ def as_dict(self) -> dict: if self.timestamp_col is not None: body['timestamp_col'] = self.timestamp_col return body + def as_shallow_dict(self) -> dict: + """Serializes the MonitorTimeSeries into a shallow dictionary of its immediate attributes.""" + body = {} + if self.granularities: body['granularities'] = self.granularities + if self.timestamp_col is not None: body['timestamp_col'] = self.timestamp_col + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> MonitorTimeSeries: """Deserializes the MonitorTimeSeries from a dictionary.""" @@ -3848,6 +5261,12 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name return body + def as_shallow_dict(self) -> dict: + """Serializes the NamedTableConstraint into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: body['name'] = self.name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> NamedTableConstraint: """Deserializes the NamedTableConstraint from a dictionary.""" @@ -3883,7 +5302,18 @@ def as_dict(self) -> dict: if self.status: body['status'] = self.status.as_dict() if self.table_serving_url is not None: body['table_serving_url'] = self.table_serving_url if self.unity_catalog_provisioning_state is not None: - body['unity_catalog_provisioning_state'] = self.unity_catalog_provisioning_state.value + body['unity_catalog_provisioning_state'] = self.unity_catalog_provisioning_state.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the OnlineTable into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: body['name'] = self.name + if self.spec: body['spec'] = self.spec + if self.status: body['status'] = self.status + if self.table_serving_url is not None: body['table_serving_url'] = self.table_serving_url + if self.unity_catalog_provisioning_state is not None: + body['unity_catalog_provisioning_state'] = self.unity_catalog_provisioning_state return body @classmethod @@ -3941,6 +5371,19 @@ def as_dict(self) -> dict: if self.timeseries_key is not None: body['timeseries_key'] = self.timeseries_key return body + def as_shallow_dict(self) -> dict: + """Serializes the OnlineTableSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.perform_full_copy is not None: body['perform_full_copy'] = self.perform_full_copy + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.primary_key_columns: body['primary_key_columns'] = self.primary_key_columns + if self.run_continuously: body['run_continuously'] = self.run_continuously + if self.run_triggered: body['run_triggered'] = self.run_triggered + if self.source_table_full_name is not None: + body['source_table_full_name'] = self.source_table_full_name + if self.timeseries_key is not None: body['timeseries_key'] = self.timeseries_key + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> OnlineTableSpec: """Deserializes the OnlineTableSpec from a dictionary.""" @@ -3962,6 +5405,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the OnlineTableSpecContinuousSchedulingPolicy into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> OnlineTableSpecContinuousSchedulingPolicy: """Deserializes the OnlineTableSpecContinuousSchedulingPolicy from a dictionary.""" @@ -3976,6 +5424,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the OnlineTableSpecTriggeredSchedulingPolicy into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> OnlineTableSpecTriggeredSchedulingPolicy: """Deserializes the OnlineTableSpecTriggeredSchedulingPolicy from a dictionary.""" @@ -4037,6 +5490,17 @@ def as_dict(self) -> dict: body['triggered_update_status'] = self.triggered_update_status.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the OnlineTableStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.continuous_update_status: body['continuous_update_status'] = self.continuous_update_status + if self.detailed_state is not None: body['detailed_state'] = self.detailed_state + if self.failed_status: body['failed_status'] = self.failed_status + if self.message is not None: body['message'] = self.message + if self.provisioning_status: body['provisioning_status'] = self.provisioning_status + if self.triggered_update_status: body['triggered_update_status'] = self.triggered_update_status + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> OnlineTableStatus: """Deserializes the OnlineTableStatus from a dictionary.""" @@ -4067,6 +5531,14 @@ def as_dict(self) -> dict: if self.remove: body['remove'] = [v.value for v in self.remove] return body + def as_shallow_dict(self) -> dict: + """Serializes the PermissionsChange into a shallow dictionary of its immediate attributes.""" + body = {} + if self.add: body['add'] = self.add + if self.principal is not None: body['principal'] = self.principal + if self.remove: body['remove'] = self.remove + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PermissionsChange: """Deserializes the PermissionsChange from a dictionary.""" @@ -4087,6 +5559,12 @@ def as_dict(self) -> dict: body['privilege_assignments'] = [v.as_dict() for v in self.privilege_assignments] return body + def as_shallow_dict(self) -> dict: + """Serializes the PermissionsList into a shallow dictionary of its immediate attributes.""" + body = {} + if self.privilege_assignments: body['privilege_assignments'] = self.privilege_assignments + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PermissionsList: """Deserializes the PermissionsList from a dictionary.""" @@ -4126,6 +5604,19 @@ def as_dict(self) -> dict: if self.total_row_count is not None: body['total_row_count'] = self.total_row_count return body + def as_shallow_dict(self) -> dict: + """Serializes the PipelineProgress into a shallow dictionary of its immediate attributes.""" + body = {} + if self.estimated_completion_time_seconds is not None: + body['estimated_completion_time_seconds'] = self.estimated_completion_time_seconds + if self.latest_version_currently_processing is not None: + body['latest_version_currently_processing'] = self.latest_version_currently_processing + if self.sync_progress_completion is not None: + body['sync_progress_completion'] = self.sync_progress_completion + if self.synced_row_count is not None: body['synced_row_count'] = self.synced_row_count + if self.total_row_count is not None: body['total_row_count'] = self.total_row_count + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PipelineProgress: """Deserializes the PipelineProgress from a dictionary.""" @@ -4151,6 +5642,13 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name return body + def as_shallow_dict(self) -> dict: + """Serializes the PrimaryKeyConstraint into a shallow dictionary of its immediate attributes.""" + body = {} + if self.child_columns: body['child_columns'] = self.child_columns + if self.name is not None: body['name'] = self.name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PrimaryKeyConstraint: """Deserializes the PrimaryKeyConstraint from a dictionary.""" @@ -4169,6 +5667,7 @@ class Privilege(Enum): CREATE_EXTERNAL_TABLE = 'CREATE_EXTERNAL_TABLE' CREATE_EXTERNAL_VOLUME = 'CREATE_EXTERNAL_VOLUME' CREATE_FOREIGN_CATALOG = 'CREATE_FOREIGN_CATALOG' + CREATE_FOREIGN_SECURABLE = 'CREATE_FOREIGN_SECURABLE' CREATE_FUNCTION = 'CREATE_FUNCTION' CREATE_MANAGED_STORAGE = 'CREATE_MANAGED_STORAGE' CREATE_MATERIALIZED_VIEW = 'CREATE_MATERIALIZED_VIEW' @@ -4220,6 +5719,13 @@ def as_dict(self) -> dict: if self.privileges: body['privileges'] = [v.value for v in self.privileges] return body + def as_shallow_dict(self) -> dict: + """Serializes the PrivilegeAssignment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.principal is not None: body['principal'] = self.principal + if self.privileges: body['privileges'] = self.privileges + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PrivilegeAssignment: """Deserializes the PrivilegeAssignment from a dictionary.""" @@ -4241,6 +5747,12 @@ def as_dict(self) -> dict: if self.state is not None: body['state'] = self.state.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ProvisioningInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.state is not None: body['state'] = self.state + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ProvisioningInfo: """Deserializes the ProvisioningInfo from a dictionary.""" @@ -4250,6 +5762,7 @@ def from_dict(cls, d: Dict[str, any]) -> ProvisioningInfo: class ProvisioningInfoState(Enum): ACTIVE = 'ACTIVE' + DEGRADED = 'DEGRADED' DELETING = 'DELETING' FAILED = 'FAILED' PROVISIONING = 'PROVISIONING' @@ -4272,6 +5785,13 @@ def as_dict(self) -> dict: body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the ProvisioningStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.initial_pipeline_sync_progress: + body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ProvisioningStatus: """Deserializes the ProvisioningStatus from a dictionary.""" @@ -4311,6 +5831,17 @@ def as_dict(self) -> dict: if self.quota_name is not None: body['quota_name'] = self.quota_name return body + def as_shallow_dict(self) -> dict: + """Serializes the QuotaInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.last_refreshed_at is not None: body['last_refreshed_at'] = self.last_refreshed_at + if self.parent_full_name is not None: body['parent_full_name'] = self.parent_full_name + if self.parent_securable_type is not None: body['parent_securable_type'] = self.parent_securable_type + if self.quota_count is not None: body['quota_count'] = self.quota_count + if self.quota_limit is not None: body['quota_limit'] = self.quota_limit + if self.quota_name is not None: body['quota_name'] = self.quota_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> QuotaInfo: """Deserializes the QuotaInfo from a dictionary.""" @@ -4344,6 +5875,14 @@ def as_dict(self) -> dict: if self.session_token is not None: body['session_token'] = self.session_token return body + def as_shallow_dict(self) -> dict: + """Serializes the R2Credentials into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_key_id is not None: body['access_key_id'] = self.access_key_id + if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key + if self.session_token is not None: body['session_token'] = self.session_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> R2Credentials: """Deserializes the R2Credentials from a dictionary.""" @@ -4368,6 +5907,13 @@ def as_dict(self) -> dict: if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body + def as_shallow_dict(self) -> dict: + """Serializes the RegenerateDashboardRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.table_name is not None: body['table_name'] = self.table_name + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RegenerateDashboardRequest: """Deserializes the RegenerateDashboardRequest from a dictionary.""" @@ -4389,6 +5935,13 @@ def as_dict(self) -> dict: if self.parent_folder is not None: body['parent_folder'] = self.parent_folder return body + def as_shallow_dict(self) -> dict: + """Serializes the RegenerateDashboardResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.parent_folder is not None: body['parent_folder'] = self.parent_folder + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RegenerateDashboardResponse: """Deserializes the RegenerateDashboardResponse from a dictionary.""" @@ -4412,6 +5965,13 @@ def as_dict(self) -> dict: if self.version_num is not None: body['version_num'] = self.version_num return body + def as_shallow_dict(self) -> dict: + """Serializes the RegisteredModelAlias into a shallow dictionary of its immediate attributes.""" + body = {} + if self.alias_name is not None: body['alias_name'] = self.alias_name + if self.version_num is not None: body['version_num'] = self.version_num + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RegisteredModelAlias: """Deserializes the RegisteredModelAlias from a dictionary.""" @@ -4482,6 +6042,25 @@ def as_dict(self) -> dict: if self.updated_by is not None: body['updated_by'] = self.updated_by return body + def as_shallow_dict(self) -> dict: + """Serializes the RegisteredModelInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aliases: body['aliases'] = self.aliases + if self.browse_only is not None: body['browse_only'] = self.browse_only + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.full_name is not None: body['full_name'] = self.full_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RegisteredModelInfo: """Deserializes the RegisteredModelInfo from a dictionary.""" @@ -4584,6 +6163,31 @@ def as_dict(self) -> dict: if self.updated_by is not None: body['updated_by'] = self.updated_by return body + def as_shallow_dict(self) -> dict: + """Serializes the SchemaInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.browse_only is not None: body['browse_only'] = self.browse_only + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.catalog_type is not None: body['catalog_type'] = self.catalog_type + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.effective_predictive_optimization_flag: + body['effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag + if self.enable_predictive_optimization is not None: + body['enable_predictive_optimization'] = self.enable_predictive_optimization + if self.full_name is not None: body['full_name'] = self.full_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.properties: body['properties'] = self.properties + if self.schema_id is not None: body['schema_id'] = self.schema_id + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SchemaInfo: """Deserializes the SchemaInfo from a dictionary.""" @@ -4617,19 +6221,21 @@ def from_dict(cls, d: Dict[str, any]) -> SchemaInfo: class SecurableType(Enum): """The type of Unity Catalog securable""" - CATALOG = 'catalog' - CONNECTION = 'connection' - EXTERNAL_LOCATION = 'external_location' - FUNCTION = 'function' - METASTORE = 'metastore' - PIPELINE = 'pipeline' - PROVIDER = 'provider' - RECIPIENT = 'recipient' - SCHEMA = 'schema' - SHARE = 'share' - STORAGE_CREDENTIAL = 'storage_credential' - TABLE = 'table' - VOLUME = 'volume' + CATALOG = 'CATALOG' + CLEAN_ROOM = 'CLEAN_ROOM' + CONNECTION = 'CONNECTION' + CREDENTIAL = 'CREDENTIAL' + EXTERNAL_LOCATION = 'EXTERNAL_LOCATION' + FUNCTION = 'FUNCTION' + METASTORE = 'METASTORE' + PIPELINE = 'PIPELINE' + PROVIDER = 'PROVIDER' + RECIPIENT = 'RECIPIENT' + SCHEMA = 'SCHEMA' + SHARE = 'SHARE' + STORAGE_CREDENTIAL = 'STORAGE_CREDENTIAL' + TABLE = 'TABLE' + VOLUME = 'VOLUME' @dataclass @@ -4647,6 +6253,13 @@ def as_dict(self) -> dict: if self.artifact_type is not None: body['artifact_type'] = self.artifact_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the SetArtifactAllowlist into a shallow dictionary of its immediate attributes.""" + body = {} + if self.artifact_matchers: body['artifact_matchers'] = self.artifact_matchers + if self.artifact_type is not None: body['artifact_type'] = self.artifact_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SetArtifactAllowlist: """Deserializes the SetArtifactAllowlist from a dictionary.""" @@ -4673,6 +6286,14 @@ def as_dict(self) -> dict: if self.version_num is not None: body['version_num'] = self.version_num return body + def as_shallow_dict(self) -> dict: + """Serializes the SetRegisteredModelAliasRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.alias is not None: body['alias'] = self.alias + if self.full_name is not None: body['full_name'] = self.full_name + if self.version_num is not None: body['version_num'] = self.version_num + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SetRegisteredModelAliasRequest: """Deserializes the SetRegisteredModelAliasRequest from a dictionary.""" @@ -4698,6 +6319,13 @@ def as_dict(self) -> dict: if self.aws_kms_key_arn is not None: body['aws_kms_key_arn'] = self.aws_kms_key_arn return body + def as_shallow_dict(self) -> dict: + """Serializes the SseEncryptionDetails into a shallow dictionary of its immediate attributes.""" + body = {} + if self.algorithm is not None: body['algorithm'] = self.algorithm + if self.aws_kms_key_arn is not None: body['aws_kms_key_arn'] = self.aws_kms_key_arn + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SseEncryptionDetails: """Deserializes the SseEncryptionDetails from a dictionary.""" @@ -4738,11 +6366,13 @@ class StorageCredentialInfo: databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountResponse] = None """The Databricks managed GCP service account configuration.""" + full_name: Optional[str] = None + """The full name of the credential.""" + id: Optional[str] = None """The unique identifier of the credential.""" isolation_mode: Optional[IsolationMode] = None - """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" metastore_id: Optional[str] = None """Unique identifier of parent metastore.""" @@ -4778,6 +6408,7 @@ def as_dict(self) -> dict: if self.created_by is not None: body['created_by'] = self.created_by if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict() + if self.full_name is not None: body['full_name'] = self.full_name if self.id is not None: body['id'] = self.id if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value if self.metastore_id is not None: body['metastore_id'] = self.metastore_id @@ -4790,6 +6421,31 @@ def as_dict(self) -> dict: body['used_for_managed_storage'] = self.used_for_managed_storage return body + def as_shallow_dict(self) -> dict: + """Serializes the StorageCredentialInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity + if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal + if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.databricks_gcp_service_account: + body['databricks_gcp_service_account'] = self.databricks_gcp_service_account + if self.full_name is not None: body['full_name'] = self.full_name + if self.id is not None: body['id'] = self.id + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.read_only is not None: body['read_only'] = self.read_only + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.used_for_managed_storage is not None: + body['used_for_managed_storage'] = self.used_for_managed_storage + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> StorageCredentialInfo: """Deserializes the StorageCredentialInfo from a dictionary.""" @@ -4803,6 +6459,7 @@ def from_dict(cls, d: Dict[str, any]) -> StorageCredentialInfo: created_by=d.get('created_by', None), databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account', DatabricksGcpServiceAccountResponse), + full_name=d.get('full_name', None), id=d.get('id', None), isolation_mode=_enum(d, 'isolation_mode', IsolationMode), metastore_id=d.get('metastore_id', None), @@ -4830,6 +6487,13 @@ def as_dict(self) -> dict: if self.state is not None: body['state'] = self.state.value return body + def as_shallow_dict(self) -> dict: + """Serializes the SystemSchemaInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.schema is not None: body['schema'] = self.schema + if self.state is not None: body['state'] = self.state + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SystemSchemaInfo: """Deserializes the SystemSchemaInfo from a dictionary.""" @@ -4866,6 +6530,14 @@ def as_dict(self) -> dict: if self.primary_key_constraint: body['primary_key_constraint'] = self.primary_key_constraint.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the TableConstraint into a shallow dictionary of its immediate attributes.""" + body = {} + if self.foreign_key_constraint: body['foreign_key_constraint'] = self.foreign_key_constraint + if self.named_table_constraint: body['named_table_constraint'] = self.named_table_constraint + if self.primary_key_constraint: body['primary_key_constraint'] = self.primary_key_constraint + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TableConstraint: """Deserializes the TableConstraint from a dictionary.""" @@ -4888,6 +6560,12 @@ def as_dict(self) -> dict: if self.table_full_name is not None: body['table_full_name'] = self.table_full_name return body + def as_shallow_dict(self) -> dict: + """Serializes the TableDependency into a shallow dictionary of its immediate attributes.""" + body = {} + if self.table_full_name is not None: body['table_full_name'] = self.table_full_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TableDependency: """Deserializes the TableDependency from a dictionary.""" @@ -4905,6 +6583,12 @@ def as_dict(self) -> dict: if self.table_exists is not None: body['table_exists'] = self.table_exists return body + def as_shallow_dict(self) -> dict: + """Serializes the TableExistsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.table_exists is not None: body['table_exists'] = self.table_exists + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TableExistsResponse: """Deserializes the TableExistsResponse from a dictionary.""" @@ -5057,6 +6741,48 @@ def as_dict(self) -> dict: if self.view_dependencies: body['view_dependencies'] = self.view_dependencies.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the TableInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_point is not None: body['access_point'] = self.access_point + if self.browse_only is not None: body['browse_only'] = self.browse_only + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.columns: body['columns'] = self.columns + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.data_access_configuration_id is not None: + body['data_access_configuration_id'] = self.data_access_configuration_id + if self.data_source_format is not None: body['data_source_format'] = self.data_source_format + if self.deleted_at is not None: body['deleted_at'] = self.deleted_at + if self.delta_runtime_properties_kvpairs: + body['delta_runtime_properties_kvpairs'] = self.delta_runtime_properties_kvpairs + if self.effective_predictive_optimization_flag: + body['effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag + if self.enable_predictive_optimization is not None: + body['enable_predictive_optimization'] = self.enable_predictive_optimization + if self.encryption_details: body['encryption_details'] = self.encryption_details + if self.full_name is not None: body['full_name'] = self.full_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.properties: body['properties'] = self.properties + if self.row_filter: body['row_filter'] = self.row_filter + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.sql_path is not None: body['sql_path'] = self.sql_path + if self.storage_credential_name is not None: + body['storage_credential_name'] = self.storage_credential_name + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.table_constraints: body['table_constraints'] = self.table_constraints + if self.table_id is not None: body['table_id'] = self.table_id + if self.table_type is not None: body['table_type'] = self.table_type + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.view_definition is not None: body['view_definition'] = self.view_definition + if self.view_dependencies: body['view_dependencies'] = self.view_dependencies + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TableInfo: """Deserializes the TableInfo from a dictionary.""" @@ -5119,6 +6845,13 @@ def as_dict(self) -> dict: if self.input_column_names: body['input_column_names'] = [v for v in self.input_column_names] return body + def as_shallow_dict(self) -> dict: + """Serializes the TableRowFilter into a shallow dictionary of its immediate attributes.""" + body = {} + if self.function_name is not None: body['function_name'] = self.function_name + if self.input_column_names: body['input_column_names'] = self.input_column_names + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TableRowFilter: """Deserializes the TableRowFilter from a dictionary.""" @@ -5140,6 +6873,13 @@ def as_dict(self) -> dict: if self.table_type is not None: body['table_type'] = self.table_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the TableSummary into a shallow dictionary of its immediate attributes.""" + body = {} + if self.full_name is not None: body['full_name'] = self.full_name + if self.table_type is not None: body['table_type'] = self.table_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TableSummary: """Deserializes the TableSummary from a dictionary.""" @@ -5158,6 +6898,52 @@ class TableType(Enum): VIEW = 'VIEW' +@dataclass +class TemporaryCredentials: + aws_temp_credentials: Optional[AwsCredentials] = None + """AWS temporary credentials for API authentication. Read more at + https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html.""" + + azure_aad: Optional[AzureActiveDirectoryToken] = None + """Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed + Identity. Read more at + https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token""" + + expiration_time: Optional[int] = None + """Server time when the credential will expire, in epoch milliseconds. The API client is advised to + cache the credential given this expiration time.""" + + gcp_oauth_token: Optional[GcpOauthToken] = None + """GCP temporary credentials for API authentication. Read more at + https://developers.google.com/identity/protocols/oauth2/service-account""" + + def as_dict(self) -> dict: + """Serializes the TemporaryCredentials into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials.as_dict() + if self.azure_aad: body['azure_aad'] = self.azure_aad.as_dict() + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.gcp_oauth_token: body['gcp_oauth_token'] = self.gcp_oauth_token.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the TemporaryCredentials into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials + if self.azure_aad: body['azure_aad'] = self.azure_aad + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.gcp_oauth_token: body['gcp_oauth_token'] = self.gcp_oauth_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> TemporaryCredentials: + """Deserializes the TemporaryCredentials from a dictionary.""" + return cls(aws_temp_credentials=_from_dict(d, 'aws_temp_credentials', AwsCredentials), + azure_aad=_from_dict(d, 'azure_aad', AzureActiveDirectoryToken), + expiration_time=d.get('expiration_time', None), + gcp_oauth_token=_from_dict(d, 'gcp_oauth_token', GcpOauthToken)) + + @dataclass class TriggeredUpdateStatus: """Detailed status of an online table. Shown if the online table is in the ONLINE_TRIGGERED_UPDATE @@ -5184,6 +6970,15 @@ def as_dict(self) -> dict: body['triggered_update_progress'] = self.triggered_update_progress.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the TriggeredUpdateStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.last_processed_commit_version is not None: + body['last_processed_commit_version'] = self.last_processed_commit_version + if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.triggered_update_progress: body['triggered_update_progress'] = self.triggered_update_progress + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TriggeredUpdateStatus: """Deserializes the TriggeredUpdateStatus from a dictionary.""" @@ -5200,6 +6995,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the UnassignResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UnassignResponse: """Deserializes the UnassignResponse from a dictionary.""" @@ -5214,6 +7014,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateAssignmentResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateAssignmentResponse: """Deserializes the UpdateAssignmentResponse from a dictionary.""" @@ -5223,6 +7028,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateAssignmentResponse: class UpdateBindingsSecurableType(Enum): CATALOG = 'catalog' + CREDENTIAL = 'credential' EXTERNAL_LOCATION = 'external_location' STORAGE_CREDENTIAL = 'storage_credential' @@ -5244,6 +7050,9 @@ class UpdateCatalog: new_name: Optional[str] = None """New name for the catalog.""" + options: Optional[Dict[str, str]] = None + """A map of key-value properties attached to the securable.""" + owner: Optional[str] = None """Username of current owner of catalog.""" @@ -5259,6 +7068,21 @@ def as_dict(self) -> dict: if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value if self.name is not None: body['name'] = self.name if self.new_name is not None: body['new_name'] = self.new_name + if self.options: body['options'] = self.options + if self.owner is not None: body['owner'] = self.owner + if self.properties: body['properties'] = self.properties + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateCatalog into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: body['comment'] = self.comment + if self.enable_predictive_optimization is not None: + body['enable_predictive_optimization'] = self.enable_predictive_optimization + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name + if self.options: body['options'] = self.options if self.owner is not None: body['owner'] = self.owner if self.properties: body['properties'] = self.properties return body @@ -5272,6 +7096,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateCatalog: isolation_mode=_enum(d, 'isolation_mode', CatalogIsolationMode), name=d.get('name', None), new_name=d.get('new_name', None), + options=d.get('options', None), owner=d.get('owner', None), properties=d.get('properties', None)) @@ -5299,6 +7124,15 @@ def as_dict(self) -> dict: if self.owner is not None: body['owner'] = self.owner return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateConnection into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name + if self.options: body['options'] = self.options + if self.owner is not None: body['owner'] = self.owner + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateConnection: """Deserializes the UpdateConnection from a dictionary.""" @@ -5308,6 +7142,101 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateConnection: owner=d.get('owner', None)) +@dataclass +class UpdateCredentialRequest: + aws_iam_role: Optional[AwsIamRole] = None + """The AWS IAM role configuration""" + + azure_managed_identity: Optional[AzureManagedIdentity] = None + """The Azure managed identity configuration.""" + + azure_service_principal: Optional[AzureServicePrincipal] = None + """The Azure service principal configuration. Only applicable when purpose is **STORAGE**.""" + + comment: Optional[str] = None + """Comment associated with the credential.""" + + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None + """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" + + force: Optional[bool] = None + """Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent + external locations and external tables (when purpose is **STORAGE**).""" + + isolation_mode: Optional[IsolationMode] = None + """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" + + name_arg: Optional[str] = None + """Name of the credential.""" + + new_name: Optional[str] = None + """New name of credential.""" + + owner: Optional[str] = None + """Username of current owner of credential.""" + + read_only: Optional[bool] = None + """Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**.""" + + skip_validation: Optional[bool] = None + """Supply true to this argument to skip validation of the updated credential.""" + + def as_dict(self) -> dict: + """Serializes the UpdateCredentialRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: + body['azure_service_principal'] = self.azure_service_principal.as_dict() + if self.comment is not None: body['comment'] = self.comment + if self.databricks_gcp_service_account: + body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict() + if self.force is not None: body['force'] = self.force + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value + if self.name_arg is not None: body['name_arg'] = self.name_arg + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + if self.read_only is not None: body['read_only'] = self.read_only + if self.skip_validation is not None: body['skip_validation'] = self.skip_validation + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateCredentialRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity + if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal + if self.comment is not None: body['comment'] = self.comment + if self.databricks_gcp_service_account: + body['databricks_gcp_service_account'] = self.databricks_gcp_service_account + if self.force is not None: body['force'] = self.force + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode + if self.name_arg is not None: body['name_arg'] = self.name_arg + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + if self.read_only is not None: body['read_only'] = self.read_only + if self.skip_validation is not None: body['skip_validation'] = self.skip_validation + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateCredentialRequest: + """Deserializes the UpdateCredentialRequest from a dictionary.""" + return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole), + azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity), + azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal), + comment=d.get('comment', None), + databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account', + DatabricksGcpServiceAccount), + force=d.get('force', None), + isolation_mode=_enum(d, 'isolation_mode', IsolationMode), + name_arg=d.get('name_arg', None), + new_name=d.get('new_name', None), + owner=d.get('owner', None), + read_only=d.get('read_only', None), + skip_validation=d.get('skip_validation', None)) + + @dataclass class UpdateExternalLocation: access_point: Optional[str] = None @@ -5331,7 +7260,6 @@ class UpdateExternalLocation: """Force update even if changing url invalidates dependent external tables or mounts.""" isolation_mode: Optional[IsolationMode] = None - """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" name: Optional[str] = None """Name of the external location.""" @@ -5369,6 +7297,24 @@ def as_dict(self) -> dict: if self.url is not None: body['url'] = self.url return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateExternalLocation into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_point is not None: body['access_point'] = self.access_point + if self.comment is not None: body['comment'] = self.comment + if self.credential_name is not None: body['credential_name'] = self.credential_name + if self.encryption_details: body['encryption_details'] = self.encryption_details + if self.fallback is not None: body['fallback'] = self.fallback + if self.force is not None: body['force'] = self.force + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + if self.read_only is not None: body['read_only'] = self.read_only + if self.skip_validation is not None: body['skip_validation'] = self.skip_validation + if self.url is not None: body['url'] = self.url + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateExternalLocation: """Deserializes the UpdateExternalLocation from a dictionary.""" @@ -5403,6 +7349,13 @@ def as_dict(self) -> dict: if self.owner is not None: body['owner'] = self.owner return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateFunction into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateFunction: """Deserializes the UpdateFunction from a dictionary.""" @@ -5454,6 +7407,24 @@ def as_dict(self) -> dict: body['storage_root_credential_id'] = self.storage_root_credential_id return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateMetastore into a shallow dictionary of its immediate attributes.""" + body = {} + if self.delta_sharing_organization_name is not None: + body['delta_sharing_organization_name'] = self.delta_sharing_organization_name + if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: + body[ + 'delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds + if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope + if self.id is not None: body['id'] = self.id + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + if self.privilege_model_version is not None: + body['privilege_model_version'] = self.privilege_model_version + if self.storage_root_credential_id is not None: + body['storage_root_credential_id'] = self.storage_root_credential_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateMetastore: """Deserializes the UpdateMetastore from a dictionary.""" @@ -5488,6 +7459,14 @@ def as_dict(self) -> dict: if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateMetastoreAssignment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateMetastoreAssignment: """Deserializes the UpdateMetastoreAssignment from a dictionary.""" @@ -5522,6 +7501,14 @@ def as_dict(self) -> dict: if self.version is not None: body['version'] = self.version return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateModelVersionRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: body['comment'] = self.comment + if self.full_name is not None: body['full_name'] = self.full_name + if self.version is not None: body['version'] = self.version + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateModelVersionRequest: """Deserializes the UpdateModelVersionRequest from a dictionary.""" @@ -5592,6 +7579,24 @@ def as_dict(self) -> dict: if self.time_series: body['time_series'] = self.time_series.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateMonitor into a shallow dictionary of its immediate attributes.""" + body = {} + if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name + if self.custom_metrics: body['custom_metrics'] = self.custom_metrics + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.data_classification_config: + body['data_classification_config'] = self.data_classification_config + if self.inference_log: body['inference_log'] = self.inference_log + if self.notifications: body['notifications'] = self.notifications + if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name + if self.schedule: body['schedule'] = self.schedule + if self.slicing_exprs: body['slicing_exprs'] = self.slicing_exprs + if self.snapshot: body['snapshot'] = self.snapshot + if self.table_name is not None: body['table_name'] = self.table_name + if self.time_series: body['time_series'] = self.time_series + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateMonitor: """Deserializes the UpdateMonitor from a dictionary.""" @@ -5629,6 +7634,14 @@ def as_dict(self) -> dict: if self.securable_type is not None: body['securable_type'] = self.securable_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdatePermissions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.changes: body['changes'] = self.changes + if self.full_name is not None: body['full_name'] = self.full_name + if self.securable_type is not None: body['securable_type'] = self.securable_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdatePermissions: """Deserializes the UpdatePermissions from a dictionary.""" @@ -5660,6 +7673,15 @@ def as_dict(self) -> dict: if self.owner is not None: body['owner'] = self.owner return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateRegisteredModelRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: body['comment'] = self.comment + if self.full_name is not None: body['full_name'] = self.full_name + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateRegisteredModelRequest: """Deserializes the UpdateRegisteredModelRequest from a dictionary.""" @@ -5677,6 +7699,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateResponse: """Deserializes the UpdateResponse from a dictionary.""" @@ -5715,6 +7742,18 @@ def as_dict(self) -> dict: if self.properties: body['properties'] = self.properties return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateSchema into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: body['comment'] = self.comment + if self.enable_predictive_optimization is not None: + body['enable_predictive_optimization'] = self.enable_predictive_optimization + if self.full_name is not None: body['full_name'] = self.full_name + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + if self.properties: body['properties'] = self.properties + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateSchema: """Deserializes the UpdateSchema from a dictionary.""" @@ -5751,7 +7790,6 @@ class UpdateStorageCredential: """Force update even if there are dependent external locations or external tables.""" isolation_mode: Optional[IsolationMode] = None - """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" name: Optional[str] = None """Name of the storage credential.""" @@ -5788,6 +7826,25 @@ def as_dict(self) -> dict: if self.skip_validation is not None: body['skip_validation'] = self.skip_validation return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateStorageCredential into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity + if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal + if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token + if self.comment is not None: body['comment'] = self.comment + if self.databricks_gcp_service_account: + body['databricks_gcp_service_account'] = self.databricks_gcp_service_account + if self.force is not None: body['force'] = self.force + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + if self.read_only is not None: body['read_only'] = self.read_only + if self.skip_validation is not None: body['skip_validation'] = self.skip_validation + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateStorageCredential: """Deserializes the UpdateStorageCredential from a dictionary.""" @@ -5831,6 +7888,15 @@ def as_dict(self) -> dict: if self.owner is not None: body['owner'] = self.owner return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateVolumeRequestContent into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateVolumeRequestContent: """Deserializes the UpdateVolumeRequestContent from a dictionary.""" @@ -5859,44 +7925,162 @@ def as_dict(self) -> dict: if self.unassign_workspaces: body['unassign_workspaces'] = [v for v in self.unassign_workspaces] return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateWorkspaceBindings into a shallow dictionary of its immediate attributes.""" + body = {} + if self.assign_workspaces: body['assign_workspaces'] = self.assign_workspaces + if self.name is not None: body['name'] = self.name + if self.unassign_workspaces: body['unassign_workspaces'] = self.unassign_workspaces + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceBindings: + """Deserializes the UpdateWorkspaceBindings from a dictionary.""" + return cls(assign_workspaces=d.get('assign_workspaces', None), + name=d.get('name', None), + unassign_workspaces=d.get('unassign_workspaces', None)) + + +@dataclass +class UpdateWorkspaceBindingsParameters: + add: Optional[List[WorkspaceBinding]] = None + """List of workspace bindings""" + + remove: Optional[List[WorkspaceBinding]] = None + """List of workspace bindings""" + + securable_name: Optional[str] = None + """The name of the securable.""" + + securable_type: Optional[UpdateBindingsSecurableType] = None + """The type of the securable to bind to a workspace.""" + + def as_dict(self) -> dict: + """Serializes the UpdateWorkspaceBindingsParameters into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.add: body['add'] = [v.as_dict() for v in self.add] + if self.remove: body['remove'] = [v.as_dict() for v in self.remove] + if self.securable_name is not None: body['securable_name'] = self.securable_name + if self.securable_type is not None: body['securable_type'] = self.securable_type.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateWorkspaceBindingsParameters into a shallow dictionary of its immediate attributes.""" + body = {} + if self.add: body['add'] = self.add + if self.remove: body['remove'] = self.remove + if self.securable_name is not None: body['securable_name'] = self.securable_name + if self.securable_type is not None: body['securable_type'] = self.securable_type + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceBindingsParameters: + """Deserializes the UpdateWorkspaceBindingsParameters from a dictionary.""" + return cls(add=_repeated_dict(d, 'add', WorkspaceBinding), + remove=_repeated_dict(d, 'remove', WorkspaceBinding), + securable_name=d.get('securable_name', None), + securable_type=_enum(d, 'securable_type', UpdateBindingsSecurableType)) + + +@dataclass +class ValidateCredentialRequest: + aws_iam_role: Optional[AwsIamRole] = None + """The AWS IAM role configuration""" + + azure_managed_identity: Optional[AzureManagedIdentity] = None + """The Azure managed identity configuration.""" + + credential_name: Optional[str] = None + """Required. The name of an existing credential or long-lived cloud credential to validate.""" + + external_location_name: Optional[str] = None + """The name of an existing external location to validate. Only applicable for storage credentials + (purpose is **STORAGE**.)""" + + purpose: Optional[CredentialPurpose] = None + """The purpose of the credential. This should only be used when the credential is specified.""" + + read_only: Optional[bool] = None + """Whether the credential is only usable for read operations. Only applicable for storage + credentials (purpose is **STORAGE**.)""" + + url: Optional[str] = None + """The external location url to validate. Only applicable when purpose is **STORAGE**.""" + + def as_dict(self) -> dict: + """Serializes the ValidateCredentialRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() + if self.credential_name is not None: body['credential_name'] = self.credential_name + if self.external_location_name is not None: + body['external_location_name'] = self.external_location_name + if self.purpose is not None: body['purpose'] = self.purpose.value + if self.read_only is not None: body['read_only'] = self.read_only + if self.url is not None: body['url'] = self.url + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ValidateCredentialRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity + if self.credential_name is not None: body['credential_name'] = self.credential_name + if self.external_location_name is not None: + body['external_location_name'] = self.external_location_name + if self.purpose is not None: body['purpose'] = self.purpose + if self.read_only is not None: body['read_only'] = self.read_only + if self.url is not None: body['url'] = self.url + return body + @classmethod - def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceBindings: - """Deserializes the UpdateWorkspaceBindings from a dictionary.""" - return cls(assign_workspaces=d.get('assign_workspaces', None), - name=d.get('name', None), - unassign_workspaces=d.get('unassign_workspaces', None)) + def from_dict(cls, d: Dict[str, any]) -> ValidateCredentialRequest: + """Deserializes the ValidateCredentialRequest from a dictionary.""" + return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole), + azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity), + credential_name=d.get('credential_name', None), + external_location_name=d.get('external_location_name', None), + purpose=_enum(d, 'purpose', CredentialPurpose), + read_only=d.get('read_only', None), + url=d.get('url', None)) @dataclass -class UpdateWorkspaceBindingsParameters: - add: Optional[List[WorkspaceBinding]] = None - """List of workspace bindings""" - - remove: Optional[List[WorkspaceBinding]] = None - """List of workspace bindings""" - - securable_name: Optional[str] = None - """The name of the securable.""" +class ValidateCredentialResponse: + is_dir: Optional[bool] = None + """Whether the tested location is a directory in cloud storage. Only applicable for when purpose is + **STORAGE**.""" - securable_type: Optional[UpdateBindingsSecurableType] = None - """The type of the securable to bind to a workspace.""" + results: Optional[List[CredentialValidationResult]] = None + """The results of the validation check.""" def as_dict(self) -> dict: - """Serializes the UpdateWorkspaceBindingsParameters into a dictionary suitable for use as a JSON request body.""" + """Serializes the ValidateCredentialResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.add: body['add'] = [v.as_dict() for v in self.add] - if self.remove: body['remove'] = [v.as_dict() for v in self.remove] - if self.securable_name is not None: body['securable_name'] = self.securable_name - if self.securable_type is not None: body['securable_type'] = self.securable_type.value + if self.is_dir is not None: body['isDir'] = self.is_dir + if self.results: body['results'] = [v.as_dict() for v in self.results] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ValidateCredentialResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.is_dir is not None: body['isDir'] = self.is_dir + if self.results: body['results'] = self.results return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceBindingsParameters: - """Deserializes the UpdateWorkspaceBindingsParameters from a dictionary.""" - return cls(add=_repeated_dict(d, 'add', WorkspaceBinding), - remove=_repeated_dict(d, 'remove', WorkspaceBinding), - securable_name=d.get('securable_name', None), - securable_type=_enum(d, 'securable_type', UpdateBindingsSecurableType)) + def from_dict(cls, d: Dict[str, any]) -> ValidateCredentialResponse: + """Deserializes the ValidateCredentialResponse from a dictionary.""" + return cls(is_dir=d.get('isDir', None), + results=_repeated_dict(d, 'results', CredentialValidationResult)) + + +class ValidateCredentialResult(Enum): + """A enum represents the result of the file operation""" + + FAIL = 'FAIL' + PASS = 'PASS' + SKIP = 'SKIP' @dataclass @@ -5946,6 +8130,23 @@ def as_dict(self) -> dict: if self.url is not None: body['url'] = self.url return body + def as_shallow_dict(self) -> dict: + """Serializes the ValidateStorageCredential into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity + if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal + if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token + if self.databricks_gcp_service_account: + body['databricks_gcp_service_account'] = self.databricks_gcp_service_account + if self.external_location_name is not None: + body['external_location_name'] = self.external_location_name + if self.read_only is not None: body['read_only'] = self.read_only + if self.storage_credential_name is not None: + body['storage_credential_name'] = self.storage_credential_name + if self.url is not None: body['url'] = self.url + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ValidateStorageCredential: """Deserializes the ValidateStorageCredential from a dictionary.""" @@ -5977,6 +8178,13 @@ def as_dict(self) -> dict: if self.results: body['results'] = [v.as_dict() for v in self.results] return body + def as_shallow_dict(self) -> dict: + """Serializes the ValidateStorageCredentialResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.is_dir is not None: body['isDir'] = self.is_dir + if self.results: body['results'] = self.results + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ValidateStorageCredentialResponse: """Deserializes the ValidateStorageCredentialResponse from a dictionary.""" @@ -6002,6 +8210,14 @@ def as_dict(self) -> dict: if self.result is not None: body['result'] = self.result.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ValidationResult into a shallow dictionary of its immediate attributes.""" + body = {} + if self.message is not None: body['message'] = self.message + if self.operation is not None: body['operation'] = self.operation + if self.result is not None: body['result'] = self.result + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ValidationResult: """Deserializes the ValidationResult from a dictionary.""" @@ -6101,6 +8317,28 @@ def as_dict(self) -> dict: if self.volume_type is not None: body['volume_type'] = self.volume_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the VolumeInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_point is not None: body['access_point'] = self.access_point + if self.browse_only is not None: body['browse_only'] = self.browse_only + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.encryption_details: body['encryption_details'] = self.encryption_details + if self.full_name is not None: body['full_name'] = self.full_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.volume_id is not None: body['volume_id'] = self.volume_id + if self.volume_type is not None: body['volume_type'] = self.volume_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> VolumeInfo: """Deserializes the VolumeInfo from a dictionary.""" @@ -6142,6 +8380,13 @@ def as_dict(self) -> dict: if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body + def as_shallow_dict(self) -> dict: + """Serializes the WorkspaceBinding into a shallow dictionary of its immediate attributes.""" + body = {} + if self.binding_type is not None: body['binding_type'] = self.binding_type + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> WorkspaceBinding: """Deserializes the WorkspaceBinding from a dictionary.""" @@ -6173,6 +8418,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the WorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.bindings: body['bindings'] = self.bindings + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> WorkspaceBindingsResponse: """Deserializes the WorkspaceBindingsResponse from a dictionary.""" @@ -6731,6 +8983,7 @@ def list(self, if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } + if "max_results" not in query: query['max_results'] = 0 while True: json = self._api.do('GET', '/api/2.1/unity-catalog/catalogs', query=query, headers=headers) if 'catalogs' in json: @@ -6747,6 +9000,7 @@ def update(self, enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None, isolation_mode: Optional[CatalogIsolationMode] = None, new_name: Optional[str] = None, + options: Optional[Dict[str, str]] = None, owner: Optional[str] = None, properties: Optional[Dict[str, str]] = None) -> CatalogInfo: """Update a catalog. @@ -6764,6 +9018,8 @@ def update(self, Whether the current securable is accessible from all workspaces or a specific set of workspaces. :param new_name: str (optional) New name for the catalog. + :param options: Dict[str,str] (optional) + A map of key-value properties attached to the securable. :param owner: str (optional) Username of current owner of catalog. :param properties: Dict[str,str] (optional) @@ -6777,6 +9033,7 @@ def update(self, body['enable_predictive_optimization'] = enable_predictive_optimization.value if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value if new_name is not None: body['new_name'] = new_name + if options is not None: body['options'] = options if owner is not None: body['owner'] = owner if properties is not None: body['properties'] = properties headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } @@ -6895,6 +9152,7 @@ def list(self, if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } + if "max_results" not in query: query['max_results'] = 0 while True: json = self._api.do('GET', '/api/2.1/unity-catalog/connections', query=query, headers=headers) if 'connections' in json: @@ -6935,6 +9193,322 @@ def update(self, return ConnectionInfo.from_dict(res) +class CredentialsAPI: + """A credential represents an authentication and authorization mechanism for accessing services on your cloud + tenant. Each credential is subject to Unity Catalog access-control policies that control which users and + groups can access the credential. + + To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE CREDENTIAL` + privilege. The user who creates the credential can delegate ownership to another user or group to manage + permissions on it.""" + + def __init__(self, api_client): + self._api = api_client + + def create_credential(self, + name: str, + *, + aws_iam_role: Optional[AwsIamRole] = None, + azure_managed_identity: Optional[AzureManagedIdentity] = None, + azure_service_principal: Optional[AzureServicePrincipal] = None, + comment: Optional[str] = None, + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None, + purpose: Optional[CredentialPurpose] = None, + read_only: Optional[bool] = None, + skip_validation: Optional[bool] = None) -> CredentialInfo: + """Create a credential. + + Creates a new credential. The type of credential to be created is determined by the **purpose** field, + which should be either **SERVICE** or **STORAGE**. + + The caller must be a metastore admin or have the metastore privilege **CREATE_STORAGE_CREDENTIAL** for + storage credentials, or **CREATE_SERVICE_CREDENTIAL** for service credentials. + + :param name: str + The credential name. The name must be unique among storage and service credentials within the + metastore. + :param aws_iam_role: :class:`AwsIamRole` (optional) + The AWS IAM role configuration + :param azure_managed_identity: :class:`AzureManagedIdentity` (optional) + The Azure managed identity configuration. + :param azure_service_principal: :class:`AzureServicePrincipal` (optional) + The Azure service principal configuration. Only applicable when purpose is **STORAGE**. + :param comment: str (optional) + Comment associated with the credential. + :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional) + GCP long-lived credential. Databricks-created Google Cloud Storage service account. + :param purpose: :class:`CredentialPurpose` (optional) + Indicates the purpose of the credential. + :param read_only: bool (optional) + Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**. + :param skip_validation: bool (optional) + Optional. Supplying true to this argument skips validation of the created set of credentials. + + :returns: :class:`CredentialInfo` + """ + body = {} + if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict() + if azure_managed_identity is not None: + body['azure_managed_identity'] = azure_managed_identity.as_dict() + if azure_service_principal is not None: + body['azure_service_principal'] = azure_service_principal.as_dict() + if comment is not None: body['comment'] = comment + if databricks_gcp_service_account is not None: + body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict() + if name is not None: body['name'] = name + if purpose is not None: body['purpose'] = purpose.value + if read_only is not None: body['read_only'] = read_only + if skip_validation is not None: body['skip_validation'] = skip_validation + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('POST', '/api/2.1/unity-catalog/credentials', body=body, headers=headers) + return CredentialInfo.from_dict(res) + + def delete_credential(self, name_arg: str, *, force: Optional[bool] = None): + """Delete a credential. + + Deletes a service or storage credential from the metastore. The caller must be an owner of the + credential. + + :param name_arg: str + Name of the credential. + :param force: bool (optional) + Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent + external locations and external tables (when purpose is **STORAGE**). + + + """ + + query = {} + if force is not None: query['force'] = force + headers = {'Accept': 'application/json', } + + self._api.do('DELETE', f'/api/2.1/unity-catalog/credentials/{name_arg}', query=query, headers=headers) + + def generate_temporary_service_credential( + self, + credential_name: str, + *, + azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None, + gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions] = None + ) -> TemporaryCredentials: + """Generate a temporary service credential. + + Returns a set of temporary credentials generated using the specified service credential. The caller + must be a metastore admin or have the metastore privilege **ACCESS** on the service credential. + + :param credential_name: str + The name of the service credential used to generate a temporary credential + :param azure_options: :class:`GenerateTemporaryServiceCredentialAzureOptions` (optional) + The Azure cloud options to customize the requested temporary credential + :param gcp_options: :class:`GenerateTemporaryServiceCredentialGcpOptions` (optional) + The GCP cloud options to customize the requested temporary credential + + :returns: :class:`TemporaryCredentials` + """ + body = {} + if azure_options is not None: body['azure_options'] = azure_options.as_dict() + if credential_name is not None: body['credential_name'] = credential_name + if gcp_options is not None: body['gcp_options'] = gcp_options.as_dict() + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('POST', + '/api/2.1/unity-catalog/temporary-service-credentials', + body=body, + headers=headers) + return TemporaryCredentials.from_dict(res) + + def get_credential(self, name_arg: str) -> CredentialInfo: + """Get a credential. + + Gets a service or storage credential from the metastore. The caller must be a metastore admin, the + owner of the credential, or have any permission on the credential. + + :param name_arg: str + Name of the credential. + + :returns: :class:`CredentialInfo` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do('GET', f'/api/2.1/unity-catalog/credentials/{name_arg}', headers=headers) + return CredentialInfo.from_dict(res) + + def list_credentials(self, + *, + max_results: Optional[int] = None, + page_token: Optional[str] = None, + purpose: Optional[CredentialPurpose] = None) -> Iterator[CredentialInfo]: + """List credentials. + + Gets an array of credentials (as __CredentialInfo__ objects). + + The array is limited to only the credentials that the caller has permission to access. If the caller + is a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a specific + ordering of the elements in the array. + + :param max_results: int (optional) + Maximum number of credentials to return. - If not set, the default max page size is used. - When set + to a value greater than 0, the page length is the minimum of this value and a server-configured + value. - When set to 0, the page length is set to a server-configured value (recommended). - When + set to a value less than 0, an invalid parameter error is returned. + :param page_token: str (optional) + Opaque token to retrieve the next page of results. + :param purpose: :class:`CredentialPurpose` (optional) + Return only credentials for the specified purpose. + + :returns: Iterator over :class:`CredentialInfo` + """ + + query = {} + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + if purpose is not None: query['purpose'] = purpose.value + headers = {'Accept': 'application/json', } + + while True: + json = self._api.do('GET', '/api/2.1/unity-catalog/credentials', query=query, headers=headers) + if 'credentials' in json: + for v in json['credentials']: + yield CredentialInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + def update_credential(self, + name_arg: str, + *, + aws_iam_role: Optional[AwsIamRole] = None, + azure_managed_identity: Optional[AzureManagedIdentity] = None, + azure_service_principal: Optional[AzureServicePrincipal] = None, + comment: Optional[str] = None, + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None, + force: Optional[bool] = None, + isolation_mode: Optional[IsolationMode] = None, + new_name: Optional[str] = None, + owner: Optional[str] = None, + read_only: Optional[bool] = None, + skip_validation: Optional[bool] = None) -> CredentialInfo: + """Update a credential. + + Updates a service or storage credential on the metastore. + + The caller must be the owner of the credential or a metastore admin or have the `MANAGE` permission. + If the caller is a metastore admin, only the __owner__ field can be changed. + + :param name_arg: str + Name of the credential. + :param aws_iam_role: :class:`AwsIamRole` (optional) + The AWS IAM role configuration + :param azure_managed_identity: :class:`AzureManagedIdentity` (optional) + The Azure managed identity configuration. + :param azure_service_principal: :class:`AzureServicePrincipal` (optional) + The Azure service principal configuration. Only applicable when purpose is **STORAGE**. + :param comment: str (optional) + Comment associated with the credential. + :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional) + GCP long-lived credential. Databricks-created Google Cloud Storage service account. + :param force: bool (optional) + Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent + external locations and external tables (when purpose is **STORAGE**). + :param isolation_mode: :class:`IsolationMode` (optional) + Whether the current securable is accessible from all workspaces or a specific set of workspaces. + :param new_name: str (optional) + New name of credential. + :param owner: str (optional) + Username of current owner of credential. + :param read_only: bool (optional) + Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**. + :param skip_validation: bool (optional) + Supply true to this argument to skip validation of the updated credential. + + :returns: :class:`CredentialInfo` + """ + body = {} + if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict() + if azure_managed_identity is not None: + body['azure_managed_identity'] = azure_managed_identity.as_dict() + if azure_service_principal is not None: + body['azure_service_principal'] = azure_service_principal.as_dict() + if comment is not None: body['comment'] = comment + if databricks_gcp_service_account is not None: + body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict() + if force is not None: body['force'] = force + if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value + if new_name is not None: body['new_name'] = new_name + if owner is not None: body['owner'] = owner + if read_only is not None: body['read_only'] = read_only + if skip_validation is not None: body['skip_validation'] = skip_validation + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('PATCH', + f'/api/2.1/unity-catalog/credentials/{name_arg}', + body=body, + headers=headers) + return CredentialInfo.from_dict(res) + + def validate_credential(self, + *, + aws_iam_role: Optional[AwsIamRole] = None, + azure_managed_identity: Optional[AzureManagedIdentity] = None, + credential_name: Optional[str] = None, + external_location_name: Optional[str] = None, + purpose: Optional[CredentialPurpose] = None, + read_only: Optional[bool] = None, + url: Optional[str] = None) -> ValidateCredentialResponse: + """Validate a credential. + + Validates a credential. + + For service credentials (purpose is **SERVICE**), either the __credential_name__ or the cloud-specific + credential must be provided. + + For storage credentials (purpose is **STORAGE**), at least one of __external_location_name__ and + __url__ need to be provided. If only one of them is provided, it will be used for validation. And if + both are provided, the __url__ will be used for validation, and __external_location_name__ will be + ignored when checking overlapping urls. Either the __credential_name__ or the cloud-specific + credential must be provided. + + The caller must be a metastore admin or the credential owner or have the required permission on the + metastore and the credential (e.g., **CREATE_EXTERNAL_LOCATION** when purpose is **STORAGE**). + + :param aws_iam_role: :class:`AwsIamRole` (optional) + The AWS IAM role configuration + :param azure_managed_identity: :class:`AzureManagedIdentity` (optional) + The Azure managed identity configuration. + :param credential_name: str (optional) + Required. The name of an existing credential or long-lived cloud credential to validate. + :param external_location_name: str (optional) + The name of an existing external location to validate. Only applicable for storage credentials + (purpose is **STORAGE**.) + :param purpose: :class:`CredentialPurpose` (optional) + The purpose of the credential. This should only be used when the credential is specified. + :param read_only: bool (optional) + Whether the credential is only usable for read operations. Only applicable for storage credentials + (purpose is **STORAGE**.) + :param url: str (optional) + The external location url to validate. Only applicable when purpose is **STORAGE**. + + :returns: :class:`ValidateCredentialResponse` + """ + body = {} + if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict() + if azure_managed_identity is not None: + body['azure_managed_identity'] = azure_managed_identity.as_dict() + if credential_name is not None: body['credential_name'] = credential_name + if external_location_name is not None: body['external_location_name'] = external_location_name + if purpose is not None: body['purpose'] = purpose.value + if read_only is not None: body['read_only'] = read_only + if url is not None: body['url'] = url + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('POST', '/api/2.1/unity-catalog/validate-credentials', body=body, headers=headers) + return ValidateCredentialResponse.from_dict(res) + + class ExternalLocationsAPI: """An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path. Each external location is subject to Unity Catalog @@ -7084,6 +9658,7 @@ def list(self, if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } + if "max_results" not in query: query['max_results'] = 0 while True: json = self._api.do('GET', '/api/2.1/unity-catalog/external-locations', @@ -7134,7 +9709,6 @@ def update(self, :param force: bool (optional) Force update even if changing url invalidates dependent external tables or mounts. :param isolation_mode: :class:`IsolationMode` (optional) - Whether the current securable is accessible from all workspaces or a specific set of workspaces. :param new_name: str (optional) New name for the external location. :param owner: str (optional) @@ -7890,25 +10464,61 @@ class OnlineTablesAPI: def __init__(self, api_client): self._api = api_client - def create(self, *, name: Optional[str] = None, spec: Optional[OnlineTableSpec] = None) -> OnlineTable: + def wait_get_online_table_active(self, + name: str, + timeout=timedelta(minutes=20), + callback: Optional[Callable[[OnlineTable], None]] = None) -> OnlineTable: + deadline = time.time() + timeout.total_seconds() + target_states = (ProvisioningInfoState.ACTIVE, ) + failure_states = (ProvisioningInfoState.FAILED, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get(name=name) + status = poll.unity_catalog_provisioning_state + status_message = f'current status: {status}' + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach ACTIVE, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"name={name}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + def create(self, *, table: Optional[OnlineTable] = None) -> Wait[OnlineTable]: """Create an Online Table. Create a new Online Table. - :param name: str (optional) - Full three-part (catalog, schema, table) name of the table. - :param spec: :class:`OnlineTableSpec` (optional) - Specification of the online table. + :param table: :class:`OnlineTable` (optional) + Online Table information. - :returns: :class:`OnlineTable` + :returns: + Long-running operation waiter for :class:`OnlineTable`. + See :method:wait_get_online_table_active for more details. """ - body = {} - if name is not None: body['name'] = name - if spec is not None: body['spec'] = spec.as_dict() + body = table.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - res = self._api.do('POST', '/api/2.0/online-tables', body=body, headers=headers) - return OnlineTable.from_dict(res) + op_response = self._api.do('POST', '/api/2.0/online-tables', body=body, headers=headers) + return Wait(self.wait_get_online_table_active, + response=OnlineTable.from_dict(op_response), + name=op_response['name']) + + def create_and_wait(self, + *, + table: Optional[OnlineTable] = None, + timeout=timedelta(minutes=20)) -> OnlineTable: + return self.create(table=table).result(timeout=timeout) def delete(self, name: str): """Delete an Online Table. @@ -8782,6 +11392,7 @@ def list(self, if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } + if "max_results" not in query: query['max_results'] = 0 while True: json = self._api.do('GET', '/api/2.1/unity-catalog/schemas', query=query, headers=headers) if 'schemas' in json: @@ -8971,6 +11582,7 @@ def list(self, if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } + if "max_results" not in query: query['max_results'] = 0 while True: json = self._api.do('GET', '/api/2.1/unity-catalog/storage-credentials', @@ -9019,7 +11631,6 @@ def update(self, :param force: bool (optional) Force update even if there are dependent external locations or external tables. :param isolation_mode: :class:`IsolationMode` (optional) - Whether the current securable is accessible from all workspaces or a specific set of workspaces. :param new_name: str (optional) New name for the storage credential. :param owner: str (optional) @@ -9196,6 +11807,7 @@ def list(self, if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } + if "max_results" not in query: query['max_results'] = 0 while True: json = self._api.do('GET', f'/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas', @@ -9385,6 +11997,7 @@ def list(self, max_results: Optional[int] = None, omit_columns: Optional[bool] = None, omit_properties: Optional[bool] = None, + omit_username: Optional[bool] = None, page_token: Optional[str] = None) -> Iterator[TableInfo]: """List tables. @@ -9414,6 +12027,9 @@ def list(self, Whether to omit the columns of the table from the response or not. :param omit_properties: bool (optional) Whether to omit the properties of the table from the response or not. + :param omit_username: bool (optional) + Whether to omit the username of the table (e.g. owner, updated_by, created_by) from the response or + not. :param page_token: str (optional) Opaque token to send for the next page of results (pagination). @@ -9429,10 +12045,12 @@ def list(self, if max_results is not None: query['max_results'] = max_results if omit_columns is not None: query['omit_columns'] = omit_columns if omit_properties is not None: query['omit_properties'] = omit_properties + if omit_username is not None: query['omit_username'] = omit_username if page_token is not None: query['page_token'] = page_token if schema_name is not None: query['schema_name'] = schema_name headers = {'Accept': 'application/json', } + if "max_results" not in query: query['max_results'] = 0 while True: json = self._api.do('GET', '/api/2.1/unity-catalog/tables', query=query, headers=headers) if 'tables' in json: @@ -9493,6 +12111,7 @@ def list_summaries(self, if table_name_pattern is not None: query['table_name_pattern'] = table_name_pattern headers = {'Accept': 'application/json', } + if "max_results" not in query: query['max_results'] = 0 while True: json = self._api.do('GET', '/api/2.1/unity-catalog/table-summaries', query=query, headers=headers) if 'tables' in json: diff --git a/databricks/sdk/service/cleanrooms.py b/databricks/sdk/service/cleanrooms.py new file mode 100755 index 000000000..f7a213669 --- /dev/null +++ b/databricks/sdk/service/cleanrooms.py @@ -0,0 +1,1283 @@ +# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +from __future__ import annotations + +import logging +from dataclasses import dataclass +from enum import Enum +from typing import Dict, Iterator, List, Optional + +from ._internal import _enum, _from_dict, _repeated_dict + +_LOG = logging.getLogger('databricks.sdk') + +from databricks.sdk.service import catalog, jobs, settings, sharing + +# all definitions in this file are in alphabetical order + + +@dataclass +class CleanRoom: + access_restricted: Optional[CleanRoomAccessRestricted] = None + """Whether clean room access is restricted due to [CSP] + + [CSP]: https://docs.databricks.com/en/security/privacy/security-profile.html""" + + comment: Optional[str] = None + + created_at: Optional[int] = None + """When the clean room was created, in epoch milliseconds.""" + + local_collaborator_alias: Optional[str] = None + """The alias of the collaborator tied to the local clean room.""" + + name: Optional[str] = None + """The name of the clean room. It should follow [UC securable naming requirements]. + + [UC securable naming requirements]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#securable-object-naming-requirements""" + + output_catalog: Optional[CleanRoomOutputCatalog] = None + """Output catalog of the clean room. It is an output only field. Output catalog is manipulated + using the separate CreateCleanRoomOutputCatalog API.""" + + owner: Optional[str] = None + """This is Databricks username of the owner of the local clean room securable for permission + management.""" + + remote_detailed_info: Optional[CleanRoomRemoteDetail] = None + """Central clean room details. During creation, users need to specify cloud_vendor, region, and + collaborators.global_metastore_id. This field will not be filled in the ListCleanRooms call.""" + + status: Optional[CleanRoomStatusEnum] = None + """Clean room status.""" + + updated_at: Optional[int] = None + """When the clean room was last updated, in epoch milliseconds.""" + + def as_dict(self) -> dict: + """Serializes the CleanRoom into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.access_restricted is not None: body['access_restricted'] = self.access_restricted.value + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.local_collaborator_alias is not None: + body['local_collaborator_alias'] = self.local_collaborator_alias + if self.name is not None: body['name'] = self.name + if self.output_catalog: body['output_catalog'] = self.output_catalog.as_dict() + if self.owner is not None: body['owner'] = self.owner + if self.remote_detailed_info: body['remote_detailed_info'] = self.remote_detailed_info.as_dict() + if self.status is not None: body['status'] = self.status.value + if self.updated_at is not None: body['updated_at'] = self.updated_at + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CleanRoom into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_restricted is not None: body['access_restricted'] = self.access_restricted + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.local_collaborator_alias is not None: + body['local_collaborator_alias'] = self.local_collaborator_alias + if self.name is not None: body['name'] = self.name + if self.output_catalog: body['output_catalog'] = self.output_catalog + if self.owner is not None: body['owner'] = self.owner + if self.remote_detailed_info: body['remote_detailed_info'] = self.remote_detailed_info + if self.status is not None: body['status'] = self.status + if self.updated_at is not None: body['updated_at'] = self.updated_at + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CleanRoom: + """Deserializes the CleanRoom from a dictionary.""" + return cls(access_restricted=_enum(d, 'access_restricted', CleanRoomAccessRestricted), + comment=d.get('comment', None), + created_at=d.get('created_at', None), + local_collaborator_alias=d.get('local_collaborator_alias', None), + name=d.get('name', None), + output_catalog=_from_dict(d, 'output_catalog', CleanRoomOutputCatalog), + owner=d.get('owner', None), + remote_detailed_info=_from_dict(d, 'remote_detailed_info', CleanRoomRemoteDetail), + status=_enum(d, 'status', CleanRoomStatusEnum), + updated_at=d.get('updated_at', None)) + + +class CleanRoomAccessRestricted(Enum): + + CSP_MISMATCH = 'CSP_MISMATCH' + NO_RESTRICTION = 'NO_RESTRICTION' + + +@dataclass +class CleanRoomAsset: + """Metadata of the clean room asset""" + + added_at: Optional[int] = None + """When the asset is added to the clean room, in epoch milliseconds.""" + + asset_type: Optional[CleanRoomAssetAssetType] = None + """The type of the asset.""" + + foreign_table: Optional[CleanRoomAssetForeignTable] = None + """Foreign table details available to all collaborators of the clean room. Present if and only if + **asset_type** is **FOREIGN_TABLE**""" + + foreign_table_local_details: Optional[CleanRoomAssetForeignTableLocalDetails] = None + """Local details for a foreign that are only available to its owner. Present if and only if + **asset_type** is **FOREIGN_TABLE**""" + + name: Optional[str] = None + """A fully qualified name that uniquely identifies the asset within the clean room. This is also + the name displayed in the clean room UI. + + For UC securable assets (tables, volumes, etc.), the format is + *shared_catalog*.*shared_schema*.*asset_name* + + For notebooks, the name is the notebook file name.""" + + notebook: Optional[CleanRoomAssetNotebook] = None + """Notebook details available to all collaborators of the clean room. Present if and only if + **asset_type** is **NOTEBOOK_FILE**""" + + owner_collaborator_alias: Optional[str] = None + """The alias of the collaborator who owns this asset""" + + status: Optional[CleanRoomAssetStatusEnum] = None + """Status of the asset""" + + table: Optional[CleanRoomAssetTable] = None + """Table details available to all collaborators of the clean room. Present if and only if + **asset_type** is **TABLE**""" + + table_local_details: Optional[CleanRoomAssetTableLocalDetails] = None + """Local details for a table that are only available to its owner. Present if and only if + **asset_type** is **TABLE**""" + + view: Optional[CleanRoomAssetView] = None + """View details available to all collaborators of the clean room. Present if and only if + **asset_type** is **VIEW**""" + + view_local_details: Optional[CleanRoomAssetViewLocalDetails] = None + """Local details for a view that are only available to its owner. Present if and only if + **asset_type** is **VIEW**""" + + volume_local_details: Optional[CleanRoomAssetVolumeLocalDetails] = None + """Local details for a volume that are only available to its owner. Present if and only if + **asset_type** is **VOLUME**""" + + def as_dict(self) -> dict: + """Serializes the CleanRoomAsset into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.added_at is not None: body['added_at'] = self.added_at + if self.asset_type is not None: body['asset_type'] = self.asset_type.value + if self.foreign_table: body['foreign_table'] = self.foreign_table.as_dict() + if self.foreign_table_local_details: + body['foreign_table_local_details'] = self.foreign_table_local_details.as_dict() + if self.name is not None: body['name'] = self.name + if self.notebook: body['notebook'] = self.notebook.as_dict() + if self.owner_collaborator_alias is not None: + body['owner_collaborator_alias'] = self.owner_collaborator_alias + if self.status is not None: body['status'] = self.status.value + if self.table: body['table'] = self.table.as_dict() + if self.table_local_details: body['table_local_details'] = self.table_local_details.as_dict() + if self.view: body['view'] = self.view.as_dict() + if self.view_local_details: body['view_local_details'] = self.view_local_details.as_dict() + if self.volume_local_details: body['volume_local_details'] = self.volume_local_details.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CleanRoomAsset into a shallow dictionary of its immediate attributes.""" + body = {} + if self.added_at is not None: body['added_at'] = self.added_at + if self.asset_type is not None: body['asset_type'] = self.asset_type + if self.foreign_table: body['foreign_table'] = self.foreign_table + if self.foreign_table_local_details: + body['foreign_table_local_details'] = self.foreign_table_local_details + if self.name is not None: body['name'] = self.name + if self.notebook: body['notebook'] = self.notebook + if self.owner_collaborator_alias is not None: + body['owner_collaborator_alias'] = self.owner_collaborator_alias + if self.status is not None: body['status'] = self.status + if self.table: body['table'] = self.table + if self.table_local_details: body['table_local_details'] = self.table_local_details + if self.view: body['view'] = self.view + if self.view_local_details: body['view_local_details'] = self.view_local_details + if self.volume_local_details: body['volume_local_details'] = self.volume_local_details + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CleanRoomAsset: + """Deserializes the CleanRoomAsset from a dictionary.""" + return cls(added_at=d.get('added_at', None), + asset_type=_enum(d, 'asset_type', CleanRoomAssetAssetType), + foreign_table=_from_dict(d, 'foreign_table', CleanRoomAssetForeignTable), + foreign_table_local_details=_from_dict(d, 'foreign_table_local_details', + CleanRoomAssetForeignTableLocalDetails), + name=d.get('name', None), + notebook=_from_dict(d, 'notebook', CleanRoomAssetNotebook), + owner_collaborator_alias=d.get('owner_collaborator_alias', None), + status=_enum(d, 'status', CleanRoomAssetStatusEnum), + table=_from_dict(d, 'table', CleanRoomAssetTable), + table_local_details=_from_dict(d, 'table_local_details', CleanRoomAssetTableLocalDetails), + view=_from_dict(d, 'view', CleanRoomAssetView), + view_local_details=_from_dict(d, 'view_local_details', CleanRoomAssetViewLocalDetails), + volume_local_details=_from_dict(d, 'volume_local_details', + CleanRoomAssetVolumeLocalDetails)) + + +class CleanRoomAssetAssetType(Enum): + + FOREIGN_TABLE = 'FOREIGN_TABLE' + NOTEBOOK_FILE = 'NOTEBOOK_FILE' + TABLE = 'TABLE' + VIEW = 'VIEW' + VOLUME = 'VOLUME' + + +@dataclass +class CleanRoomAssetForeignTable: + columns: Optional[List[catalog.ColumnInfo]] = None + """The metadata information of the columns in the foreign table""" + + def as_dict(self) -> dict: + """Serializes the CleanRoomAssetForeignTable into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.columns: body['columns'] = [v.as_dict() for v in self.columns] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CleanRoomAssetForeignTable into a shallow dictionary of its immediate attributes.""" + body = {} + if self.columns: body['columns'] = self.columns + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetForeignTable: + """Deserializes the CleanRoomAssetForeignTable from a dictionary.""" + return cls(columns=_repeated_dict(d, 'columns', catalog.ColumnInfo)) + + +@dataclass +class CleanRoomAssetForeignTableLocalDetails: + local_name: Optional[str] = None + """The fully qualified name of the foreign table in its owner's local metastore, in the format of + *catalog*.*schema*.*foreign_table_name*""" + + def as_dict(self) -> dict: + """Serializes the CleanRoomAssetForeignTableLocalDetails into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.local_name is not None: body['local_name'] = self.local_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CleanRoomAssetForeignTableLocalDetails into a shallow dictionary of its immediate attributes.""" + body = {} + if self.local_name is not None: body['local_name'] = self.local_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetForeignTableLocalDetails: + """Deserializes the CleanRoomAssetForeignTableLocalDetails from a dictionary.""" + return cls(local_name=d.get('local_name', None)) + + +@dataclass +class CleanRoomAssetNotebook: + etag: Optional[str] = None + """Server generated checksum that represents the notebook version.""" + + notebook_content: Optional[str] = None + """Base 64 representation of the notebook contents. This is the same format as returned by + :method:workspace/export with the format of **HTML**.""" + + def as_dict(self) -> dict: + """Serializes the CleanRoomAssetNotebook into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.etag is not None: body['etag'] = self.etag + if self.notebook_content is not None: body['notebook_content'] = self.notebook_content + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CleanRoomAssetNotebook into a shallow dictionary of its immediate attributes.""" + body = {} + if self.etag is not None: body['etag'] = self.etag + if self.notebook_content is not None: body['notebook_content'] = self.notebook_content + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetNotebook: + """Deserializes the CleanRoomAssetNotebook from a dictionary.""" + return cls(etag=d.get('etag', None), notebook_content=d.get('notebook_content', None)) + + +class CleanRoomAssetStatusEnum(Enum): + + ACTIVE = 'ACTIVE' + PENDING = 'PENDING' + PERMISSION_DENIED = 'PERMISSION_DENIED' + + +@dataclass +class CleanRoomAssetTable: + columns: Optional[List[catalog.ColumnInfo]] = None + """The metadata information of the columns in the table""" + + def as_dict(self) -> dict: + """Serializes the CleanRoomAssetTable into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.columns: body['columns'] = [v.as_dict() for v in self.columns] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CleanRoomAssetTable into a shallow dictionary of its immediate attributes.""" + body = {} + if self.columns: body['columns'] = self.columns + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetTable: + """Deserializes the CleanRoomAssetTable from a dictionary.""" + return cls(columns=_repeated_dict(d, 'columns', catalog.ColumnInfo)) + + +@dataclass +class CleanRoomAssetTableLocalDetails: + local_name: Optional[str] = None + """The fully qualified name of the table in its owner's local metastore, in the format of + *catalog*.*schema*.*table_name*""" + + partitions: Optional[List[sharing.PartitionSpecificationPartition]] = None + """Partition filtering specification for a shared table.""" + + def as_dict(self) -> dict: + """Serializes the CleanRoomAssetTableLocalDetails into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.local_name is not None: body['local_name'] = self.local_name + if self.partitions: body['partitions'] = [v.as_dict() for v in self.partitions] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CleanRoomAssetTableLocalDetails into a shallow dictionary of its immediate attributes.""" + body = {} + if self.local_name is not None: body['local_name'] = self.local_name + if self.partitions: body['partitions'] = self.partitions + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetTableLocalDetails: + """Deserializes the CleanRoomAssetTableLocalDetails from a dictionary.""" + return cls(local_name=d.get('local_name', None), + partitions=_repeated_dict(d, 'partitions', sharing.PartitionSpecificationPartition)) + + +@dataclass +class CleanRoomAssetView: + columns: Optional[List[catalog.ColumnInfo]] = None + """The metadata information of the columns in the view""" + + def as_dict(self) -> dict: + """Serializes the CleanRoomAssetView into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.columns: body['columns'] = [v.as_dict() for v in self.columns] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CleanRoomAssetView into a shallow dictionary of its immediate attributes.""" + body = {} + if self.columns: body['columns'] = self.columns + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetView: + """Deserializes the CleanRoomAssetView from a dictionary.""" + return cls(columns=_repeated_dict(d, 'columns', catalog.ColumnInfo)) + + +@dataclass +class CleanRoomAssetViewLocalDetails: + local_name: Optional[str] = None + """The fully qualified name of the view in its owner's local metastore, in the format of + *catalog*.*schema*.*view_name*""" + + def as_dict(self) -> dict: + """Serializes the CleanRoomAssetViewLocalDetails into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.local_name is not None: body['local_name'] = self.local_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CleanRoomAssetViewLocalDetails into a shallow dictionary of its immediate attributes.""" + body = {} + if self.local_name is not None: body['local_name'] = self.local_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetViewLocalDetails: + """Deserializes the CleanRoomAssetViewLocalDetails from a dictionary.""" + return cls(local_name=d.get('local_name', None)) + + +@dataclass +class CleanRoomAssetVolumeLocalDetails: + local_name: Optional[str] = None + """The fully qualified name of the volume in its owner's local metastore, in the format of + *catalog*.*schema*.*volume_name*""" + + def as_dict(self) -> dict: + """Serializes the CleanRoomAssetVolumeLocalDetails into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.local_name is not None: body['local_name'] = self.local_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CleanRoomAssetVolumeLocalDetails into a shallow dictionary of its immediate attributes.""" + body = {} + if self.local_name is not None: body['local_name'] = self.local_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetVolumeLocalDetails: + """Deserializes the CleanRoomAssetVolumeLocalDetails from a dictionary.""" + return cls(local_name=d.get('local_name', None)) + + +@dataclass +class CleanRoomCollaborator: + """Publicly visible clean room collaborator.""" + + collaborator_alias: str + """Collaborator alias specified by the clean room creator. It is unique across all collaborators of + this clean room, and used to derive multiple values internally such as catalog alias and clean + room name for single metastore clean rooms. It should follow [UC securable naming requirements]. + + [UC securable naming requirements]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#securable-object-naming-requirements""" + + display_name: Optional[str] = None + """Generated display name for the collaborator. In the case of a single metastore clean room, it is + the clean room name. For x-metastore clean rooms, it is the organization name of the metastore. + It is not restricted to these values and could change in the future""" + + global_metastore_id: Optional[str] = None + """The global Unity Catalog metastore id of the collaborator. The identifier is of format + cloud:region:metastore-uuid.""" + + invite_recipient_email: Optional[str] = None + """Email of the user who is receiving the clean room "invitation". It should be empty for the + creator of the clean room, and non-empty for the invitees of the clean room. It is only returned + in the output when clean room creator calls GET""" + + invite_recipient_workspace_id: Optional[int] = None + """Workspace ID of the user who is receiving the clean room "invitation". Must be specified if + invite_recipient_email is specified. It should be empty when the collaborator is the creator of + the clean room.""" + + organization_name: Optional[str] = None + """[Organization name](:method:metastores/list#metastores-delta_sharing_organization_name) + configured in the metastore""" + + def as_dict(self) -> dict: + """Serializes the CleanRoomCollaborator into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.collaborator_alias is not None: body['collaborator_alias'] = self.collaborator_alias + if self.display_name is not None: body['display_name'] = self.display_name + if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id + if self.invite_recipient_email is not None: + body['invite_recipient_email'] = self.invite_recipient_email + if self.invite_recipient_workspace_id is not None: + body['invite_recipient_workspace_id'] = self.invite_recipient_workspace_id + if self.organization_name is not None: body['organization_name'] = self.organization_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CleanRoomCollaborator into a shallow dictionary of its immediate attributes.""" + body = {} + if self.collaborator_alias is not None: body['collaborator_alias'] = self.collaborator_alias + if self.display_name is not None: body['display_name'] = self.display_name + if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id + if self.invite_recipient_email is not None: + body['invite_recipient_email'] = self.invite_recipient_email + if self.invite_recipient_workspace_id is not None: + body['invite_recipient_workspace_id'] = self.invite_recipient_workspace_id + if self.organization_name is not None: body['organization_name'] = self.organization_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CleanRoomCollaborator: + """Deserializes the CleanRoomCollaborator from a dictionary.""" + return cls(collaborator_alias=d.get('collaborator_alias', None), + display_name=d.get('display_name', None), + global_metastore_id=d.get('global_metastore_id', None), + invite_recipient_email=d.get('invite_recipient_email', None), + invite_recipient_workspace_id=d.get('invite_recipient_workspace_id', None), + organization_name=d.get('organization_name', None)) + + +@dataclass +class CleanRoomNotebookTaskRun: + """Stores information about a single task run.""" + + collaborator_job_run_info: Optional[CollaboratorJobRunInfo] = None + """Job run info of the task in the runner's local workspace. This field is only included in the + LIST API. if the task was run within the same workspace the API is being called. If the task run + was in a different workspace under the same metastore, only the workspace_id is included.""" + + notebook_job_run_state: Optional[jobs.CleanRoomTaskRunState] = None + """State of the task run.""" + + notebook_name: Optional[str] = None + """Asset name of the notebook executed in this task run.""" + + output_schema_expiration_time: Optional[int] = None + """Expiration time of the output schema of the task run (if any), in epoch milliseconds.""" + + output_schema_name: Optional[str] = None + """Name of the output schema associated with the clean rooms notebook task run.""" + + run_duration: Optional[int] = None + """Duration of the task run, in milliseconds.""" + + start_time: Optional[int] = None + """When the task run started, in epoch milliseconds.""" + + def as_dict(self) -> dict: + """Serializes the CleanRoomNotebookTaskRun into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.collaborator_job_run_info: + body['collaborator_job_run_info'] = self.collaborator_job_run_info.as_dict() + if self.notebook_job_run_state: body['notebook_job_run_state'] = self.notebook_job_run_state.as_dict() + if self.notebook_name is not None: body['notebook_name'] = self.notebook_name + if self.output_schema_expiration_time is not None: + body['output_schema_expiration_time'] = self.output_schema_expiration_time + if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name + if self.run_duration is not None: body['run_duration'] = self.run_duration + if self.start_time is not None: body['start_time'] = self.start_time + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CleanRoomNotebookTaskRun into a shallow dictionary of its immediate attributes.""" + body = {} + if self.collaborator_job_run_info: body['collaborator_job_run_info'] = self.collaborator_job_run_info + if self.notebook_job_run_state: body['notebook_job_run_state'] = self.notebook_job_run_state + if self.notebook_name is not None: body['notebook_name'] = self.notebook_name + if self.output_schema_expiration_time is not None: + body['output_schema_expiration_time'] = self.output_schema_expiration_time + if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name + if self.run_duration is not None: body['run_duration'] = self.run_duration + if self.start_time is not None: body['start_time'] = self.start_time + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CleanRoomNotebookTaskRun: + """Deserializes the CleanRoomNotebookTaskRun from a dictionary.""" + return cls(collaborator_job_run_info=_from_dict(d, 'collaborator_job_run_info', + CollaboratorJobRunInfo), + notebook_job_run_state=_from_dict(d, 'notebook_job_run_state', jobs.CleanRoomTaskRunState), + notebook_name=d.get('notebook_name', None), + output_schema_expiration_time=d.get('output_schema_expiration_time', None), + output_schema_name=d.get('output_schema_name', None), + run_duration=d.get('run_duration', None), + start_time=d.get('start_time', None)) + + +@dataclass +class CleanRoomOutputCatalog: + catalog_name: Optional[str] = None + """The name of the output catalog in UC. It should follow [UC securable naming requirements]. The + field will always exist if status is CREATED. + + [UC securable naming requirements]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#securable-object-naming-requirements""" + + status: Optional[CleanRoomOutputCatalogOutputCatalogStatus] = None + + def as_dict(self) -> dict: + """Serializes the CleanRoomOutputCatalog into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.status is not None: body['status'] = self.status.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CleanRoomOutputCatalog into a shallow dictionary of its immediate attributes.""" + body = {} + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.status is not None: body['status'] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CleanRoomOutputCatalog: + """Deserializes the CleanRoomOutputCatalog from a dictionary.""" + return cls(catalog_name=d.get('catalog_name', None), + status=_enum(d, 'status', CleanRoomOutputCatalogOutputCatalogStatus)) + + +class CleanRoomOutputCatalogOutputCatalogStatus(Enum): + + CREATED = 'CREATED' + NOT_CREATED = 'NOT_CREATED' + NOT_ELIGIBLE = 'NOT_ELIGIBLE' + + +@dataclass +class CleanRoomRemoteDetail: + """Publicly visible central clean room details.""" + + central_clean_room_id: Optional[str] = None + """Central clean room ID.""" + + cloud_vendor: Optional[str] = None + """Cloud vendor (aws,azure,gcp) of the central clean room.""" + + collaborators: Optional[List[CleanRoomCollaborator]] = None + """Collaborators in the central clean room. There should one and only one collaborator in the list + that satisfies the owner condition: + + 1. It has the creator's global_metastore_id (determined by caller of CreateCleanRoom). + + 2. Its invite_recipient_email is empty.""" + + compliance_security_profile: Optional[ComplianceSecurityProfile] = None + """The compliance security profile used to process regulated data following compliance standards.""" + + creator: Optional[CleanRoomCollaborator] = None + """Collaborator who creates the clean room.""" + + egress_network_policy: Optional[settings.EgressNetworkPolicy] = None + """Egress network policy to apply to the central clean room workspace.""" + + region: Optional[str] = None + """Region of the central clean room.""" + + def as_dict(self) -> dict: + """Serializes the CleanRoomRemoteDetail into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.central_clean_room_id is not None: body['central_clean_room_id'] = self.central_clean_room_id + if self.cloud_vendor is not None: body['cloud_vendor'] = self.cloud_vendor + if self.collaborators: body['collaborators'] = [v.as_dict() for v in self.collaborators] + if self.compliance_security_profile: + body['compliance_security_profile'] = self.compliance_security_profile.as_dict() + if self.creator: body['creator'] = self.creator.as_dict() + if self.egress_network_policy: body['egress_network_policy'] = self.egress_network_policy.as_dict() + if self.region is not None: body['region'] = self.region + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CleanRoomRemoteDetail into a shallow dictionary of its immediate attributes.""" + body = {} + if self.central_clean_room_id is not None: body['central_clean_room_id'] = self.central_clean_room_id + if self.cloud_vendor is not None: body['cloud_vendor'] = self.cloud_vendor + if self.collaborators: body['collaborators'] = self.collaborators + if self.compliance_security_profile: + body['compliance_security_profile'] = self.compliance_security_profile + if self.creator: body['creator'] = self.creator + if self.egress_network_policy: body['egress_network_policy'] = self.egress_network_policy + if self.region is not None: body['region'] = self.region + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CleanRoomRemoteDetail: + """Deserializes the CleanRoomRemoteDetail from a dictionary.""" + return cls(central_clean_room_id=d.get('central_clean_room_id', None), + cloud_vendor=d.get('cloud_vendor', None), + collaborators=_repeated_dict(d, 'collaborators', CleanRoomCollaborator), + compliance_security_profile=_from_dict(d, 'compliance_security_profile', + ComplianceSecurityProfile), + creator=_from_dict(d, 'creator', CleanRoomCollaborator), + egress_network_policy=_from_dict(d, 'egress_network_policy', settings.EgressNetworkPolicy), + region=d.get('region', None)) + + +class CleanRoomStatusEnum(Enum): + + ACTIVE = 'ACTIVE' + DELETED = 'DELETED' + FAILED = 'FAILED' + PROVISIONING = 'PROVISIONING' + + +@dataclass +class CollaboratorJobRunInfo: + collaborator_alias: Optional[str] = None + """Alias of the collaborator that triggered the task run.""" + + collaborator_job_id: Optional[int] = None + """Job ID of the task run in the collaborator's workspace.""" + + collaborator_job_run_id: Optional[int] = None + """Job run ID of the task run in the collaborator's workspace.""" + + collaborator_task_run_id: Optional[int] = None + """Task run ID of the task run in the collaborator's workspace.""" + + collaborator_workspace_id: Optional[int] = None + """ID of the collaborator's workspace that triggered the task run.""" + + def as_dict(self) -> dict: + """Serializes the CollaboratorJobRunInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.collaborator_alias is not None: body['collaborator_alias'] = self.collaborator_alias + if self.collaborator_job_id is not None: body['collaborator_job_id'] = self.collaborator_job_id + if self.collaborator_job_run_id is not None: + body['collaborator_job_run_id'] = self.collaborator_job_run_id + if self.collaborator_task_run_id is not None: + body['collaborator_task_run_id'] = self.collaborator_task_run_id + if self.collaborator_workspace_id is not None: + body['collaborator_workspace_id'] = self.collaborator_workspace_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CollaboratorJobRunInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.collaborator_alias is not None: body['collaborator_alias'] = self.collaborator_alias + if self.collaborator_job_id is not None: body['collaborator_job_id'] = self.collaborator_job_id + if self.collaborator_job_run_id is not None: + body['collaborator_job_run_id'] = self.collaborator_job_run_id + if self.collaborator_task_run_id is not None: + body['collaborator_task_run_id'] = self.collaborator_task_run_id + if self.collaborator_workspace_id is not None: + body['collaborator_workspace_id'] = self.collaborator_workspace_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CollaboratorJobRunInfo: + """Deserializes the CollaboratorJobRunInfo from a dictionary.""" + return cls(collaborator_alias=d.get('collaborator_alias', None), + collaborator_job_id=d.get('collaborator_job_id', None), + collaborator_job_run_id=d.get('collaborator_job_run_id', None), + collaborator_task_run_id=d.get('collaborator_task_run_id', None), + collaborator_workspace_id=d.get('collaborator_workspace_id', None)) + + +@dataclass +class ComplianceSecurityProfile: + """The compliance security profile used to process regulated data following compliance standards.""" + + compliance_standards: Optional[List[settings.ComplianceStandard]] = None + """The list of compliance standards that the compliance security profile is configured to enforce.""" + + is_enabled: Optional[bool] = None + """Whether the compliance security profile is enabled.""" + + def as_dict(self) -> dict: + """Serializes the ComplianceSecurityProfile into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.compliance_standards: + body['compliance_standards'] = [v.as_dict() for v in self.compliance_standards] + if self.is_enabled is not None: body['is_enabled'] = self.is_enabled + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ComplianceSecurityProfile into a shallow dictionary of its immediate attributes.""" + body = {} + if self.compliance_standards: body['compliance_standards'] = self.compliance_standards + if self.is_enabled is not None: body['is_enabled'] = self.is_enabled + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ComplianceSecurityProfile: + """Deserializes the ComplianceSecurityProfile from a dictionary.""" + return cls(compliance_standards=_repeated_dict(d, 'compliance_standards', + settings.ComplianceStandard), + is_enabled=d.get('is_enabled', None)) + + +@dataclass +class CreateCleanRoomOutputCatalogResponse: + output_catalog: Optional[CleanRoomOutputCatalog] = None + + def as_dict(self) -> dict: + """Serializes the CreateCleanRoomOutputCatalogResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.output_catalog: body['output_catalog'] = self.output_catalog.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CreateCleanRoomOutputCatalogResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.output_catalog: body['output_catalog'] = self.output_catalog + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateCleanRoomOutputCatalogResponse: + """Deserializes the CreateCleanRoomOutputCatalogResponse from a dictionary.""" + return cls(output_catalog=_from_dict(d, 'output_catalog', CleanRoomOutputCatalog)) + + +@dataclass +class DeleteCleanRoomAssetResponse: + """Response for delete clean room request. Using an empty message since the generic Empty proto + does not externd UnshadedMessageMarker.""" + + def as_dict(self) -> dict: + """Serializes the DeleteCleanRoomAssetResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DeleteCleanRoomAssetResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> DeleteCleanRoomAssetResponse: + """Deserializes the DeleteCleanRoomAssetResponse from a dictionary.""" + return cls() + + +@dataclass +class DeleteResponse: + + def as_dict(self) -> dict: + """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> DeleteResponse: + """Deserializes the DeleteResponse from a dictionary.""" + return cls() + + +@dataclass +class ListCleanRoomAssetsResponse: + assets: Optional[List[CleanRoomAsset]] = None + """Assets in the clean room.""" + + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. page_token + should be set to this value for the next request (for the next page of results).""" + + def as_dict(self) -> dict: + """Serializes the ListCleanRoomAssetsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.assets: body['assets'] = [v.as_dict() for v in self.assets] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListCleanRoomAssetsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.assets: body['assets'] = self.assets + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListCleanRoomAssetsResponse: + """Deserializes the ListCleanRoomAssetsResponse from a dictionary.""" + return cls(assets=_repeated_dict(d, 'assets', CleanRoomAsset), + next_page_token=d.get('next_page_token', None)) + + +@dataclass +class ListCleanRoomNotebookTaskRunsResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. page_token + should be set to this value for the next request (for the next page of results).""" + + runs: Optional[List[CleanRoomNotebookTaskRun]] = None + """Name of the clean room.""" + + def as_dict(self) -> dict: + """Serializes the ListCleanRoomNotebookTaskRunsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.runs: body['runs'] = [v.as_dict() for v in self.runs] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListCleanRoomNotebookTaskRunsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.runs: body['runs'] = self.runs + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListCleanRoomNotebookTaskRunsResponse: + """Deserializes the ListCleanRoomNotebookTaskRunsResponse from a dictionary.""" + return cls(next_page_token=d.get('next_page_token', None), + runs=_repeated_dict(d, 'runs', CleanRoomNotebookTaskRun)) + + +@dataclass +class ListCleanRoomsResponse: + clean_rooms: Optional[List[CleanRoom]] = None + + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. page_token + should be set to this value for the next request (for the next page of results).""" + + def as_dict(self) -> dict: + """Serializes the ListCleanRoomsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.clean_rooms: body['clean_rooms'] = [v.as_dict() for v in self.clean_rooms] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListCleanRoomsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.clean_rooms: body['clean_rooms'] = self.clean_rooms + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListCleanRoomsResponse: + """Deserializes the ListCleanRoomsResponse from a dictionary.""" + return cls(clean_rooms=_repeated_dict(d, 'clean_rooms', CleanRoom), + next_page_token=d.get('next_page_token', None)) + + +@dataclass +class UpdateCleanRoomRequest: + clean_room: Optional[CleanRoom] = None + + name: Optional[str] = None + """Name of the clean room.""" + + def as_dict(self) -> dict: + """Serializes the UpdateCleanRoomRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.clean_room: body['clean_room'] = self.clean_room.as_dict() + if self.name is not None: body['name'] = self.name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateCleanRoomRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.clean_room: body['clean_room'] = self.clean_room + if self.name is not None: body['name'] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateCleanRoomRequest: + """Deserializes the UpdateCleanRoomRequest from a dictionary.""" + return cls(clean_room=_from_dict(d, 'clean_room', CleanRoom), name=d.get('name', None)) + + +class CleanRoomAssetsAPI: + """Clean room assets are data and code objects — Tables, volumes, and notebooks that are shared with the + clean room.""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, clean_room_name: str, *, asset: Optional[CleanRoomAsset] = None) -> CleanRoomAsset: + """Create an asset. + + Create a clean room asset —share an asset like a notebook or table into the clean room. For each UC + asset that is added through this method, the clean room owner must also have enough privilege on the + asset to consume it. The privilege must be maintained indefinitely for the clean room to be able to + access the asset. Typically, you should use a group as the clean room owner. + + :param clean_room_name: str + Name of the clean room. + :param asset: :class:`CleanRoomAsset` (optional) + Metadata of the clean room asset + + :returns: :class:`CleanRoomAsset` + """ + body = asset.as_dict() + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('POST', + f'/api/2.0/clean-rooms/{clean_room_name}/assets', + body=body, + headers=headers) + return CleanRoomAsset.from_dict(res) + + def delete(self, clean_room_name: str, asset_type: CleanRoomAssetAssetType, asset_full_name: str): + """Delete an asset. + + Delete a clean room asset - unshare/remove the asset from the clean room + + :param clean_room_name: str + Name of the clean room. + :param asset_type: :class:`CleanRoomAssetAssetType` + The type of the asset. + :param asset_full_name: str + The fully qualified name of the asset, it is same as the name field in CleanRoomAsset. + + + """ + + headers = {'Accept': 'application/json', } + + self._api.do('DELETE', + f'/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{asset_full_name}', + headers=headers) + + def get(self, clean_room_name: str, asset_type: CleanRoomAssetAssetType, + asset_full_name: str) -> CleanRoomAsset: + """Get an asset. + + Get the details of a clean room asset by its type and full name. + + :param clean_room_name: str + Name of the clean room. + :param asset_type: :class:`CleanRoomAssetAssetType` + The type of the asset. + :param asset_full_name: str + The fully qualified name of the asset, it is same as the name field in CleanRoomAsset. + + :returns: :class:`CleanRoomAsset` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do( + 'GET', + f'/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{asset_full_name}', + headers=headers) + return CleanRoomAsset.from_dict(res) + + def list(self, clean_room_name: str, *, page_token: Optional[str] = None) -> Iterator[CleanRoomAsset]: + """List assets. + + :param clean_room_name: str + Name of the clean room. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. + + :returns: Iterator over :class:`CleanRoomAsset` + """ + + query = {} + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json', } + + while True: + json = self._api.do('GET', + f'/api/2.0/clean-rooms/{clean_room_name}/assets', + query=query, + headers=headers) + if 'assets' in json: + for v in json['assets']: + yield CleanRoomAsset.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + def update(self, + clean_room_name: str, + asset_type: CleanRoomAssetAssetType, + name: str, + *, + asset: Optional[CleanRoomAsset] = None) -> CleanRoomAsset: + """Update an asset. + + Update a clean room asset. For example, updating the content of a notebook; changing the shared + partitions of a table; etc. + + :param clean_room_name: str + Name of the clean room. + :param asset_type: :class:`CleanRoomAssetAssetType` + The type of the asset. + :param name: str + A fully qualified name that uniquely identifies the asset within the clean room. This is also the + name displayed in the clean room UI. + + For UC securable assets (tables, volumes, etc.), the format is + *shared_catalog*.*shared_schema*.*asset_name* + + For notebooks, the name is the notebook file name. + :param asset: :class:`CleanRoomAsset` (optional) + Metadata of the clean room asset + + :returns: :class:`CleanRoomAsset` + """ + body = asset.as_dict() + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('PATCH', + f'/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{name}', + body=body, + headers=headers) + return CleanRoomAsset.from_dict(res) + + +class CleanRoomTaskRunsAPI: + """Clean room task runs are the executions of notebooks in a clean room.""" + + def __init__(self, api_client): + self._api = api_client + + def list(self, + clean_room_name: str, + *, + notebook_name: Optional[str] = None, + page_size: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[CleanRoomNotebookTaskRun]: + """List notebook task runs. + + List all the historical notebook task runs in a clean room. + + :param clean_room_name: str + Name of the clean room. + :param notebook_name: str (optional) + Notebook name + :param page_size: int (optional) + The maximum number of task runs to return + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. + + :returns: Iterator over :class:`CleanRoomNotebookTaskRun` + """ + + query = {} + if notebook_name is not None: query['notebook_name'] = notebook_name + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json', } + + while True: + json = self._api.do('GET', + f'/api/2.0/clean-rooms/{clean_room_name}/runs', + query=query, + headers=headers) + if 'runs' in json: + for v in json['runs']: + yield CleanRoomNotebookTaskRun.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + +class CleanRoomsAPI: + """A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting + environment where multiple parties can work together on sensitive enterprise data without direct access to + each other’s data.""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, *, clean_room: Optional[CleanRoom] = None) -> CleanRoom: + """Create a clean room. + + Create a new clean room with the specified collaborators. This method is asynchronous; the returned + name field inside the clean_room field can be used to poll the clean room status, using the + :method:cleanrooms/get method. When this method returns, the clean room will be in a PROVISIONING + state, with only name, owner, comment, created_at and status populated. The clean room will be usable + once it enters an ACTIVE state. + + The caller must be a metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the metastore. + + :param clean_room: :class:`CleanRoom` (optional) + + :returns: :class:`CleanRoom` + """ + body = clean_room.as_dict() + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('POST', '/api/2.0/clean-rooms', body=body, headers=headers) + return CleanRoom.from_dict(res) + + def create_output_catalog( + self, + clean_room_name: str, + *, + output_catalog: Optional[CleanRoomOutputCatalog] = None) -> CreateCleanRoomOutputCatalogResponse: + """Create an output catalog. + + Create the output catalog of the clean room. + + :param clean_room_name: str + Name of the clean room. + :param output_catalog: :class:`CleanRoomOutputCatalog` (optional) + + :returns: :class:`CreateCleanRoomOutputCatalogResponse` + """ + body = output_catalog.as_dict() + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('POST', + f'/api/2.0/clean-rooms/{clean_room_name}/output-catalogs', + body=body, + headers=headers) + return CreateCleanRoomOutputCatalogResponse.from_dict(res) + + def delete(self, name: str): + """Delete a clean room. + + Delete a clean room. After deletion, the clean room will be removed from the metastore. If the other + collaborators have not deleted the clean room, they will still have the clean room in their metastore, + but it will be in a DELETED state and no operations other than deletion can be performed on it. + + :param name: str + Name of the clean room. + + + """ + + headers = {'Accept': 'application/json', } + + self._api.do('DELETE', f'/api/2.0/clean-rooms/{name}', headers=headers) + + def get(self, name: str) -> CleanRoom: + """Get a clean room. + + Get the details of a clean room given its name. + + :param name: str + + :returns: :class:`CleanRoom` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do('GET', f'/api/2.0/clean-rooms/{name}', headers=headers) + return CleanRoom.from_dict(res) + + def list(self, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[CleanRoom]: + """List clean rooms. + + Get a list of all clean rooms of the metastore. Only clean rooms the caller has access to are + returned. + + :param page_size: int (optional) + Maximum number of clean rooms to return (i.e., the page length). Defaults to 100. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. + + :returns: Iterator over :class:`CleanRoom` + """ + + query = {} + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json', } + + while True: + json = self._api.do('GET', '/api/2.0/clean-rooms', query=query, headers=headers) + if 'clean_rooms' in json: + for v in json['clean_rooms']: + yield CleanRoom.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + def update(self, name: str, *, clean_room: Optional[CleanRoom] = None) -> CleanRoom: + """Update a clean room. + + Update a clean room. The caller must be the owner of the clean room, have **MODIFY_CLEAN_ROOM** + privilege, or be metastore admin. + + When the caller is a metastore admin, only the __owner__ field can be updated. + + :param name: str + Name of the clean room. + :param clean_room: :class:`CleanRoom` (optional) + + :returns: :class:`CleanRoom` + """ + body = {} + if clean_room is not None: body['clean_room'] = clean_room.as_dict() + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('PATCH', f'/api/2.0/clean-rooms/{name}', body=body, headers=headers) + return CleanRoom.from_dict(res) diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py index fabd258d0..63a971b73 100755 --- a/databricks/sdk/service/compute.py +++ b/databricks/sdk/service/compute.py @@ -55,6 +55,16 @@ def as_dict(self) -> dict: if self.skip_validation is not None: body['skip_validation'] = self.skip_validation return body + def as_shallow_dict(self) -> dict: + """Serializes the AddInstanceProfile into a shallow dictionary of its immediate attributes.""" + body = {} + if self.iam_role_arn is not None: body['iam_role_arn'] = self.iam_role_arn + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.is_meta_instance_profile is not None: + body['is_meta_instance_profile'] = self.is_meta_instance_profile + if self.skip_validation is not None: body['skip_validation'] = self.skip_validation + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AddInstanceProfile: """Deserializes the AddInstanceProfile from a dictionary.""" @@ -72,6 +82,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the AddResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AddResponse: """Deserializes the AddResponse from a dictionary.""" @@ -90,6 +105,12 @@ def as_dict(self) -> dict: if self.destination is not None: body['destination'] = self.destination return body + def as_shallow_dict(self) -> dict: + """Serializes the Adlsgen2Info into a shallow dictionary of its immediate attributes.""" + body = {} + if self.destination is not None: body['destination'] = self.destination + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Adlsgen2Info: """Deserializes the Adlsgen2Info from a dictionary.""" @@ -113,6 +134,13 @@ def as_dict(self) -> dict: if self.min_workers is not None: body['min_workers'] = self.min_workers return body + def as_shallow_dict(self) -> dict: + """Serializes the AutoScale into a shallow dictionary of its immediate attributes.""" + body = {} + if self.max_workers is not None: body['max_workers'] = self.max_workers + if self.min_workers is not None: body['min_workers'] = self.min_workers + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AutoScale: """Deserializes the AutoScale from a dictionary.""" @@ -216,6 +244,22 @@ def as_dict(self) -> dict: if self.zone_id is not None: body['zone_id'] = self.zone_id return body + def as_shallow_dict(self) -> dict: + """Serializes the AwsAttributes into a shallow dictionary of its immediate attributes.""" + body = {} + if self.availability is not None: body['availability'] = self.availability + if self.ebs_volume_count is not None: body['ebs_volume_count'] = self.ebs_volume_count + if self.ebs_volume_iops is not None: body['ebs_volume_iops'] = self.ebs_volume_iops + if self.ebs_volume_size is not None: body['ebs_volume_size'] = self.ebs_volume_size + if self.ebs_volume_throughput is not None: body['ebs_volume_throughput'] = self.ebs_volume_throughput + if self.ebs_volume_type is not None: body['ebs_volume_type'] = self.ebs_volume_type + if self.first_on_demand is not None: body['first_on_demand'] = self.first_on_demand + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.spot_bid_price_percent is not None: + body['spot_bid_price_percent'] = self.spot_bid_price_percent + if self.zone_id is not None: body['zone_id'] = self.zone_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AwsAttributes: """Deserializes the AwsAttributes from a dictionary.""" @@ -275,6 +319,15 @@ def as_dict(self) -> dict: if self.spot_bid_max_price is not None: body['spot_bid_max_price'] = self.spot_bid_max_price return body + def as_shallow_dict(self) -> dict: + """Serializes the AzureAttributes into a shallow dictionary of its immediate attributes.""" + body = {} + if self.availability is not None: body['availability'] = self.availability + if self.first_on_demand is not None: body['first_on_demand'] = self.first_on_demand + if self.log_analytics_info: body['log_analytics_info'] = self.log_analytics_info + if self.spot_bid_max_price is not None: body['spot_bid_max_price'] = self.spot_bid_max_price + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AzureAttributes: """Deserializes the AzureAttributes from a dictionary.""" @@ -310,6 +363,14 @@ def as_dict(self) -> dict: if self.context_id is not None: body['contextId'] = self.context_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CancelCommand into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cluster_id is not None: body['clusterId'] = self.cluster_id + if self.command_id is not None: body['commandId'] = self.command_id + if self.context_id is not None: body['contextId'] = self.context_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CancelCommand: """Deserializes the CancelCommand from a dictionary.""" @@ -326,6 +387,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the CancelResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CancelResponse: """Deserializes the CancelResponse from a dictionary.""" @@ -347,6 +413,13 @@ def as_dict(self) -> dict: if self.owner_username is not None: body['owner_username'] = self.owner_username return body + def as_shallow_dict(self) -> dict: + """Serializes the ChangeClusterOwner into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.owner_username is not None: body['owner_username'] = self.owner_username + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ChangeClusterOwner: """Deserializes the ChangeClusterOwner from a dictionary.""" @@ -361,6 +434,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the ChangeClusterOwnerResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ChangeClusterOwnerResponse: """Deserializes the ChangeClusterOwnerResponse from a dictionary.""" @@ -382,6 +460,13 @@ def as_dict(self) -> dict: if self.notebooks is not None: body['notebooks'] = self.notebooks return body + def as_shallow_dict(self) -> dict: + """Serializes the ClientsTypes into a shallow dictionary of its immediate attributes.""" + body = {} + if self.jobs is not None: body['jobs'] = self.jobs + if self.notebooks is not None: body['notebooks'] = self.notebooks + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClientsTypes: """Deserializes the ClientsTypes from a dictionary.""" @@ -399,6 +484,12 @@ def as_dict(self) -> dict: if self.source_cluster_id is not None: body['source_cluster_id'] = self.source_cluster_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CloneCluster into a shallow dictionary of its immediate attributes.""" + body = {} + if self.source_cluster_id is not None: body['source_cluster_id'] = self.source_cluster_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CloneCluster: """Deserializes the CloneCluster from a dictionary.""" @@ -415,6 +506,12 @@ def as_dict(self) -> dict: if self.status: body['status'] = [v.value for v in self.status] return body + def as_shallow_dict(self) -> dict: + """Serializes the CloudProviderNodeInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.status: body['status'] = self.status + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CloudProviderNodeInfo: """Deserializes the CloudProviderNodeInfo from a dictionary.""" @@ -451,6 +548,16 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterAccessControlRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterAccessControlRequest: """Deserializes the ClusterAccessControlRequest from a dictionary.""" @@ -488,6 +595,17 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterAccessControlResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterAccessControlResponse: """Deserializes the ClusterAccessControlResponse from a dictionary.""" @@ -519,11 +637,11 @@ class ClusterAttributes: a set of default values will be used.""" cluster_log_conf: Optional[ClusterLogConf] = None - """The configuration for delivering spark logs to a long-term storage destination. Two kinds of - destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster. - If the conf is given, the logs will be delivered to the destination every `5 mins`. The - destination of driver logs is `$destination/$clusterId/driver`, while the destination of - executor logs is `$destination/$clusterId/executor`.""" + """The configuration for delivering spark logs to a long-term storage destination. Three kinds of + destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be + specified for one cluster. If the conf is given, the logs will be delivered to the destination + every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the + destination of executor logs is `$destination/$clusterId/executor`.""" cluster_name: Optional[str] = None """Cluster name requested by the user. This doesn't have to be unique. If not specified at @@ -541,13 +659,19 @@ class ClusterAttributes: data_security_mode: Optional[DataSecurityMode] = None """Data security mode decides what data governance model to use when accessing data from a cluster. - * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features - are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively - used by a single user specified in `single_user_name`. Most programming languages, cluster - features and data governance features are available in this mode. * `USER_ISOLATION`: A secure - cluster that can be shared by multiple users. Cluster users are fully isolated so that they - cannot see each other's data and credentials. Most data governance features are supported in - this mode. But programming languages and cluster features might be limited. + The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will + choose the most appropriate access mode depending on your compute configuration. * + `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: + Alias for `SINGLE_USER`. + + The following modes can be used regardless of `kind`. * `NONE`: No security isolation for + multiple users sharing the cluster. Data governance features are not available in this mode. * + `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in + `single_user_name`. Most programming languages, cluster features and data governance features + are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple + users. Cluster users are fully isolated so that they cannot see each other's data and + credentials. Most data governance features are supported in this mode. But programming languages + and cluster features might be limited. The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for future Databricks Runtime versions: @@ -588,6 +712,20 @@ class ClusterAttributes: instance_pool_id: Optional[str] = None """The optional ID of the instance pool to which the cluster belongs.""" + is_single_node: Optional[bool] = None + """This field can only be used with `kind`. + + When set to true, Databricks will automatically set single node related `custom_tags`, + `spark_conf`, and `num_workers`""" + + kind: Optional[Kind] = None + """The kind of compute described by this compute specification. + + Depending on `kind`, different validations and default values will be applied. + + The first usage of this value is for the simple cluster form where it sets `kind = + CLASSIC_PREVIEW`.""" + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or @@ -632,6 +770,12 @@ class ClusterAttributes: private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be specified.""" + use_ml_runtime: Optional[bool] = None + """This field can only be used with `kind`. + + `effective_spark_version` is determined by `spark_version` (DBR release), this field + `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" + workload_type: Optional[WorkloadType] = None def as_dict(self) -> dict: @@ -655,6 +799,8 @@ def as_dict(self) -> dict: if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.is_single_node is not None: body['is_single_node'] = self.is_single_node + if self.kind is not None: body['kind'] = self.kind.value if self.node_type_id is not None: body['node_type_id'] = self.node_type_id if self.policy_id is not None: body['policy_id'] = self.policy_id if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine.value @@ -663,9 +809,45 @@ def as_dict(self) -> dict: if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars if self.spark_version is not None: body['spark_version'] = self.spark_version if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys] + if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime if self.workload_type: body['workload_type'] = self.workload_type.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterAttributes into a shallow dictionary of its immediate attributes.""" + body = {} + if self.autotermination_minutes is not None: + body['autotermination_minutes'] = self.autotermination_minutes + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes + if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf + if self.cluster_name is not None: body['cluster_name'] = self.cluster_name + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode + if self.docker_image: body['docker_image'] = self.docker_image + if self.driver_instance_pool_id is not None: + body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: + body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes + if self.init_scripts: body['init_scripts'] = self.init_scripts + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.is_single_node is not None: body['is_single_node'] = self.is_single_node + if self.kind is not None: body['kind'] = self.kind + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine + if self.single_user_name is not None: body['single_user_name'] = self.single_user_name + if self.spark_conf: body['spark_conf'] = self.spark_conf + if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars + if self.spark_version is not None: body['spark_version'] = self.spark_version + if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys + if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime + if self.workload_type: body['workload_type'] = self.workload_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterAttributes: """Deserializes the ClusterAttributes from a dictionary.""" @@ -684,6 +866,8 @@ def from_dict(cls, d: Dict[str, any]) -> ClusterAttributes: gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), + is_single_node=d.get('is_single_node', None), + kind=_enum(d, 'kind', Kind), node_type_id=d.get('node_type_id', None), policy_id=d.get('policy_id', None), runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), @@ -692,6 +876,7 @@ def from_dict(cls, d: Dict[str, any]) -> ClusterAttributes: spark_env_vars=d.get('spark_env_vars', None), spark_version=d.get('spark_version', None), ssh_public_keys=d.get('ssh_public_keys', None), + use_ml_runtime=d.get('use_ml_runtime', None), workload_type=_from_dict(d, 'workload_type', WorkloadType)) @@ -716,6 +901,14 @@ def as_dict(self) -> dict: if self.violations: body['violations'] = self.violations return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterCompliance into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.is_compliant is not None: body['is_compliant'] = self.is_compliant + if self.violations: body['violations'] = self.violations + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterCompliance: """Deserializes the ClusterCompliance from a dictionary.""" @@ -754,11 +947,11 @@ class ClusterDetails: while each new cluster has a globally unique id.""" cluster_log_conf: Optional[ClusterLogConf] = None - """The configuration for delivering spark logs to a long-term storage destination. Two kinds of - destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster. - If the conf is given, the logs will be delivered to the destination every `5 mins`. The - destination of driver logs is `$destination/$clusterId/driver`, while the destination of - executor logs is `$destination/$clusterId/executor`.""" + """The configuration for delivering spark logs to a long-term storage destination. Three kinds of + destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be + specified for one cluster. If the conf is given, the logs will be delivered to the destination + every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the + destination of executor logs is `$destination/$clusterId/executor`.""" cluster_log_status: Optional[LogSyncStatus] = None """Cluster log delivery status.""" @@ -790,13 +983,19 @@ class ClusterDetails: data_security_mode: Optional[DataSecurityMode] = None """Data security mode decides what data governance model to use when accessing data from a cluster. - * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features - are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively - used by a single user specified in `single_user_name`. Most programming languages, cluster - features and data governance features are available in this mode. * `USER_ISOLATION`: A secure - cluster that can be shared by multiple users. Cluster users are fully isolated so that they - cannot see each other's data and credentials. Most data governance features are supported in - this mode. But programming languages and cluster features might be limited. + The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will + choose the most appropriate access mode depending on your compute configuration. * + `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: + Alias for `SINGLE_USER`. + + The following modes can be used regardless of `kind`. * `NONE`: No security isolation for + multiple users sharing the cluster. Data governance features are not available in this mode. * + `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in + `single_user_name`. Most programming languages, cluster features and data governance features + are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple + users. Cluster users are fully isolated so that they cannot see each other's data and + credentials. Most data governance features are supported in this mode. But programming languages + and cluster features might be limited. The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for future Databricks Runtime versions: @@ -881,10 +1080,24 @@ class ClusterDetails: """[PROD-2198] An APC attribute only. This field is missing in the API docs and the unforked databricks sdk so it needed to be added here""" + is_single_node: Optional[bool] = None + """This field can only be used with `kind`. + + When set to true, Databricks will automatically set single node related `custom_tags`, + `spark_conf`, and `num_workers`""" + jdbc_port: Optional[int] = None """Port on which Spark JDBC server is listening, in the driver nod. No service will be listeningon on this port in executor nodes.""" + kind: Optional[Kind] = None + """The kind of compute described by this compute specification. + + Depending on `kind`, different validations and default values will be applied. + + The first usage of this value is for the simple cluster form where it sets `kind = + CLASSIC_PREVIEW`.""" + last_activity_time: Optional[int] = None """[PROD-2198] An APC attribute only. This field is missing in the API docs and the unforked databricks sdk so it needed to be added here""" @@ -981,6 +1194,12 @@ class ClusterDetails: """Information about why the cluster was terminated. This field only appears when the cluster is in a `TERMINATING` or `TERMINATED` state.""" + use_ml_runtime: Optional[bool] = None + """This field can only be used with `kind`. + + `effective_spark_version` is determined by `spark_version` (DBR release), this field + `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" + workload_type: Optional[WorkloadType] = None def as_dict(self) -> dict: @@ -1014,7 +1233,9 @@ def as_dict(self) -> dict: if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.is_single_node is not None: body['is_single_node'] = self.is_single_node if self.jdbc_port is not None: body['jdbc_port'] = self.jdbc_port + if self.kind is not None: body['kind'] = self.kind.value if self.last_restarted_time is not None: body['last_restarted_time'] = self.last_restarted_time if self.last_state_loss_time is not None: body['last_state_loss_time'] = self.last_state_loss_time if self.node_type_id is not None: body['node_type_id'] = self.node_type_id @@ -1033,9 +1254,66 @@ def as_dict(self) -> dict: if self.state_message is not None: body['state_message'] = self.state_message if self.terminated_time is not None: body['terminated_time'] = self.terminated_time if self.termination_reason: body['termination_reason'] = self.termination_reason.as_dict() + if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime if self.workload_type: body['workload_type'] = self.workload_type.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterDetails into a shallow dictionary of its immediate attributes.""" + body = {} + if self.autoscale: body['autoscale'] = self.autoscale + if self.autotermination_minutes is not None: + body['autotermination_minutes'] = self.autotermination_minutes + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes + if self.cluster_cores is not None: body['cluster_cores'] = self.cluster_cores + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf + if self.cluster_log_status: body['cluster_log_status'] = self.cluster_log_status + if self.cluster_memory_mb is not None: body['cluster_memory_mb'] = self.cluster_memory_mb + if self.cluster_name is not None: body['cluster_name'] = self.cluster_name + if self.cluster_source is not None: body['cluster_source'] = self.cluster_source + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode + if self.default_tags: body['default_tags'] = self.default_tags + if self.docker_image: body['docker_image'] = self.docker_image + if self.driver: body['driver'] = self.driver + if self.driver_instance_pool_id is not None: + body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: + body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.executors: body['executors'] = self.executors + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes + if self.init_scripts: body['init_scripts'] = self.init_scripts + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.is_single_node is not None: body['is_single_node'] = self.is_single_node + if self.jdbc_port is not None: body['jdbc_port'] = self.jdbc_port + if self.kind is not None: body['kind'] = self.kind + if self.last_restarted_time is not None: body['last_restarted_time'] = self.last_restarted_time + if self.last_state_loss_time is not None: body['last_state_loss_time'] = self.last_state_loss_time + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.num_workers is not None: body['num_workers'] = self.num_workers + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine + if self.single_user_name is not None: body['single_user_name'] = self.single_user_name + if self.spark_conf: body['spark_conf'] = self.spark_conf + if self.spark_context_id is not None: body['spark_context_id'] = self.spark_context_id + if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars + if self.spark_version is not None: body['spark_version'] = self.spark_version + if self.spec: body['spec'] = self.spec + if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys + if self.start_time is not None: body['start_time'] = self.start_time + if self.state is not None: body['state'] = self.state + if self.state_message is not None: body['state_message'] = self.state_message + if self.terminated_time is not None: body['terminated_time'] = self.terminated_time + if self.termination_reason: body['termination_reason'] = self.termination_reason + if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime + if self.workload_type: body['workload_type'] = self.workload_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterDetails: """Deserializes the ClusterDetails from a dictionary.""" @@ -1064,7 +1342,9 @@ def from_dict(cls, d: Dict[str, any]) -> ClusterDetails: gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), + is_single_node=d.get('is_single_node', None), jdbc_port=d.get('jdbc_port', None), + kind=_enum(d, 'kind', Kind), last_restarted_time=d.get('last_restarted_time', None), last_state_loss_time=d.get('last_state_loss_time', None), node_type_id=d.get('node_type_id', None), @@ -1083,6 +1363,7 @@ def from_dict(cls, d: Dict[str, any]) -> ClusterDetails: state_message=d.get('state_message', None), terminated_time=d.get('terminated_time', None), termination_reason=_from_dict(d, 'termination_reason', TerminationReason), + use_ml_runtime=d.get('use_ml_runtime', None), workload_type=_from_dict(d, 'workload_type', WorkloadType)) @@ -1114,6 +1395,16 @@ def as_dict(self) -> dict: if self.type is not None: body['type'] = self.type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterEvent into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.data_plane_event_details: body['data_plane_event_details'] = self.data_plane_event_details + if self.details: body['details'] = self.details + if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.type is not None: body['type'] = self.type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterEvent: """Deserializes the ClusterEvent from a dictionary.""" @@ -1139,6 +1430,13 @@ def as_dict(self) -> dict: if self.library_statuses: body['library_statuses'] = [v.as_dict() for v in self.library_statuses] return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterLibraryStatuses into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.library_statuses: body['library_statuses'] = self.library_statuses + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterLibraryStatuses: """Deserializes the ClusterLibraryStatuses from a dictionary.""" @@ -1158,17 +1456,32 @@ class ClusterLogConf: access s3, please make sure the cluster iam role in `instance_profile_arn` has permission to write data to the s3 destination.""" + volumes: Optional[VolumesStorageInfo] = None + """destination needs to be provided. e.g. `{ "volumes" : { "destination" : + "/Volumes/catalog/schema/volume/cluster_log" } }`""" + def as_dict(self) -> dict: """Serializes the ClusterLogConf into a dictionary suitable for use as a JSON request body.""" body = {} if self.dbfs: body['dbfs'] = self.dbfs.as_dict() if self.s3: body['s3'] = self.s3.as_dict() + if self.volumes: body['volumes'] = self.volumes.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ClusterLogConf into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dbfs: body['dbfs'] = self.dbfs + if self.s3: body['s3'] = self.s3 + if self.volumes: body['volumes'] = self.volumes return body @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterLogConf: """Deserializes the ClusterLogConf from a dictionary.""" - return cls(dbfs=_from_dict(d, 'dbfs', DbfsStorageInfo), s3=_from_dict(d, 's3', S3StorageInfo)) + return cls(dbfs=_from_dict(d, 'dbfs', DbfsStorageInfo), + s3=_from_dict(d, 's3', S3StorageInfo), + volumes=_from_dict(d, 'volumes', VolumesStorageInfo)) @dataclass @@ -1188,6 +1501,14 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterPermission into a shallow dictionary of its immediate attributes.""" + body = {} + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterPermission: """Deserializes the ClusterPermission from a dictionary.""" @@ -1221,6 +1542,14 @@ def as_dict(self) -> dict: if self.object_type is not None: body['object_type'] = self.object_type return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterPermissions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterPermissions: """Deserializes the ClusterPermissions from a dictionary.""" @@ -1243,6 +1572,13 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterPermissionsDescription into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterPermissionsDescription: """Deserializes the ClusterPermissionsDescription from a dictionary.""" @@ -1265,6 +1601,13 @@ def as_dict(self) -> dict: if self.cluster_id is not None: body['cluster_id'] = self.cluster_id return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterPermissionsRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterPermissionsRequest: """Deserializes the ClusterPermissionsRequest from a dictionary.""" @@ -1296,6 +1639,16 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterPolicyAccessControlRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyAccessControlRequest: """Deserializes the ClusterPolicyAccessControlRequest from a dictionary.""" @@ -1333,6 +1686,17 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterPolicyAccessControlResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyAccessControlResponse: """Deserializes the ClusterPolicyAccessControlResponse from a dictionary.""" @@ -1360,6 +1724,14 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterPolicyPermission into a shallow dictionary of its immediate attributes.""" + body = {} + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyPermission: """Deserializes the ClusterPolicyPermission from a dictionary.""" @@ -1391,6 +1763,14 @@ def as_dict(self) -> dict: if self.object_type is not None: body['object_type'] = self.object_type return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterPolicyPermissions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyPermissions: """Deserializes the ClusterPolicyPermissions from a dictionary.""" @@ -1414,6 +1794,13 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterPolicyPermissionsDescription into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyPermissionsDescription: """Deserializes the ClusterPolicyPermissionsDescription from a dictionary.""" @@ -1436,6 +1823,13 @@ def as_dict(self) -> dict: if self.cluster_policy_id is not None: body['cluster_policy_id'] = self.cluster_policy_id return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterPolicyPermissionsRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.cluster_policy_id is not None: body['cluster_policy_id'] = self.cluster_policy_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyPermissionsRequest: """Deserializes the ClusterPolicyPermissionsRequest from a dictionary.""" @@ -1470,6 +1864,14 @@ def as_dict(self) -> dict: if self.previous_value is not None: body['previous_value'] = self.previous_value return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterSettingsChange into a shallow dictionary of its immediate attributes.""" + body = {} + if self.field is not None: body['field'] = self.field + if self.new_value is not None: body['new_value'] = self.new_value + if self.previous_value is not None: body['previous_value'] = self.previous_value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterSettingsChange: """Deserializes the ClusterSettingsChange from a dictionary.""" @@ -1501,6 +1903,13 @@ def as_dict(self) -> dict: if self.num_workers is not None: body['num_workers'] = self.num_workers return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterSize into a shallow dictionary of its immediate attributes.""" + body = {} + if self.autoscale: body['autoscale'] = self.autoscale + if self.num_workers is not None: body['num_workers'] = self.num_workers + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterSize: """Deserializes the ClusterSize from a dictionary.""" @@ -1545,11 +1954,11 @@ class ClusterSpec: a set of default values will be used.""" cluster_log_conf: Optional[ClusterLogConf] = None - """The configuration for delivering spark logs to a long-term storage destination. Two kinds of - destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster. - If the conf is given, the logs will be delivered to the destination every `5 mins`. The - destination of driver logs is `$destination/$clusterId/driver`, while the destination of - executor logs is `$destination/$clusterId/executor`.""" + """The configuration for delivering spark logs to a long-term storage destination. Three kinds of + destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be + specified for one cluster. If the conf is given, the logs will be delivered to the destination + every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the + destination of executor logs is `$destination/$clusterId/executor`.""" cluster_name: Optional[str] = None """Cluster name requested by the user. This doesn't have to be unique. If not specified at @@ -1567,13 +1976,19 @@ class ClusterSpec: data_security_mode: Optional[DataSecurityMode] = None """Data security mode decides what data governance model to use when accessing data from a cluster. - * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features - are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively - used by a single user specified in `single_user_name`. Most programming languages, cluster - features and data governance features are available in this mode. * `USER_ISOLATION`: A secure - cluster that can be shared by multiple users. Cluster users are fully isolated so that they - cannot see each other's data and credentials. Most data governance features are supported in - this mode. But programming languages and cluster features might be limited. + The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will + choose the most appropriate access mode depending on your compute configuration. * + `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: + Alias for `SINGLE_USER`. + + The following modes can be used regardless of `kind`. * `NONE`: No security isolation for + multiple users sharing the cluster. Data governance features are not available in this mode. * + `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in + `single_user_name`. Most programming languages, cluster features and data governance features + are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple + users. Cluster users are fully isolated so that they cannot see each other's data and + credentials. Most data governance features are supported in this mode. But programming languages + and cluster features might be limited. The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for future Databricks Runtime versions: @@ -1614,6 +2029,20 @@ class ClusterSpec: instance_pool_id: Optional[str] = None """The optional ID of the instance pool to which the cluster belongs.""" + is_single_node: Optional[bool] = None + """This field can only be used with `kind`. + + When set to true, Databricks will automatically set single node related `custom_tags`, + `spark_conf`, and `num_workers`""" + + kind: Optional[Kind] = None + """The kind of compute described by this compute specification. + + Depending on `kind`, different validations and default values will be applied. + + The first usage of this value is for the simple cluster form where it sets `kind = + CLASSIC_PREVIEW`.""" + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or @@ -1672,6 +2101,12 @@ class ClusterSpec: private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be specified.""" + use_ml_runtime: Optional[bool] = None + """This field can only be used with `kind`. + + `effective_spark_version` is determined by `spark_version` (DBR release), this field + `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" + workload_type: Optional[WorkloadType] = None def as_dict(self) -> dict: @@ -1698,6 +2133,8 @@ def as_dict(self) -> dict: if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.is_single_node is not None: body['is_single_node'] = self.is_single_node + if self.kind is not None: body['kind'] = self.kind.value if self.node_type_id is not None: body['node_type_id'] = self.node_type_id if self.num_workers is not None: body['num_workers'] = self.num_workers if self.policy_id is not None: body['policy_id'] = self.policy_id @@ -1707,29 +2144,71 @@ def as_dict(self) -> dict: if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars if self.spark_version is not None: body['spark_version'] = self.spark_version if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys] + if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime if self.workload_type: body['workload_type'] = self.workload_type.as_dict() return body - @classmethod - def from_dict(cls, d: Dict[str, any]) -> ClusterSpec: - """Deserializes the ClusterSpec from a dictionary.""" - return cls(apply_policy_default_values=d.get('apply_policy_default_values', None), - autoscale=_from_dict(d, 'autoscale', AutoScale), - autotermination_minutes=d.get('autotermination_minutes', None), - aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), - azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), - cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), - cluster_name=d.get('cluster_name', None), - custom_tags=d.get('custom_tags', None), - data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), - docker_image=_from_dict(d, 'docker_image', DockerImage), - driver_instance_pool_id=d.get('driver_instance_pool_id', None), - driver_node_type_id=d.get('driver_node_type_id', None), - enable_elastic_disk=d.get('enable_elastic_disk', None), - enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), + def as_shallow_dict(self) -> dict: + """Serializes the ClusterSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.apply_policy_default_values is not None: + body['apply_policy_default_values'] = self.apply_policy_default_values + if self.autoscale: body['autoscale'] = self.autoscale + if self.autotermination_minutes is not None: + body['autotermination_minutes'] = self.autotermination_minutes + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes + if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf + if self.cluster_name is not None: body['cluster_name'] = self.cluster_name + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode + if self.docker_image: body['docker_image'] = self.docker_image + if self.driver_instance_pool_id is not None: + body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: + body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes + if self.init_scripts: body['init_scripts'] = self.init_scripts + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.is_single_node is not None: body['is_single_node'] = self.is_single_node + if self.kind is not None: body['kind'] = self.kind + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.num_workers is not None: body['num_workers'] = self.num_workers + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine + if self.single_user_name is not None: body['single_user_name'] = self.single_user_name + if self.spark_conf: body['spark_conf'] = self.spark_conf + if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars + if self.spark_version is not None: body['spark_version'] = self.spark_version + if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys + if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime + if self.workload_type: body['workload_type'] = self.workload_type + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ClusterSpec: + """Deserializes the ClusterSpec from a dictionary.""" + return cls(apply_policy_default_values=d.get('apply_policy_default_values', None), + autoscale=_from_dict(d, 'autoscale', AutoScale), + autotermination_minutes=d.get('autotermination_minutes', None), + aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), + azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), + cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), + cluster_name=d.get('cluster_name', None), + custom_tags=d.get('custom_tags', None), + data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), + docker_image=_from_dict(d, 'docker_image', DockerImage), + driver_instance_pool_id=d.get('driver_instance_pool_id', None), + driver_node_type_id=d.get('driver_node_type_id', None), + enable_elastic_disk=d.get('enable_elastic_disk', None), + enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), + is_single_node=d.get('is_single_node', None), + kind=_enum(d, 'kind', Kind), node_type_id=d.get('node_type_id', None), num_workers=d.get('num_workers', None), policy_id=d.get('policy_id', None), @@ -1739,6 +2218,7 @@ def from_dict(cls, d: Dict[str, any]) -> ClusterSpec: spark_env_vars=d.get('spark_env_vars', None), spark_version=d.get('spark_version', None), ssh_public_keys=d.get('ssh_public_keys', None), + use_ml_runtime=d.get('use_ml_runtime', None), workload_type=_from_dict(d, 'workload_type', WorkloadType)) @@ -1764,6 +2244,15 @@ def as_dict(self) -> dict: if self.language is not None: body['language'] = self.language.value return body + def as_shallow_dict(self) -> dict: + """Serializes the Command into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cluster_id is not None: body['clusterId'] = self.cluster_id + if self.command is not None: body['command'] = self.command + if self.context_id is not None: body['contextId'] = self.context_id + if self.language is not None: body['language'] = self.language + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Command: """Deserializes the Command from a dictionary.""" @@ -1799,6 +2288,14 @@ def as_dict(self) -> dict: if self.status is not None: body['status'] = self.status.value return body + def as_shallow_dict(self) -> dict: + """Serializes the CommandStatusResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.id is not None: body['id'] = self.id + if self.results: body['results'] = self.results + if self.status is not None: body['status'] = self.status + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CommandStatusResponse: """Deserializes the CommandStatusResponse from a dictionary.""" @@ -1827,6 +2324,13 @@ def as_dict(self) -> dict: if self.status is not None: body['status'] = self.status.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ContextStatusResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.id is not None: body['id'] = self.id + if self.status is not None: body['status'] = self.status + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ContextStatusResponse: """Deserializes the ContextStatusResponse from a dictionary.""" @@ -1866,11 +2370,11 @@ class CreateCluster: cluster.""" cluster_log_conf: Optional[ClusterLogConf] = None - """The configuration for delivering spark logs to a long-term storage destination. Two kinds of - destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster. - If the conf is given, the logs will be delivered to the destination every `5 mins`. The - destination of driver logs is `$destination/$clusterId/driver`, while the destination of - executor logs is `$destination/$clusterId/executor`.""" + """The configuration for delivering spark logs to a long-term storage destination. Three kinds of + destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be + specified for one cluster. If the conf is given, the logs will be delivered to the destination + every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the + destination of executor logs is `$destination/$clusterId/executor`.""" cluster_name: Optional[str] = None """Cluster name requested by the user. This doesn't have to be unique. If not specified at @@ -1888,13 +2392,19 @@ class CreateCluster: data_security_mode: Optional[DataSecurityMode] = None """Data security mode decides what data governance model to use when accessing data from a cluster. - * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features - are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively - used by a single user specified in `single_user_name`. Most programming languages, cluster - features and data governance features are available in this mode. * `USER_ISOLATION`: A secure - cluster that can be shared by multiple users. Cluster users are fully isolated so that they - cannot see each other's data and credentials. Most data governance features are supported in - this mode. But programming languages and cluster features might be limited. + The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will + choose the most appropriate access mode depending on your compute configuration. * + `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: + Alias for `SINGLE_USER`. + + The following modes can be used regardless of `kind`. * `NONE`: No security isolation for + multiple users sharing the cluster. Data governance features are not available in this mode. * + `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in + `single_user_name`. Most programming languages, cluster features and data governance features + are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple + users. Cluster users are fully isolated so that they cannot see each other's data and + credentials. Most data governance features are supported in this mode. But programming languages + and cluster features might be limited. The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for future Databricks Runtime versions: @@ -1935,6 +2445,20 @@ class CreateCluster: instance_pool_id: Optional[str] = None """The optional ID of the instance pool to which the cluster belongs.""" + is_single_node: Optional[bool] = None + """This field can only be used with `kind`. + + When set to true, Databricks will automatically set single node related `custom_tags`, + `spark_conf`, and `num_workers`""" + + kind: Optional[Kind] = None + """The kind of compute described by this compute specification. + + Depending on `kind`, different validations and default values will be applied. + + The first usage of this value is for the simple cluster form where it sets `kind = + CLASSIC_PREVIEW`.""" + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or @@ -1989,6 +2513,12 @@ class CreateCluster: private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be specified.""" + use_ml_runtime: Optional[bool] = None + """This field can only be used with `kind`. + + `effective_spark_version` is determined by `spark_version` (DBR release), this field + `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" + workload_type: Optional[WorkloadType] = None def as_dict(self) -> dict: @@ -2016,6 +2546,8 @@ def as_dict(self) -> dict: if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.is_single_node is not None: body['is_single_node'] = self.is_single_node + if self.kind is not None: body['kind'] = self.kind.value if self.node_type_id is not None: body['node_type_id'] = self.node_type_id if self.num_workers is not None: body['num_workers'] = self.num_workers if self.policy_id is not None: body['policy_id'] = self.policy_id @@ -2025,9 +2557,50 @@ def as_dict(self) -> dict: if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars if self.spark_version is not None: body['spark_version'] = self.spark_version if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys] + if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime if self.workload_type: body['workload_type'] = self.workload_type.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateCluster into a shallow dictionary of its immediate attributes.""" + body = {} + if self.apply_policy_default_values is not None: + body['apply_policy_default_values'] = self.apply_policy_default_values + if self.autoscale: body['autoscale'] = self.autoscale + if self.autotermination_minutes is not None: + body['autotermination_minutes'] = self.autotermination_minutes + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes + if self.clone_from: body['clone_from'] = self.clone_from + if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf + if self.cluster_name is not None: body['cluster_name'] = self.cluster_name + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode + if self.docker_image: body['docker_image'] = self.docker_image + if self.driver_instance_pool_id is not None: + body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: + body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes + if self.init_scripts: body['init_scripts'] = self.init_scripts + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.is_single_node is not None: body['is_single_node'] = self.is_single_node + if self.kind is not None: body['kind'] = self.kind + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.num_workers is not None: body['num_workers'] = self.num_workers + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine + if self.single_user_name is not None: body['single_user_name'] = self.single_user_name + if self.spark_conf: body['spark_conf'] = self.spark_conf + if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars + if self.spark_version is not None: body['spark_version'] = self.spark_version + if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys + if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime + if self.workload_type: body['workload_type'] = self.workload_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateCluster: """Deserializes the CreateCluster from a dictionary.""" @@ -2049,6 +2622,8 @@ def from_dict(cls, d: Dict[str, any]) -> CreateCluster: gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), + is_single_node=d.get('is_single_node', None), + kind=_enum(d, 'kind', Kind), node_type_id=d.get('node_type_id', None), num_workers=d.get('num_workers', None), policy_id=d.get('policy_id', None), @@ -2058,6 +2633,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreateCluster: spark_env_vars=d.get('spark_env_vars', None), spark_version=d.get('spark_version', None), ssh_public_keys=d.get('ssh_public_keys', None), + use_ml_runtime=d.get('use_ml_runtime', None), workload_type=_from_dict(d, 'workload_type', WorkloadType)) @@ -2071,6 +2647,12 @@ def as_dict(self) -> dict: if self.cluster_id is not None: body['cluster_id'] = self.cluster_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateClusterResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateClusterResponse: """Deserializes the CreateClusterResponse from a dictionary.""" @@ -2091,6 +2673,13 @@ def as_dict(self) -> dict: if self.language is not None: body['language'] = self.language.value return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateContext into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cluster_id is not None: body['clusterId'] = self.cluster_id + if self.language is not None: body['language'] = self.language + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateContext: """Deserializes the CreateContext from a dictionary.""" @@ -2180,6 +2769,25 @@ def as_dict(self) -> dict: body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions] return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateInstancePool into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.disk_spec: body['disk_spec'] = self.disk_spec + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes + if self.idle_instance_autotermination_minutes is not None: + body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes + if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name + if self.max_capacity is not None: body['max_capacity'] = self.max_capacity + if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.preloaded_docker_images: body['preloaded_docker_images'] = self.preloaded_docker_images + if self.preloaded_spark_versions: body['preloaded_spark_versions'] = self.preloaded_spark_versions + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateInstancePool: """Deserializes the CreateInstancePool from a dictionary.""" @@ -2209,6 +2817,12 @@ def as_dict(self) -> dict: if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateInstancePoolResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateInstancePoolResponse: """Deserializes the CreateInstancePoolResponse from a dictionary.""" @@ -2266,6 +2880,19 @@ def as_dict(self) -> dict: if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CreatePolicy into a shallow dictionary of its immediate attributes.""" + body = {} + if self.definition is not None: body['definition'] = self.definition + if self.description is not None: body['description'] = self.description + if self.libraries: body['libraries'] = self.libraries + if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user + if self.name is not None: body['name'] = self.name + if self.policy_family_definition_overrides is not None: + body['policy_family_definition_overrides'] = self.policy_family_definition_overrides + if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreatePolicy: """Deserializes the CreatePolicy from a dictionary.""" @@ -2289,6 +2916,12 @@ def as_dict(self) -> dict: if self.policy_id is not None: body['policy_id'] = self.policy_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CreatePolicyResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.policy_id is not None: body['policy_id'] = self.policy_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreatePolicyResponse: """Deserializes the CreatePolicyResponse from a dictionary.""" @@ -2306,6 +2939,12 @@ def as_dict(self) -> dict: if self.script_id is not None: body['script_id'] = self.script_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.script_id is not None: body['script_id'] = self.script_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateResponse: """Deserializes the CreateResponse from a dictionary.""" @@ -2322,12 +2961,54 @@ def as_dict(self) -> dict: if self.id is not None: body['id'] = self.id return body + def as_shallow_dict(self) -> dict: + """Serializes the Created into a shallow dictionary of its immediate attributes.""" + body = {} + if self.id is not None: body['id'] = self.id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Created: """Deserializes the Created from a dictionary.""" return cls(id=d.get('id', None)) +@dataclass +class CustomPolicyTag: + key: str + """The key of the tag. - Must be unique among all custom tags of the same policy - Cannot be + “budget-policy-name”, “budget-policy-id” or "budget-policy-resolution-result" - these + tags are preserved. + + - Follows the regex pattern defined in cluster-common/conf/src/ClusterTagConstraints.scala + (https://src.dev.databricks.com/databricks/universe@1647196627c8dc7b4152ad098a94b86484b93a6c/-/blob/cluster-common/conf/src/ClusterTagConstraints.scala?L17)""" + + value: Optional[str] = None + """The value of the tag. + + - Follows the regex pattern defined in cluster-common/conf/src/ClusterTagConstraints.scala + (https://src.dev.databricks.com/databricks/universe@1647196627c8dc7b4152ad098a94b86484b93a6c/-/blob/cluster-common/conf/src/ClusterTagConstraints.scala?L24)""" + + def as_dict(self) -> dict: + """Serializes the CustomPolicyTag into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CustomPolicyTag into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CustomPolicyTag: + """Deserializes the CustomPolicyTag from a dictionary.""" + return cls(key=d.get('key', None), value=d.get('value', None)) + + @dataclass class DataPlaneEventDetails: event_type: Optional[DataPlaneEventDetailsEventType] = None @@ -2351,6 +3032,15 @@ def as_dict(self) -> dict: if self.timestamp is not None: body['timestamp'] = self.timestamp return body + def as_shallow_dict(self) -> dict: + """Serializes the DataPlaneEventDetails into a shallow dictionary of its immediate attributes.""" + body = {} + if self.event_type is not None: body['event_type'] = self.event_type + if self.executor_failures is not None: body['executor_failures'] = self.executor_failures + if self.host_id is not None: body['host_id'] = self.host_id + if self.timestamp is not None: body['timestamp'] = self.timestamp + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DataPlaneEventDetails: """Deserializes the DataPlaneEventDetails from a dictionary.""" @@ -2370,13 +3060,19 @@ class DataPlaneEventDetailsEventType(Enum): class DataSecurityMode(Enum): """Data security mode decides what data governance model to use when accessing data from a cluster. - * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features - are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively - used by a single user specified in `single_user_name`. Most programming languages, cluster - features and data governance features are available in this mode. * `USER_ISOLATION`: A secure - cluster that can be shared by multiple users. Cluster users are fully isolated so that they - cannot see each other's data and credentials. Most data governance features are supported in - this mode. But programming languages and cluster features might be limited. + The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will + choose the most appropriate access mode depending on your compute configuration. * + `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: + Alias for `SINGLE_USER`. + + The following modes can be used regardless of `kind`. * `NONE`: No security isolation for + multiple users sharing the cluster. Data governance features are not available in this mode. * + `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in + `single_user_name`. Most programming languages, cluster features and data governance features + are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple + users. Cluster users are fully isolated so that they cannot see each other's data and + credentials. Most data governance features are supported in this mode. But programming languages + and cluster features might be limited. The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for future Databricks Runtime versions: @@ -2387,6 +3083,9 @@ class DataSecurityMode(Enum): Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC nor passthrough enabled.""" + DATA_SECURITY_MODE_AUTO = 'DATA_SECURITY_MODE_AUTO' + DATA_SECURITY_MODE_DEDICATED = 'DATA_SECURITY_MODE_DEDICATED' + DATA_SECURITY_MODE_STANDARD = 'DATA_SECURITY_MODE_STANDARD' LEGACY_PASSTHROUGH = 'LEGACY_PASSTHROUGH' LEGACY_SINGLE_USER = 'LEGACY_SINGLE_USER' LEGACY_SINGLE_USER_STANDARD = 'LEGACY_SINGLE_USER_STANDARD' @@ -2407,6 +3106,12 @@ def as_dict(self) -> dict: if self.destination is not None: body['destination'] = self.destination return body + def as_shallow_dict(self) -> dict: + """Serializes the DbfsStorageInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.destination is not None: body['destination'] = self.destination + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DbfsStorageInfo: """Deserializes the DbfsStorageInfo from a dictionary.""" @@ -2424,6 +3129,12 @@ def as_dict(self) -> dict: if self.cluster_id is not None: body['cluster_id'] = self.cluster_id return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteCluster into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteCluster: """Deserializes the DeleteCluster from a dictionary.""" @@ -2438,6 +3149,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteClusterResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteClusterResponse: """Deserializes the DeleteClusterResponse from a dictionary.""" @@ -2455,6 +3171,12 @@ def as_dict(self) -> dict: if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteInstancePool into a shallow dictionary of its immediate attributes.""" + body = {} + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteInstancePool: """Deserializes the DeleteInstancePool from a dictionary.""" @@ -2469,6 +3191,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteInstancePoolResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteInstancePoolResponse: """Deserializes the DeleteInstancePoolResponse from a dictionary.""" @@ -2486,6 +3213,12 @@ def as_dict(self) -> dict: if self.policy_id is not None: body['policy_id'] = self.policy_id return body + def as_shallow_dict(self) -> dict: + """Serializes the DeletePolicy into a shallow dictionary of its immediate attributes.""" + body = {} + if self.policy_id is not None: body['policy_id'] = self.policy_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeletePolicy: """Deserializes the DeletePolicy from a dictionary.""" @@ -2500,6 +3233,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeletePolicyResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeletePolicyResponse: """Deserializes the DeletePolicyResponse from a dictionary.""" @@ -2514,6 +3252,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" @@ -2533,6 +3276,13 @@ def as_dict(self) -> dict: if self.context_id is not None: body['contextId'] = self.context_id return body + def as_shallow_dict(self) -> dict: + """Serializes the DestroyContext into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cluster_id is not None: body['clusterId'] = self.cluster_id + if self.context_id is not None: body['contextId'] = self.context_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DestroyContext: """Deserializes the DestroyContext from a dictionary.""" @@ -2547,6 +3297,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DestroyResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DestroyResponse: """Deserializes the DestroyResponse from a dictionary.""" @@ -2595,6 +3350,16 @@ def as_dict(self) -> dict: if self.disk_type: body['disk_type'] = self.disk_type.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the DiskSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.disk_count is not None: body['disk_count'] = self.disk_count + if self.disk_iops is not None: body['disk_iops'] = self.disk_iops + if self.disk_size is not None: body['disk_size'] = self.disk_size + if self.disk_throughput is not None: body['disk_throughput'] = self.disk_throughput + if self.disk_type: body['disk_type'] = self.disk_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DiskSpec: """Deserializes the DiskSpec from a dictionary.""" @@ -2619,6 +3384,14 @@ def as_dict(self) -> dict: if self.ebs_volume_type is not None: body['ebs_volume_type'] = self.ebs_volume_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the DiskType into a shallow dictionary of its immediate attributes.""" + body = {} + if self.azure_disk_volume_type is not None: + body['azure_disk_volume_type'] = self.azure_disk_volume_type + if self.ebs_volume_type is not None: body['ebs_volume_type'] = self.ebs_volume_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DiskType: """Deserializes the DiskType from a dictionary.""" @@ -2653,6 +3426,13 @@ def as_dict(self) -> dict: if self.username is not None: body['username'] = self.username return body + def as_shallow_dict(self) -> dict: + """Serializes the DockerBasicAuth into a shallow dictionary of its immediate attributes.""" + body = {} + if self.password is not None: body['password'] = self.password + if self.username is not None: body['username'] = self.username + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DockerBasicAuth: """Deserializes the DockerBasicAuth from a dictionary.""" @@ -2673,6 +3453,13 @@ def as_dict(self) -> dict: if self.url is not None: body['url'] = self.url return body + def as_shallow_dict(self) -> dict: + """Serializes the DockerImage into a shallow dictionary of its immediate attributes.""" + body = {} + if self.basic_auth: body['basic_auth'] = self.basic_auth + if self.url is not None: body['url'] = self.url + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DockerImage: """Deserializes the DockerImage from a dictionary.""" @@ -2689,7 +3476,7 @@ class EbsVolumeType(Enum): @dataclass class EditCluster: cluster_id: str - """ID of the cluser""" + """ID of the cluster""" spark_version: str """The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can @@ -2718,11 +3505,11 @@ class EditCluster: a set of default values will be used.""" cluster_log_conf: Optional[ClusterLogConf] = None - """The configuration for delivering spark logs to a long-term storage destination. Two kinds of - destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster. - If the conf is given, the logs will be delivered to the destination every `5 mins`. The - destination of driver logs is `$destination/$clusterId/driver`, while the destination of - executor logs is `$destination/$clusterId/executor`.""" + """The configuration for delivering spark logs to a long-term storage destination. Three kinds of + destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be + specified for one cluster. If the conf is given, the logs will be delivered to the destination + every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the + destination of executor logs is `$destination/$clusterId/executor`.""" cluster_name: Optional[str] = None """Cluster name requested by the user. This doesn't have to be unique. If not specified at @@ -2740,13 +3527,19 @@ class EditCluster: data_security_mode: Optional[DataSecurityMode] = None """Data security mode decides what data governance model to use when accessing data from a cluster. - * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features - are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively - used by a single user specified in `single_user_name`. Most programming languages, cluster - features and data governance features are available in this mode. * `USER_ISOLATION`: A secure - cluster that can be shared by multiple users. Cluster users are fully isolated so that they - cannot see each other's data and credentials. Most data governance features are supported in - this mode. But programming languages and cluster features might be limited. + The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will + choose the most appropriate access mode depending on your compute configuration. * + `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: + Alias for `SINGLE_USER`. + + The following modes can be used regardless of `kind`. * `NONE`: No security isolation for + multiple users sharing the cluster. Data governance features are not available in this mode. * + `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in + `single_user_name`. Most programming languages, cluster features and data governance features + are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple + users. Cluster users are fully isolated so that they cannot see each other's data and + credentials. Most data governance features are supported in this mode. But programming languages + and cluster features might be limited. The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for future Databricks Runtime versions: @@ -2787,6 +3580,20 @@ class EditCluster: instance_pool_id: Optional[str] = None """The optional ID of the instance pool to which the cluster belongs.""" + is_single_node: Optional[bool] = None + """This field can only be used with `kind`. + + When set to true, Databricks will automatically set single node related `custom_tags`, + `spark_conf`, and `num_workers`""" + + kind: Optional[Kind] = None + """The kind of compute described by this compute specification. + + Depending on `kind`, different validations and default values will be applied. + + The first usage of this value is for the simple cluster form where it sets `kind = + CLASSIC_PREVIEW`.""" + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or @@ -2841,6 +3648,12 @@ class EditCluster: private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be specified.""" + use_ml_runtime: Optional[bool] = None + """This field can only be used with `kind`. + + `effective_spark_version` is determined by `spark_version` (DBR release), this field + `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" + workload_type: Optional[WorkloadType] = None def as_dict(self) -> dict: @@ -2868,6 +3681,8 @@ def as_dict(self) -> dict: if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.is_single_node is not None: body['is_single_node'] = self.is_single_node + if self.kind is not None: body['kind'] = self.kind.value if self.node_type_id is not None: body['node_type_id'] = self.node_type_id if self.num_workers is not None: body['num_workers'] = self.num_workers if self.policy_id is not None: body['policy_id'] = self.policy_id @@ -2877,9 +3692,50 @@ def as_dict(self) -> dict: if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars if self.spark_version is not None: body['spark_version'] = self.spark_version if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys] + if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime if self.workload_type: body['workload_type'] = self.workload_type.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the EditCluster into a shallow dictionary of its immediate attributes.""" + body = {} + if self.apply_policy_default_values is not None: + body['apply_policy_default_values'] = self.apply_policy_default_values + if self.autoscale: body['autoscale'] = self.autoscale + if self.autotermination_minutes is not None: + body['autotermination_minutes'] = self.autotermination_minutes + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf + if self.cluster_name is not None: body['cluster_name'] = self.cluster_name + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode + if self.docker_image: body['docker_image'] = self.docker_image + if self.driver_instance_pool_id is not None: + body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: + body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes + if self.init_scripts: body['init_scripts'] = self.init_scripts + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.is_single_node is not None: body['is_single_node'] = self.is_single_node + if self.kind is not None: body['kind'] = self.kind + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.num_workers is not None: body['num_workers'] = self.num_workers + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine + if self.single_user_name is not None: body['single_user_name'] = self.single_user_name + if self.spark_conf: body['spark_conf'] = self.spark_conf + if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars + if self.spark_version is not None: body['spark_version'] = self.spark_version + if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys + if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime + if self.workload_type: body['workload_type'] = self.workload_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EditCluster: """Deserializes the EditCluster from a dictionary.""" @@ -2901,6 +3757,8 @@ def from_dict(cls, d: Dict[str, any]) -> EditCluster: gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), + is_single_node=d.get('is_single_node', None), + kind=_enum(d, 'kind', Kind), node_type_id=d.get('node_type_id', None), num_workers=d.get('num_workers', None), policy_id=d.get('policy_id', None), @@ -2910,6 +3768,7 @@ def from_dict(cls, d: Dict[str, any]) -> EditCluster: spark_env_vars=d.get('spark_env_vars', None), spark_version=d.get('spark_version', None), ssh_public_keys=d.get('ssh_public_keys', None), + use_ml_runtime=d.get('use_ml_runtime', None), workload_type=_from_dict(d, 'workload_type', WorkloadType)) @@ -2921,6 +3780,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the EditClusterResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EditClusterResponse: """Deserializes the EditClusterResponse from a dictionary.""" @@ -2976,6 +3840,19 @@ def as_dict(self) -> dict: if self.node_type_id is not None: body['node_type_id'] = self.node_type_id return body + def as_shallow_dict(self) -> dict: + """Serializes the EditInstancePool into a shallow dictionary of its immediate attributes.""" + body = {} + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.idle_instance_autotermination_minutes is not None: + body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name + if self.max_capacity is not None: body['max_capacity'] = self.max_capacity + if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EditInstancePool: """Deserializes the EditInstancePool from a dictionary.""" @@ -2996,6 +3873,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the EditInstancePoolResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EditInstancePoolResponse: """Deserializes the EditInstancePoolResponse from a dictionary.""" @@ -3057,6 +3939,20 @@ def as_dict(self) -> dict: if self.policy_id is not None: body['policy_id'] = self.policy_id return body + def as_shallow_dict(self) -> dict: + """Serializes the EditPolicy into a shallow dictionary of its immediate attributes.""" + body = {} + if self.definition is not None: body['definition'] = self.definition + if self.description is not None: body['description'] = self.description + if self.libraries: body['libraries'] = self.libraries + if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user + if self.name is not None: body['name'] = self.name + if self.policy_family_definition_overrides is not None: + body['policy_family_definition_overrides'] = self.policy_family_definition_overrides + if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id + if self.policy_id is not None: body['policy_id'] = self.policy_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EditPolicy: """Deserializes the EditPolicy from a dictionary.""" @@ -3078,6 +3974,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the EditPolicyResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EditPolicyResponse: """Deserializes the EditPolicyResponse from a dictionary.""" @@ -3092,6 +3993,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the EditResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EditResponse: """Deserializes the EditResponse from a dictionary.""" @@ -3114,6 +4020,13 @@ def as_dict(self) -> dict: if self.validate_only is not None: body['validate_only'] = self.validate_only return body + def as_shallow_dict(self) -> dict: + """Serializes the EnforceClusterComplianceRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.validate_only is not None: body['validate_only'] = self.validate_only + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EnforceClusterComplianceRequest: """Deserializes the EnforceClusterComplianceRequest from a dictionary.""" @@ -3137,6 +4050,13 @@ def as_dict(self) -> dict: if self.has_changes is not None: body['has_changes'] = self.has_changes return body + def as_shallow_dict(self) -> dict: + """Serializes the EnforceClusterComplianceResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.changes: body['changes'] = self.changes + if self.has_changes is not None: body['has_changes'] = self.has_changes + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EnforceClusterComplianceResponse: """Deserializes the EnforceClusterComplianceResponse from a dictionary.""" @@ -3169,6 +4089,13 @@ def as_dict(self) -> dict: if self.dependencies: body['dependencies'] = [v for v in self.dependencies] return body + def as_shallow_dict(self) -> dict: + """Serializes the Environment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.client is not None: body['client'] = self.client + if self.dependencies: body['dependencies'] = self.dependencies + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Environment: """Deserializes the Environment from a dictionary.""" @@ -3266,6 +4193,32 @@ def as_dict(self) -> dict: if self.user is not None: body['user'] = self.user return body + def as_shallow_dict(self) -> dict: + """Serializes the EventDetails into a shallow dictionary of its immediate attributes.""" + body = {} + if self.attributes: body['attributes'] = self.attributes + if self.cause is not None: body['cause'] = self.cause + if self.cluster_size: body['cluster_size'] = self.cluster_size + if self.current_num_vcpus is not None: body['current_num_vcpus'] = self.current_num_vcpus + if self.current_num_workers is not None: body['current_num_workers'] = self.current_num_workers + if self.did_not_expand_reason is not None: body['did_not_expand_reason'] = self.did_not_expand_reason + if self.disk_size is not None: body['disk_size'] = self.disk_size + if self.driver_state_message is not None: body['driver_state_message'] = self.driver_state_message + if self.enable_termination_for_node_blocklisted is not None: + body['enable_termination_for_node_blocklisted'] = self.enable_termination_for_node_blocklisted + if self.free_space is not None: body['free_space'] = self.free_space + if self.init_scripts: body['init_scripts'] = self.init_scripts + if self.instance_id is not None: body['instance_id'] = self.instance_id + if self.job_run_name is not None: body['job_run_name'] = self.job_run_name + if self.previous_attributes: body['previous_attributes'] = self.previous_attributes + if self.previous_cluster_size: body['previous_cluster_size'] = self.previous_cluster_size + if self.previous_disk_size is not None: body['previous_disk_size'] = self.previous_disk_size + if self.reason: body['reason'] = self.reason + if self.target_num_vcpus is not None: body['target_num_vcpus'] = self.target_num_vcpus + if self.target_num_workers is not None: body['target_num_workers'] = self.target_num_workers + if self.user is not None: body['user'] = self.user + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EventDetails: """Deserializes the EventDetails from a dictionary.""" @@ -3303,8 +4256,10 @@ class EventDetailsCause(Enum): class EventType(Enum): - # [PROD-2198] Test data in the backend has an event type that was missing here ADD_NODES_FAILED = 'ADD_NODES_FAILED' + AUTOMATIC_CLUSTER_UPDATE = 'AUTOMATIC_CLUSTER_UPDATE' + AUTOSCALING_BACKOFF = 'AUTOSCALING_BACKOFF' + AUTOSCALING_FAILED = 'AUTOSCALING_FAILED' AUTOSCALING_STATS_REPORT = 'AUTOSCALING_STATS_REPORT' CREATING = 'CREATING' DBFS_DOWN = 'DBFS_DOWN' @@ -3378,6 +4333,19 @@ def as_dict(self) -> dict: if self.zone_id is not None: body['zone_id'] = self.zone_id return body + def as_shallow_dict(self) -> dict: + """Serializes the GcpAttributes into a shallow dictionary of its immediate attributes.""" + body = {} + if self.availability is not None: body['availability'] = self.availability + if self.boot_disk_size is not None: body['boot_disk_size'] = self.boot_disk_size + if self.google_service_account is not None: + body['google_service_account'] = self.google_service_account + if self.local_ssd_count is not None: body['local_ssd_count'] = self.local_ssd_count + if self.use_preemptible_executors is not None: + body['use_preemptible_executors'] = self.use_preemptible_executors + if self.zone_id is not None: body['zone_id'] = self.zone_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GcpAttributes: """Deserializes the GcpAttributes from a dictionary.""" @@ -3409,6 +4377,12 @@ def as_dict(self) -> dict: if self.destination is not None: body['destination'] = self.destination return body + def as_shallow_dict(self) -> dict: + """Serializes the GcsStorageInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.destination is not None: body['destination'] = self.destination + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GcsStorageInfo: """Deserializes the GcsStorageInfo from a dictionary.""" @@ -3433,6 +4407,13 @@ def as_dict(self) -> dict: if self.violations: body['violations'] = self.violations return body + def as_shallow_dict(self) -> dict: + """Serializes the GetClusterComplianceResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.is_compliant is not None: body['is_compliant'] = self.is_compliant + if self.violations: body['violations'] = self.violations + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetClusterComplianceResponse: """Deserializes the GetClusterComplianceResponse from a dictionary.""" @@ -3450,6 +4431,12 @@ def as_dict(self) -> dict: if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body + def as_shallow_dict(self) -> dict: + """Serializes the GetClusterPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission_levels: body['permission_levels'] = self.permission_levels + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetClusterPermissionLevelsResponse: """Deserializes the GetClusterPermissionLevelsResponse from a dictionary.""" @@ -3467,6 +4454,12 @@ def as_dict(self) -> dict: if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body + def as_shallow_dict(self) -> dict: + """Serializes the GetClusterPolicyPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission_levels: body['permission_levels'] = self.permission_levels + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetClusterPolicyPermissionLevelsResponse: """Deserializes the GetClusterPolicyPermissionLevelsResponse from a dictionary.""" @@ -3500,15 +4493,27 @@ class GetEvents: """The start time in epoch milliseconds. If empty, returns events starting from the beginning of time.""" - def as_dict(self) -> dict: - """Serializes the GetEvents into a dictionary suitable for use as a JSON request body.""" + def as_dict(self) -> dict: + """Serializes the GetEvents into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.end_time is not None: body['end_time'] = self.end_time + if self.event_types: body['event_types'] = [v.value for v in self.event_types] + if self.limit is not None: body['limit'] = self.limit + if self.offset is not None: body['offset'] = self.offset + if self.order is not None: body['order'] = self.order.value + if self.start_time is not None: body['start_time'] = self.start_time + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GetEvents into a shallow dictionary of its immediate attributes.""" body = {} if self.cluster_id is not None: body['cluster_id'] = self.cluster_id if self.end_time is not None: body['end_time'] = self.end_time - if self.event_types: body['event_types'] = [v.value for v in self.event_types] + if self.event_types: body['event_types'] = self.event_types if self.limit is not None: body['limit'] = self.limit if self.offset is not None: body['offset'] = self.offset - if self.order is not None: body['order'] = self.order.value + if self.order is not None: body['order'] = self.order if self.start_time is not None: body['start_time'] = self.start_time return body @@ -3551,6 +4556,14 @@ def as_dict(self) -> dict: if self.total_count is not None: body['total_count'] = self.total_count return body + def as_shallow_dict(self) -> dict: + """Serializes the GetEventsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.events: body['events'] = self.events + if self.next_page: body['next_page'] = self.next_page + if self.total_count is not None: body['total_count'] = self.total_count + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetEventsResponse: """Deserializes the GetEventsResponse from a dictionary.""" @@ -3670,6 +4683,30 @@ def as_dict(self) -> dict: if self.status: body['status'] = self.status.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the GetInstancePool into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.default_tags: body['default_tags'] = self.default_tags + if self.disk_spec: body['disk_spec'] = self.disk_spec + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes + if self.idle_instance_autotermination_minutes is not None: + body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name + if self.max_capacity is not None: body['max_capacity'] = self.max_capacity + if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.preloaded_docker_images: body['preloaded_docker_images'] = self.preloaded_docker_images + if self.preloaded_spark_versions: body['preloaded_spark_versions'] = self.preloaded_spark_versions + if self.state is not None: body['state'] = self.state + if self.stats: body['stats'] = self.stats + if self.status: body['status'] = self.status + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetInstancePool: """Deserializes the GetInstancePool from a dictionary.""" @@ -3704,6 +4741,12 @@ def as_dict(self) -> dict: if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body + def as_shallow_dict(self) -> dict: + """Serializes the GetInstancePoolPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission_levels: body['permission_levels'] = self.permission_levels + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetInstancePoolPermissionLevelsResponse: """Deserializes the GetInstancePoolPermissionLevelsResponse from a dictionary.""" @@ -3722,6 +4765,12 @@ def as_dict(self) -> dict: if self.versions: body['versions'] = [v.as_dict() for v in self.versions] return body + def as_shallow_dict(self) -> dict: + """Serializes the GetSparkVersionsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.versions: body['versions'] = self.versions + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetSparkVersionsResponse: """Deserializes the GetSparkVersionsResponse from a dictionary.""" @@ -3759,6 +4808,15 @@ def as_dict(self) -> dict: if self.script is not None: body['script'] = self.script return body + def as_shallow_dict(self) -> dict: + """Serializes the GlobalInitScriptCreateRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.enabled is not None: body['enabled'] = self.enabled + if self.name is not None: body['name'] = self.name + if self.position is not None: body['position'] = self.position + if self.script is not None: body['script'] = self.script + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GlobalInitScriptCreateRequest: """Deserializes the GlobalInitScriptCreateRequest from a dictionary.""" @@ -3808,6 +4866,19 @@ def as_dict(self) -> dict: if self.updated_by is not None: body['updated_by'] = self.updated_by return body + def as_shallow_dict(self) -> dict: + """Serializes the GlobalInitScriptDetails into a shallow dictionary of its immediate attributes.""" + body = {} + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.enabled is not None: body['enabled'] = self.enabled + if self.name is not None: body['name'] = self.name + if self.position is not None: body['position'] = self.position + if self.script_id is not None: body['script_id'] = self.script_id + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GlobalInitScriptDetails: """Deserializes the GlobalInitScriptDetails from a dictionary.""" @@ -3865,6 +4936,20 @@ def as_dict(self) -> dict: if self.updated_by is not None: body['updated_by'] = self.updated_by return body + def as_shallow_dict(self) -> dict: + """Serializes the GlobalInitScriptDetailsWithContent into a shallow dictionary of its immediate attributes.""" + body = {} + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.enabled is not None: body['enabled'] = self.enabled + if self.name is not None: body['name'] = self.name + if self.position is not None: body['position'] = self.position + if self.script is not None: body['script'] = self.script + if self.script_id is not None: body['script_id'] = self.script_id + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GlobalInitScriptDetailsWithContent: """Deserializes the GlobalInitScriptDetailsWithContent from a dictionary.""" @@ -3914,6 +4999,16 @@ def as_dict(self) -> dict: if self.script_id is not None: body['script_id'] = self.script_id return body + def as_shallow_dict(self) -> dict: + """Serializes the GlobalInitScriptUpdateRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.enabled is not None: body['enabled'] = self.enabled + if self.name is not None: body['name'] = self.name + if self.position is not None: body['position'] = self.position + if self.script is not None: body['script'] = self.script + if self.script_id is not None: body['script_id'] = self.script_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GlobalInitScriptUpdateRequest: """Deserializes the GlobalInitScriptUpdateRequest from a dictionary.""" @@ -3943,6 +5038,14 @@ def as_dict(self) -> dict: if self.reported_for_node is not None: body['reported_for_node'] = self.reported_for_node return body + def as_shallow_dict(self) -> dict: + """Serializes the InitScriptEventDetails into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cluster: body['cluster'] = self.cluster + if self.global_: body['global'] = self.global_ + if self.reported_for_node is not None: body['reported_for_node'] = self.reported_for_node + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> InitScriptEventDetails: """Deserializes the InitScriptEventDetails from a dictionary.""" @@ -3971,6 +5074,15 @@ def as_dict(self) -> dict: if self.status is not None: body['status'] = self.status.value return body + def as_shallow_dict(self) -> dict: + """Serializes the InitScriptExecutionDetails into a shallow dictionary of its immediate attributes.""" + body = {} + if self.error_message is not None: body['error_message'] = self.error_message + if self.execution_duration_seconds is not None: + body['execution_duration_seconds'] = self.execution_duration_seconds + if self.status is not None: body['status'] = self.status + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> InitScriptExecutionDetails: """Deserializes the InitScriptExecutionDetails from a dictionary.""" @@ -4033,6 +5145,18 @@ def as_dict(self) -> dict: if self.workspace: body['workspace'] = self.workspace.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the InitScriptInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.abfss: body['abfss'] = self.abfss + if self.dbfs: body['dbfs'] = self.dbfs + if self.file: body['file'] = self.file + if self.gcs: body['gcs'] = self.gcs + if self.s3: body['s3'] = self.s3 + if self.volumes: body['volumes'] = self.volumes + if self.workspace: body['workspace'] = self.workspace + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> InitScriptInfo: """Deserializes the InitScriptInfo from a dictionary.""" @@ -4060,6 +5184,13 @@ def as_dict(self) -> dict: if self.script: body['script'] = self.script.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the InitScriptInfoAndExecutionDetails into a shallow dictionary of its immediate attributes.""" + body = {} + if self.execution_details: body['execution_details'] = self.execution_details + if self.script: body['script'] = self.script + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> InitScriptInfoAndExecutionDetails: """Deserializes the InitScriptInfoAndExecutionDetails from a dictionary.""" @@ -4082,6 +5213,13 @@ def as_dict(self) -> dict: if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] return body + def as_shallow_dict(self) -> dict: + """Serializes the InstallLibraries into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.libraries: body['libraries'] = self.libraries + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> InstallLibraries: """Deserializes the InstallLibraries from a dictionary.""" @@ -4096,6 +5234,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the InstallLibrariesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> InstallLibrariesResponse: """Deserializes the InstallLibrariesResponse from a dictionary.""" @@ -4126,6 +5269,16 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the InstancePoolAccessControlRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> InstancePoolAccessControlRequest: """Deserializes the InstancePoolAccessControlRequest from a dictionary.""" @@ -4163,6 +5316,17 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the InstancePoolAccessControlResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> InstancePoolAccessControlResponse: """Deserializes the InstancePoolAccessControlResponse from a dictionary.""" @@ -4284,6 +5448,30 @@ def as_dict(self) -> dict: if self.status: body['status'] = self.status.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the InstancePoolAndStats into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.default_tags: body['default_tags'] = self.default_tags + if self.disk_spec: body['disk_spec'] = self.disk_spec + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes + if self.idle_instance_autotermination_minutes is not None: + body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name + if self.max_capacity is not None: body['max_capacity'] = self.max_capacity + if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.preloaded_docker_images: body['preloaded_docker_images'] = self.preloaded_docker_images + if self.preloaded_spark_versions: body['preloaded_spark_versions'] = self.preloaded_spark_versions + if self.state is not None: body['state'] = self.state + if self.stats: body['stats'] = self.stats + if self.status: body['status'] = self.status + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> InstancePoolAndStats: """Deserializes the InstancePoolAndStats from a dictionary.""" @@ -4343,6 +5531,15 @@ def as_dict(self) -> dict: if self.zone_id is not None: body['zone_id'] = self.zone_id return body + def as_shallow_dict(self) -> dict: + """Serializes the InstancePoolAwsAttributes into a shallow dictionary of its immediate attributes.""" + body = {} + if self.availability is not None: body['availability'] = self.availability + if self.spot_bid_price_percent is not None: + body['spot_bid_price_percent'] = self.spot_bid_price_percent + if self.zone_id is not None: body['zone_id'] = self.zone_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> InstancePoolAwsAttributes: """Deserializes the InstancePoolAwsAttributes from a dictionary.""" @@ -4378,6 +5575,13 @@ def as_dict(self) -> dict: if self.spot_bid_max_price is not None: body['spot_bid_max_price'] = self.spot_bid_max_price return body + def as_shallow_dict(self) -> dict: + """Serializes the InstancePoolAzureAttributes into a shallow dictionary of its immediate attributes.""" + body = {} + if self.availability is not None: body['availability'] = self.availability + if self.spot_bid_max_price is not None: body['spot_bid_max_price'] = self.spot_bid_max_price + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> InstancePoolAzureAttributes: """Deserializes the InstancePoolAzureAttributes from a dictionary.""" @@ -4429,6 +5633,14 @@ def as_dict(self) -> dict: if self.zone_id is not None: body['zone_id'] = self.zone_id return body + def as_shallow_dict(self) -> dict: + """Serializes the InstancePoolGcpAttributes into a shallow dictionary of its immediate attributes.""" + body = {} + if self.gcp_availability is not None: body['gcp_availability'] = self.gcp_availability + if self.local_ssd_count is not None: body['local_ssd_count'] = self.local_ssd_count + if self.zone_id is not None: body['zone_id'] = self.zone_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> InstancePoolGcpAttributes: """Deserializes the InstancePoolGcpAttributes from a dictionary.""" @@ -4454,6 +5666,14 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the InstancePoolPermission into a shallow dictionary of its immediate attributes.""" + body = {} + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> InstancePoolPermission: """Deserializes the InstancePoolPermission from a dictionary.""" @@ -4486,6 +5706,14 @@ def as_dict(self) -> dict: if self.object_type is not None: body['object_type'] = self.object_type return body + def as_shallow_dict(self) -> dict: + """Serializes the InstancePoolPermissions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> InstancePoolPermissions: """Deserializes the InstancePoolPermissions from a dictionary.""" @@ -4509,6 +5737,13 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the InstancePoolPermissionsDescription into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> InstancePoolPermissionsDescription: """Deserializes the InstancePoolPermissionsDescription from a dictionary.""" @@ -4531,6 +5766,13 @@ def as_dict(self) -> dict: if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id return body + def as_shallow_dict(self) -> dict: + """Serializes the InstancePoolPermissionsRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> InstancePoolPermissionsRequest: """Deserializes the InstancePoolPermissionsRequest from a dictionary.""" @@ -4570,6 +5812,15 @@ def as_dict(self) -> dict: if self.used_count is not None: body['used_count'] = self.used_count return body + def as_shallow_dict(self) -> dict: + """Serializes the InstancePoolStats into a shallow dictionary of its immediate attributes.""" + body = {} + if self.idle_count is not None: body['idle_count'] = self.idle_count + if self.pending_idle_count is not None: body['pending_idle_count'] = self.pending_idle_count + if self.pending_used_count is not None: body['pending_used_count'] = self.pending_used_count + if self.used_count is not None: body['used_count'] = self.used_count + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> InstancePoolStats: """Deserializes the InstancePoolStats from a dictionary.""" @@ -4593,6 +5844,12 @@ def as_dict(self) -> dict: body['pending_instance_errors'] = [v.as_dict() for v in self.pending_instance_errors] return body + def as_shallow_dict(self) -> dict: + """Serializes the InstancePoolStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.pending_instance_errors: body['pending_instance_errors'] = self.pending_instance_errors + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> InstancePoolStatus: """Deserializes the InstancePoolStatus from a dictionary.""" @@ -4628,6 +5885,15 @@ def as_dict(self) -> dict: body['is_meta_instance_profile'] = self.is_meta_instance_profile return body + def as_shallow_dict(self) -> dict: + """Serializes the InstanceProfile into a shallow dictionary of its immediate attributes.""" + body = {} + if self.iam_role_arn is not None: body['iam_role_arn'] = self.iam_role_arn + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.is_meta_instance_profile is not None: + body['is_meta_instance_profile'] = self.is_meta_instance_profile + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> InstanceProfile: """Deserializes the InstanceProfile from a dictionary.""" @@ -4636,6 +5902,17 @@ def from_dict(cls, d: Dict[str, any]) -> InstanceProfile: is_meta_instance_profile=d.get('is_meta_instance_profile', None)) +class Kind(Enum): + """The kind of compute described by this compute specification. + + Depending on `kind`, different validations and default values will be applied. + + The first usage of this value is for the simple cluster form where it sets `kind = + CLASSIC_PREVIEW`.""" + + CLASSIC_PREVIEW = 'CLASSIC_PREVIEW' + + class Language(Enum): PYTHON = 'python' @@ -4690,6 +5967,18 @@ def as_dict(self) -> dict: if self.whl is not None: body['whl'] = self.whl return body + def as_shallow_dict(self) -> dict: + """Serializes the Library into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cran: body['cran'] = self.cran + if self.egg is not None: body['egg'] = self.egg + if self.jar is not None: body['jar'] = self.jar + if self.maven: body['maven'] = self.maven + if self.pypi: body['pypi'] = self.pypi + if self.requirements is not None: body['requirements'] = self.requirements + if self.whl is not None: body['whl'] = self.whl + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Library: """Deserializes the Library from a dictionary.""" @@ -4728,6 +6017,16 @@ def as_dict(self) -> dict: if self.status is not None: body['status'] = self.status.value return body + def as_shallow_dict(self) -> dict: + """Serializes the LibraryFullStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.is_library_for_all_clusters is not None: + body['is_library_for_all_clusters'] = self.is_library_for_all_clusters + if self.library: body['library'] = self.library + if self.messages: body['messages'] = self.messages + if self.status is not None: body['status'] = self.status + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> LibraryFullStatus: """Deserializes the LibraryFullStatus from a dictionary.""" @@ -4761,6 +6060,12 @@ def as_dict(self) -> dict: if self.statuses: body['statuses'] = [v.as_dict() for v in self.statuses] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListAllClusterLibraryStatusesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.statuses: body['statuses'] = self.statuses + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListAllClusterLibraryStatusesResponse: """Deserializes the ListAllClusterLibraryStatusesResponse from a dictionary.""" @@ -4782,6 +6087,13 @@ def as_dict(self) -> dict: if self.zones: body['zones'] = [v for v in self.zones] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListAvailableZonesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.default_zone is not None: body['default_zone'] = self.default_zone + if self.zones: body['zones'] = self.zones + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListAvailableZonesResponse: """Deserializes the ListAvailableZonesResponse from a dictionary.""" @@ -4809,6 +6121,14 @@ def as_dict(self) -> dict: if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListClusterCompliancesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.clusters: body['clusters'] = self.clusters + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListClusterCompliancesResponse: """Deserializes the ListClusterCompliancesResponse from a dictionary.""" @@ -4840,6 +6160,15 @@ def as_dict(self) -> dict: if self.policy_id is not None: body['policy_id'] = self.policy_id return body + def as_shallow_dict(self) -> dict: + """Serializes the ListClustersFilterBy into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cluster_sources: body['cluster_sources'] = self.cluster_sources + if self.cluster_states: body['cluster_states'] = self.cluster_states + if self.is_pinned is not None: body['is_pinned'] = self.is_pinned + if self.policy_id is not None: body['policy_id'] = self.policy_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListClustersFilterBy: """Deserializes the ListClustersFilterBy from a dictionary.""" @@ -4870,6 +6199,14 @@ def as_dict(self) -> dict: if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListClustersResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.clusters: body['clusters'] = self.clusters + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListClustersResponse: """Deserializes the ListClustersResponse from a dictionary.""" @@ -4894,6 +6231,13 @@ def as_dict(self) -> dict: if self.field is not None: body['field'] = self.field.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ListClustersSortBy into a shallow dictionary of its immediate attributes.""" + body = {} + if self.direction is not None: body['direction'] = self.direction + if self.field is not None: body['field'] = self.field + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListClustersSortBy: """Deserializes the ListClustersSortBy from a dictionary.""" @@ -4926,6 +6270,12 @@ def as_dict(self) -> dict: if self.scripts: body['scripts'] = [v.as_dict() for v in self.scripts] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListGlobalInitScriptsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.scripts: body['scripts'] = self.scripts + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListGlobalInitScriptsResponse: """Deserializes the ListGlobalInitScriptsResponse from a dictionary.""" @@ -4942,6 +6292,12 @@ def as_dict(self) -> dict: if self.instance_pools: body['instance_pools'] = [v.as_dict() for v in self.instance_pools] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListInstancePools into a shallow dictionary of its immediate attributes.""" + body = {} + if self.instance_pools: body['instance_pools'] = self.instance_pools + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListInstancePools: """Deserializes the ListInstancePools from a dictionary.""" @@ -4959,6 +6315,12 @@ def as_dict(self) -> dict: if self.instance_profiles: body['instance_profiles'] = [v.as_dict() for v in self.instance_profiles] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListInstanceProfilesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.instance_profiles: body['instance_profiles'] = self.instance_profiles + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListInstanceProfilesResponse: """Deserializes the ListInstanceProfilesResponse from a dictionary.""" @@ -4976,6 +6338,12 @@ def as_dict(self) -> dict: if self.node_types: body['node_types'] = [v.as_dict() for v in self.node_types] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListNodeTypesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.node_types: body['node_types'] = self.node_types + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListNodeTypesResponse: """Deserializes the ListNodeTypesResponse from a dictionary.""" @@ -4993,6 +6361,12 @@ def as_dict(self) -> dict: if self.policies: body['policies'] = [v.as_dict() for v in self.policies] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListPoliciesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.policies: body['policies'] = self.policies + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListPoliciesResponse: """Deserializes the ListPoliciesResponse from a dictionary.""" @@ -5015,6 +6389,13 @@ def as_dict(self) -> dict: if self.policy_families: body['policy_families'] = [v.as_dict() for v in self.policy_families] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListPolicyFamiliesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.policy_families: body['policy_families'] = self.policy_families + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListPolicyFamiliesResponse: """Deserializes the ListPolicyFamiliesResponse from a dictionary.""" @@ -5046,6 +6427,12 @@ def as_dict(self) -> dict: if self.destination is not None: body['destination'] = self.destination return body + def as_shallow_dict(self) -> dict: + """Serializes the LocalFileInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.destination is not None: body['destination'] = self.destination + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> LocalFileInfo: """Deserializes the LocalFileInfo from a dictionary.""" @@ -5069,6 +6456,15 @@ def as_dict(self) -> dict: body['log_analytics_workspace_id'] = self.log_analytics_workspace_id return body + def as_shallow_dict(self) -> dict: + """Serializes the LogAnalyticsInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.log_analytics_primary_key is not None: + body['log_analytics_primary_key'] = self.log_analytics_primary_key + if self.log_analytics_workspace_id is not None: + body['log_analytics_workspace_id'] = self.log_analytics_workspace_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> LogAnalyticsInfo: """Deserializes the LogAnalyticsInfo from a dictionary.""" @@ -5093,6 +6489,13 @@ def as_dict(self) -> dict: if self.last_exception is not None: body['last_exception'] = self.last_exception return body + def as_shallow_dict(self) -> dict: + """Serializes the LogSyncStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.last_attempted is not None: body['last_attempted'] = self.last_attempted + if self.last_exception is not None: body['last_exception'] = self.last_exception + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> LogSyncStatus: """Deserializes the LogSyncStatus from a dictionary.""" @@ -5122,6 +6525,14 @@ def as_dict(self) -> dict: if self.repo is not None: body['repo'] = self.repo return body + def as_shallow_dict(self) -> dict: + """Serializes the MavenLibrary into a shallow dictionary of its immediate attributes.""" + body = {} + if self.coordinates is not None: body['coordinates'] = self.coordinates + if self.exclusions: body['exclusions'] = self.exclusions + if self.repo is not None: body['repo'] = self.repo + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> MavenLibrary: """Deserializes the MavenLibrary from a dictionary.""" @@ -5142,8 +6553,19 @@ class NodeInstanceType: local_nvme_disks: Optional[int] = None - def as_dict(self) -> dict: - """Serializes the NodeInstanceType into a dictionary suitable for use as a JSON request body.""" + def as_dict(self) -> dict: + """Serializes the NodeInstanceType into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.instance_type_id is not None: body['instance_type_id'] = self.instance_type_id + if self.local_disk_size_gb is not None: body['local_disk_size_gb'] = self.local_disk_size_gb + if self.local_disks is not None: body['local_disks'] = self.local_disks + if self.local_nvme_disk_size_gb is not None: + body['local_nvme_disk_size_gb'] = self.local_nvme_disk_size_gb + if self.local_nvme_disks is not None: body['local_nvme_disks'] = self.local_nvme_disks + return body + + def as_shallow_dict(self) -> dict: + """Serializes the NodeInstanceType into a shallow dictionary of its immediate attributes.""" body = {} if self.instance_type_id is not None: body['instance_type_id'] = self.instance_type_id if self.local_disk_size_gb is not None: body['local_disk_size_gb'] = self.local_disk_size_gb @@ -5247,6 +6669,34 @@ def as_dict(self) -> dict: if self.supports_elastic_disk is not None: body['supports_elastic_disk'] = self.supports_elastic_disk return body + def as_shallow_dict(self) -> dict: + """Serializes the NodeType into a shallow dictionary of its immediate attributes.""" + body = {} + if self.category is not None: body['category'] = self.category + if self.description is not None: body['description'] = self.description + if self.display_order is not None: body['display_order'] = self.display_order + if self.instance_type_id is not None: body['instance_type_id'] = self.instance_type_id + if self.is_deprecated is not None: body['is_deprecated'] = self.is_deprecated + if self.is_encrypted_in_transit is not None: + body['is_encrypted_in_transit'] = self.is_encrypted_in_transit + if self.is_graviton is not None: body['is_graviton'] = self.is_graviton + if self.is_hidden is not None: body['is_hidden'] = self.is_hidden + if self.is_io_cache_enabled is not None: body['is_io_cache_enabled'] = self.is_io_cache_enabled + if self.memory_mb is not None: body['memory_mb'] = self.memory_mb + if self.node_info: body['node_info'] = self.node_info + if self.node_instance_type: body['node_instance_type'] = self.node_instance_type + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.num_cores is not None: body['num_cores'] = self.num_cores + if self.num_gpus is not None: body['num_gpus'] = self.num_gpus + if self.photon_driver_capable is not None: body['photon_driver_capable'] = self.photon_driver_capable + if self.photon_worker_capable is not None: body['photon_worker_capable'] = self.photon_worker_capable + if self.support_cluster_tags is not None: body['support_cluster_tags'] = self.support_cluster_tags + if self.support_ebs_volumes is not None: body['support_ebs_volumes'] = self.support_ebs_volumes + if self.support_port_forwarding is not None: + body['support_port_forwarding'] = self.support_port_forwarding + if self.supports_elastic_disk is not None: body['supports_elastic_disk'] = self.supports_elastic_disk + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> NodeType: """Deserializes the NodeType from a dictionary.""" @@ -5286,6 +6736,13 @@ def as_dict(self) -> dict: if self.message is not None: body['message'] = self.message return body + def as_shallow_dict(self) -> dict: + """Serializes the PendingInstanceError into a shallow dictionary of its immediate attributes.""" + body = {} + if self.instance_id is not None: body['instance_id'] = self.instance_id + if self.message is not None: body['message'] = self.message + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PendingInstanceError: """Deserializes the PendingInstanceError from a dictionary.""" @@ -5303,6 +6760,12 @@ def as_dict(self) -> dict: if self.cluster_id is not None: body['cluster_id'] = self.cluster_id return body + def as_shallow_dict(self) -> dict: + """Serializes the PermanentDeleteCluster into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PermanentDeleteCluster: """Deserializes the PermanentDeleteCluster from a dictionary.""" @@ -5317,6 +6780,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the PermanentDeleteClusterResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PermanentDeleteClusterResponse: """Deserializes the PermanentDeleteClusterResponse from a dictionary.""" @@ -5334,6 +6802,12 @@ def as_dict(self) -> dict: if self.cluster_id is not None: body['cluster_id'] = self.cluster_id return body + def as_shallow_dict(self) -> dict: + """Serializes the PinCluster into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PinCluster: """Deserializes the PinCluster from a dictionary.""" @@ -5348,6 +6822,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the PinClusterResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PinClusterResponse: """Deserializes the PinClusterResponse from a dictionary.""" @@ -5425,6 +6904,23 @@ def as_dict(self) -> dict: if self.policy_id is not None: body['policy_id'] = self.policy_id return body + def as_shallow_dict(self) -> dict: + """Serializes the Policy into a shallow dictionary of its immediate attributes.""" + body = {} + if self.created_at_timestamp is not None: body['created_at_timestamp'] = self.created_at_timestamp + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.definition is not None: body['definition'] = self.definition + if self.description is not None: body['description'] = self.description + if self.is_default is not None: body['is_default'] = self.is_default + if self.libraries: body['libraries'] = self.libraries + if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user + if self.name is not None: body['name'] = self.name + if self.policy_family_definition_overrides is not None: + body['policy_family_definition_overrides'] = self.policy_family_definition_overrides + if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id + if self.policy_id is not None: body['policy_id'] = self.policy_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Policy: """Deserializes the Policy from a dictionary.""" @@ -5466,6 +6962,15 @@ def as_dict(self) -> dict: if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id return body + def as_shallow_dict(self) -> dict: + """Serializes the PolicyFamily into a shallow dictionary of its immediate attributes.""" + body = {} + if self.definition is not None: body['definition'] = self.definition + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PolicyFamily: """Deserializes the PolicyFamily from a dictionary.""" @@ -5491,6 +6996,13 @@ def as_dict(self) -> dict: if self.repo is not None: body['repo'] = self.repo return body + def as_shallow_dict(self) -> dict: + """Serializes the PythonPyPiLibrary into a shallow dictionary of its immediate attributes.""" + body = {} + if self.package is not None: body['package'] = self.package + if self.repo is not None: body['repo'] = self.repo + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PythonPyPiLibrary: """Deserializes the PythonPyPiLibrary from a dictionary.""" @@ -5512,6 +7024,13 @@ def as_dict(self) -> dict: if self.repo is not None: body['repo'] = self.repo return body + def as_shallow_dict(self) -> dict: + """Serializes the RCranLibrary into a shallow dictionary of its immediate attributes.""" + body = {} + if self.package is not None: body['package'] = self.package + if self.repo is not None: body['repo'] = self.repo + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RCranLibrary: """Deserializes the RCranLibrary from a dictionary.""" @@ -5529,6 +7048,12 @@ def as_dict(self) -> dict: if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn return body + def as_shallow_dict(self) -> dict: + """Serializes the RemoveInstanceProfile into a shallow dictionary of its immediate attributes.""" + body = {} + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RemoveInstanceProfile: """Deserializes the RemoveInstanceProfile from a dictionary.""" @@ -5543,6 +7068,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the RemoveResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RemoveResponse: """Deserializes the RemoveResponse from a dictionary.""" @@ -5576,6 +7106,14 @@ def as_dict(self) -> dict: if self.num_workers is not None: body['num_workers'] = self.num_workers return body + def as_shallow_dict(self) -> dict: + """Serializes the ResizeCluster into a shallow dictionary of its immediate attributes.""" + body = {} + if self.autoscale: body['autoscale'] = self.autoscale + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.num_workers is not None: body['num_workers'] = self.num_workers + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ResizeCluster: """Deserializes the ResizeCluster from a dictionary.""" @@ -5592,6 +7130,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the ResizeClusterResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ResizeClusterResponse: """Deserializes the ResizeClusterResponse from a dictionary.""" @@ -5613,6 +7156,13 @@ def as_dict(self) -> dict: if self.restart_user is not None: body['restart_user'] = self.restart_user return body + def as_shallow_dict(self) -> dict: + """Serializes the RestartCluster into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.restart_user is not None: body['restart_user'] = self.restart_user + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RestartCluster: """Deserializes the RestartCluster from a dictionary.""" @@ -5627,6 +7177,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the RestartClusterResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RestartClusterResponse: """Deserializes the RestartClusterResponse from a dictionary.""" @@ -5686,6 +7241,21 @@ def as_dict(self) -> dict: if self.truncated is not None: body['truncated'] = self.truncated return body + def as_shallow_dict(self) -> dict: + """Serializes the Results into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cause is not None: body['cause'] = self.cause + if self.data: body['data'] = self.data + if self.file_name is not None: body['fileName'] = self.file_name + if self.file_names: body['fileNames'] = self.file_names + if self.is_json_schema is not None: body['isJsonSchema'] = self.is_json_schema + if self.pos is not None: body['pos'] = self.pos + if self.result_type is not None: body['resultType'] = self.result_type + if self.schema: body['schema'] = self.schema + if self.summary is not None: body['summary'] = self.summary + if self.truncated is not None: body['truncated'] = self.truncated + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Results: """Deserializes the Results from a dictionary.""" @@ -5762,6 +7332,18 @@ def as_dict(self) -> dict: if self.region is not None: body['region'] = self.region return body + def as_shallow_dict(self) -> dict: + """Serializes the S3StorageInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.canned_acl is not None: body['canned_acl'] = self.canned_acl + if self.destination is not None: body['destination'] = self.destination + if self.enable_encryption is not None: body['enable_encryption'] = self.enable_encryption + if self.encryption_type is not None: body['encryption_type'] = self.encryption_type + if self.endpoint is not None: body['endpoint'] = self.endpoint + if self.kms_key is not None: body['kms_key'] = self.kms_key + if self.region is not None: body['region'] = self.region + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> S3StorageInfo: """Deserializes the S3StorageInfo from a dictionary.""" @@ -5818,6 +7400,18 @@ def as_dict(self) -> dict: if self.start_timestamp is not None: body['start_timestamp'] = self.start_timestamp return body + def as_shallow_dict(self) -> dict: + """Serializes the SparkNode into a shallow dictionary of its immediate attributes.""" + body = {} + if self.host_private_ip is not None: body['host_private_ip'] = self.host_private_ip + if self.instance_id is not None: body['instance_id'] = self.instance_id + if self.node_aws_attributes: body['node_aws_attributes'] = self.node_aws_attributes + if self.node_id is not None: body['node_id'] = self.node_id + if self.private_ip is not None: body['private_ip'] = self.private_ip + if self.public_dns is not None: body['public_dns'] = self.public_dns + if self.start_timestamp is not None: body['start_timestamp'] = self.start_timestamp + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SparkNode: """Deserializes the SparkNode from a dictionary.""" @@ -5841,6 +7435,12 @@ def as_dict(self) -> dict: if self.is_spot is not None: body['is_spot'] = self.is_spot return body + def as_shallow_dict(self) -> dict: + """Serializes the SparkNodeAwsAttributes into a shallow dictionary of its immediate attributes.""" + body = {} + if self.is_spot is not None: body['is_spot'] = self.is_spot + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SparkNodeAwsAttributes: """Deserializes the SparkNodeAwsAttributes from a dictionary.""" @@ -5865,6 +7465,13 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name return body + def as_shallow_dict(self) -> dict: + """Serializes the SparkVersion into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.name is not None: body['name'] = self.name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SparkVersion: """Deserializes the SparkVersion from a dictionary.""" @@ -5882,6 +7489,12 @@ def as_dict(self) -> dict: if self.cluster_id is not None: body['cluster_id'] = self.cluster_id return body + def as_shallow_dict(self) -> dict: + """Serializes the StartCluster into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> StartCluster: """Deserializes the StartCluster from a dictionary.""" @@ -5896,6 +7509,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the StartClusterResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> StartClusterResponse: """Deserializes the StartClusterResponse from a dictionary.""" @@ -5934,6 +7552,14 @@ def as_dict(self) -> dict: if self.type is not None: body['type'] = self.type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the TerminationReason into a shallow dictionary of its immediate attributes.""" + body = {} + if self.code is not None: body['code'] = self.code + if self.parameters: body['parameters'] = self.parameters + if self.type is not None: body['type'] = self.type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TerminationReason: """Deserializes the TerminationReason from a dictionary.""" @@ -6051,6 +7677,13 @@ def as_dict(self) -> dict: if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] return body + def as_shallow_dict(self) -> dict: + """Serializes the UninstallLibraries into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.libraries: body['libraries'] = self.libraries + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UninstallLibraries: """Deserializes the UninstallLibraries from a dictionary.""" @@ -6065,6 +7698,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the UninstallLibrariesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UninstallLibrariesResponse: """Deserializes the UninstallLibrariesResponse from a dictionary.""" @@ -6082,6 +7720,12 @@ def as_dict(self) -> dict: if self.cluster_id is not None: body['cluster_id'] = self.cluster_id return body + def as_shallow_dict(self) -> dict: + """Serializes the UnpinCluster into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UnpinCluster: """Deserializes the UnpinCluster from a dictionary.""" @@ -6096,6 +7740,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the UnpinClusterResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UnpinClusterResponse: """Deserializes the UnpinClusterResponse from a dictionary.""" @@ -6124,6 +7773,14 @@ def as_dict(self) -> dict: if self.update_mask is not None: body['update_mask'] = self.update_mask return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateCluster into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cluster: body['cluster'] = self.cluster + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.update_mask is not None: body['update_mask'] = self.update_mask + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateCluster: """Deserializes the UpdateCluster from a dictionary.""" @@ -6153,11 +7810,11 @@ class UpdateClusterResource: a set of default values will be used.""" cluster_log_conf: Optional[ClusterLogConf] = None - """The configuration for delivering spark logs to a long-term storage destination. Two kinds of - destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster. - If the conf is given, the logs will be delivered to the destination every `5 mins`. The - destination of driver logs is `$destination/$clusterId/driver`, while the destination of - executor logs is `$destination/$clusterId/executor`.""" + """The configuration for delivering spark logs to a long-term storage destination. Three kinds of + destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be + specified for one cluster. If the conf is given, the logs will be delivered to the destination + every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the + destination of executor logs is `$destination/$clusterId/executor`.""" cluster_name: Optional[str] = None """Cluster name requested by the user. This doesn't have to be unique. If not specified at @@ -6175,13 +7832,19 @@ class UpdateClusterResource: data_security_mode: Optional[DataSecurityMode] = None """Data security mode decides what data governance model to use when accessing data from a cluster. - * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features - are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively - used by a single user specified in `single_user_name`. Most programming languages, cluster - features and data governance features are available in this mode. * `USER_ISOLATION`: A secure - cluster that can be shared by multiple users. Cluster users are fully isolated so that they - cannot see each other's data and credentials. Most data governance features are supported in - this mode. But programming languages and cluster features might be limited. + The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will + choose the most appropriate access mode depending on your compute configuration. * + `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: + Alias for `SINGLE_USER`. + + The following modes can be used regardless of `kind`. * `NONE`: No security isolation for + multiple users sharing the cluster. Data governance features are not available in this mode. * + `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in + `single_user_name`. Most programming languages, cluster features and data governance features + are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple + users. Cluster users are fully isolated so that they cannot see each other's data and + credentials. Most data governance features are supported in this mode. But programming languages + and cluster features might be limited. The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for future Databricks Runtime versions: @@ -6222,6 +7885,20 @@ class UpdateClusterResource: instance_pool_id: Optional[str] = None """The optional ID of the instance pool to which the cluster belongs.""" + is_single_node: Optional[bool] = None + """This field can only be used with `kind`. + + When set to true, Databricks will automatically set single node related `custom_tags`, + `spark_conf`, and `num_workers`""" + + kind: Optional[Kind] = None + """The kind of compute described by this compute specification. + + Depending on `kind`, different validations and default values will be applied. + + The first usage of this value is for the simple cluster form where it sets `kind = + CLASSIC_PREVIEW`.""" + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or @@ -6280,6 +7957,12 @@ class UpdateClusterResource: private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be specified.""" + use_ml_runtime: Optional[bool] = None + """This field can only be used with `kind`. + + `effective_spark_version` is determined by `spark_version` (DBR release), this field + `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" + workload_type: Optional[WorkloadType] = None def as_dict(self) -> dict: @@ -6304,6 +7987,8 @@ def as_dict(self) -> dict: if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.is_single_node is not None: body['is_single_node'] = self.is_single_node + if self.kind is not None: body['kind'] = self.kind.value if self.node_type_id is not None: body['node_type_id'] = self.node_type_id if self.num_workers is not None: body['num_workers'] = self.num_workers if self.policy_id is not None: body['policy_id'] = self.policy_id @@ -6313,9 +7998,47 @@ def as_dict(self) -> dict: if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars if self.spark_version is not None: body['spark_version'] = self.spark_version if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys] + if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime if self.workload_type: body['workload_type'] = self.workload_type.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateClusterResource into a shallow dictionary of its immediate attributes.""" + body = {} + if self.autoscale: body['autoscale'] = self.autoscale + if self.autotermination_minutes is not None: + body['autotermination_minutes'] = self.autotermination_minutes + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes + if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf + if self.cluster_name is not None: body['cluster_name'] = self.cluster_name + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode + if self.docker_image: body['docker_image'] = self.docker_image + if self.driver_instance_pool_id is not None: + body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: + body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes + if self.init_scripts: body['init_scripts'] = self.init_scripts + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.is_single_node is not None: body['is_single_node'] = self.is_single_node + if self.kind is not None: body['kind'] = self.kind + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.num_workers is not None: body['num_workers'] = self.num_workers + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine + if self.single_user_name is not None: body['single_user_name'] = self.single_user_name + if self.spark_conf: body['spark_conf'] = self.spark_conf + if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars + if self.spark_version is not None: body['spark_version'] = self.spark_version + if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys + if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime + if self.workload_type: body['workload_type'] = self.workload_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateClusterResource: """Deserializes the UpdateClusterResource from a dictionary.""" @@ -6335,6 +8058,8 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateClusterResource: gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), + is_single_node=d.get('is_single_node', None), + kind=_enum(d, 'kind', Kind), node_type_id=d.get('node_type_id', None), num_workers=d.get('num_workers', None), policy_id=d.get('policy_id', None), @@ -6344,6 +8069,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateClusterResource: spark_env_vars=d.get('spark_env_vars', None), spark_version=d.get('spark_version', None), ssh_public_keys=d.get('ssh_public_keys', None), + use_ml_runtime=d.get('use_ml_runtime', None), workload_type=_from_dict(d, 'workload_type', WorkloadType)) @@ -6355,6 +8081,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateClusterResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateClusterResponse: """Deserializes the UpdateClusterResponse from a dictionary.""" @@ -6369,6 +8100,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateResponse: """Deserializes the UpdateResponse from a dictionary.""" @@ -6378,7 +8114,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateResponse: @dataclass class VolumesStorageInfo: destination: str - """Unity Catalog Volumes file destination, e.g. `/Volumes/my-init.sh`""" + """Unity Catalog volumes file destination, e.g. `/Volumes/catalog/schema/volume/dir/file`""" def as_dict(self) -> dict: """Serializes the VolumesStorageInfo into a dictionary suitable for use as a JSON request body.""" @@ -6386,6 +8122,12 @@ def as_dict(self) -> dict: if self.destination is not None: body['destination'] = self.destination return body + def as_shallow_dict(self) -> dict: + """Serializes the VolumesStorageInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.destination is not None: body['destination'] = self.destination + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> VolumesStorageInfo: """Deserializes the VolumesStorageInfo from a dictionary.""" @@ -6403,6 +8145,12 @@ def as_dict(self) -> dict: if self.clients: body['clients'] = self.clients.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the WorkloadType into a shallow dictionary of its immediate attributes.""" + body = {} + if self.clients: body['clients'] = self.clients + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> WorkloadType: """Deserializes the WorkloadType from a dictionary.""" @@ -6420,6 +8168,12 @@ def as_dict(self) -> dict: if self.destination is not None: body['destination'] = self.destination return body + def as_shallow_dict(self) -> dict: + """Serializes the WorkspaceStorageInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.destination is not None: body['destination'] = self.destination + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> WorkspaceStorageInfo: """Deserializes the WorkspaceStorageInfo from a dictionary.""" @@ -6676,7 +8430,8 @@ def set_permissions( ) -> ClusterPolicyPermissions: """Set cluster policy permissions. - Sets permissions on a cluster policy. Cluster policies can inherit permissions from their root object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param cluster_policy_id: str The cluster policy for which to get or manage permissions. @@ -6853,6 +8608,8 @@ def create(self, gcp_attributes: Optional[GcpAttributes] = None, init_scripts: Optional[List[InitScriptInfo]] = None, instance_pool_id: Optional[str] = None, + is_single_node: Optional[bool] = None, + kind: Optional[Kind] = None, node_type_id: Optional[str] = None, num_workers: Optional[int] = None, policy_id: Optional[str] = None, @@ -6861,6 +8618,7 @@ def create(self, spark_conf: Optional[Dict[str, str]] = None, spark_env_vars: Optional[Dict[str, str]] = None, ssh_public_keys: Optional[List[str]] = None, + use_ml_runtime: Optional[bool] = None, workload_type: Optional[WorkloadType] = None) -> Wait[ClusterDetails]: """Create new cluster. @@ -6898,11 +8656,11 @@ def create(self, :param clone_from: :class:`CloneCluster` (optional) When specified, this clones libraries from a source cluster during the creation of a new cluster. :param cluster_log_conf: :class:`ClusterLogConf` (optional) - The configuration for delivering spark logs to a long-term storage destination. Two kinds of - destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster. If - the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of - driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is - `$destination/$clusterId/executor`. + The configuration for delivering spark logs to a long-term storage destination. Three kinds of + destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be + specified for one cluster. If the conf is given, the logs will be delivered to the destination every + `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination + of executor logs is `$destination/$clusterId/executor`. :param cluster_name: str (optional) Cluster name requested by the user. This doesn't have to be unique. If not specified at creation, the cluster name will be an empty string. @@ -6916,13 +8674,19 @@ def create(self, :param data_security_mode: :class:`DataSecurityMode` (optional) Data security mode decides what data governance model to use when accessing data from a cluster. - * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are - not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a - single user specified in `single_user_name`. Most programming languages, cluster features and data - governance features are available in this mode. * `USER_ISOLATION`: A secure cluster that can be - shared by multiple users. Cluster users are fully isolated so that they cannot see each other's data - and credentials. Most data governance features are supported in this mode. But programming languages - and cluster features might be limited. + The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will + choose the most appropriate access mode depending on your compute configuration. * + `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias + for `SINGLE_USER`. + + The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple + users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`: + A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. + Most programming languages, cluster features and data governance features are available in this + mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are + fully isolated so that they cannot see each other's data and credentials. Most data governance + features are supported in this mode. But programming languages and cluster features might be + limited. The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for future Databricks Runtime versions: @@ -6954,6 +8718,17 @@ def create(self, logs are sent to `//init_scripts`. :param instance_pool_id: str (optional) The optional ID of the instance pool to which the cluster belongs. + :param is_single_node: bool (optional) + This field can only be used with `kind`. + + When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, + and `num_workers` + :param kind: :class:`Kind` (optional) + The kind of compute described by this compute specification. + + Depending on `kind`, different validations and default values will be applied. + + The first usage of this value is for the simple cluster form where it sets `kind = CLASSIC_PREVIEW`. :param node_type_id: str (optional) This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute @@ -7000,6 +8775,11 @@ def create(self, SSH public key contents that will be added to each Spark node in this cluster. The corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be specified. + :param use_ml_runtime: bool (optional) + This field can only be used with `kind`. + + `effective_spark_version` is determined by `spark_version` (DBR release), this field + `use_ml_runtime`, and whether `node_type_id` is gpu node or not. :param workload_type: :class:`WorkloadType` (optional) :returns: @@ -7027,6 +8807,8 @@ def create(self, if gcp_attributes is not None: body['gcp_attributes'] = gcp_attributes.as_dict() if init_scripts is not None: body['init_scripts'] = [v.as_dict() for v in init_scripts] if instance_pool_id is not None: body['instance_pool_id'] = instance_pool_id + if is_single_node is not None: body['is_single_node'] = is_single_node + if kind is not None: body['kind'] = kind.value if node_type_id is not None: body['node_type_id'] = node_type_id if num_workers is not None: body['num_workers'] = num_workers if policy_id is not None: body['policy_id'] = policy_id @@ -7036,6 +8818,7 @@ def create(self, if spark_env_vars is not None: body['spark_env_vars'] = spark_env_vars if spark_version is not None: body['spark_version'] = spark_version if ssh_public_keys is not None: body['ssh_public_keys'] = [v for v in ssh_public_keys] + if use_ml_runtime is not None: body['use_ml_runtime'] = use_ml_runtime if workload_type is not None: body['workload_type'] = workload_type.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } @@ -7066,6 +8849,8 @@ def create_and_wait( gcp_attributes: Optional[GcpAttributes] = None, init_scripts: Optional[List[InitScriptInfo]] = None, instance_pool_id: Optional[str] = None, + is_single_node: Optional[bool] = None, + kind: Optional[Kind] = None, node_type_id: Optional[str] = None, num_workers: Optional[int] = None, policy_id: Optional[str] = None, @@ -7074,6 +8859,7 @@ def create_and_wait( spark_conf: Optional[Dict[str, str]] = None, spark_env_vars: Optional[Dict[str, str]] = None, ssh_public_keys: Optional[List[str]] = None, + use_ml_runtime: Optional[bool] = None, workload_type: Optional[WorkloadType] = None, timeout=timedelta(minutes=20)) -> ClusterDetails: return self.create(apply_policy_default_values=apply_policy_default_values, @@ -7094,6 +8880,8 @@ def create_and_wait( gcp_attributes=gcp_attributes, init_scripts=init_scripts, instance_pool_id=instance_pool_id, + is_single_node=is_single_node, + kind=kind, node_type_id=node_type_id, num_workers=num_workers, policy_id=policy_id, @@ -7103,6 +8891,7 @@ def create_and_wait( spark_env_vars=spark_env_vars, spark_version=spark_version, ssh_public_keys=ssh_public_keys, + use_ml_runtime=use_ml_runtime, workload_type=workload_type).result(timeout=timeout) def delete(self, cluster_id: str) -> Wait[ClusterDetails]: @@ -7152,6 +8941,8 @@ def edit(self, gcp_attributes: Optional[GcpAttributes] = None, init_scripts: Optional[List[InitScriptInfo]] = None, instance_pool_id: Optional[str] = None, + is_single_node: Optional[bool] = None, + kind: Optional[Kind] = None, node_type_id: Optional[str] = None, num_workers: Optional[int] = None, policy_id: Optional[str] = None, @@ -7160,6 +8951,7 @@ def edit(self, spark_conf: Optional[Dict[str, str]] = None, spark_env_vars: Optional[Dict[str, str]] = None, ssh_public_keys: Optional[List[str]] = None, + use_ml_runtime: Optional[bool] = None, workload_type: Optional[WorkloadType] = None) -> Wait[ClusterDetails]: """Update cluster configuration. @@ -7176,7 +8968,7 @@ def edit(self, Clusters created by the Databricks Jobs service cannot be edited. :param cluster_id: str - ID of the cluser + ID of the cluster :param spark_version: str The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call. @@ -7197,11 +8989,11 @@ def edit(self, Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a set of default values will be used. :param cluster_log_conf: :class:`ClusterLogConf` (optional) - The configuration for delivering spark logs to a long-term storage destination. Two kinds of - destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster. If - the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of - driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is - `$destination/$clusterId/executor`. + The configuration for delivering spark logs to a long-term storage destination. Three kinds of + destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be + specified for one cluster. If the conf is given, the logs will be delivered to the destination every + `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination + of executor logs is `$destination/$clusterId/executor`. :param cluster_name: str (optional) Cluster name requested by the user. This doesn't have to be unique. If not specified at creation, the cluster name will be an empty string. @@ -7215,13 +9007,19 @@ def edit(self, :param data_security_mode: :class:`DataSecurityMode` (optional) Data security mode decides what data governance model to use when accessing data from a cluster. - * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are - not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a - single user specified in `single_user_name`. Most programming languages, cluster features and data - governance features are available in this mode. * `USER_ISOLATION`: A secure cluster that can be - shared by multiple users. Cluster users are fully isolated so that they cannot see each other's data - and credentials. Most data governance features are supported in this mode. But programming languages - and cluster features might be limited. + The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will + choose the most appropriate access mode depending on your compute configuration. * + `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias + for `SINGLE_USER`. + + The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple + users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`: + A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. + Most programming languages, cluster features and data governance features are available in this + mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are + fully isolated so that they cannot see each other's data and credentials. Most data governance + features are supported in this mode. But programming languages and cluster features might be + limited. The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for future Databricks Runtime versions: @@ -7253,6 +9051,17 @@ def edit(self, logs are sent to `//init_scripts`. :param instance_pool_id: str (optional) The optional ID of the instance pool to which the cluster belongs. + :param is_single_node: bool (optional) + This field can only be used with `kind`. + + When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, + and `num_workers` + :param kind: :class:`Kind` (optional) + The kind of compute described by this compute specification. + + Depending on `kind`, different validations and default values will be applied. + + The first usage of this value is for the simple cluster form where it sets `kind = CLASSIC_PREVIEW`. :param node_type_id: str (optional) This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute @@ -7299,6 +9108,11 @@ def edit(self, SSH public key contents that will be added to each Spark node in this cluster. The corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be specified. + :param use_ml_runtime: bool (optional) + This field can only be used with `kind`. + + `effective_spark_version` is determined by `spark_version` (DBR release), this field + `use_ml_runtime`, and whether `node_type_id` is gpu node or not. :param workload_type: :class:`WorkloadType` (optional) :returns: @@ -7326,6 +9140,8 @@ def edit(self, if gcp_attributes is not None: body['gcp_attributes'] = gcp_attributes.as_dict() if init_scripts is not None: body['init_scripts'] = [v.as_dict() for v in init_scripts] if instance_pool_id is not None: body['instance_pool_id'] = instance_pool_id + if is_single_node is not None: body['is_single_node'] = is_single_node + if kind is not None: body['kind'] = kind.value if node_type_id is not None: body['node_type_id'] = node_type_id if num_workers is not None: body['num_workers'] = num_workers if policy_id is not None: body['policy_id'] = policy_id @@ -7335,6 +9151,7 @@ def edit(self, if spark_env_vars is not None: body['spark_env_vars'] = spark_env_vars if spark_version is not None: body['spark_version'] = spark_version if ssh_public_keys is not None: body['ssh_public_keys'] = [v for v in ssh_public_keys] + if use_ml_runtime is not None: body['use_ml_runtime'] = use_ml_runtime if workload_type is not None: body['workload_type'] = workload_type.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } @@ -7365,6 +9182,8 @@ def edit_and_wait( gcp_attributes: Optional[GcpAttributes] = None, init_scripts: Optional[List[InitScriptInfo]] = None, instance_pool_id: Optional[str] = None, + is_single_node: Optional[bool] = None, + kind: Optional[Kind] = None, node_type_id: Optional[str] = None, num_workers: Optional[int] = None, policy_id: Optional[str] = None, @@ -7373,6 +9192,7 @@ def edit_and_wait( spark_conf: Optional[Dict[str, str]] = None, spark_env_vars: Optional[Dict[str, str]] = None, ssh_public_keys: Optional[List[str]] = None, + use_ml_runtime: Optional[bool] = None, workload_type: Optional[WorkloadType] = None, timeout=timedelta(minutes=20)) -> ClusterDetails: return self.edit(apply_policy_default_values=apply_policy_default_values, @@ -7393,6 +9213,8 @@ def edit_and_wait( gcp_attributes=gcp_attributes, init_scripts=init_scripts, instance_pool_id=instance_pool_id, + is_single_node=is_single_node, + kind=kind, node_type_id=node_type_id, num_workers=num_workers, policy_id=policy_id, @@ -7402,6 +9224,7 @@ def edit_and_wait( spark_env_vars=spark_env_vars, spark_version=spark_version, ssh_public_keys=ssh_public_keys, + use_ml_runtime=use_ml_runtime, workload_type=workload_type).result(timeout=timeout) def events(self, @@ -7703,7 +9526,8 @@ def set_permissions( access_control_list: Optional[List[ClusterAccessControlRequest]] = None) -> ClusterPermissions: """Set cluster permissions. - Sets permissions on a cluster. Clusters can inherit permissions from their root object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param cluster_id: str The cluster for which to get or manage permissions. @@ -7896,20 +9720,19 @@ def wait_command_status_command_execution_cancelled( attempt += 1 raise TimeoutError(f'timed out after {timeout}: {status_message}') - def wait_command_status_command_execution_finished_or_error( + def wait_context_status_command_execution_running( self, cluster_id: str, - command_id: str, context_id: str, timeout=timedelta(minutes=20), - callback: Optional[Callable[[CommandStatusResponse], None]] = None) -> CommandStatusResponse: + callback: Optional[Callable[[ContextStatusResponse], None]] = None) -> ContextStatusResponse: deadline = time.time() + timeout.total_seconds() - target_states = (CommandStatus.FINISHED, CommandStatus.ERROR, ) - failure_states = (CommandStatus.CANCELLED, CommandStatus.CANCELLING, ) + target_states = (ContextStatus.RUNNING, ) + failure_states = (ContextStatus.ERROR, ) status_message = 'polling...' attempt = 1 while time.time() < deadline: - poll = self.command_status(cluster_id=cluster_id, command_id=command_id, context_id=context_id) + poll = self.context_status(cluster_id=cluster_id, context_id=context_id) status = poll.status status_message = f'current status: {status}' if status in target_states: @@ -7917,9 +9740,9 @@ def wait_command_status_command_execution_finished_or_error( if callback: callback(poll) if status in failure_states: - msg = f'failed to reach Finished or Error, got {status}: {status_message}' + msg = f'failed to reach Running, got {status}: {status_message}' raise OperationFailed(msg) - prefix = f"cluster_id={cluster_id}, command_id={command_id}, context_id={context_id}" + prefix = f"cluster_id={cluster_id}, context_id={context_id}" sleep = attempt if sleep > 10: # sleep 10s max per attempt @@ -7929,19 +9752,20 @@ def wait_command_status_command_execution_finished_or_error( attempt += 1 raise TimeoutError(f'timed out after {timeout}: {status_message}') - def wait_context_status_command_execution_running( + def wait_command_status_command_execution_finished_or_error( self, cluster_id: str, + command_id: str, context_id: str, timeout=timedelta(minutes=20), - callback: Optional[Callable[[ContextStatusResponse], None]] = None) -> ContextStatusResponse: + callback: Optional[Callable[[CommandStatusResponse], None]] = None) -> CommandStatusResponse: deadline = time.time() + timeout.total_seconds() - target_states = (ContextStatus.RUNNING, ) - failure_states = (ContextStatus.ERROR, ) + target_states = (CommandStatus.FINISHED, CommandStatus.ERROR, ) + failure_states = (CommandStatus.CANCELLED, CommandStatus.CANCELLING, ) status_message = 'polling...' attempt = 1 while time.time() < deadline: - poll = self.context_status(cluster_id=cluster_id, context_id=context_id) + poll = self.command_status(cluster_id=cluster_id, command_id=command_id, context_id=context_id) status = poll.status status_message = f'current status: {status}' if status in target_states: @@ -7949,9 +9773,9 @@ def wait_context_status_command_execution_running( if callback: callback(poll) if status in failure_states: - msg = f'failed to reach Running, got {status}: {status_message}' + msg = f'failed to reach Finished or Error, got {status}: {status_message}' raise OperationFailed(msg) - prefix = f"cluster_id={cluster_id}, context_id={context_id}" + prefix = f"cluster_id={cluster_id}, command_id={command_id}, context_id={context_id}" sleep = attempt if sleep > 10: # sleep 10s max per attempt @@ -8546,7 +10370,8 @@ def set_permissions( ) -> InstancePoolPermissions: """Set instance pool permissions. - Sets permissions on an instance pool. Instance pools can inherit permissions from their root object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param instance_pool_id: str The instance pool for which to get or manage permissions. diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py index 4a4c640e6..c81159cca 100755 --- a/databricks/sdk/service/dashboards.py +++ b/databricks/sdk/service/dashboards.py @@ -21,100 +21,63 @@ @dataclass -class CreateDashboardRequest: - display_name: str - """The display name of the dashboard.""" - - parent_path: Optional[str] = None - """The workspace path of the folder containing the dashboard. Includes leading slash and no - trailing slash. This field is excluded in List Dashboards responses.""" - - serialized_dashboard: Optional[str] = None - """The contents of the dashboard in serialized string form. This field is excluded in List - Dashboards responses. Use the [get dashboard API] to retrieve an example response, which - includes the `serialized_dashboard` field. This field provides the structure of the JSON string - that represents the dashboard's layout and components. - - [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get""" - - warehouse_id: Optional[str] = None - """The warehouse ID used to run the dashboard.""" +class CancelQueryExecutionResponse: + status: Optional[List[CancelQueryExecutionResponseStatus]] = None def as_dict(self) -> dict: - """Serializes the CreateDashboardRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the CancelQueryExecutionResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.display_name is not None: body['display_name'] = self.display_name - if self.parent_path is not None: body['parent_path'] = self.parent_path - if self.serialized_dashboard is not None: body['serialized_dashboard'] = self.serialized_dashboard - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.status: body['status'] = [v.as_dict() for v in self.status] return body - @classmethod - def from_dict(cls, d: Dict[str, any]) -> CreateDashboardRequest: - """Deserializes the CreateDashboardRequest from a dictionary.""" - return cls(display_name=d.get('display_name', None), - parent_path=d.get('parent_path', None), - serialized_dashboard=d.get('serialized_dashboard', None), - warehouse_id=d.get('warehouse_id', None)) - - -@dataclass -class CreateScheduleRequest: - cron_schedule: CronSchedule - """The cron expression describing the frequency of the periodic refresh for this schedule.""" - - dashboard_id: Optional[str] = None - """UUID identifying the dashboard to which the schedule belongs.""" - - display_name: Optional[str] = None - """The display name for schedule.""" - - pause_status: Optional[SchedulePauseStatus] = None - """The status indicates whether this schedule is paused or not.""" - - def as_dict(self) -> dict: - """Serializes the CreateScheduleRequest into a dictionary suitable for use as a JSON request body.""" + def as_shallow_dict(self) -> dict: + """Serializes the CancelQueryExecutionResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.cron_schedule: body['cron_schedule'] = self.cron_schedule.as_dict() - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.display_name is not None: body['display_name'] = self.display_name - if self.pause_status is not None: body['pause_status'] = self.pause_status.value + if self.status: body['status'] = self.status return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> CreateScheduleRequest: - """Deserializes the CreateScheduleRequest from a dictionary.""" - return cls(cron_schedule=_from_dict(d, 'cron_schedule', CronSchedule), - dashboard_id=d.get('dashboard_id', None), - display_name=d.get('display_name', None), - pause_status=_enum(d, 'pause_status', SchedulePauseStatus)) + def from_dict(cls, d: Dict[str, any]) -> CancelQueryExecutionResponse: + """Deserializes the CancelQueryExecutionResponse from a dictionary.""" + return cls(status=_repeated_dict(d, 'status', CancelQueryExecutionResponseStatus)) @dataclass -class CreateSubscriptionRequest: - subscriber: Subscriber - """Subscriber details for users and destinations to be added as subscribers to the schedule.""" +class CancelQueryExecutionResponseStatus: + data_token: str + """The token to poll for result asynchronously Example: + EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ""" - dashboard_id: Optional[str] = None - """UUID identifying the dashboard to which the subscription belongs.""" + pending: Optional[Empty] = None + """Represents an empty message, similar to google.protobuf.Empty, which is not available in the + firm right now.""" - schedule_id: Optional[str] = None - """UUID identifying the schedule to which the subscription belongs.""" + success: Optional[Empty] = None + """Represents an empty message, similar to google.protobuf.Empty, which is not available in the + firm right now.""" def as_dict(self) -> dict: - """Serializes the CreateSubscriptionRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the CancelQueryExecutionResponseStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.schedule_id is not None: body['schedule_id'] = self.schedule_id - if self.subscriber: body['subscriber'] = self.subscriber.as_dict() + if self.data_token is not None: body['data_token'] = self.data_token + if self.pending: body['pending'] = self.pending.as_dict() + if self.success: body['success'] = self.success.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CancelQueryExecutionResponseStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data_token is not None: body['data_token'] = self.data_token + if self.pending: body['pending'] = self.pending + if self.success: body['success'] = self.success return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> CreateSubscriptionRequest: - """Deserializes the CreateSubscriptionRequest from a dictionary.""" - return cls(dashboard_id=d.get('dashboard_id', None), - schedule_id=d.get('schedule_id', None), - subscriber=_from_dict(d, 'subscriber', Subscriber)) + def from_dict(cls, d: Dict[str, any]) -> CancelQueryExecutionResponseStatus: + """Deserializes the CancelQueryExecutionResponseStatus from a dictionary.""" + return cls(data_token=d.get('data_token', None), + pending=_from_dict(d, 'pending', Empty), + success=_from_dict(d, 'success', Empty)) @dataclass @@ -139,6 +102,14 @@ def as_dict(self) -> dict: if self.timezone_id is not None: body['timezone_id'] = self.timezone_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CronSchedule into a shallow dictionary of its immediate attributes.""" + body = {} + if self.quartz_cron_expression is not None: + body['quartz_cron_expression'] = self.quartz_cron_expression + if self.timezone_id is not None: body['timezone_id'] = self.timezone_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CronSchedule: """Deserializes the CronSchedule from a dictionary.""" @@ -202,6 +173,21 @@ def as_dict(self) -> dict: if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body + def as_shallow_dict(self) -> dict: + """Serializes the Dashboard into a shallow dictionary of its immediate attributes.""" + body = {} + if self.create_time is not None: body['create_time'] = self.create_time + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.display_name is not None: body['display_name'] = self.display_name + if self.etag is not None: body['etag'] = self.etag + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.path is not None: body['path'] = self.path + if self.serialized_dashboard is not None: body['serialized_dashboard'] = self.serialized_dashboard + if self.update_time is not None: body['update_time'] = self.update_time + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Dashboard: """Deserializes the Dashboard from a dictionary.""" @@ -222,6 +208,27 @@ class DashboardView(Enum): DASHBOARD_VIEW_BASIC = 'DASHBOARD_VIEW_BASIC' +class DataType(Enum): + + DATA_TYPE_ARRAY = 'DATA_TYPE_ARRAY' + DATA_TYPE_BIG_INT = 'DATA_TYPE_BIG_INT' + DATA_TYPE_BINARY = 'DATA_TYPE_BINARY' + DATA_TYPE_BOOLEAN = 'DATA_TYPE_BOOLEAN' + DATA_TYPE_DATE = 'DATA_TYPE_DATE' + DATA_TYPE_DECIMAL = 'DATA_TYPE_DECIMAL' + DATA_TYPE_DOUBLE = 'DATA_TYPE_DOUBLE' + DATA_TYPE_FLOAT = 'DATA_TYPE_FLOAT' + DATA_TYPE_INT = 'DATA_TYPE_INT' + DATA_TYPE_INTERVAL = 'DATA_TYPE_INTERVAL' + DATA_TYPE_MAP = 'DATA_TYPE_MAP' + DATA_TYPE_SMALL_INT = 'DATA_TYPE_SMALL_INT' + DATA_TYPE_STRING = 'DATA_TYPE_STRING' + DATA_TYPE_STRUCT = 'DATA_TYPE_STRUCT' + DATA_TYPE_TIMESTAMP = 'DATA_TYPE_TIMESTAMP' + DATA_TYPE_TINY_INT = 'DATA_TYPE_TINY_INT' + DATA_TYPE_VOID = 'DATA_TYPE_VOID' + + @dataclass class DeleteScheduleResponse: @@ -230,6 +237,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteScheduleResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteScheduleResponse: """Deserializes the DeleteScheduleResponse from a dictionary.""" @@ -244,12 +256,98 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteSubscriptionResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteSubscriptionResponse: """Deserializes the DeleteSubscriptionResponse from a dictionary.""" return cls() +@dataclass +class Empty: + """Represents an empty message, similar to google.protobuf.Empty, which is not available in the + firm right now.""" + + def as_dict(self) -> dict: + """Serializes the Empty into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Empty into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> Empty: + """Deserializes the Empty from a dictionary.""" + return cls() + + +@dataclass +class ExecutePublishedDashboardQueryRequest: + """Execute query request for published Dashboards. Since published dashboards have the option of + running as the publisher, the datasets, warehouse_id are excluded from the request and instead + read from the source (lakeview-config) via the additional parameters (dashboardName and + dashboardRevisionId)""" + + dashboard_name: str + """Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains + the list of datasets, warehouse_id, and embedded_credentials""" + + dashboard_revision_id: str + + override_warehouse_id: Optional[str] = None + """A dashboard schedule can override the warehouse used as compute for processing the published + dashboard queries""" + + def as_dict(self) -> dict: + """Serializes the ExecutePublishedDashboardQueryRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.dashboard_name is not None: body['dashboard_name'] = self.dashboard_name + if self.dashboard_revision_id is not None: body['dashboard_revision_id'] = self.dashboard_revision_id + if self.override_warehouse_id is not None: body['override_warehouse_id'] = self.override_warehouse_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ExecutePublishedDashboardQueryRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dashboard_name is not None: body['dashboard_name'] = self.dashboard_name + if self.dashboard_revision_id is not None: body['dashboard_revision_id'] = self.dashboard_revision_id + if self.override_warehouse_id is not None: body['override_warehouse_id'] = self.override_warehouse_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ExecutePublishedDashboardQueryRequest: + """Deserializes the ExecutePublishedDashboardQueryRequest from a dictionary.""" + return cls(dashboard_name=d.get('dashboard_name', None), + dashboard_revision_id=d.get('dashboard_revision_id', None), + override_warehouse_id=d.get('override_warehouse_id', None)) + + +@dataclass +class ExecuteQueryResponse: + + def as_dict(self) -> dict: + """Serializes the ExecuteQueryResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ExecuteQueryResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ExecuteQueryResponse: + """Deserializes the ExecuteQueryResponse from a dictionary.""" + return cls() + + @dataclass class GenieAttachment: """Genie AI Response""" @@ -265,6 +363,13 @@ def as_dict(self) -> dict: if self.text: body['text'] = self.text.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the GenieAttachment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.query: body['query'] = self.query + if self.text: body['text'] = self.text + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GenieAttachment: """Deserializes the GenieAttachment from a dictionary.""" @@ -303,6 +408,18 @@ def as_dict(self) -> dict: if self.user_id is not None: body['user_id'] = self.user_id return body + def as_shallow_dict(self) -> dict: + """Serializes the GenieConversation into a shallow dictionary of its immediate attributes.""" + body = {} + if self.created_timestamp is not None: body['created_timestamp'] = self.created_timestamp + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: + body['last_updated_timestamp'] = self.last_updated_timestamp + if self.space_id is not None: body['space_id'] = self.space_id + if self.title is not None: body['title'] = self.title + if self.user_id is not None: body['user_id'] = self.user_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GenieConversation: """Deserializes the GenieConversation from a dictionary.""" @@ -333,6 +450,14 @@ def as_dict(self) -> dict: if self.space_id is not None: body['space_id'] = self.space_id return body + def as_shallow_dict(self) -> dict: + """Serializes the GenieCreateConversationMessageRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.content is not None: body['content'] = self.content + if self.conversation_id is not None: body['conversation_id'] = self.conversation_id + if self.space_id is not None: body['space_id'] = self.space_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GenieCreateConversationMessageRequest: """Deserializes the GenieCreateConversationMessageRequest from a dictionary.""" @@ -353,6 +478,12 @@ def as_dict(self) -> dict: if self.statement_response: body['statement_response'] = self.statement_response.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the GenieGetMessageQueryResultResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.statement_response: body['statement_response'] = self.statement_response + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GenieGetMessageQueryResultResponse: """Deserializes the GenieGetMessageQueryResultResponse from a dictionary.""" @@ -391,8 +522,9 @@ class GenieMessage: status: Optional[MessageStatus] = None """MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. * - `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`: - Executing AI provided SQL query. Get the SQL query result by calling + `ASKING_AI`: Waiting for the LLM to respond to the users question. * `PENDING_WAREHOUSE`: + Waiting for warehouse before the SQL query can start executing. * `EXECUTING_QUERY`: Executing + AI provided SQL query. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message status will stay in the `EXECUTING_QUERY` until a client calls [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a @@ -422,6 +554,23 @@ def as_dict(self) -> dict: if self.user_id is not None: body['user_id'] = self.user_id return body + def as_shallow_dict(self) -> dict: + """Serializes the GenieMessage into a shallow dictionary of its immediate attributes.""" + body = {} + if self.attachments: body['attachments'] = self.attachments + if self.content is not None: body['content'] = self.content + if self.conversation_id is not None: body['conversation_id'] = self.conversation_id + if self.created_timestamp is not None: body['created_timestamp'] = self.created_timestamp + if self.error: body['error'] = self.error + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: + body['last_updated_timestamp'] = self.last_updated_timestamp + if self.query_result: body['query_result'] = self.query_result + if self.space_id is not None: body['space_id'] = self.space_id + if self.status is not None: body['status'] = self.status + if self.user_id is not None: body['user_id'] = self.user_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GenieMessage: """Deserializes the GenieMessage from a dictionary.""" @@ -453,6 +602,13 @@ def as_dict(self) -> dict: if self.space_id is not None: body['space_id'] = self.space_id return body + def as_shallow_dict(self) -> dict: + """Serializes the GenieStartConversationMessageRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.content is not None: body['content'] = self.content + if self.space_id is not None: body['space_id'] = self.space_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GenieStartConversationMessageRequest: """Deserializes the GenieStartConversationMessageRequest from a dictionary.""" @@ -480,6 +636,15 @@ def as_dict(self) -> dict: if self.message_id is not None: body['message_id'] = self.message_id return body + def as_shallow_dict(self) -> dict: + """Serializes the GenieStartConversationResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.conversation: body['conversation'] = self.conversation + if self.conversation_id is not None: body['conversation_id'] = self.conversation_id + if self.message: body['message'] = self.message + if self.message_id is not None: body['message_id'] = self.message_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GenieStartConversationResponse: """Deserializes the GenieStartConversationResponse from a dictionary.""" @@ -489,6 +654,25 @@ def from_dict(cls, d: Dict[str, any]) -> GenieStartConversationResponse: message_id=d.get('message_id', None)) +@dataclass +class GetPublishedDashboardEmbeddedResponse: + + def as_dict(self) -> dict: + """Serializes the GetPublishedDashboardEmbeddedResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GetPublishedDashboardEmbeddedResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GetPublishedDashboardEmbeddedResponse: + """Deserializes the GetPublishedDashboardEmbeddedResponse from a dictionary.""" + return cls() + + class LifecycleState(Enum): ACTIVE = 'ACTIVE' @@ -510,6 +694,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListDashboardsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dashboards: body['dashboards'] = self.dashboards + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListDashboardsResponse: """Deserializes the ListDashboardsResponse from a dictionary.""" @@ -532,6 +723,13 @@ def as_dict(self) -> dict: if self.schedules: body['schedules'] = [v.as_dict() for v in self.schedules] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListSchedulesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.schedules: body['schedules'] = self.schedules + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListSchedulesResponse: """Deserializes the ListSchedulesResponse from a dictionary.""" @@ -554,6 +752,13 @@ def as_dict(self) -> dict: if self.subscriptions: body['subscriptions'] = [v.as_dict() for v in self.subscriptions] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListSubscriptionsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.subscriptions: body['subscriptions'] = self.subscriptions + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListSubscriptionsResponse: """Deserializes the ListSubscriptionsResponse from a dictionary.""" @@ -574,6 +779,13 @@ def as_dict(self) -> dict: if self.type is not None: body['type'] = self.type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the MessageError into a shallow dictionary of its immediate attributes.""" + body = {} + if self.error is not None: body['error'] = self.error + if self.type is not None: body['type'] = self.type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> MessageError: """Deserializes the MessageError from a dictionary.""" @@ -607,6 +819,7 @@ class MessageErrorType(Enum): LOCAL_CONTEXT_EXCEEDED_EXCEPTION = 'LOCAL_CONTEXT_EXCEEDED_EXCEPTION' MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION = 'MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION' MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION = 'MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION' + NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE = 'NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE' NO_QUERY_TO_VISUALIZE_EXCEPTION = 'NO_QUERY_TO_VISUALIZE_EXCEPTION' NO_TABLES_TO_QUERY_EXCEPTION = 'NO_TABLES_TO_QUERY_EXCEPTION' RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION = 'RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION' @@ -614,6 +827,7 @@ class MessageErrorType(Enum): REPLY_PROCESS_TIMEOUT_EXCEPTION = 'REPLY_PROCESS_TIMEOUT_EXCEPTION' RETRYABLE_PROCESSING_EXCEPTION = 'RETRYABLE_PROCESSING_EXCEPTION' SQL_EXECUTION_EXCEPTION = 'SQL_EXECUTION_EXCEPTION' + STOP_PROCESS_DUE_TO_AUTO_REGENERATE = 'STOP_PROCESS_DUE_TO_AUTO_REGENERATE' TABLES_MISSING_EXCEPTION = 'TABLES_MISSING_EXCEPTION' TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION = 'TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION' TOO_MANY_TABLES_EXCEPTION = 'TOO_MANY_TABLES_EXCEPTION' @@ -626,8 +840,9 @@ class MessageErrorType(Enum): class MessageStatus(Enum): """MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. * - `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`: - Executing AI provided SQL query. Get the SQL query result by calling + `ASKING_AI`: Waiting for the LLM to respond to the users question. * `PENDING_WAREHOUSE`: + Waiting for warehouse before the SQL query can start executing. * `EXECUTING_QUERY`: Executing + AI provided SQL query. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message status will stay in the `EXECUTING_QUERY` until a client calls [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a @@ -644,6 +859,7 @@ class MessageStatus(Enum): FAILED = 'FAILED' FETCHING_METADATA = 'FETCHING_METADATA' FILTERING_CONTEXT = 'FILTERING_CONTEXT' + PENDING_WAREHOUSE = 'PENDING_WAREHOUSE' QUERY_RESULT_EXPIRED = 'QUERY_RESULT_EXPIRED' SUBMITTED = 'SUBMITTED' @@ -659,12 +875,28 @@ class MigrateDashboardRequest: parent_path: Optional[str] = None """The workspace path of the folder to contain the migrated Lakeview dashboard.""" + update_parameter_syntax: Optional[bool] = None + """Flag to indicate if mustache parameter syntax ({{ param }}) should be auto-updated to named + syntax (:param) when converting datasets in the dashboard.""" + def as_dict(self) -> dict: """Serializes the MigrateDashboardRequest into a dictionary suitable for use as a JSON request body.""" body = {} if self.display_name is not None: body['display_name'] = self.display_name if self.parent_path is not None: body['parent_path'] = self.parent_path if self.source_dashboard_id is not None: body['source_dashboard_id'] = self.source_dashboard_id + if self.update_parameter_syntax is not None: + body['update_parameter_syntax'] = self.update_parameter_syntax + return body + + def as_shallow_dict(self) -> dict: + """Serializes the MigrateDashboardRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.display_name is not None: body['display_name'] = self.display_name + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.source_dashboard_id is not None: body['source_dashboard_id'] = self.source_dashboard_id + if self.update_parameter_syntax is not None: + body['update_parameter_syntax'] = self.update_parameter_syntax return body @classmethod @@ -672,7 +904,76 @@ def from_dict(cls, d: Dict[str, any]) -> MigrateDashboardRequest: """Deserializes the MigrateDashboardRequest from a dictionary.""" return cls(display_name=d.get('display_name', None), parent_path=d.get('parent_path', None), - source_dashboard_id=d.get('source_dashboard_id', None)) + source_dashboard_id=d.get('source_dashboard_id', None), + update_parameter_syntax=d.get('update_parameter_syntax', None)) + + +@dataclass +class PendingStatus: + data_token: str + """The token to poll for result asynchronously Example: + EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ""" + + def as_dict(self) -> dict: + """Serializes the PendingStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.data_token is not None: body['data_token'] = self.data_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PendingStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data_token is not None: body['data_token'] = self.data_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> PendingStatus: + """Deserializes the PendingStatus from a dictionary.""" + return cls(data_token=d.get('data_token', None)) + + +@dataclass +class PollQueryStatusResponse: + data: Optional[List[PollQueryStatusResponseData]] = None + + def as_dict(self) -> dict: + """Serializes the PollQueryStatusResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.data: body['data'] = [v.as_dict() for v in self.data] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PollQueryStatusResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data: body['data'] = self.data + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> PollQueryStatusResponse: + """Deserializes the PollQueryStatusResponse from a dictionary.""" + return cls(data=_repeated_dict(d, 'data', PollQueryStatusResponseData)) + + +@dataclass +class PollQueryStatusResponseData: + status: QueryResponseStatus + + def as_dict(self) -> dict: + """Serializes the PollQueryStatusResponseData into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.status: body['status'] = self.status.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PollQueryStatusResponseData into a shallow dictionary of its immediate attributes.""" + body = {} + if self.status: body['status'] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> PollQueryStatusResponseData: + """Deserializes the PollQueryStatusResponseData from a dictionary.""" + return cls(status=_from_dict(d, 'status', QueryResponseStatus)) @dataclass @@ -695,6 +996,14 @@ def as_dict(self) -> dict: if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body + def as_shallow_dict(self) -> dict: + """Serializes the PublishRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.embed_credentials is not None: body['embed_credentials'] = self.embed_credentials + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PublishRequest: """Deserializes the PublishRequest from a dictionary.""" @@ -726,17 +1035,28 @@ def as_dict(self) -> dict: if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body - @classmethod - def from_dict(cls, d: Dict[str, any]) -> PublishedDashboard: - """Deserializes the PublishedDashboard from a dictionary.""" - return cls(display_name=d.get('display_name', None), - embed_credentials=d.get('embed_credentials', None), - revision_create_time=d.get('revision_create_time', None), + def as_shallow_dict(self) -> dict: + """Serializes the PublishedDashboard into a shallow dictionary of its immediate attributes.""" + body = {} + if self.display_name is not None: body['display_name'] = self.display_name + if self.embed_credentials is not None: body['embed_credentials'] = self.embed_credentials + if self.revision_create_time is not None: body['revision_create_time'] = self.revision_create_time + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> PublishedDashboard: + """Deserializes the PublishedDashboard from a dictionary.""" + return cls(display_name=d.get('display_name', None), + embed_credentials=d.get('embed_credentials', None), + revision_create_time=d.get('revision_create_time', None), warehouse_id=d.get('warehouse_id', None)) @dataclass class QueryAttachment: + cached_query_schema: Optional[QuerySchema] = None + description: Optional[str] = None """Description of the query""" @@ -755,12 +1075,15 @@ class QueryAttachment: query: Optional[str] = None """AI generated SQL query""" + statement_id: Optional[str] = None + title: Optional[str] = None """Name of the query""" def as_dict(self) -> dict: """Serializes the QueryAttachment into a dictionary suitable for use as a JSON request body.""" body = {} + if self.cached_query_schema: body['cached_query_schema'] = self.cached_query_schema.as_dict() if self.description is not None: body['description'] = self.description if self.id is not None: body['id'] = self.id if self.instruction_id is not None: body['instruction_id'] = self.instruction_id @@ -768,21 +1091,151 @@ def as_dict(self) -> dict: if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp if self.query is not None: body['query'] = self.query + if self.statement_id is not None: body['statement_id'] = self.statement_id + if self.title is not None: body['title'] = self.title + return body + + def as_shallow_dict(self) -> dict: + """Serializes the QueryAttachment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cached_query_schema: body['cached_query_schema'] = self.cached_query_schema + if self.description is not None: body['description'] = self.description + if self.id is not None: body['id'] = self.id + if self.instruction_id is not None: body['instruction_id'] = self.instruction_id + if self.instruction_title is not None: body['instruction_title'] = self.instruction_title + if self.last_updated_timestamp is not None: + body['last_updated_timestamp'] = self.last_updated_timestamp + if self.query is not None: body['query'] = self.query + if self.statement_id is not None: body['statement_id'] = self.statement_id if self.title is not None: body['title'] = self.title return body @classmethod def from_dict(cls, d: Dict[str, any]) -> QueryAttachment: """Deserializes the QueryAttachment from a dictionary.""" - return cls(description=d.get('description', None), + return cls(cached_query_schema=_from_dict(d, 'cached_query_schema', QuerySchema), + description=d.get('description', None), id=d.get('id', None), instruction_id=d.get('instruction_id', None), instruction_title=d.get('instruction_title', None), last_updated_timestamp=d.get('last_updated_timestamp', None), query=d.get('query', None), + statement_id=d.get('statement_id', None), title=d.get('title', None)) +@dataclass +class QueryResponseStatus: + canceled: Optional[Empty] = None + """Represents an empty message, similar to google.protobuf.Empty, which is not available in the + firm right now.""" + + closed: Optional[Empty] = None + """Represents an empty message, similar to google.protobuf.Empty, which is not available in the + firm right now.""" + + pending: Optional[PendingStatus] = None + + statement_id: Optional[str] = None + """The statement id in format(01eef5da-c56e-1f36-bafa-21906587d6ba) The statement_id should be + identical to data_token in SuccessStatus and PendingStatus. This field is created for audit + logging purpose to record the statement_id of all QueryResponseStatus.""" + + success: Optional[SuccessStatus] = None + + def as_dict(self) -> dict: + """Serializes the QueryResponseStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.canceled: body['canceled'] = self.canceled.as_dict() + if self.closed: body['closed'] = self.closed.as_dict() + if self.pending: body['pending'] = self.pending.as_dict() + if self.statement_id is not None: body['statement_id'] = self.statement_id + if self.success: body['success'] = self.success.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the QueryResponseStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.canceled: body['canceled'] = self.canceled + if self.closed: body['closed'] = self.closed + if self.pending: body['pending'] = self.pending + if self.statement_id is not None: body['statement_id'] = self.statement_id + if self.success: body['success'] = self.success + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> QueryResponseStatus: + """Deserializes the QueryResponseStatus from a dictionary.""" + return cls(canceled=_from_dict(d, 'canceled', Empty), + closed=_from_dict(d, 'closed', Empty), + pending=_from_dict(d, 'pending', PendingStatus), + statement_id=d.get('statement_id', None), + success=_from_dict(d, 'success', SuccessStatus)) + + +@dataclass +class QuerySchema: + columns: Optional[List[QuerySchemaColumn]] = None + + statement_id: Optional[str] = None + """Used to determine if the stored query schema is compatible with the latest run. The service + should always clear the schema when the query is re-executed.""" + + def as_dict(self) -> dict: + """Serializes the QuerySchema into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.columns: body['columns'] = [v.as_dict() for v in self.columns] + if self.statement_id is not None: body['statement_id'] = self.statement_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the QuerySchema into a shallow dictionary of its immediate attributes.""" + body = {} + if self.columns: body['columns'] = self.columns + if self.statement_id is not None: body['statement_id'] = self.statement_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> QuerySchema: + """Deserializes the QuerySchema from a dictionary.""" + return cls(columns=_repeated_dict(d, 'columns', QuerySchemaColumn), + statement_id=d.get('statement_id', None)) + + +@dataclass +class QuerySchemaColumn: + name: str + + type_text: str + """Corresponds to type desc""" + + data_type: DataType + """Populated from https://docs.databricks.com/sql/language-manual/sql-ref-datatypes.html""" + + def as_dict(self) -> dict: + """Serializes the QuerySchemaColumn into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.data_type is not None: body['data_type'] = self.data_type.value + if self.name is not None: body['name'] = self.name + if self.type_text is not None: body['type_text'] = self.type_text + return body + + def as_shallow_dict(self) -> dict: + """Serializes the QuerySchemaColumn into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data_type is not None: body['data_type'] = self.data_type + if self.name is not None: body['name'] = self.name + if self.type_text is not None: body['type_text'] = self.type_text + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> QuerySchemaColumn: + """Deserializes the QuerySchemaColumn from a dictionary.""" + return cls(data_type=_enum(d, 'data_type', DataType), + name=d.get('name', None), + type_text=d.get('type_text', None)) + + @dataclass class Result: is_truncated: Optional[bool] = None @@ -803,6 +1256,14 @@ def as_dict(self) -> dict: if self.statement_id is not None: body['statement_id'] = self.statement_id return body + def as_shallow_dict(self) -> dict: + """Serializes the Result into a shallow dictionary of its immediate attributes.""" + body = {} + if self.is_truncated is not None: body['is_truncated'] = self.is_truncated + if self.row_count is not None: body['row_count'] = self.row_count + if self.statement_id is not None: body['statement_id'] = self.statement_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Result: """Deserializes the Result from a dictionary.""" @@ -839,6 +1300,9 @@ class Schedule: update_time: Optional[str] = None """A timestamp indicating when the schedule was last updated.""" + warehouse_id: Optional[str] = None + """The warehouse id to run the dashboard with for the schedule.""" + def as_dict(self) -> dict: """Serializes the Schedule into a dictionary suitable for use as a JSON request body.""" body = {} @@ -850,6 +1314,21 @@ def as_dict(self) -> dict: if self.pause_status is not None: body['pause_status'] = self.pause_status.value if self.schedule_id is not None: body['schedule_id'] = self.schedule_id if self.update_time is not None: body['update_time'] = self.update_time + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Schedule into a shallow dictionary of its immediate attributes.""" + body = {} + if self.create_time is not None: body['create_time'] = self.create_time + if self.cron_schedule: body['cron_schedule'] = self.cron_schedule + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.display_name is not None: body['display_name'] = self.display_name + if self.etag is not None: body['etag'] = self.etag + if self.pause_status is not None: body['pause_status'] = self.pause_status + if self.schedule_id is not None: body['schedule_id'] = self.schedule_id + if self.update_time is not None: body['update_time'] = self.update_time + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body @classmethod @@ -862,7 +1341,8 @@ def from_dict(cls, d: Dict[str, any]) -> Schedule: etag=d.get('etag', None), pause_status=_enum(d, 'pause_status', SchedulePauseStatus), schedule_id=d.get('schedule_id', None), - update_time=d.get('update_time', None)) + update_time=d.get('update_time', None), + warehouse_id=d.get('warehouse_id', None)) class SchedulePauseStatus(Enum): @@ -888,6 +1368,13 @@ def as_dict(self) -> dict: if self.user_subscriber: body['user_subscriber'] = self.user_subscriber.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the Subscriber into a shallow dictionary of its immediate attributes.""" + body = {} + if self.destination_subscriber: body['destination_subscriber'] = self.destination_subscriber + if self.user_subscriber: body['user_subscriber'] = self.user_subscriber + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Subscriber: """Deserializes the Subscriber from a dictionary.""" @@ -937,6 +1424,19 @@ def as_dict(self) -> dict: if self.update_time is not None: body['update_time'] = self.update_time return body + def as_shallow_dict(self) -> dict: + """Serializes the Subscription into a shallow dictionary of its immediate attributes.""" + body = {} + if self.create_time is not None: body['create_time'] = self.create_time + if self.created_by_user_id is not None: body['created_by_user_id'] = self.created_by_user_id + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.etag is not None: body['etag'] = self.etag + if self.schedule_id is not None: body['schedule_id'] = self.schedule_id + if self.subscriber: body['subscriber'] = self.subscriber + if self.subscription_id is not None: body['subscription_id'] = self.subscription_id + if self.update_time is not None: body['update_time'] = self.update_time + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Subscription: """Deserializes the Subscription from a dictionary.""" @@ -961,6 +1461,12 @@ def as_dict(self) -> dict: if self.destination_id is not None: body['destination_id'] = self.destination_id return body + def as_shallow_dict(self) -> dict: + """Serializes the SubscriptionSubscriberDestination into a shallow dictionary of its immediate attributes.""" + body = {} + if self.destination_id is not None: body['destination_id'] = self.destination_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SubscriptionSubscriberDestination: """Deserializes the SubscriptionSubscriberDestination from a dictionary.""" @@ -978,12 +1484,47 @@ def as_dict(self) -> dict: if self.user_id is not None: body['user_id'] = self.user_id return body + def as_shallow_dict(self) -> dict: + """Serializes the SubscriptionSubscriberUser into a shallow dictionary of its immediate attributes.""" + body = {} + if self.user_id is not None: body['user_id'] = self.user_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SubscriptionSubscriberUser: """Deserializes the SubscriptionSubscriberUser from a dictionary.""" return cls(user_id=d.get('user_id', None)) +@dataclass +class SuccessStatus: + data_token: str + """The token to poll for result asynchronously Example: + EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ""" + + truncated: Optional[bool] = None + """Whether the query result is truncated (either by byte limit or row limit)""" + + def as_dict(self) -> dict: + """Serializes the SuccessStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.data_token is not None: body['data_token'] = self.data_token + if self.truncated is not None: body['truncated'] = self.truncated + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SuccessStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data_token is not None: body['data_token'] = self.data_token + if self.truncated is not None: body['truncated'] = self.truncated + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> SuccessStatus: + """Deserializes the SuccessStatus from a dictionary.""" + return cls(data_token=d.get('data_token', None), truncated=d.get('truncated', None)) + + @dataclass class TextAttachment: content: Optional[str] = None @@ -998,6 +1539,13 @@ def as_dict(self) -> dict: if self.id is not None: body['id'] = self.id return body + def as_shallow_dict(self) -> dict: + """Serializes the TextAttachment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.content is not None: body['content'] = self.content + if self.id is not None: body['id'] = self.id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TextAttachment: """Deserializes the TextAttachment from a dictionary.""" @@ -1012,6 +1560,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the TrashDashboardResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TrashDashboardResponse: """Deserializes the TrashDashboardResponse from a dictionary.""" @@ -1026,97 +1579,229 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the UnpublishDashboardResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UnpublishDashboardResponse: """Deserializes the UnpublishDashboardResponse from a dictionary.""" return cls() -@dataclass -class UpdateDashboardRequest: - dashboard_id: Optional[str] = None - """UUID identifying the dashboard.""" +class GenieAPI: + """Genie provides a no-code experience for business users, powered by AI/BI. Analysts set up spaces that + business users can use to ask questions using natural language. Genie uses data registered to Unity + Catalog and requires at least CAN USE permission on a Pro or Serverless SQL warehouse. Also, Databricks + Assistant must be enabled.""" - display_name: Optional[str] = None - """The display name of the dashboard.""" + def __init__(self, api_client): + self._api = api_client - etag: Optional[str] = None - """The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard - has not been modified since the last read. This field is excluded in List Dashboards responses.""" + def wait_get_message_genie_completed( + self, + conversation_id: str, + message_id: str, + space_id: str, + timeout=timedelta(minutes=20), + callback: Optional[Callable[[GenieMessage], None]] = None) -> GenieMessage: + deadline = time.time() + timeout.total_seconds() + target_states = (MessageStatus.COMPLETED, ) + failure_states = (MessageStatus.FAILED, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get_message(conversation_id=conversation_id, message_id=message_id, space_id=space_id) + status = poll.status + status_message = f'current status: {status}' + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach COMPLETED, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"conversation_id={conversation_id}, message_id={message_id}, space_id={space_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') - serialized_dashboard: Optional[str] = None - """The contents of the dashboard in serialized string form. This field is excluded in List - Dashboards responses. Use the [get dashboard API] to retrieve an example response, which - includes the `serialized_dashboard` field. This field provides the structure of the JSON string - that represents the dashboard's layout and components. - - [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get""" + def create_message(self, space_id: str, conversation_id: str, content: str) -> Wait[GenieMessage]: + """Create conversation message. + + Create new message in [conversation](:method:genie/startconversation). The AI response uses all + previously created messages in the conversation to respond. + + :param space_id: str + The ID associated with the Genie space where the conversation is started. + :param conversation_id: str + The ID associated with the conversation. + :param content: str + User message content. + + :returns: + Long-running operation waiter for :class:`GenieMessage`. + See :method:wait_get_message_genie_completed for more details. + """ + body = {} + if content is not None: body['content'] = content + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - warehouse_id: Optional[str] = None - """The warehouse ID used to run the dashboard.""" + op_response = self._api.do( + 'POST', + f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages', + body=body, + headers=headers) + return Wait(self.wait_get_message_genie_completed, + response=GenieMessage.from_dict(op_response), + conversation_id=conversation_id, + message_id=op_response['id'], + space_id=space_id) - def as_dict(self) -> dict: - """Serializes the UpdateDashboardRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.display_name is not None: body['display_name'] = self.display_name - if self.etag is not None: body['etag'] = self.etag - if self.serialized_dashboard is not None: body['serialized_dashboard'] = self.serialized_dashboard - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id - return body + def create_message_and_wait(self, + space_id: str, + conversation_id: str, + content: str, + timeout=timedelta(minutes=20)) -> GenieMessage: + return self.create_message(content=content, conversation_id=conversation_id, + space_id=space_id).result(timeout=timeout) - @classmethod - def from_dict(cls, d: Dict[str, any]) -> UpdateDashboardRequest: - """Deserializes the UpdateDashboardRequest from a dictionary.""" - return cls(dashboard_id=d.get('dashboard_id', None), - display_name=d.get('display_name', None), - etag=d.get('etag', None), - serialized_dashboard=d.get('serialized_dashboard', None), - warehouse_id=d.get('warehouse_id', None)) + def execute_message_query(self, space_id: str, conversation_id: str, + message_id: str) -> GenieGetMessageQueryResultResponse: + """Execute SQL query in a conversation message. + + Execute the SQL query in the message. + + :param space_id: str + Genie space ID + :param conversation_id: str + Conversation ID + :param message_id: str + Message ID + + :returns: :class:`GenieGetMessageQueryResultResponse` + """ + headers = {'Accept': 'application/json', } -@dataclass -class UpdateScheduleRequest: - cron_schedule: CronSchedule - """The cron expression describing the frequency of the periodic refresh for this schedule.""" + res = self._api.do( + 'POST', + f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/execute-query', + headers=headers) + return GenieGetMessageQueryResultResponse.from_dict(res) - dashboard_id: Optional[str] = None - """UUID identifying the dashboard to which the schedule belongs.""" + def get_message(self, space_id: str, conversation_id: str, message_id: str) -> GenieMessage: + """Get conversation message. + + Get message from conversation. + + :param space_id: str + The ID associated with the Genie space where the target conversation is located. + :param conversation_id: str + The ID associated with the target conversation. + :param message_id: str + The ID associated with the target message from the identified conversation. + + :returns: :class:`GenieMessage` + """ - display_name: Optional[str] = None - """The display name for schedule.""" + headers = {'Accept': 'application/json', } - etag: Optional[str] = None - """The etag for the schedule. Must be left empty on create, must be provided on updates to ensure - that the schedule has not been modified since the last read, and can be optionally provided on - delete.""" + res = self._api.do( + 'GET', + f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}', + headers=headers) + return GenieMessage.from_dict(res) - pause_status: Optional[SchedulePauseStatus] = None - """The status indicates whether this schedule is paused or not.""" + def get_message_query_result(self, space_id: str, conversation_id: str, + message_id: str) -> GenieGetMessageQueryResultResponse: + """Get conversation message SQL query result. + + Get the result of SQL query if the message has a query attachment. This is only available if a message + has a query attachment and the message status is `EXECUTING_QUERY`. + + :param space_id: str + Genie space ID + :param conversation_id: str + Conversation ID + :param message_id: str + Message ID + + :returns: :class:`GenieGetMessageQueryResultResponse` + """ - schedule_id: Optional[str] = None - """UUID identifying the schedule.""" + headers = {'Accept': 'application/json', } - def as_dict(self) -> dict: - """Serializes the UpdateScheduleRequest into a dictionary suitable for use as a JSON request body.""" + res = self._api.do( + 'GET', + f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/query-result', + headers=headers) + return GenieGetMessageQueryResultResponse.from_dict(res) + + def get_message_query_result_by_attachment(self, space_id: str, conversation_id: str, message_id: str, + attachment_id: str) -> GenieGetMessageQueryResultResponse: + """Get conversation message SQL query result by attachment id. + + Get the result of SQL query by attachment id This is only available if a message has a query + attachment and the message status is `EXECUTING_QUERY`. + + :param space_id: str + Genie space ID + :param conversation_id: str + Conversation ID + :param message_id: str + Message ID + :param attachment_id: str + Attachment ID + + :returns: :class:`GenieGetMessageQueryResultResponse` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do( + 'GET', + f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/query-result/{attachment_id}', + headers=headers) + return GenieGetMessageQueryResultResponse.from_dict(res) + + def start_conversation(self, space_id: str, content: str) -> Wait[GenieMessage]: + """Start conversation. + + Start a new conversation. + + :param space_id: str + The ID associated with the Genie space where you want to start a conversation. + :param content: str + The text of the message that starts the conversation. + + :returns: + Long-running operation waiter for :class:`GenieMessage`. + See :method:wait_get_message_genie_completed for more details. + """ body = {} - if self.cron_schedule: body['cron_schedule'] = self.cron_schedule.as_dict() - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.display_name is not None: body['display_name'] = self.display_name - if self.etag is not None: body['etag'] = self.etag - if self.pause_status is not None: body['pause_status'] = self.pause_status.value - if self.schedule_id is not None: body['schedule_id'] = self.schedule_id - return body + if content is not None: body['content'] = content + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - @classmethod - def from_dict(cls, d: Dict[str, any]) -> UpdateScheduleRequest: - """Deserializes the UpdateScheduleRequest from a dictionary.""" - return cls(cron_schedule=_from_dict(d, 'cron_schedule', CronSchedule), - dashboard_id=d.get('dashboard_id', None), - display_name=d.get('display_name', None), - etag=d.get('etag', None), - pause_status=_enum(d, 'pause_status', SchedulePauseStatus), - schedule_id=d.get('schedule_id', None)) + op_response = self._api.do('POST', + f'/api/2.0/genie/spaces/{space_id}/start-conversation', + body=body, + headers=headers) + return Wait(self.wait_get_message_genie_completed, + response=GenieStartConversationResponse.from_dict(op_response), + conversation_id=op_response['conversation_id'], + message_id=op_response['message_id'], + space_id=space_id) + + def start_conversation_and_wait(self, space_id: str, content: str, + timeout=timedelta(minutes=20)) -> GenieMessage: + return self.start_conversation(content=content, space_id=space_id).result(timeout=timeout) class GenieAPI: @@ -1313,66 +1998,31 @@ class LakeviewAPI: def __init__(self, api_client): self._api = api_client - def create(self, - display_name: str, - *, - parent_path: Optional[str] = None, - serialized_dashboard: Optional[str] = None, - warehouse_id: Optional[str] = None) -> Dashboard: + def create(self, *, dashboard: Optional[Dashboard] = None) -> Dashboard: """Create dashboard. Create a draft dashboard. - :param display_name: str - The display name of the dashboard. - :param parent_path: str (optional) - The workspace path of the folder containing the dashboard. Includes leading slash and no trailing - slash. This field is excluded in List Dashboards responses. - :param serialized_dashboard: str (optional) - The contents of the dashboard in serialized string form. This field is excluded in List Dashboards - responses. Use the [get dashboard API] to retrieve an example response, which includes the - `serialized_dashboard` field. This field provides the structure of the JSON string that represents - the dashboard's layout and components. - - [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get - :param warehouse_id: str (optional) - The warehouse ID used to run the dashboard. + :param dashboard: :class:`Dashboard` (optional) :returns: :class:`Dashboard` """ - body = {} - if display_name is not None: body['display_name'] = display_name - if parent_path is not None: body['parent_path'] = parent_path - if serialized_dashboard is not None: body['serialized_dashboard'] = serialized_dashboard - if warehouse_id is not None: body['warehouse_id'] = warehouse_id + body = dashboard.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', '/api/2.0/lakeview/dashboards', body=body, headers=headers) return Dashboard.from_dict(res) - def create_schedule(self, - dashboard_id: str, - cron_schedule: CronSchedule, - *, - display_name: Optional[str] = None, - pause_status: Optional[SchedulePauseStatus] = None) -> Schedule: + def create_schedule(self, dashboard_id: str, *, schedule: Optional[Schedule] = None) -> Schedule: """Create dashboard schedule. :param dashboard_id: str UUID identifying the dashboard to which the schedule belongs. - :param cron_schedule: :class:`CronSchedule` - The cron expression describing the frequency of the periodic refresh for this schedule. - :param display_name: str (optional) - The display name for schedule. - :param pause_status: :class:`SchedulePauseStatus` (optional) - The status indicates whether this schedule is paused or not. + :param schedule: :class:`Schedule` (optional) :returns: :class:`Schedule` """ - body = {} - if cron_schedule is not None: body['cron_schedule'] = cron_schedule.as_dict() - if display_name is not None: body['display_name'] = display_name - if pause_status is not None: body['pause_status'] = pause_status.value + body = schedule.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', @@ -1381,21 +2031,22 @@ def create_schedule(self, headers=headers) return Schedule.from_dict(res) - def create_subscription(self, dashboard_id: str, schedule_id: str, - subscriber: Subscriber) -> Subscription: + def create_subscription(self, + dashboard_id: str, + schedule_id: str, + *, + subscription: Optional[Subscription] = None) -> Subscription: """Create schedule subscription. :param dashboard_id: str UUID identifying the dashboard to which the subscription belongs. :param schedule_id: str UUID identifying the schedule to which the subscription belongs. - :param subscriber: :class:`Subscriber` - Subscriber details for users and destinations to be added as subscribers to the schedule. + :param subscription: :class:`Subscription` (optional) :returns: :class:`Subscription` """ - body = {} - if subscriber is not None: body['subscriber'] = subscriber.as_dict() + body = subscription.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do( @@ -1481,7 +2132,7 @@ def get_published(self, dashboard_id: str) -> PublishedDashboard: Get the current published dashboard. :param dashboard_id: str - UUID identifying the dashboard to be published. + UUID identifying the published dashboard. :returns: :class:`PublishedDashboard` """ @@ -1576,7 +2227,7 @@ def list_schedules(self, """List dashboard schedules. :param dashboard_id: str - UUID identifying the dashboard to which the schedule belongs. + UUID identifying the dashboard to which the schedules belongs. :param page_size: int (optional) The number of schedules to return per page. :param page_token: str (optional) @@ -1612,9 +2263,9 @@ def list_subscriptions(self, """List schedule subscriptions. :param dashboard_id: str - UUID identifying the dashboard to which the subscription belongs. + UUID identifying the dashboard which the subscriptions belongs. :param schedule_id: str - UUID identifying the schedule to which the subscription belongs. + UUID identifying the schedule which the subscriptions belongs. :param page_size: int (optional) The number of subscriptions to return per page. :param page_token: str (optional) @@ -1646,7 +2297,8 @@ def migrate(self, source_dashboard_id: str, *, display_name: Optional[str] = None, - parent_path: Optional[str] = None) -> Dashboard: + parent_path: Optional[str] = None, + update_parameter_syntax: Optional[bool] = None) -> Dashboard: """Migrate dashboard. Migrates a classic SQL dashboard to Lakeview. @@ -1657,6 +2309,9 @@ def migrate(self, Display name for the new Lakeview dashboard. :param parent_path: str (optional) The workspace path of the folder to contain the migrated Lakeview dashboard. + :param update_parameter_syntax: bool (optional) + Flag to indicate if mustache parameter syntax ({{ param }}) should be auto-updated to named syntax + (:param) when converting datasets in the dashboard. :returns: :class:`Dashboard` """ @@ -1664,6 +2319,7 @@ def migrate(self, if display_name is not None: body['display_name'] = display_name if parent_path is not None: body['parent_path'] = parent_path if source_dashboard_id is not None: body['source_dashboard_id'] = source_dashboard_id + if update_parameter_syntax is not None: body['update_parameter_syntax'] = update_parameter_syntax headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', '/api/2.0/lakeview/dashboards/migrate', body=body, headers=headers) @@ -1720,7 +2376,7 @@ def unpublish(self, dashboard_id: str): Unpublish the dashboard. :param dashboard_id: str - UUID identifying the dashboard to be published. + UUID identifying the published dashboard. """ @@ -1729,41 +2385,18 @@ def unpublish(self, dashboard_id: str): self._api.do('DELETE', f'/api/2.0/lakeview/dashboards/{dashboard_id}/published', headers=headers) - def update(self, - dashboard_id: str, - *, - display_name: Optional[str] = None, - etag: Optional[str] = None, - serialized_dashboard: Optional[str] = None, - warehouse_id: Optional[str] = None) -> Dashboard: + def update(self, dashboard_id: str, *, dashboard: Optional[Dashboard] = None) -> Dashboard: """Update dashboard. Update a draft dashboard. :param dashboard_id: str UUID identifying the dashboard. - :param display_name: str (optional) - The display name of the dashboard. - :param etag: str (optional) - The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard has - not been modified since the last read. This field is excluded in List Dashboards responses. - :param serialized_dashboard: str (optional) - The contents of the dashboard in serialized string form. This field is excluded in List Dashboards - responses. Use the [get dashboard API] to retrieve an example response, which includes the - `serialized_dashboard` field. This field provides the structure of the JSON string that represents - the dashboard's layout and components. - - [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get - :param warehouse_id: str (optional) - The warehouse ID used to run the dashboard. + :param dashboard: :class:`Dashboard` (optional) :returns: :class:`Dashboard` """ - body = {} - if display_name is not None: body['display_name'] = display_name - if etag is not None: body['etag'] = etag - if serialized_dashboard is not None: body['serialized_dashboard'] = serialized_dashboard - if warehouse_id is not None: body['warehouse_id'] = warehouse_id + body = dashboard.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('PATCH', @@ -1775,34 +2408,19 @@ def update(self, def update_schedule(self, dashboard_id: str, schedule_id: str, - cron_schedule: CronSchedule, *, - display_name: Optional[str] = None, - etag: Optional[str] = None, - pause_status: Optional[SchedulePauseStatus] = None) -> Schedule: + schedule: Optional[Schedule] = None) -> Schedule: """Update dashboard schedule. :param dashboard_id: str UUID identifying the dashboard to which the schedule belongs. :param schedule_id: str UUID identifying the schedule. - :param cron_schedule: :class:`CronSchedule` - The cron expression describing the frequency of the periodic refresh for this schedule. - :param display_name: str (optional) - The display name for schedule. - :param etag: str (optional) - The etag for the schedule. Must be left empty on create, must be provided on updates to ensure that - the schedule has not been modified since the last read, and can be optionally provided on delete. - :param pause_status: :class:`SchedulePauseStatus` (optional) - The status indicates whether this schedule is paused or not. + :param schedule: :class:`Schedule` (optional) :returns: :class:`Schedule` """ - body = {} - if cron_schedule is not None: body['cron_schedule'] = cron_schedule.as_dict() - if display_name is not None: body['display_name'] = display_name - if etag is not None: body['etag'] = etag - if pause_status is not None: body['pause_status'] = pause_status.value + body = schedule.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('PUT', @@ -1810,3 +2428,107 @@ def update_schedule(self, body=body, headers=headers) return Schedule.from_dict(res) + + +class LakeviewEmbeddedAPI: + """Token-based Lakeview APIs for embedding dashboards in external applications.""" + + def __init__(self, api_client): + self._api = api_client + + def get_published_dashboard_embedded(self, dashboard_id: str): + """Read a published dashboard in an embedded ui. + + Get the current published dashboard within an embedded context. + + :param dashboard_id: str + UUID identifying the published dashboard. + + + """ + + headers = {'Accept': 'application/json', } + + self._api.do('GET', + f'/api/2.0/lakeview/dashboards/{dashboard_id}/published/embedded', + headers=headers) + + +class QueryExecutionAPI: + """Query execution APIs for AI / BI Dashboards""" + + def __init__(self, api_client): + self._api = api_client + + def cancel_published_query_execution(self, + dashboard_name: str, + dashboard_revision_id: str, + *, + tokens: Optional[List[str]] = None) -> CancelQueryExecutionResponse: + """Cancel the results for the a query for a published, embedded dashboard. + + :param dashboard_name: str + :param dashboard_revision_id: str + :param tokens: List[str] (optional) + Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ + + :returns: :class:`CancelQueryExecutionResponse` + """ + + query = {} + if dashboard_name is not None: query['dashboard_name'] = dashboard_name + if dashboard_revision_id is not None: query['dashboard_revision_id'] = dashboard_revision_id + if tokens is not None: query['tokens'] = [v for v in tokens] + headers = {'Accept': 'application/json', } + + res = self._api.do('DELETE', '/api/2.0/lakeview-query/query/published', query=query, headers=headers) + return CancelQueryExecutionResponse.from_dict(res) + + def execute_published_dashboard_query(self, + dashboard_name: str, + dashboard_revision_id: str, + *, + override_warehouse_id: Optional[str] = None): + """Execute a query for a published dashboard. + + :param dashboard_name: str + Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains the + list of datasets, warehouse_id, and embedded_credentials + :param dashboard_revision_id: str + :param override_warehouse_id: str (optional) + A dashboard schedule can override the warehouse used as compute for processing the published + dashboard queries + + + """ + body = {} + if dashboard_name is not None: body['dashboard_name'] = dashboard_name + if dashboard_revision_id is not None: body['dashboard_revision_id'] = dashboard_revision_id + if override_warehouse_id is not None: body['override_warehouse_id'] = override_warehouse_id + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + self._api.do('POST', '/api/2.0/lakeview-query/query/published', body=body, headers=headers) + + def poll_published_query_status(self, + dashboard_name: str, + dashboard_revision_id: str, + *, + tokens: Optional[List[str]] = None) -> PollQueryStatusResponse: + """Poll the results for the a query for a published, embedded dashboard. + + :param dashboard_name: str + :param dashboard_revision_id: str + :param tokens: List[str] (optional) + Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ + + :returns: :class:`PollQueryStatusResponse` + """ + + query = {} + if dashboard_name is not None: query['dashboard_name'] = dashboard_name + if dashboard_revision_id is not None: query['dashboard_revision_id'] = dashboard_revision_id + if tokens is not None: query['tokens'] = [v for v in tokens] + headers = {'Accept': 'application/json', } + + res = self._api.do('GET', '/api/2.0/lakeview-query/query/published', query=query, headers=headers) + return PollQueryStatusResponse.from_dict(res) diff --git a/databricks/sdk/service/files.py b/databricks/sdk/service/files.py index 255e1c1a0..99c252298 100755 --- a/databricks/sdk/service/files.py +++ b/databricks/sdk/service/files.py @@ -28,6 +28,13 @@ def as_dict(self) -> dict: if self.handle is not None: body['handle'] = self.handle return body + def as_shallow_dict(self) -> dict: + """Serializes the AddBlock into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data is not None: body['data'] = self.data + if self.handle is not None: body['handle'] = self.handle + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AddBlock: """Deserializes the AddBlock from a dictionary.""" @@ -42,6 +49,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the AddBlockResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AddBlockResponse: """Deserializes the AddBlockResponse from a dictionary.""" @@ -59,6 +71,12 @@ def as_dict(self) -> dict: if self.handle is not None: body['handle'] = self.handle return body + def as_shallow_dict(self) -> dict: + """Serializes the Close into a shallow dictionary of its immediate attributes.""" + body = {} + if self.handle is not None: body['handle'] = self.handle + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Close: """Deserializes the Close from a dictionary.""" @@ -73,6 +91,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the CloseResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CloseResponse: """Deserializes the CloseResponse from a dictionary.""" @@ -94,6 +117,13 @@ def as_dict(self) -> dict: if self.path is not None: body['path'] = self.path return body + def as_shallow_dict(self) -> dict: + """Serializes the Create into a shallow dictionary of its immediate attributes.""" + body = {} + if self.overwrite is not None: body['overwrite'] = self.overwrite + if self.path is not None: body['path'] = self.path + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Create: """Deserializes the Create from a dictionary.""" @@ -108,6 +138,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateDirectoryResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateDirectoryResponse: """Deserializes the CreateDirectoryResponse from a dictionary.""" @@ -126,6 +161,12 @@ def as_dict(self) -> dict: if self.handle is not None: body['handle'] = self.handle return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.handle is not None: body['handle'] = self.handle + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateResponse: """Deserializes the CreateResponse from a dictionary.""" @@ -148,6 +189,13 @@ def as_dict(self) -> dict: if self.recursive is not None: body['recursive'] = self.recursive return body + def as_shallow_dict(self) -> dict: + """Serializes the Delete into a shallow dictionary of its immediate attributes.""" + body = {} + if self.path is not None: body['path'] = self.path + if self.recursive is not None: body['recursive'] = self.recursive + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Delete: """Deserializes the Delete from a dictionary.""" @@ -162,6 +210,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteDirectoryResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteDirectoryResponse: """Deserializes the DeleteDirectoryResponse from a dictionary.""" @@ -176,6 +229,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" @@ -209,6 +267,16 @@ def as_dict(self) -> dict: if self.path is not None: body['path'] = self.path return body + def as_shallow_dict(self) -> dict: + """Serializes the DirectoryEntry into a shallow dictionary of its immediate attributes.""" + body = {} + if self.file_size is not None: body['file_size'] = self.file_size + if self.is_directory is not None: body['is_directory'] = self.is_directory + if self.last_modified is not None: body['last_modified'] = self.last_modified + if self.name is not None: body['name'] = self.name + if self.path is not None: body['path'] = self.path + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DirectoryEntry: """Deserializes the DirectoryEntry from a dictionary.""" @@ -238,6 +306,15 @@ def as_dict(self) -> dict: if self.last_modified is not None: body['last-modified'] = self.last_modified return body + def as_shallow_dict(self) -> dict: + """Serializes the DownloadResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.content_length is not None: body['content-length'] = self.content_length + if self.content_type is not None: body['content-type'] = self.content_type + if self.contents: body['contents'] = self.contents + if self.last_modified is not None: body['last-modified'] = self.last_modified + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DownloadResponse: """Deserializes the DownloadResponse from a dictionary.""" @@ -270,6 +347,15 @@ def as_dict(self) -> dict: if self.path is not None: body['path'] = self.path return body + def as_shallow_dict(self) -> dict: + """Serializes the FileInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.file_size is not None: body['file_size'] = self.file_size + if self.is_dir is not None: body['is_dir'] = self.is_dir + if self.modification_time is not None: body['modification_time'] = self.modification_time + if self.path is not None: body['path'] = self.path + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> FileInfo: """Deserializes the FileInfo from a dictionary.""" @@ -287,6 +373,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the GetDirectoryMetadataResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetDirectoryMetadataResponse: """Deserializes the GetDirectoryMetadataResponse from a dictionary.""" @@ -309,6 +400,14 @@ def as_dict(self) -> dict: if self.last_modified is not None: body['last-modified'] = self.last_modified return body + def as_shallow_dict(self) -> dict: + """Serializes the GetMetadataResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.content_length is not None: body['content-length'] = self.content_length + if self.content_type is not None: body['content-type'] = self.content_type + if self.last_modified is not None: body['last-modified'] = self.last_modified + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetMetadataResponse: """Deserializes the GetMetadataResponse from a dictionary.""" @@ -332,6 +431,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListDirectoryResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.contents: body['contents'] = self.contents + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListDirectoryResponse: """Deserializes the ListDirectoryResponse from a dictionary.""" @@ -350,6 +456,12 @@ def as_dict(self) -> dict: if self.files: body['files'] = [v.as_dict() for v in self.files] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListStatusResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.files: body['files'] = self.files + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListStatusResponse: """Deserializes the ListStatusResponse from a dictionary.""" @@ -367,6 +479,12 @@ def as_dict(self) -> dict: if self.path is not None: body['path'] = self.path return body + def as_shallow_dict(self) -> dict: + """Serializes the MkDirs into a shallow dictionary of its immediate attributes.""" + body = {} + if self.path is not None: body['path'] = self.path + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> MkDirs: """Deserializes the MkDirs from a dictionary.""" @@ -381,6 +499,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the MkDirsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> MkDirsResponse: """Deserializes the MkDirsResponse from a dictionary.""" @@ -402,6 +525,13 @@ def as_dict(self) -> dict: if self.source_path is not None: body['source_path'] = self.source_path return body + def as_shallow_dict(self) -> dict: + """Serializes the Move into a shallow dictionary of its immediate attributes.""" + body = {} + if self.destination_path is not None: body['destination_path'] = self.destination_path + if self.source_path is not None: body['source_path'] = self.source_path + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Move: """Deserializes the Move from a dictionary.""" @@ -416,6 +546,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the MoveResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> MoveResponse: """Deserializes the MoveResponse from a dictionary.""" @@ -441,6 +576,14 @@ def as_dict(self) -> dict: if self.path is not None: body['path'] = self.path return body + def as_shallow_dict(self) -> dict: + """Serializes the Put into a shallow dictionary of its immediate attributes.""" + body = {} + if self.contents is not None: body['contents'] = self.contents + if self.overwrite is not None: body['overwrite'] = self.overwrite + if self.path is not None: body['path'] = self.path + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Put: """Deserializes the Put from a dictionary.""" @@ -457,6 +600,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the PutResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PutResponse: """Deserializes the PutResponse from a dictionary.""" @@ -479,6 +627,13 @@ def as_dict(self) -> dict: if self.data is not None: body['data'] = self.data return body + def as_shallow_dict(self) -> dict: + """Serializes the ReadResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.bytes_read is not None: body['bytes_read'] = self.bytes_read + if self.data is not None: body['data'] = self.data + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ReadResponse: """Deserializes the ReadResponse from a dictionary.""" @@ -493,6 +648,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the UploadResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UploadResponse: """Deserializes the UploadResponse from a dictionary.""" @@ -765,9 +925,12 @@ class FilesAPI: /Volumes/<catalog_name>/<schema_name>/<volume_name>/<path_to_file>. The Files API has two distinct endpoints, one for working with files (`/fs/files`) and another one for - working with directories (`/fs/directories`). Both endpoints, use the standard HTTP methods GET, HEAD, - PUT, and DELETE to manage files and directories specified using their URI path. The path is always - absolute. + working with directories (`/fs/directories`). Both endpoints use the standard HTTP methods GET, HEAD, PUT, + and DELETE to manage files and directories specified using their URI path. The path is always absolute. + + Some Files API client features are currently experimental. To enable them, set + `enable_experimental_files_api_client = True` in your configuration profile or use the environment + variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`. [Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html""" @@ -833,8 +996,8 @@ def delete_directory(self, directory_path: str): def download(self, file_path: str) -> DownloadResponse: """Download a file. - Downloads a file of up to 5 GiB. The file contents are the response body. This is a standard HTTP file - download, not a JSON RPC. + Downloads a file. The file contents are the response body. This is a standard HTTP file download, not + a JSON RPC. It supports the Range and If-Unmodified-Since HTTP headers. :param file_path: str The absolute path of the file. diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py index f1c56a1a9..b841bec8b 100755 --- a/databricks/sdk/service/iam.py +++ b/databricks/sdk/service/iam.py @@ -38,6 +38,16 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the AccessControlRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AccessControlRequest: """Deserializes the AccessControlRequest from a dictionary.""" @@ -75,6 +85,17 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the AccessControlResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AccessControlResponse: """Deserializes the AccessControlResponse from a dictionary.""" @@ -85,6 +106,58 @@ def from_dict(cls, d: Dict[str, any]) -> AccessControlResponse: user_name=d.get('user_name', None)) +@dataclass +class Actor: + """represents an identity trying to access a resource - user or a service principal group can be a + principal of a permission set assignment but an actor is always a user or a service principal""" + + actor_id: Optional[int] = None + + def as_dict(self) -> dict: + """Serializes the Actor into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.actor_id is not None: body['actor_id'] = self.actor_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Actor into a shallow dictionary of its immediate attributes.""" + body = {} + if self.actor_id is not None: body['actor_id'] = self.actor_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> Actor: + """Deserializes the Actor from a dictionary.""" + return cls(actor_id=d.get('actor_id', None)) + + +@dataclass +class CheckPolicyResponse: + consistency_token: ConsistencyToken + + is_permitted: Optional[bool] = None + + def as_dict(self) -> dict: + """Serializes the CheckPolicyResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.consistency_token: body['consistency_token'] = self.consistency_token.as_dict() + if self.is_permitted is not None: body['is_permitted'] = self.is_permitted + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CheckPolicyResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.consistency_token: body['consistency_token'] = self.consistency_token + if self.is_permitted is not None: body['is_permitted'] = self.is_permitted + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CheckPolicyResponse: + """Deserializes the CheckPolicyResponse from a dictionary.""" + return cls(consistency_token=_from_dict(d, 'consistency_token', ConsistencyToken), + is_permitted=d.get('is_permitted', None)) + + @dataclass class ComplexValue: display: Optional[str] = None @@ -107,6 +180,16 @@ def as_dict(self) -> dict: if self.value is not None: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ComplexValue into a shallow dictionary of its immediate attributes.""" + body = {} + if self.display is not None: body['display'] = self.display + if self.primary is not None: body['primary'] = self.primary + if self.ref is not None: body['$ref'] = self.ref + if self.type is not None: body['type'] = self.type + if self.value is not None: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ComplexValue: """Deserializes the ComplexValue from a dictionary.""" @@ -117,6 +200,28 @@ def from_dict(cls, d: Dict[str, any]) -> ComplexValue: value=d.get('value', None)) +@dataclass +class ConsistencyToken: + value: str + + def as_dict(self) -> dict: + """Serializes the ConsistencyToken into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.value is not None: body['value'] = self.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ConsistencyToken into a shallow dictionary of its immediate attributes.""" + body = {} + if self.value is not None: body['value'] = self.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ConsistencyToken: + """Deserializes the ConsistencyToken from a dictionary.""" + return cls(value=d.get('value', None)) + + @dataclass class DeleteResponse: @@ -125,6 +230,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" @@ -139,6 +249,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteWorkspacePermissionAssignmentResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteWorkspacePermissionAssignmentResponse: """Deserializes the DeleteWorkspacePermissionAssignmentResponse from a dictionary.""" @@ -155,6 +270,12 @@ def as_dict(self) -> dict: if self.roles: body['roles'] = [v.as_dict() for v in self.roles] return body + def as_shallow_dict(self) -> dict: + """Serializes the GetAssignableRolesForResourceResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.roles: body['roles'] = self.roles + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetAssignableRolesForResourceResponse: """Deserializes the GetAssignableRolesForResourceResponse from a dictionary.""" @@ -172,6 +293,12 @@ def as_dict(self) -> dict: if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body + def as_shallow_dict(self) -> dict: + """Serializes the GetPasswordPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission_levels: body['permission_levels'] = self.permission_levels + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetPasswordPermissionLevelsResponse: """Deserializes the GetPasswordPermissionLevelsResponse from a dictionary.""" @@ -189,6 +316,12 @@ def as_dict(self) -> dict: if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body + def as_shallow_dict(self) -> dict: + """Serializes the GetPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission_levels: body['permission_levels'] = self.permission_levels + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetPermissionLevelsResponse: """Deserializes the GetPermissionLevelsResponse from a dictionary.""" @@ -216,6 +349,13 @@ def as_dict(self) -> dict: if self.role is not None: body['role'] = self.role return body + def as_shallow_dict(self) -> dict: + """Serializes the GrantRule into a shallow dictionary of its immediate attributes.""" + body = {} + if self.principals: body['principals'] = self.principals + if self.role is not None: body['role'] = self.role + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GrantRule: """Deserializes the GrantRule from a dictionary.""" @@ -265,6 +405,20 @@ def as_dict(self) -> dict: if self.schemas: body['schemas'] = [v.value for v in self.schemas] return body + def as_shallow_dict(self) -> dict: + """Serializes the Group into a shallow dictionary of its immediate attributes.""" + body = {} + if self.display_name is not None: body['displayName'] = self.display_name + if self.entitlements: body['entitlements'] = self.entitlements + if self.external_id is not None: body['externalId'] = self.external_id + if self.groups: body['groups'] = self.groups + if self.id is not None: body['id'] = self.id + if self.members: body['members'] = self.members + if self.meta: body['meta'] = self.meta + if self.roles: body['roles'] = self.roles + if self.schemas: body['schemas'] = self.schemas + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Group: """Deserializes the Group from a dictionary.""" @@ -311,6 +465,16 @@ def as_dict(self) -> dict: if self.total_results is not None: body['totalResults'] = self.total_results return body + def as_shallow_dict(self) -> dict: + """Serializes the ListGroupsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page + if self.resources: body['Resources'] = self.resources + if self.schemas: body['schemas'] = self.schemas + if self.start_index is not None: body['startIndex'] = self.start_index + if self.total_results is not None: body['totalResults'] = self.total_results + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListGroupsResponse: """Deserializes the ListGroupsResponse from a dictionary.""" @@ -353,6 +517,16 @@ def as_dict(self) -> dict: if self.total_results is not None: body['totalResults'] = self.total_results return body + def as_shallow_dict(self) -> dict: + """Serializes the ListServicePrincipalResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page + if self.resources: body['Resources'] = self.resources + if self.schemas: body['schemas'] = self.schemas + if self.start_index is not None: body['startIndex'] = self.start_index + if self.total_results is not None: body['totalResults'] = self.total_results + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListServicePrincipalResponse: """Deserializes the ListServicePrincipalResponse from a dictionary.""" @@ -396,6 +570,16 @@ def as_dict(self) -> dict: if self.total_results is not None: body['totalResults'] = self.total_results return body + def as_shallow_dict(self) -> dict: + """Serializes the ListUsersResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page + if self.resources: body['Resources'] = self.resources + if self.schemas: body['schemas'] = self.schemas + if self.start_index is not None: body['startIndex'] = self.start_index + if self.total_results is not None: body['totalResults'] = self.total_results + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListUsersResponse: """Deserializes the ListUsersResponse from a dictionary.""" @@ -430,6 +614,16 @@ def as_dict(self) -> dict: if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body + def as_shallow_dict(self) -> dict: + """Serializes the MigratePermissionsRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.from_workspace_group_name is not None: + body['from_workspace_group_name'] = self.from_workspace_group_name + if self.size is not None: body['size'] = self.size + if self.to_account_group_name is not None: body['to_account_group_name'] = self.to_account_group_name + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> MigratePermissionsRequest: """Deserializes the MigratePermissionsRequest from a dictionary.""" @@ -450,6 +644,12 @@ def as_dict(self) -> dict: if self.permissions_migrated is not None: body['permissions_migrated'] = self.permissions_migrated return body + def as_shallow_dict(self) -> dict: + """Serializes the MigratePermissionsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permissions_migrated is not None: body['permissions_migrated'] = self.permissions_migrated + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> MigratePermissionsResponse: """Deserializes the MigratePermissionsResponse from a dictionary.""" @@ -471,6 +671,13 @@ def as_dict(self) -> dict: if self.given_name is not None: body['givenName'] = self.given_name return body + def as_shallow_dict(self) -> dict: + """Serializes the Name into a shallow dictionary of its immediate attributes.""" + body = {} + if self.family_name is not None: body['familyName'] = self.family_name + if self.given_name is not None: body['givenName'] = self.given_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Name: """Deserializes the Name from a dictionary.""" @@ -494,6 +701,14 @@ def as_dict(self) -> dict: if self.object_type is not None: body['object_type'] = self.object_type return body + def as_shallow_dict(self) -> dict: + """Serializes the ObjectPermissions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ObjectPermissions: """Deserializes the ObjectPermissions from a dictionary.""" @@ -520,6 +735,14 @@ def as_dict(self) -> dict: if self.schemas: body['schemas'] = [v.value for v in self.schemas] return body + def as_shallow_dict(self) -> dict: + """Serializes the PartialUpdate into a shallow dictionary of its immediate attributes.""" + body = {} + if self.id is not None: body['id'] = self.id + if self.operations: body['Operations'] = self.operations + if self.schemas: body['schemas'] = self.schemas + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PartialUpdate: """Deserializes the PartialUpdate from a dictionary.""" @@ -552,6 +775,16 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the PasswordAccessControlRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PasswordAccessControlRequest: """Deserializes the PasswordAccessControlRequest from a dictionary.""" @@ -589,6 +822,17 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the PasswordAccessControlResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PasswordAccessControlResponse: """Deserializes the PasswordAccessControlResponse from a dictionary.""" @@ -616,6 +860,14 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the PasswordPermission into a shallow dictionary of its immediate attributes.""" + body = {} + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PasswordPermission: """Deserializes the PasswordPermission from a dictionary.""" @@ -647,6 +899,14 @@ def as_dict(self) -> dict: if self.object_type is not None: body['object_type'] = self.object_type return body + def as_shallow_dict(self) -> dict: + """Serializes the PasswordPermissions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PasswordPermissions: """Deserializes the PasswordPermissions from a dictionary.""" @@ -670,6 +930,13 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the PasswordPermissionsDescription into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PasswordPermissionsDescription: """Deserializes the PasswordPermissionsDescription from a dictionary.""" @@ -688,6 +955,12 @@ def as_dict(self) -> dict: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] return body + def as_shallow_dict(self) -> dict: + """Serializes the PasswordPermissionsRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PasswordPermissionsRequest: """Deserializes the PasswordPermissionsRequest from a dictionary.""" @@ -713,6 +986,14 @@ def as_dict(self) -> dict: if self.value: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the Patch into a shallow dictionary of its immediate attributes.""" + body = {} + if self.op is not None: body['op'] = self.op + if self.path is not None: body['path'] = self.path + if self.value: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Patch: """Deserializes the Patch from a dictionary.""" @@ -735,6 +1016,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the PatchResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PatchResponse: """Deserializes the PatchResponse from a dictionary.""" @@ -763,6 +1049,14 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the Permission into a shallow dictionary of its immediate attributes.""" + body = {} + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Permission: """Deserializes the Permission from a dictionary.""" @@ -793,6 +1087,14 @@ def as_dict(self) -> dict: if self.principal: body['principal'] = self.principal.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the PermissionAssignment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.error is not None: body['error'] = self.error + if self.permissions: body['permissions'] = self.permissions + if self.principal: body['principal'] = self.principal + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PermissionAssignment: """Deserializes the PermissionAssignment from a dictionary.""" @@ -813,6 +1115,12 @@ def as_dict(self) -> dict: body['permission_assignments'] = [v.as_dict() for v in self.permission_assignments] return body + def as_shallow_dict(self) -> dict: + """Serializes the PermissionAssignments into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission_assignments: body['permission_assignments'] = self.permission_assignments + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PermissionAssignments: """Deserializes the PermissionAssignments from a dictionary.""" @@ -855,6 +1163,13 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the PermissionOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PermissionOutput: """Deserializes the PermissionOutput from a dictionary.""" @@ -876,6 +1191,13 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the PermissionsDescription into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PermissionsDescription: """Deserializes the PermissionsDescription from a dictionary.""" @@ -904,6 +1226,14 @@ def as_dict(self) -> dict: if self.request_object_type is not None: body['request_object_type'] = self.request_object_type return body + def as_shallow_dict(self) -> dict: + """Serializes the PermissionsRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.request_object_id is not None: body['request_object_id'] = self.request_object_id + if self.request_object_type is not None: body['request_object_type'] = self.request_object_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PermissionsRequest: """Deserializes the PermissionsRequest from a dictionary.""" @@ -942,6 +1272,17 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the PrincipalOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.principal_id is not None: body['principal_id'] = self.principal_id + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PrincipalOutput: """Deserializes the PrincipalOutput from a dictionary.""" @@ -952,6 +1293,49 @@ def from_dict(cls, d: Dict[str, any]) -> PrincipalOutput: user_name=d.get('user_name', None)) +class RequestAuthzIdentity(Enum): + """Defines the identity to be used for authZ of the request on the server side. See one pager for + for more information: http://go/acl/service-identity""" + + REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY = 'REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY' + REQUEST_AUTHZ_IDENTITY_USER_CONTEXT = 'REQUEST_AUTHZ_IDENTITY_USER_CONTEXT' + + +@dataclass +class ResourceInfo: + id: str + """Id of the current resource.""" + + legacy_acl_path: Optional[str] = None + """The legacy acl path of the current resource.""" + + parent_resource_info: Optional[ResourceInfo] = None + """Parent resource info for the current resource. The parent may have another parent.""" + + def as_dict(self) -> dict: + """Serializes the ResourceInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.id is not None: body['id'] = self.id + if self.legacy_acl_path is not None: body['legacy_acl_path'] = self.legacy_acl_path + if self.parent_resource_info: body['parent_resource_info'] = self.parent_resource_info.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ResourceInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.id is not None: body['id'] = self.id + if self.legacy_acl_path is not None: body['legacy_acl_path'] = self.legacy_acl_path + if self.parent_resource_info: body['parent_resource_info'] = self.parent_resource_info + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ResourceInfo: + """Deserializes the ResourceInfo from a dictionary.""" + return cls(id=d.get('id', None), + legacy_acl_path=d.get('legacy_acl_path', None), + parent_resource_info=_from_dict(d, 'parent_resource_info', ResourceInfo)) + + @dataclass class ResourceMeta: resource_type: Optional[str] = None @@ -964,6 +1348,12 @@ def as_dict(self) -> dict: if self.resource_type is not None: body['resourceType'] = self.resource_type return body + def as_shallow_dict(self) -> dict: + """Serializes the ResourceMeta into a shallow dictionary of its immediate attributes.""" + body = {} + if self.resource_type is not None: body['resourceType'] = self.resource_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ResourceMeta: """Deserializes the ResourceMeta from a dictionary.""" @@ -981,6 +1371,12 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name return body + def as_shallow_dict(self) -> dict: + """Serializes the Role into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: body['name'] = self.name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Role: """Deserializes the Role from a dictionary.""" @@ -1005,6 +1401,14 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name return body + def as_shallow_dict(self) -> dict: + """Serializes the RuleSetResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.etag is not None: body['etag'] = self.etag + if self.grant_rules: body['grant_rules'] = self.grant_rules + if self.name is not None: body['name'] = self.name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RuleSetResponse: """Deserializes the RuleSetResponse from a dictionary.""" @@ -1032,6 +1436,14 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name return body + def as_shallow_dict(self) -> dict: + """Serializes the RuleSetUpdateRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.etag is not None: body['etag'] = self.etag + if self.grant_rules: body['grant_rules'] = self.grant_rules + if self.name is not None: body['name'] = self.name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RuleSetUpdateRequest: """Deserializes the RuleSetUpdateRequest from a dictionary.""" @@ -1084,6 +1496,20 @@ def as_dict(self) -> dict: if self.schemas: body['schemas'] = [v.value for v in self.schemas] return body + def as_shallow_dict(self) -> dict: + """Serializes the ServicePrincipal into a shallow dictionary of its immediate attributes.""" + body = {} + if self.active is not None: body['active'] = self.active + if self.application_id is not None: body['applicationId'] = self.application_id + if self.display_name is not None: body['displayName'] = self.display_name + if self.entitlements: body['entitlements'] = self.entitlements + if self.external_id is not None: body['externalId'] = self.external_id + if self.groups: body['groups'] = self.groups + if self.id is not None: body['id'] = self.id + if self.roles: body['roles'] = self.roles + if self.schemas: body['schemas'] = self.schemas + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ServicePrincipal: """Deserializes the ServicePrincipal from a dictionary.""" @@ -1111,6 +1537,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateResponse: """Deserializes the UpdateResponse from a dictionary.""" @@ -1131,6 +1562,13 @@ def as_dict(self) -> dict: if self.rule_set: body['rule_set'] = self.rule_set.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateRuleSetRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: body['name'] = self.name + if self.rule_set: body['rule_set'] = self.rule_set + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateRuleSetRequest: """Deserializes the UpdateRuleSetRequest from a dictionary.""" @@ -1160,6 +1598,14 @@ def as_dict(self) -> dict: if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateWorkspaceAssignments into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permissions: body['permissions'] = self.permissions + if self.principal_id is not None: body['principal_id'] = self.principal_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceAssignments: """Deserializes the UpdateWorkspaceAssignments from a dictionary.""" @@ -1225,6 +1671,22 @@ def as_dict(self) -> dict: if self.user_name is not None: body['userName'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the User into a shallow dictionary of its immediate attributes.""" + body = {} + if self.active is not None: body['active'] = self.active + if self.display_name is not None: body['displayName'] = self.display_name + if self.emails: body['emails'] = self.emails + if self.entitlements: body['entitlements'] = self.entitlements + if self.external_id is not None: body['externalId'] = self.external_id + if self.groups: body['groups'] = self.groups + if self.id is not None: body['id'] = self.id + if self.name: body['name'] = self.name + if self.roles: body['roles'] = self.roles + if self.schemas: body['schemas'] = self.schemas + if self.user_name is not None: body['userName'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> User: """Deserializes the User from a dictionary.""" @@ -1265,12 +1727,59 @@ def as_dict(self) -> dict: if self.permissions: body['permissions'] = [v.as_dict() for v in self.permissions] return body + def as_shallow_dict(self) -> dict: + """Serializes the WorkspacePermissions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permissions: body['permissions'] = self.permissions + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> WorkspacePermissions: """Deserializes the WorkspacePermissions from a dictionary.""" return cls(permissions=_repeated_dict(d, 'permissions', PermissionOutput)) +class AccessControlAPI: + """Rule based Access Control for Databricks Resources.""" + + def __init__(self, api_client): + self._api = api_client + + def check_policy(self, + actor: Actor, + permission: str, + resource: str, + consistency_token: ConsistencyToken, + authz_identity: RequestAuthzIdentity, + *, + resource_info: Optional[ResourceInfo] = None) -> CheckPolicyResponse: + """Check access policy to a resource. + + :param actor: :class:`Actor` + :param permission: str + :param resource: str + Ex: (servicePrincipal/use, accounts//servicePrincipals/) Ex: + (servicePrincipal.ruleSet/update, accounts//servicePrincipals//ruleSets/default) + :param consistency_token: :class:`ConsistencyToken` + :param authz_identity: :class:`RequestAuthzIdentity` + :param resource_info: :class:`ResourceInfo` (optional) + + :returns: :class:`CheckPolicyResponse` + """ + + query = {} + if actor is not None: query['actor'] = actor.as_dict() + if authz_identity is not None: query['authz_identity'] = authz_identity.value + if consistency_token is not None: query['consistency_token'] = consistency_token.as_dict() + if permission is not None: query['permission'] = permission + if resource is not None: query['resource'] = resource + if resource_info is not None: query['resource_info'] = resource_info.as_dict() + headers = {'Accept': 'application/json', } + + res = self._api.do('GET', '/api/2.0/access-control/check-policy-v2', query=query, headers=headers) + return CheckPolicyResponse.from_dict(res) + + class AccountAccessControlAPI: """These APIs manage access rules on resources in an account. Currently, only grant rules are supported. A grant rule specifies a role assigned to a set of principals. A list of rules attached to a resource is @@ -2643,7 +3152,8 @@ def set(self, access_control_list: Optional[List[AccessControlRequest]] = None) -> ObjectPermissions: """Set object permissions. - Sets permissions on an object. Objects can inherit permissions from their parent objects or root + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their parent objects or root object. :param request_object_type: str @@ -3205,7 +3715,8 @@ def set_permissions( access_control_list: Optional[List[PasswordAccessControlRequest]] = None) -> PasswordPermissions: """Set password permissions. - Sets permissions on all passwords. Passwords can inherit permissions from their root object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional) diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index 56c4a838e..8220a0715 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -35,6 +35,11 @@ class BaseJob: Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based on accessible budget policies of the run_as identity on job creation or modification.""" + has_more: Optional[bool] = None + """Indicates if the job has more sub-resources (`tasks`, `job_clusters`) that are not shown. They + can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 :method:jobs/list + requests with `expand_tasks=true`.""" + job_id: Optional[int] = None """The canonical identifier for this job.""" @@ -49,16 +54,30 @@ def as_dict(self) -> dict: if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id + if self.has_more is not None: body['has_more'] = self.has_more if self.job_id is not None: body['job_id'] = self.job_id if self.settings: body['settings'] = self.settings.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the BaseJob into a shallow dictionary of its immediate attributes.""" + body = {} + if self.created_time is not None: body['created_time'] = self.created_time + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.effective_budget_policy_id is not None: + body['effective_budget_policy_id'] = self.effective_budget_policy_id + if self.has_more is not None: body['has_more'] = self.has_more + if self.job_id is not None: body['job_id'] = self.job_id + if self.settings: body['settings'] = self.settings + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> BaseJob: """Deserializes the BaseJob from a dictionary.""" return cls(created_time=d.get('created_time', None), creator_user_name=d.get('creator_user_name', None), effective_budget_policy_id=d.get('effective_budget_policy_id', None), + has_more=d.get('has_more', None), job_id=d.get('job_id', None), settings=_from_dict(d, 'settings', JobSettings)) @@ -92,6 +111,12 @@ class BaseRun: description: Optional[str] = None """Description of the run""" + effective_performance_target: Optional[PerformanceTarget] = None + """effective_performance_target is the actual performance target used by the run during execution. + effective_performance_target can differ from performance_target depending on if the job was + eligible to be cost-optimized (e.g. contains at least 1 serverless task) or if we specifically + override the value for the run (ex. RunNow).""" + end_time: Optional[int] = None """The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This field is set to 0 if the job is still running.""" @@ -113,10 +138,16 @@ class BaseRun: Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job.""" + has_more: Optional[bool] = None + """Indicates if the run has more sub-resources (`tasks`, `job_clusters`) that are not shown. They + can be accessed via :method:jobs/getrun endpoint. It is only relevant for API 2.2 + :method:jobs/listruns requests with `expand_tasks=true`.""" + job_clusters: Optional[List[JobCluster]] = None """A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in - task settings.""" + task settings. If more than 100 job clusters are available, you can paginate through them using + :method:jobs/getrun.""" job_id: Optional[int] = None """The canonical identifier of the job that contains this run.""" @@ -187,7 +218,9 @@ class BaseRun: tasks: Optional[List[RunTask]] = None """The list of tasks performed by the run. Each task has its own `run_id` which you can use to call - `JobsGetOutput` to retrieve the run resutls.""" + `JobsGetOutput` to retrieve the run resutls. If more than 100 tasks are available, you can + paginate through them using :method:jobs/getrun. Use the `next_page_token` field at the object + root to determine if more results are available.""" trigger: Optional[TriggerType] = None """The type of trigger that fired this run. @@ -198,7 +231,8 @@ class BaseRun: previously failed run. This occurs when you request to re-run the job in case of failures. * `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`: Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is - triggered by a table update.""" + triggered by a table update. * `CONTINUOUS_RESTART`: Indicates a run created by user to manually + restart a continuous job run.""" trigger_info: Optional[TriggerInfo] = None """Additional details about what triggered the run""" @@ -212,9 +246,12 @@ def as_dict(self) -> dict: if self.cluster_spec: body['cluster_spec'] = self.cluster_spec.as_dict() if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name if self.description is not None: body['description'] = self.description + if self.effective_performance_target is not None: + body['effective_performance_target'] = self.effective_performance_target.value if self.end_time is not None: body['end_time'] = self.end_time if self.execution_duration is not None: body['execution_duration'] = self.execution_duration if self.git_source: body['git_source'] = self.git_source.as_dict() + if self.has_more is not None: body['has_more'] = self.has_more if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters] if self.job_id is not None: body['job_id'] = self.job_id if self.job_parameters: body['job_parameters'] = [v.as_dict() for v in self.job_parameters] @@ -240,6 +277,46 @@ def as_dict(self) -> dict: if self.trigger_info: body['trigger_info'] = self.trigger_info.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the BaseRun into a shallow dictionary of its immediate attributes.""" + body = {} + if self.attempt_number is not None: body['attempt_number'] = self.attempt_number + if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration + if self.cluster_instance: body['cluster_instance'] = self.cluster_instance + if self.cluster_spec: body['cluster_spec'] = self.cluster_spec + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.description is not None: body['description'] = self.description + if self.effective_performance_target is not None: + body['effective_performance_target'] = self.effective_performance_target + if self.end_time is not None: body['end_time'] = self.end_time + if self.execution_duration is not None: body['execution_duration'] = self.execution_duration + if self.git_source: body['git_source'] = self.git_source + if self.has_more is not None: body['has_more'] = self.has_more + if self.job_clusters: body['job_clusters'] = self.job_clusters + if self.job_id is not None: body['job_id'] = self.job_id + if self.job_parameters: body['job_parameters'] = self.job_parameters + if self.job_run_id is not None: body['job_run_id'] = self.job_run_id + if self.number_in_job is not None: body['number_in_job'] = self.number_in_job + if self.original_attempt_run_id is not None: + body['original_attempt_run_id'] = self.original_attempt_run_id + if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters + if self.queue_duration is not None: body['queue_duration'] = self.queue_duration + if self.repair_history: body['repair_history'] = self.repair_history + if self.run_duration is not None: body['run_duration'] = self.run_duration + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_name is not None: body['run_name'] = self.run_name + if self.run_page_url is not None: body['run_page_url'] = self.run_page_url + if self.run_type is not None: body['run_type'] = self.run_type + if self.schedule: body['schedule'] = self.schedule + if self.setup_duration is not None: body['setup_duration'] = self.setup_duration + if self.start_time is not None: body['start_time'] = self.start_time + if self.state: body['state'] = self.state + if self.status: body['status'] = self.status + if self.tasks: body['tasks'] = self.tasks + if self.trigger is not None: body['trigger'] = self.trigger + if self.trigger_info: body['trigger_info'] = self.trigger_info + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> BaseRun: """Deserializes the BaseRun from a dictionary.""" @@ -249,9 +326,11 @@ def from_dict(cls, d: Dict[str, any]) -> BaseRun: cluster_spec=_from_dict(d, 'cluster_spec', ClusterSpec), creator_user_name=d.get('creator_user_name', None), description=d.get('description', None), + effective_performance_target=_enum(d, 'effective_performance_target', PerformanceTarget), end_time=d.get('end_time', None), execution_duration=d.get('execution_duration', None), git_source=_from_dict(d, 'git_source', GitSource), + has_more=d.get('has_more', None), job_clusters=_repeated_dict(d, 'job_clusters', JobCluster), job_id=d.get('job_id', None), job_parameters=_repeated_dict(d, 'job_parameters', JobParameter), @@ -292,6 +371,13 @@ def as_dict(self) -> dict: if self.job_id is not None: body['job_id'] = self.job_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CancelAllRuns into a shallow dictionary of its immediate attributes.""" + body = {} + if self.all_queued_runs is not None: body['all_queued_runs'] = self.all_queued_runs + if self.job_id is not None: body['job_id'] = self.job_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CancelAllRuns: """Deserializes the CancelAllRuns from a dictionary.""" @@ -306,6 +392,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the CancelAllRunsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CancelAllRunsResponse: """Deserializes the CancelAllRunsResponse from a dictionary.""" @@ -323,6 +414,12 @@ def as_dict(self) -> dict: if self.run_id is not None: body['run_id'] = self.run_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CancelRun into a shallow dictionary of its immediate attributes.""" + body = {} + if self.run_id is not None: body['run_id'] = self.run_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CancelRun: """Deserializes the CancelRun from a dictionary.""" @@ -337,12 +434,162 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the CancelRunResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CancelRunResponse: """Deserializes the CancelRunResponse from a dictionary.""" return cls() +class CleanRoomTaskRunLifeCycleState(Enum): + """Copied from elastic-spark-common/api/messages/runs.proto. Using the original definition to + remove coupling with jobs API definition""" + + BLOCKED = 'BLOCKED' + INTERNAL_ERROR = 'INTERNAL_ERROR' + PENDING = 'PENDING' + QUEUED = 'QUEUED' + RUNNING = 'RUNNING' + RUN_LIFE_CYCLE_STATE_UNSPECIFIED = 'RUN_LIFE_CYCLE_STATE_UNSPECIFIED' + SKIPPED = 'SKIPPED' + TERMINATED = 'TERMINATED' + TERMINATING = 'TERMINATING' + WAITING_FOR_RETRY = 'WAITING_FOR_RETRY' + + +class CleanRoomTaskRunResultState(Enum): + """Copied from elastic-spark-common/api/messages/runs.proto. Using the original definition to avoid + cyclic dependency.""" + + CANCELED = 'CANCELED' + DISABLED = 'DISABLED' + EVICTED = 'EVICTED' + EXCLUDED = 'EXCLUDED' + FAILED = 'FAILED' + MAXIMUM_CONCURRENT_RUNS_REACHED = 'MAXIMUM_CONCURRENT_RUNS_REACHED' + RUN_RESULT_STATE_UNSPECIFIED = 'RUN_RESULT_STATE_UNSPECIFIED' + SUCCESS = 'SUCCESS' + SUCCESS_WITH_FAILURES = 'SUCCESS_WITH_FAILURES' + TIMEDOUT = 'TIMEDOUT' + UPSTREAM_CANCELED = 'UPSTREAM_CANCELED' + UPSTREAM_EVICTED = 'UPSTREAM_EVICTED' + UPSTREAM_FAILED = 'UPSTREAM_FAILED' + + +@dataclass +class CleanRoomTaskRunState: + """Stores the run state of the clean rooms notebook task.""" + + life_cycle_state: Optional[CleanRoomTaskRunLifeCycleState] = None + """A value indicating the run's current lifecycle state. This field is always available in the + response.""" + + result_state: Optional[CleanRoomTaskRunResultState] = None + """A value indicating the run's result. This field is only available for terminal lifecycle states.""" + + def as_dict(self) -> dict: + """Serializes the CleanRoomTaskRunState into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.life_cycle_state is not None: body['life_cycle_state'] = self.life_cycle_state.value + if self.result_state is not None: body['result_state'] = self.result_state.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CleanRoomTaskRunState into a shallow dictionary of its immediate attributes.""" + body = {} + if self.life_cycle_state is not None: body['life_cycle_state'] = self.life_cycle_state + if self.result_state is not None: body['result_state'] = self.result_state + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CleanRoomTaskRunState: + """Deserializes the CleanRoomTaskRunState from a dictionary.""" + return cls(life_cycle_state=_enum(d, 'life_cycle_state', CleanRoomTaskRunLifeCycleState), + result_state=_enum(d, 'result_state', CleanRoomTaskRunResultState)) + + +@dataclass +class CleanRoomsNotebookTask: + clean_room_name: str + """The clean room that the notebook belongs to.""" + + notebook_name: str + """Name of the notebook being run.""" + + etag: Optional[str] = None + """Checksum to validate the freshness of the notebook resource (i.e. the notebook being run is the + latest version). It can be fetched by calling the :method:cleanroomassets/get API.""" + + notebook_base_parameters: Optional[Dict[str, str]] = None + """Base parameters to be used for the clean room notebook job.""" + + def as_dict(self) -> dict: + """Serializes the CleanRoomsNotebookTask into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.clean_room_name is not None: body['clean_room_name'] = self.clean_room_name + if self.etag is not None: body['etag'] = self.etag + if self.notebook_base_parameters: body['notebook_base_parameters'] = self.notebook_base_parameters + if self.notebook_name is not None: body['notebook_name'] = self.notebook_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CleanRoomsNotebookTask into a shallow dictionary of its immediate attributes.""" + body = {} + if self.clean_room_name is not None: body['clean_room_name'] = self.clean_room_name + if self.etag is not None: body['etag'] = self.etag + if self.notebook_base_parameters: body['notebook_base_parameters'] = self.notebook_base_parameters + if self.notebook_name is not None: body['notebook_name'] = self.notebook_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CleanRoomsNotebookTask: + """Deserializes the CleanRoomsNotebookTask from a dictionary.""" + return cls(clean_room_name=d.get('clean_room_name', None), + etag=d.get('etag', None), + notebook_base_parameters=d.get('notebook_base_parameters', None), + notebook_name=d.get('notebook_name', None)) + + +@dataclass +class CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput: + clean_room_job_run_state: Optional[CleanRoomTaskRunState] = None + """The run state of the clean rooms notebook task.""" + + notebook_output: Optional[NotebookOutput] = None + """The notebook output for the clean room run""" + + output_schema_info: Optional[OutputSchemaInfo] = None + """Information on how to access the output schema for the clean room run""" + + def as_dict(self) -> dict: + """Serializes the CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.clean_room_job_run_state: + body['clean_room_job_run_state'] = self.clean_room_job_run_state.as_dict() + if self.notebook_output: body['notebook_output'] = self.notebook_output.as_dict() + if self.output_schema_info: body['output_schema_info'] = self.output_schema_info.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.clean_room_job_run_state: body['clean_room_job_run_state'] = self.clean_room_job_run_state + if self.notebook_output: body['notebook_output'] = self.notebook_output + if self.output_schema_info: body['output_schema_info'] = self.output_schema_info + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput: + """Deserializes the CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput from a dictionary.""" + return cls(clean_room_job_run_state=_from_dict(d, 'clean_room_job_run_state', CleanRoomTaskRunState), + notebook_output=_from_dict(d, 'notebook_output', NotebookOutput), + output_schema_info=_from_dict(d, 'output_schema_info', OutputSchemaInfo)) + + @dataclass class ClusterInstance: cluster_id: Optional[str] = None @@ -369,6 +616,13 @@ def as_dict(self) -> dict: if self.spark_context_id is not None: body['spark_context_id'] = self.spark_context_id return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterInstance into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.spark_context_id is not None: body['spark_context_id'] = self.spark_context_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterInstance: """Deserializes the ClusterInstance from a dictionary.""" @@ -402,6 +656,15 @@ def as_dict(self) -> dict: if self.new_cluster: body['new_cluster'] = self.new_cluster.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id + if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key + if self.libraries: body['libraries'] = self.libraries + if self.new_cluster: body['new_cluster'] = self.new_cluster + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterSpec: """Deserializes the ClusterSpec from a dictionary.""" @@ -446,6 +709,14 @@ def as_dict(self) -> dict: if self.right is not None: body['right'] = self.right return body + def as_shallow_dict(self) -> dict: + """Serializes the ConditionTask into a shallow dictionary of its immediate attributes.""" + body = {} + if self.left is not None: body['left'] = self.left + if self.op is not None: body['op'] = self.op + if self.right is not None: body['right'] = self.right + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ConditionTask: """Deserializes the ConditionTask from a dictionary.""" @@ -482,6 +753,12 @@ def as_dict(self) -> dict: if self.pause_status is not None: body['pause_status'] = self.pause_status.value return body + def as_shallow_dict(self) -> dict: + """Serializes the Continuous into a shallow dictionary of its immediate attributes.""" + body = {} + if self.pause_status is not None: body['pause_status'] = self.pause_status + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Continuous: """Deserializes the Continuous from a dictionary.""" @@ -545,7 +822,8 @@ class CreateJob: job_clusters: Optional[List[JobCluster]] = None """A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in - task settings.""" + task settings. If more than 100 job clusters are available, you can paginate through them using + :method:jobs/get.""" max_concurrent_runs: Optional[int] = None """An optional maximum allowed number of concurrent runs of the job. Set this value if you want to @@ -567,15 +845,18 @@ class CreateJob: parameters: Optional[List[JobParameterDefinition]] = None """Job-level parameter definitions""" + performance_target: Optional[PerformanceTarget] = None + """PerformanceTarget defines how performant or cost efficient the execution of run on serverless + should be.""" + queue: Optional[QueueSettings] = None """The queue settings of the job.""" run_as: Optional[JobRunAs] = None - """Write-only setting. Specifies the user, service principal or group that the job/pipeline runs - as. If not specified, the job/pipeline runs as the user who created the job/pipeline. + """Write-only setting. Specifies the user or service principal that the job runs as. If not + specified, the job runs as the user who created the job. - Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not, - an error is thrown.""" + Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.""" schedule: Optional[CronSchedule] = None """An optional periodic schedule for this job. The default behavior is that the job only runs when @@ -587,7 +868,9 @@ class CreateJob: be added to the job.""" tasks: Optional[List[Task]] = None - """A list of task specifications to be executed by this job.""" + """A list of task specifications to be executed by this job. If more than 100 tasks are available, + you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the + object root to determine if more results are available.""" timeout_seconds: Optional[int] = None """An optional timeout applied to each run of this job. A value of `0` means no timeout.""" @@ -620,6 +903,7 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict() if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] + if self.performance_target is not None: body['performance_target'] = self.performance_target.value if self.queue: body['queue'] = self.queue.as_dict() if self.run_as: body['run_as'] = self.run_as.as_dict() if self.schedule: body['schedule'] = self.schedule.as_dict() @@ -630,6 +914,36 @@ def as_dict(self) -> dict: if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateJob into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.continuous: body['continuous'] = self.continuous + if self.deployment: body['deployment'] = self.deployment + if self.description is not None: body['description'] = self.description + if self.edit_mode is not None: body['edit_mode'] = self.edit_mode + if self.email_notifications: body['email_notifications'] = self.email_notifications + if self.environments: body['environments'] = self.environments + if self.format is not None: body['format'] = self.format + if self.git_source: body['git_source'] = self.git_source + if self.health: body['health'] = self.health + if self.job_clusters: body['job_clusters'] = self.job_clusters + if self.max_concurrent_runs is not None: body['max_concurrent_runs'] = self.max_concurrent_runs + if self.name is not None: body['name'] = self.name + if self.notification_settings: body['notification_settings'] = self.notification_settings + if self.parameters: body['parameters'] = self.parameters + if self.performance_target is not None: body['performance_target'] = self.performance_target + if self.queue: body['queue'] = self.queue + if self.run_as: body['run_as'] = self.run_as + if self.schedule: body['schedule'] = self.schedule + if self.tags: body['tags'] = self.tags + if self.tasks: body['tasks'] = self.tasks + if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds + if self.trigger: body['trigger'] = self.trigger + if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateJob: """Deserializes the CreateJob from a dictionary.""" @@ -649,6 +963,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreateJob: name=d.get('name', None), notification_settings=_from_dict(d, 'notification_settings', JobNotificationSettings), parameters=_repeated_dict(d, 'parameters', JobParameterDefinition), + performance_target=_enum(d, 'performance_target', PerformanceTarget), queue=_from_dict(d, 'queue', QueueSettings), run_as=_from_dict(d, 'run_as', JobRunAs), schedule=_from_dict(d, 'schedule', CronSchedule), @@ -672,6 +987,12 @@ def as_dict(self) -> dict: if self.job_id is not None: body['job_id'] = self.job_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.job_id is not None: body['job_id'] = self.job_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateResponse: """Deserializes the CreateResponse from a dictionary.""" @@ -704,6 +1025,15 @@ def as_dict(self) -> dict: if self.timezone_id is not None: body['timezone_id'] = self.timezone_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CronSchedule into a shallow dictionary of its immediate attributes.""" + body = {} + if self.pause_status is not None: body['pause_status'] = self.pause_status + if self.quartz_cron_expression is not None: + body['quartz_cron_expression'] = self.quartz_cron_expression + if self.timezone_id is not None: body['timezone_id'] = self.timezone_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CronSchedule: """Deserializes the CronSchedule from a dictionary.""" @@ -728,6 +1058,13 @@ def as_dict(self) -> dict: if self.artifacts_link is not None: body['artifacts_link'] = self.artifacts_link return body + def as_shallow_dict(self) -> dict: + """Serializes the DbtOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.artifacts_headers: body['artifacts_headers'] = self.artifacts_headers + if self.artifacts_link is not None: body['artifacts_link'] = self.artifacts_link + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DbtOutput: """Deserializes the DbtOutput from a dictionary.""" @@ -784,6 +1121,18 @@ def as_dict(self) -> dict: if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body + def as_shallow_dict(self) -> dict: + """Serializes the DbtTask into a shallow dictionary of its immediate attributes.""" + body = {} + if self.catalog is not None: body['catalog'] = self.catalog + if self.commands: body['commands'] = self.commands + if self.profiles_directory is not None: body['profiles_directory'] = self.profiles_directory + if self.project_directory is not None: body['project_directory'] = self.project_directory + if self.schema is not None: body['schema'] = self.schema + if self.source is not None: body['source'] = self.source + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DbtTask: """Deserializes the DbtTask from a dictionary.""" @@ -807,6 +1156,12 @@ def as_dict(self) -> dict: if self.job_id is not None: body['job_id'] = self.job_id return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteJob into a shallow dictionary of its immediate attributes.""" + body = {} + if self.job_id is not None: body['job_id'] = self.job_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteJob: """Deserializes the DeleteJob from a dictionary.""" @@ -821,6 +1176,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" @@ -838,6 +1198,12 @@ def as_dict(self) -> dict: if self.run_id is not None: body['run_id'] = self.run_id return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteRun into a shallow dictionary of its immediate attributes.""" + body = {} + if self.run_id is not None: body['run_id'] = self.run_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteRun: """Deserializes the DeleteRun from a dictionary.""" @@ -852,6 +1218,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteRunResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteRunResponse: """Deserializes the DeleteRunResponse from a dictionary.""" @@ -884,6 +1255,14 @@ def as_dict(self) -> dict: if self.previous_value is not None: body['previous_value'] = self.previous_value return body + def as_shallow_dict(self) -> dict: + """Serializes the EnforcePolicyComplianceForJobResponseJobClusterSettingsChange into a shallow dictionary of its immediate attributes.""" + body = {} + if self.field is not None: body['field'] = self.field + if self.new_value is not None: body['new_value'] = self.new_value + if self.previous_value is not None: body['previous_value'] = self.previous_value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EnforcePolicyComplianceForJobResponseJobClusterSettingsChange: """Deserializes the EnforcePolicyComplianceForJobResponseJobClusterSettingsChange from a dictionary.""" @@ -907,6 +1286,13 @@ def as_dict(self) -> dict: if self.validate_only is not None: body['validate_only'] = self.validate_only return body + def as_shallow_dict(self) -> dict: + """Serializes the EnforcePolicyComplianceRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.job_id is not None: body['job_id'] = self.job_id + if self.validate_only is not None: body['validate_only'] = self.validate_only + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EnforcePolicyComplianceRequest: """Deserializes the EnforcePolicyComplianceRequest from a dictionary.""" @@ -938,6 +1324,14 @@ def as_dict(self) -> dict: if self.settings: body['settings'] = self.settings.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the EnforcePolicyComplianceResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.has_changes is not None: body['has_changes'] = self.has_changes + if self.job_cluster_changes: body['job_cluster_changes'] = self.job_cluster_changes + if self.settings: body['settings'] = self.settings + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EnforcePolicyComplianceResponse: """Deserializes the EnforcePolicyComplianceResponse from a dictionary.""" @@ -964,6 +1358,12 @@ def as_dict(self) -> dict: if self.views: body['views'] = [v.as_dict() for v in self.views] return body + def as_shallow_dict(self) -> dict: + """Serializes the ExportRunOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.views: body['views'] = self.views + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ExportRunOutput: """Deserializes the ExportRunOutput from a dictionary.""" @@ -995,6 +1395,16 @@ def as_dict(self) -> dict: body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds return body + def as_shallow_dict(self) -> dict: + """Serializes the FileArrivalTriggerConfiguration into a shallow dictionary of its immediate attributes.""" + body = {} + if self.min_time_between_triggers_seconds is not None: + body['min_time_between_triggers_seconds'] = self.min_time_between_triggers_seconds + if self.url is not None: body['url'] = self.url + if self.wait_after_last_change_seconds is not None: + body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> FileArrivalTriggerConfiguration: """Deserializes the FileArrivalTriggerConfiguration from a dictionary.""" @@ -1019,6 +1429,13 @@ def as_dict(self) -> dict: if self.task_run_stats: body['task_run_stats'] = self.task_run_stats.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the ForEachStats into a shallow dictionary of its immediate attributes.""" + body = {} + if self.error_message_stats: body['error_message_stats'] = self.error_message_stats + if self.task_run_stats: body['task_run_stats'] = self.task_run_stats + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ForEachStats: """Deserializes the ForEachStats from a dictionary.""" @@ -1046,6 +1463,14 @@ def as_dict(self) -> dict: if self.task: body['task'] = self.task.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the ForEachTask into a shallow dictionary of its immediate attributes.""" + body = {} + if self.concurrency is not None: body['concurrency'] = self.concurrency + if self.inputs is not None: body['inputs'] = self.inputs + if self.task: body['task'] = self.task + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ForEachTask: """Deserializes the ForEachTask from a dictionary.""" @@ -1073,6 +1498,14 @@ def as_dict(self) -> dict: if self.termination_category is not None: body['termination_category'] = self.termination_category return body + def as_shallow_dict(self) -> dict: + """Serializes the ForEachTaskErrorMessageStats into a shallow dictionary of its immediate attributes.""" + body = {} + if self.count is not None: body['count'] = self.count + if self.error_message is not None: body['error_message'] = self.error_message + if self.termination_category is not None: body['termination_category'] = self.termination_category + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ForEachTaskErrorMessageStats: """Deserializes the ForEachTaskErrorMessageStats from a dictionary.""" @@ -1112,6 +1545,17 @@ def as_dict(self) -> dict: if self.total_iterations is not None: body['total_iterations'] = self.total_iterations return body + def as_shallow_dict(self) -> dict: + """Serializes the ForEachTaskTaskRunStats into a shallow dictionary of its immediate attributes.""" + body = {} + if self.active_iterations is not None: body['active_iterations'] = self.active_iterations + if self.completed_iterations is not None: body['completed_iterations'] = self.completed_iterations + if self.failed_iterations is not None: body['failed_iterations'] = self.failed_iterations + if self.scheduled_iterations is not None: body['scheduled_iterations'] = self.scheduled_iterations + if self.succeeded_iterations is not None: body['succeeded_iterations'] = self.succeeded_iterations + if self.total_iterations is not None: body['total_iterations'] = self.total_iterations + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ForEachTaskTaskRunStats: """Deserializes the ForEachTaskTaskRunStats from a dictionary.""" @@ -1140,6 +1584,12 @@ def as_dict(self) -> dict: if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body + def as_shallow_dict(self) -> dict: + """Serializes the GetJobPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission_levels: body['permission_levels'] = self.permission_levels + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetJobPermissionLevelsResponse: """Deserializes the GetJobPermissionLevelsResponse from a dictionary.""" @@ -1166,6 +1616,13 @@ def as_dict(self) -> dict: if self.violations: body['violations'] = self.violations return body + def as_shallow_dict(self) -> dict: + """Serializes the GetPolicyComplianceResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.is_compliant is not None: body['is_compliant'] = self.is_compliant + if self.violations: body['violations'] = self.violations + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetPolicyComplianceResponse: """Deserializes the GetPolicyComplianceResponse from a dictionary.""" @@ -1207,6 +1664,12 @@ def as_dict(self) -> dict: if self.used_commit is not None: body['used_commit'] = self.used_commit return body + def as_shallow_dict(self) -> dict: + """Serializes the GitSnapshot into a shallow dictionary of its immediate attributes.""" + body = {} + if self.used_commit is not None: body['used_commit'] = self.used_commit + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GitSnapshot: """Deserializes the GitSnapshot from a dictionary.""" @@ -1261,6 +1724,18 @@ def as_dict(self) -> dict: if self.job_source: body['job_source'] = self.job_source.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the GitSource into a shallow dictionary of its immediate attributes.""" + body = {} + if self.git_branch is not None: body['git_branch'] = self.git_branch + if self.git_commit is not None: body['git_commit'] = self.git_commit + if self.git_provider is not None: body['git_provider'] = self.git_provider + if self.git_snapshot: body['git_snapshot'] = self.git_snapshot + if self.git_tag is not None: body['git_tag'] = self.git_tag + if self.git_url is not None: body['git_url'] = self.git_url + if self.job_source: body['job_source'] = self.job_source + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GitSource: """Deserializes the GitSource from a dictionary.""" @@ -1290,9 +1765,17 @@ class Job: Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based on accessible budget policies of the run_as identity on job creation or modification.""" + has_more: Optional[bool] = None + """Indicates if the job has more sub-resources (`tasks`, `job_clusters`) that are not shown. They + can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 :method:jobs/list + requests with `expand_tasks=true`.""" + job_id: Optional[int] = None """The canonical identifier for this job.""" + next_page_token: Optional[str] = None + """A token that can be used to list the next page of sub-resources.""" + run_as_user_name: Optional[str] = None """The email of an active workspace user or the application ID of a service principal that the job runs as. This value can be changed by setting the `run_as` field when creating or updating a @@ -1313,18 +1796,36 @@ def as_dict(self) -> dict: if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id + if self.has_more is not None: body['has_more'] = self.has_more if self.job_id is not None: body['job_id'] = self.job_id + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name if self.settings: body['settings'] = self.settings.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the Job into a shallow dictionary of its immediate attributes.""" + body = {} + if self.created_time is not None: body['created_time'] = self.created_time + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.effective_budget_policy_id is not None: + body['effective_budget_policy_id'] = self.effective_budget_policy_id + if self.has_more is not None: body['has_more'] = self.has_more + if self.job_id is not None: body['job_id'] = self.job_id + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name + if self.settings: body['settings'] = self.settings + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Job: """Deserializes the Job from a dictionary.""" return cls(created_time=d.get('created_time', None), creator_user_name=d.get('creator_user_name', None), effective_budget_policy_id=d.get('effective_budget_policy_id', None), + has_more=d.get('has_more', None), job_id=d.get('job_id', None), + next_page_token=d.get('next_page_token', None), run_as_user_name=d.get('run_as_user_name', None), settings=_from_dict(d, 'settings', JobSettings)) @@ -1353,6 +1854,16 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the JobAccessControlRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> JobAccessControlRequest: """Deserializes the JobAccessControlRequest from a dictionary.""" @@ -1390,6 +1901,17 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the JobAccessControlResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> JobAccessControlResponse: """Deserializes the JobAccessControlResponse from a dictionary.""" @@ -1417,6 +1939,13 @@ def as_dict(self) -> dict: if self.new_cluster: body['new_cluster'] = self.new_cluster.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the JobCluster into a shallow dictionary of its immediate attributes.""" + body = {} + if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key + if self.new_cluster: body['new_cluster'] = self.new_cluster + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> JobCluster: """Deserializes the JobCluster from a dictionary.""" @@ -1446,6 +1975,14 @@ def as_dict(self) -> dict: if self.violations: body['violations'] = self.violations return body + def as_shallow_dict(self) -> dict: + """Serializes the JobCompliance into a shallow dictionary of its immediate attributes.""" + body = {} + if self.is_compliant is not None: body['is_compliant'] = self.is_compliant + if self.job_id is not None: body['job_id'] = self.job_id + if self.violations: body['violations'] = self.violations + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> JobCompliance: """Deserializes the JobCompliance from a dictionary.""" @@ -1471,6 +2008,13 @@ def as_dict(self) -> dict: if self.metadata_file_path is not None: body['metadata_file_path'] = self.metadata_file_path return body + def as_shallow_dict(self) -> dict: + """Serializes the JobDeployment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.kind is not None: body['kind'] = self.kind + if self.metadata_file_path is not None: body['metadata_file_path'] = self.metadata_file_path + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> JobDeployment: """Deserializes the JobDeployment from a dictionary.""" @@ -1545,6 +2089,20 @@ def as_dict(self) -> dict: if self.on_success: body['on_success'] = [v for v in self.on_success] return body + def as_shallow_dict(self) -> dict: + """Serializes the JobEmailNotifications into a shallow dictionary of its immediate attributes.""" + body = {} + if self.no_alert_for_skipped_runs is not None: + body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs + if self.on_duration_warning_threshold_exceeded: + body['on_duration_warning_threshold_exceeded'] = self.on_duration_warning_threshold_exceeded + if self.on_failure: body['on_failure'] = self.on_failure + if self.on_start: body['on_start'] = self.on_start + if self.on_streaming_backlog_exceeded: + body['on_streaming_backlog_exceeded'] = self.on_streaming_backlog_exceeded + if self.on_success: body['on_success'] = self.on_success + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> JobEmailNotifications: """Deserializes the JobEmailNotifications from a dictionary.""" @@ -1573,6 +2131,13 @@ def as_dict(self) -> dict: if self.spec: body['spec'] = self.spec.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the JobEnvironment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.environment_key is not None: body['environment_key'] = self.environment_key + if self.spec: body['spec'] = self.spec + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> JobEnvironment: """Deserializes the JobEnvironment from a dictionary.""" @@ -1599,6 +2164,15 @@ def as_dict(self) -> dict: body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs return body + def as_shallow_dict(self) -> dict: + """Serializes the JobNotificationSettings into a shallow dictionary of its immediate attributes.""" + body = {} + if self.no_alert_for_canceled_runs is not None: + body['no_alert_for_canceled_runs'] = self.no_alert_for_canceled_runs + if self.no_alert_for_skipped_runs is not None: + body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> JobNotificationSettings: """Deserializes the JobNotificationSettings from a dictionary.""" @@ -1617,8 +2191,16 @@ class JobParameter: value: Optional[str] = None """The value used in the run""" - def as_dict(self) -> dict: - """Serializes the JobParameter into a dictionary suitable for use as a JSON request body.""" + def as_dict(self) -> dict: + """Serializes the JobParameter into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.default is not None: body['default'] = self.default + if self.name is not None: body['name'] = self.name + if self.value is not None: body['value'] = self.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the JobParameter into a shallow dictionary of its immediate attributes.""" body = {} if self.default is not None: body['default'] = self.default if self.name is not None: body['name'] = self.name @@ -1646,6 +2228,13 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name return body + def as_shallow_dict(self) -> dict: + """Serializes the JobParameterDefinition into a shallow dictionary of its immediate attributes.""" + body = {} + if self.default is not None: body['default'] = self.default + if self.name is not None: body['name'] = self.name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> JobParameterDefinition: """Deserializes the JobParameterDefinition from a dictionary.""" @@ -1669,6 +2258,14 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the JobPermission into a shallow dictionary of its immediate attributes.""" + body = {} + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> JobPermission: """Deserializes the JobPermission from a dictionary.""" @@ -1703,6 +2300,14 @@ def as_dict(self) -> dict: if self.object_type is not None: body['object_type'] = self.object_type return body + def as_shallow_dict(self) -> dict: + """Serializes the JobPermissions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> JobPermissions: """Deserializes the JobPermissions from a dictionary.""" @@ -1725,6 +2330,13 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the JobPermissionsDescription into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> JobPermissionsDescription: """Deserializes the JobPermissionsDescription from a dictionary.""" @@ -1747,6 +2359,13 @@ def as_dict(self) -> dict: if self.job_id is not None: body['job_id'] = self.job_id return body + def as_shallow_dict(self) -> dict: + """Serializes the JobPermissionsRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.job_id is not None: body['job_id'] = self.job_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> JobPermissionsRequest: """Deserializes the JobPermissionsRequest from a dictionary.""" @@ -1756,11 +2375,10 @@ def from_dict(cls, d: Dict[str, any]) -> JobPermissionsRequest: @dataclass class JobRunAs: - """Write-only setting. Specifies the user, service principal or group that the job/pipeline runs - as. If not specified, the job/pipeline runs as the user who created the job/pipeline. + """Write-only setting. Specifies the user or service principal that the job runs as. If not + specified, the job runs as the user who created the job. - Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not, - an error is thrown.""" + Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.""" service_principal_name: Optional[str] = None """Application ID of an active service principal. Setting this field requires the @@ -1778,6 +2396,14 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the JobRunAs into a shallow dictionary of its immediate attributes.""" + body = {} + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> JobRunAs: """Deserializes the JobRunAs from a dictionary.""" @@ -1839,7 +2465,8 @@ class JobSettings: job_clusters: Optional[List[JobCluster]] = None """A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in - task settings.""" + task settings. If more than 100 job clusters are available, you can paginate through them using + :method:jobs/get.""" max_concurrent_runs: Optional[int] = None """An optional maximum allowed number of concurrent runs of the job. Set this value if you want to @@ -1861,15 +2488,18 @@ class JobSettings: parameters: Optional[List[JobParameterDefinition]] = None """Job-level parameter definitions""" + performance_target: Optional[PerformanceTarget] = None + """PerformanceTarget defines how performant or cost efficient the execution of run on serverless + should be.""" + queue: Optional[QueueSettings] = None """The queue settings of the job.""" run_as: Optional[JobRunAs] = None - """Write-only setting. Specifies the user, service principal or group that the job/pipeline runs - as. If not specified, the job/pipeline runs as the user who created the job/pipeline. + """Write-only setting. Specifies the user or service principal that the job runs as. If not + specified, the job runs as the user who created the job. - Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not, - an error is thrown.""" + Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.""" schedule: Optional[CronSchedule] = None """An optional periodic schedule for this job. The default behavior is that the job only runs when @@ -1881,7 +2511,9 @@ class JobSettings: be added to the job.""" tasks: Optional[List[Task]] = None - """A list of task specifications to be executed by this job.""" + """A list of task specifications to be executed by this job. If more than 100 tasks are available, + you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the + object root to determine if more results are available.""" timeout_seconds: Optional[int] = None """An optional timeout applied to each run of this job. A value of `0` means no timeout.""" @@ -1912,6 +2544,7 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict() if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] + if self.performance_target is not None: body['performance_target'] = self.performance_target.value if self.queue: body['queue'] = self.queue.as_dict() if self.run_as: body['run_as'] = self.run_as.as_dict() if self.schedule: body['schedule'] = self.schedule.as_dict() @@ -1922,6 +2555,35 @@ def as_dict(self) -> dict: if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the JobSettings into a shallow dictionary of its immediate attributes.""" + body = {} + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.continuous: body['continuous'] = self.continuous + if self.deployment: body['deployment'] = self.deployment + if self.description is not None: body['description'] = self.description + if self.edit_mode is not None: body['edit_mode'] = self.edit_mode + if self.email_notifications: body['email_notifications'] = self.email_notifications + if self.environments: body['environments'] = self.environments + if self.format is not None: body['format'] = self.format + if self.git_source: body['git_source'] = self.git_source + if self.health: body['health'] = self.health + if self.job_clusters: body['job_clusters'] = self.job_clusters + if self.max_concurrent_runs is not None: body['max_concurrent_runs'] = self.max_concurrent_runs + if self.name is not None: body['name'] = self.name + if self.notification_settings: body['notification_settings'] = self.notification_settings + if self.parameters: body['parameters'] = self.parameters + if self.performance_target is not None: body['performance_target'] = self.performance_target + if self.queue: body['queue'] = self.queue + if self.run_as: body['run_as'] = self.run_as + if self.schedule: body['schedule'] = self.schedule + if self.tags: body['tags'] = self.tags + if self.tasks: body['tasks'] = self.tasks + if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds + if self.trigger: body['trigger'] = self.trigger + if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> JobSettings: """Deserializes the JobSettings from a dictionary.""" @@ -1940,6 +2602,7 @@ def from_dict(cls, d: Dict[str, any]) -> JobSettings: name=d.get('name', None), notification_settings=_from_dict(d, 'notification_settings', JobNotificationSettings), parameters=_repeated_dict(d, 'parameters', JobParameterDefinition), + performance_target=_enum(d, 'performance_target', PerformanceTarget), queue=_from_dict(d, 'queue', QueueSettings), run_as=_from_dict(d, 'run_as', JobRunAs), schedule=_from_dict(d, 'schedule', CronSchedule), @@ -1979,6 +2642,15 @@ def as_dict(self) -> dict: if self.job_config_path is not None: body['job_config_path'] = self.job_config_path return body + def as_shallow_dict(self) -> dict: + """Serializes the JobSource into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dirty_state is not None: body['dirty_state'] = self.dirty_state + if self.import_from_git_branch is not None: + body['import_from_git_branch'] = self.import_from_git_branch + if self.job_config_path is not None: body['job_config_path'] = self.job_config_path + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> JobSource: """Deserializes the JobSource from a dictionary.""" @@ -2006,11 +2678,11 @@ class JobsHealthMetric(Enum): * `RUN_DURATION_SECONDS`: Expected total time for a run in seconds. * `STREAMING_BACKLOG_BYTES`: An estimate of the maximum bytes of data waiting to be consumed across all streams. This metric - is in Private Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag - across all streams. This metric is in Private Preview. * `STREAMING_BACKLOG_SECONDS`: An - estimate of the maximum consumer delay across all streams. This metric is in Private Preview. * + is in Public Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag + across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_SECONDS`: An estimate + of the maximum consumer delay across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of outstanding files across all - streams. This metric is in Private Preview.""" + streams. This metric is in Public Preview.""" RUN_DURATION_SECONDS = 'RUN_DURATION_SECONDS' STREAMING_BACKLOG_BYTES = 'STREAMING_BACKLOG_BYTES' @@ -2032,11 +2704,11 @@ class JobsHealthRule: * `RUN_DURATION_SECONDS`: Expected total time for a run in seconds. * `STREAMING_BACKLOG_BYTES`: An estimate of the maximum bytes of data waiting to be consumed across all streams. This metric - is in Private Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag - across all streams. This metric is in Private Preview. * `STREAMING_BACKLOG_SECONDS`: An - estimate of the maximum consumer delay across all streams. This metric is in Private Preview. * + is in Public Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag + across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_SECONDS`: An estimate + of the maximum consumer delay across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of outstanding files across all - streams. This metric is in Private Preview.""" + streams. This metric is in Public Preview.""" op: JobsHealthOperator """Specifies the operator used to compare the health metric value with the specified threshold.""" @@ -2052,6 +2724,14 @@ def as_dict(self) -> dict: if self.value is not None: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the JobsHealthRule into a shallow dictionary of its immediate attributes.""" + body = {} + if self.metric is not None: body['metric'] = self.metric + if self.op is not None: body['op'] = self.op + if self.value is not None: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> JobsHealthRule: """Deserializes the JobsHealthRule from a dictionary.""" @@ -2072,6 +2752,12 @@ def as_dict(self) -> dict: if self.rules: body['rules'] = [v.as_dict() for v in self.rules] return body + def as_shallow_dict(self) -> dict: + """Serializes the JobsHealthRules into a shallow dictionary of its immediate attributes.""" + body = {} + if self.rules: body['rules'] = self.rules + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> JobsHealthRules: """Deserializes the JobsHealthRules from a dictionary.""" @@ -2099,6 +2785,14 @@ def as_dict(self) -> dict: if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListJobComplianceForPolicyResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.jobs: body['jobs'] = self.jobs + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListJobComplianceForPolicyResponse: """Deserializes the ListJobComplianceForPolicyResponse from a dictionary.""" @@ -2132,6 +2826,15 @@ def as_dict(self) -> dict: if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListJobsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.has_more is not None: body['has_more'] = self.has_more + if self.jobs: body['jobs'] = self.jobs + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListJobsResponse: """Deserializes the ListJobsResponse from a dictionary.""" @@ -2167,6 +2870,15 @@ def as_dict(self) -> dict: if self.runs: body['runs'] = [v.as_dict() for v in self.runs] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListRunsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.has_more is not None: body['has_more'] = self.has_more + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token + if self.runs: body['runs'] = self.runs + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListRunsResponse: """Deserializes the ListRunsResponse from a dictionary.""" @@ -2195,6 +2907,13 @@ def as_dict(self) -> dict: if self.truncated is not None: body['truncated'] = self.truncated return body + def as_shallow_dict(self) -> dict: + """Serializes the NotebookOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.result is not None: body['result'] = self.result + if self.truncated is not None: body['truncated'] = self.truncated + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> NotebookOutput: """Deserializes the NotebookOutput from a dictionary.""" @@ -2247,6 +2966,15 @@ def as_dict(self) -> dict: if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body + def as_shallow_dict(self) -> dict: + """Serializes the NotebookTask into a shallow dictionary of its immediate attributes.""" + body = {} + if self.base_parameters: body['base_parameters'] = self.base_parameters + if self.notebook_path is not None: body['notebook_path'] = self.notebook_path + if self.source is not None: body['source'] = self.source + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> NotebookTask: """Deserializes the NotebookTask from a dictionary.""" @@ -2256,12 +2984,57 @@ def from_dict(cls, d: Dict[str, any]) -> NotebookTask: warehouse_id=d.get('warehouse_id', None)) +@dataclass +class OutputSchemaInfo: + """Stores the catalog name, schema name, and the output schema expiration time for the clean room + run.""" + + catalog_name: Optional[str] = None + + expiration_time: Optional[int] = None + """The expiration time for the output schema as a Unix timestamp in milliseconds.""" + + schema_name: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the OutputSchemaInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.schema_name is not None: body['schema_name'] = self.schema_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the OutputSchemaInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.schema_name is not None: body['schema_name'] = self.schema_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> OutputSchemaInfo: + """Deserializes the OutputSchemaInfo from a dictionary.""" + return cls(catalog_name=d.get('catalog_name', None), + expiration_time=d.get('expiration_time', None), + schema_name=d.get('schema_name', None)) + + class PauseStatus(Enum): PAUSED = 'PAUSED' UNPAUSED = 'UNPAUSED' +class PerformanceTarget(Enum): + """PerformanceTarget defines how performant (lower latency) or cost efficient the execution of run + on serverless compute should be. The performance mode on the job or pipeline should map to a + performance setting that is passed to Cluster Manager (see cluster-common PerformanceTarget).""" + + COST_OPTIMIZED = 'COST_OPTIMIZED' + PERFORMANCE_OPTIMIZED = 'PERFORMANCE_OPTIMIZED' + + @dataclass class PeriodicTriggerConfiguration: interval: int @@ -2277,6 +3050,13 @@ def as_dict(self) -> dict: if self.unit is not None: body['unit'] = self.unit.value return body + def as_shallow_dict(self) -> dict: + """Serializes the PeriodicTriggerConfiguration into a shallow dictionary of its immediate attributes.""" + body = {} + if self.interval is not None: body['interval'] = self.interval + if self.unit is not None: body['unit'] = self.unit + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PeriodicTriggerConfiguration: """Deserializes the PeriodicTriggerConfiguration from a dictionary.""" @@ -2302,6 +3082,12 @@ def as_dict(self) -> dict: if self.full_refresh is not None: body['full_refresh'] = self.full_refresh return body + def as_shallow_dict(self) -> dict: + """Serializes the PipelineParams into a shallow dictionary of its immediate attributes.""" + body = {} + if self.full_refresh is not None: body['full_refresh'] = self.full_refresh + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PipelineParams: """Deserializes the PipelineParams from a dictionary.""" @@ -2323,6 +3109,13 @@ def as_dict(self) -> dict: if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id return body + def as_shallow_dict(self) -> dict: + """Serializes the PipelineTask into a shallow dictionary of its immediate attributes.""" + body = {} + if self.full_refresh is not None: body['full_refresh'] = self.full_refresh + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PipelineTask: """Deserializes the PipelineTask from a dictionary.""" @@ -2355,6 +3148,15 @@ def as_dict(self) -> dict: if self.parameters: body['parameters'] = [v for v in self.parameters] return body + def as_shallow_dict(self) -> dict: + """Serializes the PythonWheelTask into a shallow dictionary of its immediate attributes.""" + body = {} + if self.entry_point is not None: body['entry_point'] = self.entry_point + if self.named_parameters: body['named_parameters'] = self.named_parameters + if self.package_name is not None: body['package_name'] = self.package_name + if self.parameters: body['parameters'] = self.parameters + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PythonWheelTask: """Deserializes the PythonWheelTask from a dictionary.""" @@ -2384,6 +3186,13 @@ def as_dict(self) -> dict: if self.message is not None: body['message'] = self.message return body + def as_shallow_dict(self) -> dict: + """Serializes the QueueDetails into a shallow dictionary of its immediate attributes.""" + body = {} + if self.code is not None: body['code'] = self.code + if self.message is not None: body['message'] = self.message + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> QueueDetails: """Deserializes the QueueDetails from a dictionary.""" @@ -2413,6 +3222,12 @@ def as_dict(self) -> dict: if self.enabled is not None: body['enabled'] = self.enabled return body + def as_shallow_dict(self) -> dict: + """Serializes the QueueSettings into a shallow dictionary of its immediate attributes.""" + body = {} + if self.enabled is not None: body['enabled'] = self.enabled + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> QueueSettings: """Deserializes the QueueSettings from a dictionary.""" @@ -2454,6 +3269,18 @@ def as_dict(self) -> dict: if self.type is not None: body['type'] = self.type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the RepairHistoryItem into a shallow dictionary of its immediate attributes.""" + body = {} + if self.end_time is not None: body['end_time'] = self.end_time + if self.id is not None: body['id'] = self.id + if self.start_time is not None: body['start_time'] = self.start_time + if self.state: body['state'] = self.state + if self.status: body['status'] = self.status + if self.task_run_ids: body['task_run_ids'] = self.task_run_ids + if self.type is not None: body['type'] = self.type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RepairHistoryItem: """Deserializes the RepairHistoryItem from a dictionary.""" @@ -2489,8 +3316,9 @@ class RepairRun: be specified in conjunction with notebook_params. The JSON representation of this field (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing - information about job runs.""" + Use [Task parameter variables] to set parameters containing information about job runs. + + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" job_parameters: Optional[Dict[str, str]] = None """Job-level parameters used in the run. for example `"param": "overriding_val"`""" @@ -2589,6 +3417,26 @@ def as_dict(self) -> dict: if self.sql_params: body['sql_params'] = self.sql_params return body + def as_shallow_dict(self) -> dict: + """Serializes the RepairRun into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dbt_commands: body['dbt_commands'] = self.dbt_commands + if self.jar_params: body['jar_params'] = self.jar_params + if self.job_parameters: body['job_parameters'] = self.job_parameters + if self.latest_repair_id is not None: body['latest_repair_id'] = self.latest_repair_id + if self.notebook_params: body['notebook_params'] = self.notebook_params + if self.pipeline_params: body['pipeline_params'] = self.pipeline_params + if self.python_named_params: body['python_named_params'] = self.python_named_params + if self.python_params: body['python_params'] = self.python_params + if self.rerun_all_failed_tasks is not None: + body['rerun_all_failed_tasks'] = self.rerun_all_failed_tasks + if self.rerun_dependent_tasks is not None: body['rerun_dependent_tasks'] = self.rerun_dependent_tasks + if self.rerun_tasks: body['rerun_tasks'] = self.rerun_tasks + if self.run_id is not None: body['run_id'] = self.run_id + if self.spark_submit_params: body['spark_submit_params'] = self.spark_submit_params + if self.sql_params: body['sql_params'] = self.sql_params + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RepairRun: """Deserializes the RepairRun from a dictionary.""" @@ -2622,6 +3470,12 @@ def as_dict(self) -> dict: if self.repair_id is not None: body['repair_id'] = self.repair_id return body + def as_shallow_dict(self) -> dict: + """Serializes the RepairRunResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.repair_id is not None: body['repair_id'] = self.repair_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RepairRunResponse: """Deserializes the RepairRunResponse from a dictionary.""" @@ -2646,6 +3500,13 @@ def as_dict(self) -> dict: if self.new_settings: body['new_settings'] = self.new_settings.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the ResetJob into a shallow dictionary of its immediate attributes.""" + body = {} + if self.job_id is not None: body['job_id'] = self.job_id + if self.new_settings: body['new_settings'] = self.new_settings + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ResetJob: """Deserializes the ResetJob from a dictionary.""" @@ -2660,6 +3521,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the ResetResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ResetResponse: """Deserializes the ResetResponse from a dictionary.""" @@ -2679,6 +3545,13 @@ def as_dict(self) -> dict: if self.right is not None: body['right'] = self.right return body + def as_shallow_dict(self) -> dict: + """Serializes the ResolvedConditionTaskValues into a shallow dictionary of its immediate attributes.""" + body = {} + if self.left is not None: body['left'] = self.left + if self.right is not None: body['right'] = self.right + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ResolvedConditionTaskValues: """Deserializes the ResolvedConditionTaskValues from a dictionary.""" @@ -2695,6 +3568,12 @@ def as_dict(self) -> dict: if self.commands: body['commands'] = [v for v in self.commands] return body + def as_shallow_dict(self) -> dict: + """Serializes the ResolvedDbtTaskValues into a shallow dictionary of its immediate attributes.""" + body = {} + if self.commands: body['commands'] = self.commands + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ResolvedDbtTaskValues: """Deserializes the ResolvedDbtTaskValues from a dictionary.""" @@ -2711,6 +3590,12 @@ def as_dict(self) -> dict: if self.base_parameters: body['base_parameters'] = self.base_parameters return body + def as_shallow_dict(self) -> dict: + """Serializes the ResolvedNotebookTaskValues into a shallow dictionary of its immediate attributes.""" + body = {} + if self.base_parameters: body['base_parameters'] = self.base_parameters + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ResolvedNotebookTaskValues: """Deserializes the ResolvedNotebookTaskValues from a dictionary.""" @@ -2727,6 +3612,12 @@ def as_dict(self) -> dict: if self.parameters: body['parameters'] = self.parameters return body + def as_shallow_dict(self) -> dict: + """Serializes the ResolvedParamPairValues into a shallow dictionary of its immediate attributes.""" + body = {} + if self.parameters: body['parameters'] = self.parameters + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ResolvedParamPairValues: """Deserializes the ResolvedParamPairValues from a dictionary.""" @@ -2746,6 +3637,13 @@ def as_dict(self) -> dict: if self.parameters: body['parameters'] = [v for v in self.parameters] return body + def as_shallow_dict(self) -> dict: + """Serializes the ResolvedPythonWheelTaskValues into a shallow dictionary of its immediate attributes.""" + body = {} + if self.named_parameters: body['named_parameters'] = self.named_parameters + if self.parameters: body['parameters'] = self.parameters + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ResolvedPythonWheelTaskValues: """Deserializes the ResolvedPythonWheelTaskValues from a dictionary.""" @@ -2765,6 +3663,13 @@ def as_dict(self) -> dict: if self.parameters: body['parameters'] = self.parameters return body + def as_shallow_dict(self) -> dict: + """Serializes the ResolvedRunJobTaskValues into a shallow dictionary of its immediate attributes.""" + body = {} + if self.job_parameters: body['job_parameters'] = self.job_parameters + if self.parameters: body['parameters'] = self.parameters + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ResolvedRunJobTaskValues: """Deserializes the ResolvedRunJobTaskValues from a dictionary.""" @@ -2781,6 +3686,12 @@ def as_dict(self) -> dict: if self.parameters: body['parameters'] = [v for v in self.parameters] return body + def as_shallow_dict(self) -> dict: + """Serializes the ResolvedStringParamsValues into a shallow dictionary of its immediate attributes.""" + body = {} + if self.parameters: body['parameters'] = self.parameters + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ResolvedStringParamsValues: """Deserializes the ResolvedStringParamsValues from a dictionary.""" @@ -2824,6 +3735,21 @@ def as_dict(self) -> dict: if self.sql_task: body['sql_task'] = self.sql_task.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the ResolvedValues into a shallow dictionary of its immediate attributes.""" + body = {} + if self.condition_task: body['condition_task'] = self.condition_task + if self.dbt_task: body['dbt_task'] = self.dbt_task + if self.notebook_task: body['notebook_task'] = self.notebook_task + if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task + if self.run_job_task: body['run_job_task'] = self.run_job_task + if self.simulation_task: body['simulation_task'] = self.simulation_task + if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task + if self.spark_python_task: body['spark_python_task'] = self.spark_python_task + if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task + if self.sql_task: body['sql_task'] = self.sql_task + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ResolvedValues: """Deserializes the ResolvedValues from a dictionary.""" @@ -2870,6 +3796,12 @@ class Run: description: Optional[str] = None """Description of the run""" + effective_performance_target: Optional[PerformanceTarget] = None + """effective_performance_target is the actual performance target used by the run during execution. + effective_performance_target can differ from performance_target depending on if the job was + eligible to be cost-optimized (e.g. contains at least 1 serverless task) or if we specifically + override the value for the run (ex. RunNow).""" + end_time: Optional[int] = None """The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This field is set to 0 if the job is still running.""" @@ -2891,13 +3823,19 @@ class Run: Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job.""" + has_more: Optional[bool] = None + """Indicates if the run has more sub-resources (`tasks`, `job_clusters`) that are not shown. They + can be accessed via :method:jobs/getrun endpoint. It is only relevant for API 2.2 + :method:jobs/listruns requests with `expand_tasks=true`.""" + iterations: Optional[List[RunTask]] = None """Only populated by for-each iterations. The parent for-each task is located in tasks array.""" job_clusters: Optional[List[JobCluster]] = None """A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in - task settings.""" + task settings. If more than 100 job clusters are available, you can paginate through them using + :method:jobs/getrun.""" job_id: Optional[int] = None """The canonical identifier of the job that contains this run.""" @@ -2974,7 +3912,9 @@ class Run: tasks: Optional[List[RunTask]] = None """The list of tasks performed by the run. Each task has its own `run_id` which you can use to call - `JobsGetOutput` to retrieve the run resutls.""" + `JobsGetOutput` to retrieve the run resutls. If more than 100 tasks are available, you can + paginate through them using :method:jobs/getrun. Use the `next_page_token` field at the object + root to determine if more results are available.""" trigger: Optional[TriggerType] = None """The type of trigger that fired this run. @@ -2985,7 +3925,8 @@ class Run: previously failed run. This occurs when you request to re-run the job in case of failures. * `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`: Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is - triggered by a table update.""" + triggered by a table update. * `CONTINUOUS_RESTART`: Indicates a run created by user to manually + restart a continuous job run.""" trigger_info: Optional[TriggerInfo] = None """Additional details about what triggered the run""" @@ -2999,9 +3940,12 @@ def as_dict(self) -> dict: if self.cluster_spec: body['cluster_spec'] = self.cluster_spec.as_dict() if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name if self.description is not None: body['description'] = self.description + if self.effective_performance_target is not None: + body['effective_performance_target'] = self.effective_performance_target.value if self.end_time is not None: body['end_time'] = self.end_time if self.execution_duration is not None: body['execution_duration'] = self.execution_duration if self.git_source: body['git_source'] = self.git_source.as_dict() + if self.has_more is not None: body['has_more'] = self.has_more if self.iterations: body['iterations'] = [v.as_dict() for v in self.iterations] if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters] if self.job_id is not None: body['job_id'] = self.job_id @@ -3030,6 +3974,48 @@ def as_dict(self) -> dict: if self.trigger_info: body['trigger_info'] = self.trigger_info.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the Run into a shallow dictionary of its immediate attributes.""" + body = {} + if self.attempt_number is not None: body['attempt_number'] = self.attempt_number + if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration + if self.cluster_instance: body['cluster_instance'] = self.cluster_instance + if self.cluster_spec: body['cluster_spec'] = self.cluster_spec + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.description is not None: body['description'] = self.description + if self.effective_performance_target is not None: + body['effective_performance_target'] = self.effective_performance_target + if self.end_time is not None: body['end_time'] = self.end_time + if self.execution_duration is not None: body['execution_duration'] = self.execution_duration + if self.git_source: body['git_source'] = self.git_source + if self.has_more is not None: body['has_more'] = self.has_more + if self.iterations: body['iterations'] = self.iterations + if self.job_clusters: body['job_clusters'] = self.job_clusters + if self.job_id is not None: body['job_id'] = self.job_id + if self.job_parameters: body['job_parameters'] = self.job_parameters + if self.job_run_id is not None: body['job_run_id'] = self.job_run_id + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.number_in_job is not None: body['number_in_job'] = self.number_in_job + if self.original_attempt_run_id is not None: + body['original_attempt_run_id'] = self.original_attempt_run_id + if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters + if self.queue_duration is not None: body['queue_duration'] = self.queue_duration + if self.repair_history: body['repair_history'] = self.repair_history + if self.run_duration is not None: body['run_duration'] = self.run_duration + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_name is not None: body['run_name'] = self.run_name + if self.run_page_url is not None: body['run_page_url'] = self.run_page_url + if self.run_type is not None: body['run_type'] = self.run_type + if self.schedule: body['schedule'] = self.schedule + if self.setup_duration is not None: body['setup_duration'] = self.setup_duration + if self.start_time is not None: body['start_time'] = self.start_time + if self.state: body['state'] = self.state + if self.status: body['status'] = self.status + if self.tasks: body['tasks'] = self.tasks + if self.trigger is not None: body['trigger'] = self.trigger + if self.trigger_info: body['trigger_info'] = self.trigger_info + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Run: """Deserializes the Run from a dictionary.""" @@ -3039,9 +4025,11 @@ def from_dict(cls, d: Dict[str, any]) -> Run: cluster_spec=_from_dict(d, 'cluster_spec', ClusterSpec), creator_user_name=d.get('creator_user_name', None), description=d.get('description', None), + effective_performance_target=_enum(d, 'effective_performance_target', PerformanceTarget), end_time=d.get('end_time', None), execution_duration=d.get('execution_duration', None), git_source=_from_dict(d, 'git_source', GitSource), + has_more=d.get('has_more', None), iterations=_repeated_dict(d, 'iterations', RunTask), job_clusters=_repeated_dict(d, 'job_clusters', JobCluster), job_id=d.get('job_id', None), @@ -3103,6 +4091,15 @@ def as_dict(self) -> dict: if self.right is not None: body['right'] = self.right return body + def as_shallow_dict(self) -> dict: + """Serializes the RunConditionTask into a shallow dictionary of its immediate attributes.""" + body = {} + if self.left is not None: body['left'] = self.left + if self.op is not None: body['op'] = self.op + if self.outcome is not None: body['outcome'] = self.outcome + if self.right is not None: body['right'] = self.right + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RunConditionTask: """Deserializes the RunConditionTask from a dictionary.""" @@ -3137,6 +4134,15 @@ def as_dict(self) -> dict: if self.task: body['task'] = self.task.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the RunForEachTask into a shallow dictionary of its immediate attributes.""" + body = {} + if self.concurrency is not None: body['concurrency'] = self.concurrency + if self.inputs is not None: body['inputs'] = self.inputs + if self.stats: body['stats'] = self.stats + if self.task: body['task'] = self.task + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RunForEachTask: """Deserializes the RunForEachTask from a dictionary.""" @@ -3175,6 +4181,12 @@ def as_dict(self) -> dict: if self.run_id is not None: body['run_id'] = self.run_id return body + def as_shallow_dict(self) -> dict: + """Serializes the RunJobOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.run_id is not None: body['run_id'] = self.run_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RunJobOutput: """Deserializes the RunJobOutput from a dictionary.""" @@ -3197,8 +4209,9 @@ class RunJobTask: be specified in conjunction with notebook_params. The JSON representation of this field (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing - information about job runs.""" + Use [Task parameter variables] to set parameters containing information about job runs. + + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" job_parameters: Optional[Dict[str, str]] = None """Job-level parameters used to trigger the job.""" @@ -3277,6 +4290,21 @@ def as_dict(self) -> dict: if self.sql_params: body['sql_params'] = self.sql_params return body + def as_shallow_dict(self) -> dict: + """Serializes the RunJobTask into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dbt_commands: body['dbt_commands'] = self.dbt_commands + if self.jar_params: body['jar_params'] = self.jar_params + if self.job_id is not None: body['job_id'] = self.job_id + if self.job_parameters: body['job_parameters'] = self.job_parameters + if self.notebook_params: body['notebook_params'] = self.notebook_params + if self.pipeline_params: body['pipeline_params'] = self.pipeline_params + if self.python_named_params: body['python_named_params'] = self.python_named_params + if self.python_params: body['python_params'] = self.python_params + if self.spark_submit_params: body['spark_submit_params'] = self.spark_submit_params + if self.sql_params: body['sql_params'] = self.sql_params + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RunJobTask: """Deserializes the RunJobTask from a dictionary.""" @@ -3357,8 +4385,9 @@ class RunNow: be specified in conjunction with notebook_params. The JSON representation of this field (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing - information about job runs.""" + Use [Task parameter variables] to set parameters containing information about job runs. + + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" job_parameters: Optional[Dict[str, str]] = None """Job-level parameters used in the run. for example `"param": "overriding_val"`""" @@ -3380,6 +4409,15 @@ class RunNow: [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html""" + only: Optional[List[str]] = None + """A list of task keys to run inside of the job. If this field is not provided, all tasks in the + job will be run.""" + + performance_target: Optional[PerformanceTarget] = None + """PerformanceTarget defines how performant or cost efficient the execution of run on serverless + compute should be. For RunNow request, the run will execute with this settings instead of ones + defined in job.""" + pipeline_params: Optional[PipelineParams] = None """Controls whether the pipeline should perform a full refresh""" @@ -3434,6 +4472,8 @@ def as_dict(self) -> dict: if self.job_id is not None: body['job_id'] = self.job_id if self.job_parameters: body['job_parameters'] = self.job_parameters if self.notebook_params: body['notebook_params'] = self.notebook_params + if self.only: body['only'] = [v for v in self.only] + if self.performance_target is not None: body['performance_target'] = self.performance_target.value if self.pipeline_params: body['pipeline_params'] = self.pipeline_params.as_dict() if self.python_named_params: body['python_named_params'] = self.python_named_params if self.python_params: body['python_params'] = [v for v in self.python_params] @@ -3442,6 +4482,25 @@ def as_dict(self) -> dict: if self.sql_params: body['sql_params'] = self.sql_params return body + def as_shallow_dict(self) -> dict: + """Serializes the RunNow into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dbt_commands: body['dbt_commands'] = self.dbt_commands + if self.idempotency_token is not None: body['idempotency_token'] = self.idempotency_token + if self.jar_params: body['jar_params'] = self.jar_params + if self.job_id is not None: body['job_id'] = self.job_id + if self.job_parameters: body['job_parameters'] = self.job_parameters + if self.notebook_params: body['notebook_params'] = self.notebook_params + if self.only: body['only'] = self.only + if self.performance_target is not None: body['performance_target'] = self.performance_target + if self.pipeline_params: body['pipeline_params'] = self.pipeline_params + if self.python_named_params: body['python_named_params'] = self.python_named_params + if self.python_params: body['python_params'] = self.python_params + if self.queue: body['queue'] = self.queue + if self.spark_submit_params: body['spark_submit_params'] = self.spark_submit_params + if self.sql_params: body['sql_params'] = self.sql_params + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RunNow: """Deserializes the RunNow from a dictionary.""" @@ -3451,6 +4510,8 @@ def from_dict(cls, d: Dict[str, any]) -> RunNow: job_id=d.get('job_id', None), job_parameters=d.get('job_parameters', None), notebook_params=d.get('notebook_params', None), + only=d.get('only', None), + performance_target=_enum(d, 'performance_target', PerformanceTarget), pipeline_params=_from_dict(d, 'pipeline_params', PipelineParams), python_named_params=d.get('python_named_params', None), python_params=d.get('python_params', None), @@ -3476,6 +4537,13 @@ def as_dict(self) -> dict: if self.run_id is not None: body['run_id'] = self.run_id return body + def as_shallow_dict(self) -> dict: + """Serializes the RunNowResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.number_in_job is not None: body['number_in_job'] = self.number_in_job + if self.run_id is not None: body['run_id'] = self.run_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RunNowResponse: """Deserializes the RunNowResponse from a dictionary.""" @@ -3486,6 +4554,9 @@ def from_dict(cls, d: Dict[str, any]) -> RunNowResponse: class RunOutput: """Run output was retrieved successfully.""" + clean_rooms_notebook_output: Optional[CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput] = None + """The output of a clean rooms notebook task, if available""" + dbt_output: Optional[DbtOutput] = None """The output of a dbt task, if available.""" @@ -3530,6 +4601,8 @@ class RunOutput: def as_dict(self) -> dict: """Serializes the RunOutput into a dictionary suitable for use as a JSON request body.""" body = {} + if self.clean_rooms_notebook_output: + body['clean_rooms_notebook_output'] = self.clean_rooms_notebook_output.as_dict() if self.dbt_output: body['dbt_output'] = self.dbt_output.as_dict() if self.error is not None: body['error'] = self.error if self.error_trace is not None: body['error_trace'] = self.error_trace @@ -3542,10 +4615,29 @@ def as_dict(self) -> dict: if self.sql_output: body['sql_output'] = self.sql_output.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the RunOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.clean_rooms_notebook_output: + body['clean_rooms_notebook_output'] = self.clean_rooms_notebook_output + if self.dbt_output: body['dbt_output'] = self.dbt_output + if self.error is not None: body['error'] = self.error + if self.error_trace is not None: body['error_trace'] = self.error_trace + if self.info is not None: body['info'] = self.info + if self.logs is not None: body['logs'] = self.logs + if self.logs_truncated is not None: body['logs_truncated'] = self.logs_truncated + if self.metadata: body['metadata'] = self.metadata + if self.notebook_output: body['notebook_output'] = self.notebook_output + if self.run_job_output: body['run_job_output'] = self.run_job_output + if self.sql_output: body['sql_output'] = self.sql_output + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RunOutput: """Deserializes the RunOutput from a dictionary.""" - return cls(dbt_output=_from_dict(d, 'dbt_output', DbtOutput), + return cls(clean_rooms_notebook_output=_from_dict(d, 'clean_rooms_notebook_output', + CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput), + dbt_output=_from_dict(d, 'dbt_output', DbtOutput), error=d.get('error', None), error_trace=d.get('error_trace', None), info=d.get('info', None), @@ -3570,8 +4662,9 @@ class RunParameters: be specified in conjunction with notebook_params. The JSON representation of this field (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing - information about job runs.""" + Use [Task parameter variables] to set parameters containing information about job runs. + + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" notebook_params: Optional[Dict[str, str]] = None """A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": @@ -3645,6 +4738,19 @@ def as_dict(self) -> dict: if self.sql_params: body['sql_params'] = self.sql_params return body + def as_shallow_dict(self) -> dict: + """Serializes the RunParameters into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dbt_commands: body['dbt_commands'] = self.dbt_commands + if self.jar_params: body['jar_params'] = self.jar_params + if self.notebook_params: body['notebook_params'] = self.notebook_params + if self.pipeline_params: body['pipeline_params'] = self.pipeline_params + if self.python_named_params: body['python_named_params'] = self.python_named_params + if self.python_params: body['python_params'] = self.python_params + if self.spark_submit_params: body['spark_submit_params'] = self.spark_submit_params + if self.sql_params: body['sql_params'] = self.sql_params + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RunParameters: """Deserializes the RunParameters from a dictionary.""" @@ -3714,6 +4820,17 @@ def as_dict(self) -> dict: body['user_cancelled_or_timedout'] = self.user_cancelled_or_timedout return body + def as_shallow_dict(self) -> dict: + """Serializes the RunState into a shallow dictionary of its immediate attributes.""" + body = {} + if self.life_cycle_state is not None: body['life_cycle_state'] = self.life_cycle_state + if self.queue_reason is not None: body['queue_reason'] = self.queue_reason + if self.result_state is not None: body['result_state'] = self.result_state + if self.state_message is not None: body['state_message'] = self.state_message + if self.user_cancelled_or_timedout is not None: + body['user_cancelled_or_timedout'] = self.user_cancelled_or_timedout + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RunState: """Deserializes the RunState from a dictionary.""" @@ -3746,6 +4863,14 @@ def as_dict(self) -> dict: if self.termination_details: body['termination_details'] = self.termination_details.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the RunStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.queue_details: body['queue_details'] = self.queue_details + if self.state is not None: body['state'] = self.state + if self.termination_details: body['termination_details'] = self.termination_details + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RunStatus: """Deserializes the RunStatus from a dictionary.""" @@ -3771,6 +4896,11 @@ class RunTask: original attempt’s ID and an incrementing `attempt_number`. Runs are retried only until they succeed, and the maximum `attempt_number` is the same as the `max_retries` value for the job.""" + clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None + """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present. + + [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html""" + cleanup_duration: Optional[int] = None """The time in milliseconds it took to terminate the cluster and clean up any associated artifacts. The duration of a task run is the sum of the `setup_duration`, `execution_duration`, and the @@ -3782,13 +4912,13 @@ class RunTask: once the Jobs service has requested a cluster for the run.""" condition_task: Optional[RunConditionTask] = None - """If condition_task, specifies a condition with an outcome that can be used to control the - execution of other tasks. Does not require a cluster to execute and does not support retries or - notifications.""" + """The task evaluates a condition that can be used to control the execution of other tasks when the + `condition_task` field is present. The condition task does not require a cluster to execute and + does not support retries or notifications.""" dbt_task: Optional[DbtTask] = None - """If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and - the ability to use a serverless or a pro SQL warehouse.""" + """The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task + requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse.""" depends_on: Optional[List[TaskDependency]] = None """An optional array of objects specifying the dependency graph of the task. All tasks specified in @@ -3798,6 +4928,16 @@ class RunTask: description: Optional[str] = None """An optional description for this task.""" + disabled: Optional[bool] = None + """Denotes whether or not the task was disabled by the user. Disabled tasks do not execute and are + immediately skipped as soon as they are unblocked.""" + + effective_performance_target: Optional[PerformanceTarget] = None + """effective_performance_target is the actual performance target used by the run during execution. + effective_performance_target can differ from performance_target depending on if the job was + eligible to be cost-optimized (e.g. contains at least 1 serverless task) or if an override was + provided for the run (ex. RunNow).""" + email_notifications: Optional[JobEmailNotifications] = None """An optional set of email addresses notified when the task run begins or completes. The default behavior is to not send any emails.""" @@ -3823,7 +4963,8 @@ class RunTask: responding. We suggest running jobs and tasks on new clusters for greater reliability""" for_each_task: Optional[RunForEachTask] = None - """If for_each_task, indicates that this task must execute the nested task within it.""" + """The task executes a nested task for every input provided when the `for_each_task` field is + present.""" git_source: Optional[GitSource] = None """An optional specification for a remote Git repository containing the source code used by tasks. @@ -3845,18 +4986,18 @@ class RunTask: """If new_cluster, a description of a new cluster that is created for each run.""" notebook_task: Optional[NotebookTask] = None - """If notebook_task, indicates that this task must run a notebook. This field may not be specified - in conjunction with spark_jar_task.""" + """The task runs a notebook when the `notebook_task` field is present.""" notification_settings: Optional[TaskNotificationSettings] = None """Optional notification settings that are used when sending notifications to each of the `email_notifications` and `webhook_notifications` for this task run.""" pipeline_task: Optional[PipelineTask] = None - """If pipeline_task, indicates that this task must execute a Pipeline.""" + """The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines + configured to use triggered more are supported.""" python_wheel_task: Optional[PythonWheelTask] = None - """If python_wheel_task, indicates that this job must execute a PythonWheel.""" + """The task runs a Python wheel when the `python_wheel_task` field is present.""" queue_duration: Optional[int] = None """The time in milliseconds that the run has spent in the queue.""" @@ -3876,7 +5017,7 @@ class RunTask: :method:jobs/create for a list of possible values.""" run_job_task: Optional[RunJobTask] = None - """If run_job_task, indicates that this task must execute another job.""" + """The task triggers another job when the `run_job_task` field is present.""" run_page_url: Optional[str] = None @@ -3888,14 +5029,14 @@ class RunTask: duration of a multitask job run is the value of the `run_duration` field.""" spark_jar_task: Optional[SparkJarTask] = None - """If spark_jar_task, indicates that this task must run a JAR.""" + """The task runs a JAR when the `spark_jar_task` field is present.""" spark_python_task: Optional[SparkPythonTask] = None - """If spark_python_task, indicates that this task must run a Python file.""" + """The task runs a Python file when the `spark_python_task` field is present.""" spark_submit_task: Optional[SparkSubmitTask] = None - """If `spark_submit_task`, indicates that this task must be launched by the spark submit script. - This task can run only on new clusters. + """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. + This task can run only on new clusters and is not compatible with serverless compute. In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark @@ -3911,7 +5052,8 @@ class RunTask: The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.""" sql_task: Optional[SqlTask] = None - """If sql_task, indicates that this job must execute a SQL task.""" + """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when + the `sql_task` field is present.""" start_time: Optional[int] = None """The time at which this run was started in epoch milliseconds (milliseconds since 1/1/1970 UTC). @@ -3936,12 +5078,17 @@ def as_dict(self) -> dict: """Serializes the RunTask into a dictionary suitable for use as a JSON request body.""" body = {} if self.attempt_number is not None: body['attempt_number'] = self.attempt_number + if self.clean_rooms_notebook_task: + body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task.as_dict() if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration if self.cluster_instance: body['cluster_instance'] = self.cluster_instance.as_dict() if self.condition_task: body['condition_task'] = self.condition_task.as_dict() if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict() if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on] if self.description is not None: body['description'] = self.description + if self.disabled is not None: body['disabled'] = self.disabled + if self.effective_performance_target is not None: + body['effective_performance_target'] = self.effective_performance_target.value if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict() if self.end_time is not None: body['end_time'] = self.end_time if self.environment_key is not None: body['environment_key'] = self.environment_key @@ -3976,16 +5123,68 @@ def as_dict(self) -> dict: if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the RunTask into a shallow dictionary of its immediate attributes.""" + body = {} + if self.attempt_number is not None: body['attempt_number'] = self.attempt_number + if self.clean_rooms_notebook_task: body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task + if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration + if self.cluster_instance: body['cluster_instance'] = self.cluster_instance + if self.condition_task: body['condition_task'] = self.condition_task + if self.dbt_task: body['dbt_task'] = self.dbt_task + if self.depends_on: body['depends_on'] = self.depends_on + if self.description is not None: body['description'] = self.description + if self.disabled is not None: body['disabled'] = self.disabled + if self.effective_performance_target is not None: + body['effective_performance_target'] = self.effective_performance_target + if self.email_notifications: body['email_notifications'] = self.email_notifications + if self.end_time is not None: body['end_time'] = self.end_time + if self.environment_key is not None: body['environment_key'] = self.environment_key + if self.execution_duration is not None: body['execution_duration'] = self.execution_duration + if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id + if self.for_each_task: body['for_each_task'] = self.for_each_task + if self.git_source: body['git_source'] = self.git_source + if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key + if self.libraries: body['libraries'] = self.libraries + if self.new_cluster: body['new_cluster'] = self.new_cluster + if self.notebook_task: body['notebook_task'] = self.notebook_task + if self.notification_settings: body['notification_settings'] = self.notification_settings + if self.pipeline_task: body['pipeline_task'] = self.pipeline_task + if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task + if self.queue_duration is not None: body['queue_duration'] = self.queue_duration + if self.resolved_values: body['resolved_values'] = self.resolved_values + if self.run_duration is not None: body['run_duration'] = self.run_duration + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_if is not None: body['run_if'] = self.run_if + if self.run_job_task: body['run_job_task'] = self.run_job_task + if self.run_page_url is not None: body['run_page_url'] = self.run_page_url + if self.setup_duration is not None: body['setup_duration'] = self.setup_duration + if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task + if self.spark_python_task: body['spark_python_task'] = self.spark_python_task + if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task + if self.sql_task: body['sql_task'] = self.sql_task + if self.start_time is not None: body['start_time'] = self.start_time + if self.state: body['state'] = self.state + if self.status: body['status'] = self.status + if self.task_key is not None: body['task_key'] = self.task_key + if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds + if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RunTask: """Deserializes the RunTask from a dictionary.""" return cls(attempt_number=d.get('attempt_number', None), + clean_rooms_notebook_task=_from_dict(d, 'clean_rooms_notebook_task', + CleanRoomsNotebookTask), cleanup_duration=d.get('cleanup_duration', None), cluster_instance=_from_dict(d, 'cluster_instance', ClusterInstance), condition_task=_from_dict(d, 'condition_task', RunConditionTask), dbt_task=_from_dict(d, 'dbt_task', DbtTask), depends_on=_repeated_dict(d, 'depends_on', TaskDependency), description=d.get('description', None), + disabled=d.get('disabled', None), + effective_performance_target=_enum(d, 'effective_performance_target', PerformanceTarget), email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications), end_time=d.get('end_time', None), environment_key=d.get('environment_key', None), @@ -4065,12 +5264,25 @@ class SparkJarTask: [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" + run_as_repl: Optional[bool] = None + """Deprecated. A value of `false` is no longer supported.""" + def as_dict(self) -> dict: """Serializes the SparkJarTask into a dictionary suitable for use as a JSON request body.""" body = {} if self.jar_uri is not None: body['jar_uri'] = self.jar_uri if self.main_class_name is not None: body['main_class_name'] = self.main_class_name if self.parameters: body['parameters'] = [v for v in self.parameters] + if self.run_as_repl is not None: body['run_as_repl'] = self.run_as_repl + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SparkJarTask into a shallow dictionary of its immediate attributes.""" + body = {} + if self.jar_uri is not None: body['jar_uri'] = self.jar_uri + if self.main_class_name is not None: body['main_class_name'] = self.main_class_name + if self.parameters: body['parameters'] = self.parameters + if self.run_as_repl is not None: body['run_as_repl'] = self.run_as_repl return body @classmethod @@ -4078,7 +5290,8 @@ def from_dict(cls, d: Dict[str, any]) -> SparkJarTask: """Deserializes the SparkJarTask from a dictionary.""" return cls(jar_uri=d.get('jar_uri', None), main_class_name=d.get('main_class_name', None), - parameters=d.get('parameters', None)) + parameters=d.get('parameters', None), + run_as_repl=d.get('run_as_repl', None)) @dataclass @@ -4113,6 +5326,14 @@ def as_dict(self) -> dict: if self.source is not None: body['source'] = self.source.value return body + def as_shallow_dict(self) -> dict: + """Serializes the SparkPythonTask into a shallow dictionary of its immediate attributes.""" + body = {} + if self.parameters: body['parameters'] = self.parameters + if self.python_file is not None: body['python_file'] = self.python_file + if self.source is not None: body['source'] = self.source + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SparkPythonTask: """Deserializes the SparkPythonTask from a dictionary.""" @@ -4136,6 +5357,12 @@ def as_dict(self) -> dict: if self.parameters: body['parameters'] = [v for v in self.parameters] return body + def as_shallow_dict(self) -> dict: + """Serializes the SparkSubmitTask into a shallow dictionary of its immediate attributes.""" + body = {} + if self.parameters: body['parameters'] = self.parameters + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SparkSubmitTask: """Deserializes the SparkSubmitTask from a dictionary.""" @@ -4173,6 +5400,16 @@ def as_dict(self) -> dict: if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body + def as_shallow_dict(self) -> dict: + """Serializes the SqlAlertOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.alert_state is not None: body['alert_state'] = self.alert_state + if self.output_link is not None: body['output_link'] = self.output_link + if self.query_text is not None: body['query_text'] = self.query_text + if self.sql_statements: body['sql_statements'] = self.sql_statements + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SqlAlertOutput: """Deserializes the SqlAlertOutput from a dictionary.""" @@ -4209,6 +5446,13 @@ def as_dict(self) -> dict: if self.widgets: body['widgets'] = [v.as_dict() for v in self.widgets] return body + def as_shallow_dict(self) -> dict: + """Serializes the SqlDashboardOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.widgets: body['widgets'] = self.widgets + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SqlDashboardOutput: """Deserializes the SqlDashboardOutput from a dictionary.""" @@ -4251,6 +5495,18 @@ def as_dict(self) -> dict: if self.widget_title is not None: body['widget_title'] = self.widget_title return body + def as_shallow_dict(self) -> dict: + """Serializes the SqlDashboardWidgetOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.end_time is not None: body['end_time'] = self.end_time + if self.error: body['error'] = self.error + if self.output_link is not None: body['output_link'] = self.output_link + if self.start_time is not None: body['start_time'] = self.start_time + if self.status is not None: body['status'] = self.status + if self.widget_id is not None: body['widget_id'] = self.widget_id + if self.widget_title is not None: body['widget_title'] = self.widget_title + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SqlDashboardWidgetOutput: """Deserializes the SqlDashboardWidgetOutput from a dictionary.""" @@ -4291,6 +5547,14 @@ def as_dict(self) -> dict: if self.query_output: body['query_output'] = self.query_output.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the SqlOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.alert_output: body['alert_output'] = self.alert_output + if self.dashboard_output: body['dashboard_output'] = self.dashboard_output + if self.query_output: body['query_output'] = self.query_output + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SqlOutput: """Deserializes the SqlOutput from a dictionary.""" @@ -4310,6 +5574,12 @@ def as_dict(self) -> dict: if self.message is not None: body['message'] = self.message return body + def as_shallow_dict(self) -> dict: + """Serializes the SqlOutputError into a shallow dictionary of its immediate attributes.""" + body = {} + if self.message is not None: body['message'] = self.message + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SqlOutputError: """Deserializes the SqlOutputError from a dictionary.""" @@ -4342,6 +5612,16 @@ def as_dict(self) -> dict: if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body + def as_shallow_dict(self) -> dict: + """Serializes the SqlQueryOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.endpoint_id is not None: body['endpoint_id'] = self.endpoint_id + if self.output_link is not None: body['output_link'] = self.output_link + if self.query_text is not None: body['query_text'] = self.query_text + if self.sql_statements: body['sql_statements'] = self.sql_statements + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SqlQueryOutput: """Deserializes the SqlQueryOutput from a dictionary.""" @@ -4363,6 +5643,12 @@ def as_dict(self) -> dict: if self.lookup_key is not None: body['lookup_key'] = self.lookup_key return body + def as_shallow_dict(self) -> dict: + """Serializes the SqlStatementOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.lookup_key is not None: body['lookup_key'] = self.lookup_key + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SqlStatementOutput: """Deserializes the SqlStatementOutput from a dictionary.""" @@ -4403,6 +5689,17 @@ def as_dict(self) -> dict: if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body + def as_shallow_dict(self) -> dict: + """Serializes the SqlTask into a shallow dictionary of its immediate attributes.""" + body = {} + if self.alert: body['alert'] = self.alert + if self.dashboard: body['dashboard'] = self.dashboard + if self.file: body['file'] = self.file + if self.parameters: body['parameters'] = self.parameters + if self.query: body['query'] = self.query + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SqlTask: """Deserializes the SqlTask from a dictionary.""" @@ -4433,6 +5730,14 @@ def as_dict(self) -> dict: if self.subscriptions: body['subscriptions'] = [v.as_dict() for v in self.subscriptions] return body + def as_shallow_dict(self) -> dict: + """Serializes the SqlTaskAlert into a shallow dictionary of its immediate attributes.""" + body = {} + if self.alert_id is not None: body['alert_id'] = self.alert_id + if self.pause_subscriptions is not None: body['pause_subscriptions'] = self.pause_subscriptions + if self.subscriptions: body['subscriptions'] = self.subscriptions + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SqlTaskAlert: """Deserializes the SqlTaskAlert from a dictionary.""" @@ -4464,6 +5769,15 @@ def as_dict(self) -> dict: if self.subscriptions: body['subscriptions'] = [v.as_dict() for v in self.subscriptions] return body + def as_shallow_dict(self) -> dict: + """Serializes the SqlTaskDashboard into a shallow dictionary of its immediate attributes.""" + body = {} + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.pause_subscriptions is not None: body['pause_subscriptions'] = self.pause_subscriptions + if self.subscriptions: body['subscriptions'] = self.subscriptions + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SqlTaskDashboard: """Deserializes the SqlTaskDashboard from a dictionary.""" @@ -4495,6 +5809,13 @@ def as_dict(self) -> dict: if self.source is not None: body['source'] = self.source.value return body + def as_shallow_dict(self) -> dict: + """Serializes the SqlTaskFile into a shallow dictionary of its immediate attributes.""" + body = {} + if self.path is not None: body['path'] = self.path + if self.source is not None: body['source'] = self.source + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SqlTaskFile: """Deserializes the SqlTaskFile from a dictionary.""" @@ -4512,6 +5833,12 @@ def as_dict(self) -> dict: if self.query_id is not None: body['query_id'] = self.query_id return body + def as_shallow_dict(self) -> dict: + """Serializes the SqlTaskQuery into a shallow dictionary of its immediate attributes.""" + body = {} + if self.query_id is not None: body['query_id'] = self.query_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SqlTaskQuery: """Deserializes the SqlTaskQuery from a dictionary.""" @@ -4536,6 +5863,13 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the SqlTaskSubscription into a shallow dictionary of its immediate attributes.""" + body = {} + if self.destination_id is not None: body['destination_id'] = self.destination_id + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SqlTaskSubscription: """Deserializes the SqlTaskSubscription from a dictionary.""" @@ -4626,6 +5960,25 @@ def as_dict(self) -> dict: if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the SubmitRun into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.email_notifications: body['email_notifications'] = self.email_notifications + if self.environments: body['environments'] = self.environments + if self.git_source: body['git_source'] = self.git_source + if self.health: body['health'] = self.health + if self.idempotency_token is not None: body['idempotency_token'] = self.idempotency_token + if self.notification_settings: body['notification_settings'] = self.notification_settings + if self.queue: body['queue'] = self.queue + if self.run_as: body['run_as'] = self.run_as + if self.run_name is not None: body['run_name'] = self.run_name + if self.tasks: body['tasks'] = self.tasks + if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds + if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SubmitRun: """Deserializes the SubmitRun from a dictionary.""" @@ -4658,6 +6011,12 @@ def as_dict(self) -> dict: if self.run_id is not None: body['run_id'] = self.run_id return body + def as_shallow_dict(self) -> dict: + """Serializes the SubmitRunResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.run_id is not None: body['run_id'] = self.run_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SubmitRunResponse: """Deserializes the SubmitRunResponse from a dictionary.""" @@ -4671,14 +6030,19 @@ class SubmitTask: field is required and must be unique within its parent job. On Update or Reset, this field is used to reference the tasks to be updated or reset.""" + clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None + """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present. + + [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html""" + condition_task: Optional[ConditionTask] = None - """If condition_task, specifies a condition with an outcome that can be used to control the - execution of other tasks. Does not require a cluster to execute and does not support retries or - notifications.""" + """The task evaluates a condition that can be used to control the execution of other tasks when the + `condition_task` field is present. The condition task does not require a cluster to execute and + does not support retries or notifications.""" dbt_task: Optional[DbtTask] = None - """If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and - the ability to use a serverless or a pro SQL warehouse.""" + """The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task + requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse.""" depends_on: Optional[List[TaskDependency]] = None """An optional array of objects specifying the dependency graph of the task. All tasks specified in @@ -4702,7 +6066,8 @@ class SubmitTask: responding. We suggest running jobs and tasks on new clusters for greater reliability""" for_each_task: Optional[ForEachTask] = None - """If for_each_task, indicates that this task must execute the nested task within it.""" + """The task executes a nested task for every input provided when the `for_each_task` field is + present.""" health: Optional[JobsHealthRules] = None """An optional set of health rules that can be defined for this job.""" @@ -4715,18 +6080,18 @@ class SubmitTask: """If new_cluster, a description of a new cluster that is created for each run.""" notebook_task: Optional[NotebookTask] = None - """If notebook_task, indicates that this task must run a notebook. This field may not be specified - in conjunction with spark_jar_task.""" + """The task runs a notebook when the `notebook_task` field is present.""" notification_settings: Optional[TaskNotificationSettings] = None """Optional notification settings that are used when sending notifications to each of the `email_notifications` and `webhook_notifications` for this task run.""" pipeline_task: Optional[PipelineTask] = None - """If pipeline_task, indicates that this task must execute a Pipeline.""" + """The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines + configured to use triggered more are supported.""" python_wheel_task: Optional[PythonWheelTask] = None - """If python_wheel_task, indicates that this job must execute a PythonWheel.""" + """The task runs a Python wheel when the `python_wheel_task` field is present.""" run_if: Optional[RunIf] = None """An optional value indicating the condition that determines whether the task should be run once @@ -4734,17 +6099,17 @@ class SubmitTask: :method:jobs/create for a list of possible values.""" run_job_task: Optional[RunJobTask] = None - """If run_job_task, indicates that this task must execute another job.""" + """The task triggers another job when the `run_job_task` field is present.""" spark_jar_task: Optional[SparkJarTask] = None - """If spark_jar_task, indicates that this task must run a JAR.""" + """The task runs a JAR when the `spark_jar_task` field is present.""" spark_python_task: Optional[SparkPythonTask] = None - """If spark_python_task, indicates that this task must run a Python file.""" + """The task runs a Python file when the `spark_python_task` field is present.""" spark_submit_task: Optional[SparkSubmitTask] = None - """If `spark_submit_task`, indicates that this task must be launched by the spark submit script. - This task can run only on new clusters. + """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. + This task can run only on new clusters and is not compatible with serverless compute. In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark @@ -4760,7 +6125,8 @@ class SubmitTask: The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.""" sql_task: Optional[SqlTask] = None - """If sql_task, indicates that this job must execute a SQL task.""" + """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when + the `sql_task` field is present.""" timeout_seconds: Optional[int] = None """An optional timeout applied to each run of this job task. A value of `0` means no timeout.""" @@ -4773,6 +6139,8 @@ class SubmitTask: def as_dict(self) -> dict: """Serializes the SubmitTask into a dictionary suitable for use as a JSON request body.""" body = {} + if self.clean_rooms_notebook_task: + body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task.as_dict() if self.condition_task: body['condition_task'] = self.condition_task.as_dict() if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict() if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on] @@ -4799,10 +6167,42 @@ def as_dict(self) -> dict: if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the SubmitTask into a shallow dictionary of its immediate attributes.""" + body = {} + if self.clean_rooms_notebook_task: body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task + if self.condition_task: body['condition_task'] = self.condition_task + if self.dbt_task: body['dbt_task'] = self.dbt_task + if self.depends_on: body['depends_on'] = self.depends_on + if self.description is not None: body['description'] = self.description + if self.email_notifications: body['email_notifications'] = self.email_notifications + if self.environment_key is not None: body['environment_key'] = self.environment_key + if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id + if self.for_each_task: body['for_each_task'] = self.for_each_task + if self.health: body['health'] = self.health + if self.libraries: body['libraries'] = self.libraries + if self.new_cluster: body['new_cluster'] = self.new_cluster + if self.notebook_task: body['notebook_task'] = self.notebook_task + if self.notification_settings: body['notification_settings'] = self.notification_settings + if self.pipeline_task: body['pipeline_task'] = self.pipeline_task + if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task + if self.run_if is not None: body['run_if'] = self.run_if + if self.run_job_task: body['run_job_task'] = self.run_job_task + if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task + if self.spark_python_task: body['spark_python_task'] = self.spark_python_task + if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task + if self.sql_task: body['sql_task'] = self.sql_task + if self.task_key is not None: body['task_key'] = self.task_key + if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds + if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SubmitTask: """Deserializes the SubmitTask from a dictionary.""" - return cls(condition_task=_from_dict(d, 'condition_task', ConditionTask), + return cls(clean_rooms_notebook_task=_from_dict(d, 'clean_rooms_notebook_task', + CleanRoomsNotebookTask), + condition_task=_from_dict(d, 'condition_task', ConditionTask), dbt_task=_from_dict(d, 'dbt_task', DbtTask), depends_on=_repeated_dict(d, 'depends_on', TaskDependency), description=d.get('description', None), @@ -4857,6 +6257,17 @@ def as_dict(self) -> dict: body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds return body + def as_shallow_dict(self) -> dict: + """Serializes the TableUpdateTriggerConfiguration into a shallow dictionary of its immediate attributes.""" + body = {} + if self.condition is not None: body['condition'] = self.condition + if self.min_time_between_triggers_seconds is not None: + body['min_time_between_triggers_seconds'] = self.min_time_between_triggers_seconds + if self.table_names: body['table_names'] = self.table_names + if self.wait_after_last_change_seconds is not None: + body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TableUpdateTriggerConfiguration: """Deserializes the TableUpdateTriggerConfiguration from a dictionary.""" @@ -4873,14 +6284,19 @@ class Task: field is required and must be unique within its parent job. On Update or Reset, this field is used to reference the tasks to be updated or reset.""" + clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None + """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present. + + [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html""" + condition_task: Optional[ConditionTask] = None - """If condition_task, specifies a condition with an outcome that can be used to control the - execution of other tasks. Does not require a cluster to execute and does not support retries or - notifications.""" + """The task evaluates a condition that can be used to control the execution of other tasks when the + `condition_task` field is present. The condition task does not require a cluster to execute and + does not support retries or notifications.""" dbt_task: Optional[DbtTask] = None - """If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and - the ability to use a serverless or a pro SQL warehouse.""" + """The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task + requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse.""" depends_on: Optional[List[TaskDependency]] = None """An optional array of objects specifying the dependency graph of the task. All tasks specified in @@ -4908,7 +6324,8 @@ class Task: responding. We suggest running jobs and tasks on new clusters for greater reliability""" for_each_task: Optional[ForEachTask] = None - """If for_each_task, indicates that this task must execute the nested task within it.""" + """The task executes a nested task for every input provided when the `for_each_task` field is + present.""" health: Optional[JobsHealthRules] = None """An optional set of health rules that can be defined for this job.""" @@ -4935,18 +6352,18 @@ class Task: """If new_cluster, a description of a new cluster that is created for each run.""" notebook_task: Optional[NotebookTask] = None - """If notebook_task, indicates that this task must run a notebook. This field may not be specified - in conjunction with spark_jar_task.""" + """The task runs a notebook when the `notebook_task` field is present.""" notification_settings: Optional[TaskNotificationSettings] = None """Optional notification settings that are used when sending notifications to each of the `email_notifications` and `webhook_notifications` for this task.""" pipeline_task: Optional[PipelineTask] = None - """If pipeline_task, indicates that this task must execute a Pipeline.""" + """The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines + configured to use triggered more are supported.""" python_wheel_task: Optional[PythonWheelTask] = None - """If python_wheel_task, indicates that this job must execute a PythonWheel.""" + """The task runs a Python wheel when the `python_wheel_task` field is present.""" retry_on_timeout: Optional[bool] = None """An optional policy to specify whether to retry a job when it times out. The default behavior is @@ -4962,17 +6379,17 @@ class Task: least one dependency failed * `ALL_FAILED`: ALl dependencies have failed""" run_job_task: Optional[RunJobTask] = None - """If run_job_task, indicates that this task must execute another job.""" + """The task triggers another job when the `run_job_task` field is present.""" spark_jar_task: Optional[SparkJarTask] = None - """If spark_jar_task, indicates that this task must run a JAR.""" + """The task runs a JAR when the `spark_jar_task` field is present.""" spark_python_task: Optional[SparkPythonTask] = None - """If spark_python_task, indicates that this task must run a Python file.""" + """The task runs a Python file when the `spark_python_task` field is present.""" spark_submit_task: Optional[SparkSubmitTask] = None - """If `spark_submit_task`, indicates that this task must be launched by the spark submit script. - This task can run only on new clusters. + """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. + This task can run only on new clusters and is not compatible with serverless compute. In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark @@ -4988,7 +6405,8 @@ class Task: The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.""" sql_task: Optional[SqlTask] = None - """If sql_task, indicates that this job must execute a SQL task.""" + """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when + the `sql_task` field is present.""" timeout_seconds: Optional[int] = None """An optional timeout applied to each run of this job task. A value of `0` means no timeout.""" @@ -5000,6 +6418,8 @@ class Task: def as_dict(self) -> dict: """Serializes the Task into a dictionary suitable for use as a JSON request body.""" body = {} + if self.clean_rooms_notebook_task: + body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task.as_dict() if self.condition_task: body['condition_task'] = self.condition_task.as_dict() if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict() if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on] @@ -5033,10 +6453,49 @@ def as_dict(self) -> dict: if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the Task into a shallow dictionary of its immediate attributes.""" + body = {} + if self.clean_rooms_notebook_task: body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task + if self.condition_task: body['condition_task'] = self.condition_task + if self.dbt_task: body['dbt_task'] = self.dbt_task + if self.depends_on: body['depends_on'] = self.depends_on + if self.description is not None: body['description'] = self.description + if self.disable_auto_optimization is not None: + body['disable_auto_optimization'] = self.disable_auto_optimization + if self.email_notifications: body['email_notifications'] = self.email_notifications + if self.environment_key is not None: body['environment_key'] = self.environment_key + if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id + if self.for_each_task: body['for_each_task'] = self.for_each_task + if self.health: body['health'] = self.health + if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key + if self.libraries: body['libraries'] = self.libraries + if self.max_retries is not None: body['max_retries'] = self.max_retries + if self.min_retry_interval_millis is not None: + body['min_retry_interval_millis'] = self.min_retry_interval_millis + if self.new_cluster: body['new_cluster'] = self.new_cluster + if self.notebook_task: body['notebook_task'] = self.notebook_task + if self.notification_settings: body['notification_settings'] = self.notification_settings + if self.pipeline_task: body['pipeline_task'] = self.pipeline_task + if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task + if self.retry_on_timeout is not None: body['retry_on_timeout'] = self.retry_on_timeout + if self.run_if is not None: body['run_if'] = self.run_if + if self.run_job_task: body['run_job_task'] = self.run_job_task + if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task + if self.spark_python_task: body['spark_python_task'] = self.spark_python_task + if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task + if self.sql_task: body['sql_task'] = self.sql_task + if self.task_key is not None: body['task_key'] = self.task_key + if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds + if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Task: """Deserializes the Task from a dictionary.""" - return cls(condition_task=_from_dict(d, 'condition_task', ConditionTask), + return cls(clean_rooms_notebook_task=_from_dict(d, 'clean_rooms_notebook_task', + CleanRoomsNotebookTask), + condition_task=_from_dict(d, 'condition_task', ConditionTask), dbt_task=_from_dict(d, 'dbt_task', DbtTask), depends_on=_repeated_dict(d, 'depends_on', TaskDependency), description=d.get('description', None), @@ -5083,6 +6542,13 @@ def as_dict(self) -> dict: if self.task_key is not None: body['task_key'] = self.task_key return body + def as_shallow_dict(self) -> dict: + """Serializes the TaskDependency into a shallow dictionary of its immediate attributes.""" + body = {} + if self.outcome is not None: body['outcome'] = self.outcome + if self.task_key is not None: body['task_key'] = self.task_key + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TaskDependency: """Deserializes the TaskDependency from a dictionary.""" @@ -5140,6 +6606,20 @@ def as_dict(self) -> dict: if self.on_success: body['on_success'] = [v for v in self.on_success] return body + def as_shallow_dict(self) -> dict: + """Serializes the TaskEmailNotifications into a shallow dictionary of its immediate attributes.""" + body = {} + if self.no_alert_for_skipped_runs is not None: + body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs + if self.on_duration_warning_threshold_exceeded: + body['on_duration_warning_threshold_exceeded'] = self.on_duration_warning_threshold_exceeded + if self.on_failure: body['on_failure'] = self.on_failure + if self.on_start: body['on_start'] = self.on_start + if self.on_streaming_backlog_exceeded: + body['on_streaming_backlog_exceeded'] = self.on_streaming_backlog_exceeded + if self.on_success: body['on_success'] = self.on_success + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TaskEmailNotifications: """Deserializes the TaskEmailNotifications from a dictionary.""" @@ -5177,6 +6657,16 @@ def as_dict(self) -> dict: body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs return body + def as_shallow_dict(self) -> dict: + """Serializes the TaskNotificationSettings into a shallow dictionary of its immediate attributes.""" + body = {} + if self.alert_on_last_attempt is not None: body['alert_on_last_attempt'] = self.alert_on_last_attempt + if self.no_alert_for_canceled_runs is not None: + body['no_alert_for_canceled_runs'] = self.no_alert_for_canceled_runs + if self.no_alert_for_skipped_runs is not None: + body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TaskNotificationSettings: """Deserializes the TaskNotificationSettings from a dictionary.""" @@ -5222,6 +6712,7 @@ class TerminationCodeCode(Enum): [Link]: https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now""" + BUDGET_POLICY_LIMIT_EXCEEDED = 'BUDGET_POLICY_LIMIT_EXCEEDED' CANCELED = 'CANCELED' CLOUD_FAILURE = 'CLOUD_FAILURE' CLUSTER_ERROR = 'CLUSTER_ERROR' @@ -5306,6 +6797,14 @@ def as_dict(self) -> dict: if self.type is not None: body['type'] = self.type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the TerminationDetails into a shallow dictionary of its immediate attributes.""" + body = {} + if self.code is not None: body['code'] = self.code + if self.message is not None: body['message'] = self.message + if self.type is not None: body['type'] = self.type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TerminationDetails: """Deserializes the TerminationDetails from a dictionary.""" @@ -5342,6 +6841,12 @@ def as_dict(self) -> dict: if self.run_id is not None: body['run_id'] = self.run_id return body + def as_shallow_dict(self) -> dict: + """Serializes the TriggerInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.run_id is not None: body['run_id'] = self.run_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TriggerInfo: """Deserializes the TriggerInfo from a dictionary.""" @@ -5374,6 +6879,16 @@ def as_dict(self) -> dict: if self.table_update: body['table_update'] = self.table_update.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the TriggerSettings into a shallow dictionary of its immediate attributes.""" + body = {} + if self.file_arrival: body['file_arrival'] = self.file_arrival + if self.pause_status is not None: body['pause_status'] = self.pause_status + if self.periodic: body['periodic'] = self.periodic + if self.table: body['table'] = self.table + if self.table_update: body['table_update'] = self.table_update + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TriggerSettings: """Deserializes the TriggerSettings from a dictionary.""" @@ -5393,7 +6908,8 @@ class TriggerType(Enum): previously failed run. This occurs when you request to re-run the job in case of failures. * `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`: Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is - triggered by a table update.""" + triggered by a table update. * `CONTINUOUS_RESTART`: Indicates a run created by user to manually + restart a continuous job run.""" FILE_ARRIVAL = 'FILE_ARRIVAL' ONE_TIME = 'ONE_TIME' @@ -5434,6 +6950,14 @@ def as_dict(self) -> dict: if self.new_settings: body['new_settings'] = self.new_settings.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateJob into a shallow dictionary of its immediate attributes.""" + body = {} + if self.fields_to_remove: body['fields_to_remove'] = self.fields_to_remove + if self.job_id is not None: body['job_id'] = self.job_id + if self.new_settings: body['new_settings'] = self.new_settings + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateJob: """Deserializes the UpdateJob from a dictionary.""" @@ -5450,6 +6974,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateResponse: """Deserializes the UpdateResponse from a dictionary.""" @@ -5476,6 +7005,14 @@ def as_dict(self) -> dict: if self.type is not None: body['type'] = self.type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ViewItem into a shallow dictionary of its immediate attributes.""" + body = {} + if self.content is not None: body['content'] = self.content + if self.name is not None: body['name'] = self.name + if self.type is not None: body['type'] = self.type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ViewItem: """Deserializes the ViewItem from a dictionary.""" @@ -5508,6 +7045,12 @@ def as_dict(self) -> dict: if self.id is not None: body['id'] = self.id return body + def as_shallow_dict(self) -> dict: + """Serializes the Webhook into a shallow dictionary of its immediate attributes.""" + body = {} + if self.id is not None: body['id'] = self.id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Webhook: """Deserializes the Webhook from a dictionary.""" @@ -5555,6 +7098,18 @@ def as_dict(self) -> dict: if self.on_success: body['on_success'] = [v.as_dict() for v in self.on_success] return body + def as_shallow_dict(self) -> dict: + """Serializes the WebhookNotifications into a shallow dictionary of its immediate attributes.""" + body = {} + if self.on_duration_warning_threshold_exceeded: + body['on_duration_warning_threshold_exceeded'] = self.on_duration_warning_threshold_exceeded + if self.on_failure: body['on_failure'] = self.on_failure + if self.on_start: body['on_start'] = self.on_start + if self.on_streaming_backlog_exceeded: + body['on_streaming_backlog_exceeded'] = self.on_streaming_backlog_exceeded + if self.on_success: body['on_success'] = self.on_success + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> WebhookNotifications: """Deserializes the WebhookNotifications from a dictionary.""" @@ -5682,6 +7237,7 @@ def create(self, name: Optional[str] = None, notification_settings: Optional[JobNotificationSettings] = None, parameters: Optional[List[JobParameterDefinition]] = None, + performance_target: Optional[PerformanceTarget] = None, queue: Optional[QueueSettings] = None, run_as: Optional[JobRunAs] = None, schedule: Optional[CronSchedule] = None, @@ -5737,6 +7293,7 @@ def create(self, :param job_clusters: List[:class:`JobCluster`] (optional) A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings. + If more than 100 job clusters are available, you can paginate through them using :method:jobs/get. :param max_concurrent_runs: int (optional) An optional maximum allowed number of concurrent runs of the job. Set this value if you want to be able to execute multiple runs of the same job concurrently. This is useful for example if you @@ -5753,14 +7310,16 @@ def create(self, `email_notifications` and `webhook_notifications` for this job. :param parameters: List[:class:`JobParameterDefinition`] (optional) Job-level parameter definitions + :param performance_target: :class:`PerformanceTarget` (optional) + PerformanceTarget defines how performant or cost efficient the execution of run on serverless should + be. :param queue: :class:`QueueSettings` (optional) The queue settings of the job. :param run_as: :class:`JobRunAs` (optional) - Write-only setting. Specifies the user, service principal or group that the job/pipeline runs as. If - not specified, the job/pipeline runs as the user who created the job/pipeline. + Write-only setting. Specifies the user or service principal that the job runs as. If not specified, + the job runs as the user who created the job. - Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not, an - error is thrown. + Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown. :param schedule: :class:`CronSchedule` (optional) An optional periodic schedule for this job. The default behavior is that the job only runs when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`. @@ -5769,7 +7328,9 @@ def create(self, clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags can be added to the job. :param tasks: List[:class:`Task`] (optional) - A list of task specifications to be executed by this job. + A list of task specifications to be executed by this job. If more than 100 tasks are available, you + can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root + to determine if more results are available. :param timeout_seconds: int (optional) An optional timeout applied to each run of this job. A value of `0` means no timeout. :param trigger: :class:`TriggerSettings` (optional) @@ -5799,6 +7360,7 @@ def create(self, if name is not None: body['name'] = name if notification_settings is not None: body['notification_settings'] = notification_settings.as_dict() if parameters is not None: body['parameters'] = [v.as_dict() for v in parameters] + if performance_target is not None: body['performance_target'] = performance_target.value if queue is not None: body['queue'] = queue.as_dict() if run_as is not None: body['run_as'] = run_as.as_dict() if schedule is not None: body['schedule'] = schedule.as_dict() @@ -5865,19 +7427,28 @@ def export_run(self, run_id: int, *, views_to_export: Optional[ViewsToExport] = res = self._api.do('GET', '/api/2.1/jobs/runs/export', query=query, headers=headers) return ExportRunOutput.from_dict(res) - def get(self, job_id: int) -> Job: + def get(self, job_id: int, *, page_token: Optional[str] = None) -> Job: """Get a single job. Retrieves the details for a single job. + In Jobs API 2.2, requests for a single job support pagination of `tasks` and `job_clusters` when + either exceeds 100 elements. Use the `next_page_token` field to check for more results and pass its + value as the `page_token` in subsequent requests. Arrays with fewer than 100 elements in a page will + be empty on later pages. + :param job_id: int The canonical identifier of the job to retrieve information about. This field is required. + :param page_token: str (optional) + Use `next_page_token` returned from the previous GetJob to request the next page of the job's + sub-resources. :returns: :class:`Job` """ query = {} if job_id is not None: query['job_id'] = job_id + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } res = self._api.do('GET', '/api/2.1/jobs/get', query=query, headers=headers) @@ -5923,7 +7494,12 @@ def get_run(self, page_token: Optional[str] = None) -> Run: """Get a single job run. - Retrieve the metadata of a run. + Retrieves the metadata of a run. + + In Jobs API 2.2, requests for a single job run support pagination of `tasks` and `job_clusters` when + either exceeds 100 elements. Use the `next_page_token` field to check for more results and pass its + value as the `page_token` in subsequent requests. Arrays with fewer than 100 elements in a page will + be empty on later pages. :param run_id: int The canonical identifier of the run for which to retrieve the metadata. This field is required. @@ -5932,8 +7508,8 @@ def get_run(self, :param include_resolved_values: bool (optional) Whether to include resolved parameter values in the response. :param page_token: str (optional) - To list the next page or the previous page of job tasks, set this field to the value of the - `next_page_token` or `prev_page_token` returned in the GetJob response. + Use `next_page_token` returned from the previous GetRun to request the next page of the run's + sub-resources. :returns: :class:`Run` """ @@ -5985,7 +7561,8 @@ def list(self, Retrieves a list of jobs. :param expand_tasks: bool (optional) - Whether to include task and cluster details in the response. + Whether to include task and cluster details in the response. Note that in API 2.2, only the first + 100 elements will be shown. Use :method:jobs/get to paginate through all tasks and clusters. :param limit: int (optional) The number of jobs to return. This value must be greater than 0 and less or equal to 100. The default value is 20. @@ -6042,7 +7619,8 @@ def list_runs(self, If completed_only is `true`, only completed runs are included in the results; otherwise, lists both active and completed runs. This field cannot be `true` when active_only is `true`. :param expand_tasks: bool (optional) - Whether to include task and cluster details in the response. + Whether to include task and cluster details in the response. Note that in API 2.2, only the first + 100 elements will be shown. Use :method:jobs/getrun to paginate through all tasks and clusters. :param job_id: int (optional) The job for which to list runs. If omitted, the Jobs service lists runs from all jobs. :param limit: int (optional) @@ -6121,8 +7699,9 @@ def repair_run(self, in conjunction with notebook_params. The JSON representation of this field (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing - information about job runs. + Use [Task parameter variables] to set parameters containing information about job runs. + + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param job_parameters: Dict[str,str] (optional) Job-level parameters used in the run. for example `"param": "overriding_val"` :param latest_repair_id: int (optional) @@ -6279,6 +7858,8 @@ def run_now(self, jar_params: Optional[List[str]] = None, job_parameters: Optional[Dict[str, str]] = None, notebook_params: Optional[Dict[str, str]] = None, + only: Optional[List[str]] = None, + performance_target: Optional[PerformanceTarget] = None, pipeline_params: Optional[PipelineParams] = None, python_named_params: Optional[Dict[str, str]] = None, python_params: Optional[List[str]] = None, @@ -6314,8 +7895,9 @@ def run_now(self, in conjunction with notebook_params. The JSON representation of this field (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing - information about job runs. + Use [Task parameter variables] to set parameters containing information about job runs. + + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param job_parameters: Dict[str,str] (optional) Job-level parameters used in the run. for example `"param": "overriding_val"` :param notebook_params: Dict[str,str] (optional) @@ -6334,6 +7916,13 @@ def run_now(self, [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html + :param only: List[str] (optional) + A list of task keys to run inside of the job. If this field is not provided, all tasks in the job + will be run. + :param performance_target: :class:`PerformanceTarget` (optional) + PerformanceTarget defines how performant or cost efficient the execution of run on serverless + compute should be. For RunNow request, the run will execute with this settings instead of ones + defined in job. :param pipeline_params: :class:`PipelineParams` (optional) Controls whether the pipeline should perform a full refresh :param python_named_params: Dict[str,str] (optional) @@ -6385,6 +7974,8 @@ def run_now(self, if job_id is not None: body['job_id'] = job_id if job_parameters is not None: body['job_parameters'] = job_parameters if notebook_params is not None: body['notebook_params'] = notebook_params + if only is not None: body['only'] = [v for v in only] + if performance_target is not None: body['performance_target'] = performance_target.value if pipeline_params is not None: body['pipeline_params'] = pipeline_params.as_dict() if python_named_params is not None: body['python_named_params'] = python_named_params if python_params is not None: body['python_params'] = [v for v in python_params] @@ -6406,6 +7997,8 @@ def run_now_and_wait(self, jar_params: Optional[List[str]] = None, job_parameters: Optional[Dict[str, str]] = None, notebook_params: Optional[Dict[str, str]] = None, + only: Optional[List[str]] = None, + performance_target: Optional[PerformanceTarget] = None, pipeline_params: Optional[PipelineParams] = None, python_named_params: Optional[Dict[str, str]] = None, python_params: Optional[List[str]] = None, @@ -6419,6 +8012,8 @@ def run_now_and_wait(self, job_id=job_id, job_parameters=job_parameters, notebook_params=notebook_params, + only=only, + performance_target=performance_target, pipeline_params=pipeline_params, python_named_params=python_named_params, python_params=python_params, @@ -6433,7 +8028,8 @@ def set_permissions( access_control_list: Optional[List[JobAccessControlRequest]] = None) -> JobPermissions: """Set job permissions. - Sets permissions on a job. Jobs can inherit permissions from their root object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param job_id: str The job for which to get or manage permissions. diff --git a/databricks/sdk/service/marketplace.py b/databricks/sdk/service/marketplace.py index 1a2dedf31..239cd2eaf 100755 --- a/databricks/sdk/service/marketplace.py +++ b/databricks/sdk/service/marketplace.py @@ -27,6 +27,13 @@ def as_dict(self) -> dict: if self.listing_id is not None: body['listing_id'] = self.listing_id return body + def as_shallow_dict(self) -> dict: + """Serializes the AddExchangeForListingRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.exchange_id is not None: body['exchange_id'] = self.exchange_id + if self.listing_id is not None: body['listing_id'] = self.listing_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AddExchangeForListingRequest: """Deserializes the AddExchangeForListingRequest from a dictionary.""" @@ -43,6 +50,12 @@ def as_dict(self) -> dict: if self.exchange_for_listing: body['exchange_for_listing'] = self.exchange_for_listing.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the AddExchangeForListingResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.exchange_for_listing: body['exchange_for_listing'] = self.exchange_for_listing + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AddExchangeForListingResponse: """Deserializes the AddExchangeForListingResponse from a dictionary.""" @@ -56,6 +69,7 @@ class AssetType(Enum): ASSET_TYPE_MEDIA = 'ASSET_TYPE_MEDIA' ASSET_TYPE_MODEL = 'ASSET_TYPE_MODEL' ASSET_TYPE_NOTEBOOK = 'ASSET_TYPE_NOTEBOOK' + ASSET_TYPE_PARTNER_INTEGRATION = 'ASSET_TYPE_PARTNER_INTEGRATION' @dataclass @@ -68,6 +82,12 @@ def as_dict(self) -> dict: if self.listings: body['listings'] = [v.as_dict() for v in self.listings] return body + def as_shallow_dict(self) -> dict: + """Serializes the BatchGetListingsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.listings: body['listings'] = self.listings + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> BatchGetListingsResponse: """Deserializes the BatchGetListingsResponse from a dictionary.""" @@ -84,6 +104,12 @@ def as_dict(self) -> dict: if self.providers: body['providers'] = [v.as_dict() for v in self.providers] return body + def as_shallow_dict(self) -> dict: + """Serializes the BatchGetProvidersResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.providers: body['providers'] = self.providers + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> BatchGetProvidersResponse: """Deserializes the BatchGetProvidersResponse from a dictionary.""" @@ -126,6 +152,12 @@ def as_dict(self) -> dict: if self.version is not None: body['version'] = self.version return body + def as_shallow_dict(self) -> dict: + """Serializes the ConsumerTerms into a shallow dictionary of its immediate attributes.""" + body = {} + if self.version is not None: body['version'] = self.version + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ConsumerTerms: """Deserializes the ConsumerTerms from a dictionary.""" @@ -153,6 +185,15 @@ def as_dict(self) -> dict: if self.last_name is not None: body['last_name'] = self.last_name return body + def as_shallow_dict(self) -> dict: + """Serializes the ContactInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.company is not None: body['company'] = self.company + if self.email is not None: body['email'] = self.email + if self.first_name is not None: body['first_name'] = self.first_name + if self.last_name is not None: body['last_name'] = self.last_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ContactInfo: """Deserializes the ContactInfo from a dictionary.""" @@ -178,6 +219,12 @@ def as_dict(self) -> dict: if self.filter: body['filter'] = self.filter.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateExchangeFilterRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.filter: body['filter'] = self.filter + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateExchangeFilterRequest: """Deserializes the CreateExchangeFilterRequest from a dictionary.""" @@ -194,6 +241,12 @@ def as_dict(self) -> dict: if self.filter_id is not None: body['filter_id'] = self.filter_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateExchangeFilterResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.filter_id is not None: body['filter_id'] = self.filter_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateExchangeFilterResponse: """Deserializes the CreateExchangeFilterResponse from a dictionary.""" @@ -210,6 +263,12 @@ def as_dict(self) -> dict: if self.exchange: body['exchange'] = self.exchange.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateExchangeRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.exchange: body['exchange'] = self.exchange + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateExchangeRequest: """Deserializes the CreateExchangeRequest from a dictionary.""" @@ -226,6 +285,12 @@ def as_dict(self) -> dict: if self.exchange_id is not None: body['exchange_id'] = self.exchange_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateExchangeResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.exchange_id is not None: body['exchange_id'] = self.exchange_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateExchangeResponse: """Deserializes the CreateExchangeResponse from a dictionary.""" @@ -252,6 +317,15 @@ def as_dict(self) -> dict: if self.mime_type is not None: body['mime_type'] = self.mime_type return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateFileRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.display_name is not None: body['display_name'] = self.display_name + if self.file_parent: body['file_parent'] = self.file_parent + if self.marketplace_file_type is not None: body['marketplace_file_type'] = self.marketplace_file_type + if self.mime_type is not None: body['mime_type'] = self.mime_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateFileRequest: """Deserializes the CreateFileRequest from a dictionary.""" @@ -275,6 +349,13 @@ def as_dict(self) -> dict: if self.upload_url is not None: body['upload_url'] = self.upload_url return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateFileResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.file_info: body['file_info'] = self.file_info + if self.upload_url is not None: body['upload_url'] = self.upload_url + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateFileResponse: """Deserializes the CreateFileResponse from a dictionary.""" @@ -308,6 +389,17 @@ def as_dict(self) -> dict: if self.share_name is not None: body['share_name'] = self.share_name return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateInstallationRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.accepted_consumer_terms: body['accepted_consumer_terms'] = self.accepted_consumer_terms + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.recipient_type is not None: body['recipient_type'] = self.recipient_type + if self.repo_detail: body['repo_detail'] = self.repo_detail + if self.share_name is not None: body['share_name'] = self.share_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateInstallationRequest: """Deserializes the CreateInstallationRequest from a dictionary.""" @@ -329,6 +421,12 @@ def as_dict(self) -> dict: if self.listing: body['listing'] = self.listing.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateListingRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.listing: body['listing'] = self.listing + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateListingRequest: """Deserializes the CreateListingRequest from a dictionary.""" @@ -345,6 +443,12 @@ def as_dict(self) -> dict: if self.listing_id is not None: body['listing_id'] = self.listing_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateListingResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.listing_id is not None: body['listing_id'] = self.listing_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateListingResponse: """Deserializes the CreateListingResponse from a dictionary.""" @@ -388,6 +492,20 @@ def as_dict(self) -> dict: if self.recipient_type is not None: body['recipient_type'] = self.recipient_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the CreatePersonalizationRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.accepted_consumer_terms: body['accepted_consumer_terms'] = self.accepted_consumer_terms + if self.comment is not None: body['comment'] = self.comment + if self.company is not None: body['company'] = self.company + if self.first_name is not None: body['first_name'] = self.first_name + if self.intended_use is not None: body['intended_use'] = self.intended_use + if self.is_from_lighthouse is not None: body['is_from_lighthouse'] = self.is_from_lighthouse + if self.last_name is not None: body['last_name'] = self.last_name + if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.recipient_type is not None: body['recipient_type'] = self.recipient_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreatePersonalizationRequest: """Deserializes the CreatePersonalizationRequest from a dictionary.""" @@ -412,6 +530,12 @@ def as_dict(self) -> dict: if self.id is not None: body['id'] = self.id return body + def as_shallow_dict(self) -> dict: + """Serializes the CreatePersonalizationRequestResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.id is not None: body['id'] = self.id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreatePersonalizationRequestResponse: """Deserializes the CreatePersonalizationRequestResponse from a dictionary.""" @@ -428,6 +552,12 @@ def as_dict(self) -> dict: if self.provider: body['provider'] = self.provider.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateProviderRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.provider: body['provider'] = self.provider + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateProviderRequest: """Deserializes the CreateProviderRequest from a dictionary.""" @@ -444,6 +574,12 @@ def as_dict(self) -> dict: if self.id is not None: body['id'] = self.id return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateProviderResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.id is not None: body['id'] = self.id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateProviderResponse: """Deserializes the CreateProviderResponse from a dictionary.""" @@ -476,6 +612,13 @@ def as_dict(self) -> dict: if self.unit is not None: body['unit'] = self.unit.value return body + def as_shallow_dict(self) -> dict: + """Serializes the DataRefreshInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.interval is not None: body['interval'] = self.interval + if self.unit is not None: body['unit'] = self.unit + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DataRefreshInfo: """Deserializes the DataRefreshInfo from a dictionary.""" @@ -490,6 +633,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteExchangeFilterResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteExchangeFilterResponse: """Deserializes the DeleteExchangeFilterResponse from a dictionary.""" @@ -504,6 +652,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteExchangeResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteExchangeResponse: """Deserializes the DeleteExchangeResponse from a dictionary.""" @@ -518,6 +671,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteFileResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteFileResponse: """Deserializes the DeleteFileResponse from a dictionary.""" @@ -532,6 +690,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteInstallationResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteInstallationResponse: """Deserializes the DeleteInstallationResponse from a dictionary.""" @@ -546,6 +709,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteListingResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteListingResponse: """Deserializes the DeleteListingResponse from a dictionary.""" @@ -560,6 +728,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteProviderResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteProviderResponse: """Deserializes the DeleteProviderResponse from a dictionary.""" @@ -606,6 +779,20 @@ def as_dict(self) -> dict: if self.updated_by is not None: body['updated_by'] = self.updated_by return body + def as_shallow_dict(self) -> dict: + """Serializes the Exchange into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.filters: body['filters'] = self.filters + if self.id is not None: body['id'] = self.id + if self.linked_listings: body['linked_listings'] = self.linked_listings + if self.name is not None: body['name'] = self.name + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Exchange: """Deserializes the Exchange from a dictionary.""" @@ -654,6 +841,20 @@ def as_dict(self) -> dict: if self.updated_by is not None: body['updated_by'] = self.updated_by return body + def as_shallow_dict(self) -> dict: + """Serializes the ExchangeFilter into a shallow dictionary of its immediate attributes.""" + body = {} + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.exchange_id is not None: body['exchange_id'] = self.exchange_id + if self.filter_type is not None: body['filter_type'] = self.filter_type + if self.filter_value is not None: body['filter_value'] = self.filter_value + if self.id is not None: body['id'] = self.id + if self.name is not None: body['name'] = self.name + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ExchangeFilter: """Deserializes the ExchangeFilter from a dictionary.""" @@ -701,6 +902,18 @@ def as_dict(self) -> dict: if self.listing_name is not None: body['listing_name'] = self.listing_name return body + def as_shallow_dict(self) -> dict: + """Serializes the ExchangeListing into a shallow dictionary of its immediate attributes.""" + body = {} + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.exchange_id is not None: body['exchange_id'] = self.exchange_id + if self.exchange_name is not None: body['exchange_name'] = self.exchange_name + if self.id is not None: body['id'] = self.id + if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.listing_name is not None: body['listing_name'] = self.listing_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ExchangeListing: """Deserializes the ExchangeListing from a dictionary.""" @@ -753,6 +966,21 @@ def as_dict(self) -> dict: if self.updated_at is not None: body['updated_at'] = self.updated_at return body + def as_shallow_dict(self) -> dict: + """Serializes the FileInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.created_at is not None: body['created_at'] = self.created_at + if self.display_name is not None: body['display_name'] = self.display_name + if self.download_link is not None: body['download_link'] = self.download_link + if self.file_parent: body['file_parent'] = self.file_parent + if self.id is not None: body['id'] = self.id + if self.marketplace_file_type is not None: body['marketplace_file_type'] = self.marketplace_file_type + if self.mime_type is not None: body['mime_type'] = self.mime_type + if self.status is not None: body['status'] = self.status + if self.status_message is not None: body['status_message'] = self.status_message + if self.updated_at is not None: body['updated_at'] = self.updated_at + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> FileInfo: """Deserializes the FileInfo from a dictionary.""" @@ -782,6 +1010,13 @@ def as_dict(self) -> dict: if self.parent_id is not None: body['parent_id'] = self.parent_id return body + def as_shallow_dict(self) -> dict: + """Serializes the FileParent into a shallow dictionary of its immediate attributes.""" + body = {} + if self.file_parent_type is not None: body['file_parent_type'] = self.file_parent_type + if self.parent_id is not None: body['parent_id'] = self.parent_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> FileParent: """Deserializes the FileParent from a dictionary.""" @@ -819,6 +1054,12 @@ def as_dict(self) -> dict: if self.exchange: body['exchange'] = self.exchange.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the GetExchangeResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.exchange: body['exchange'] = self.exchange + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetExchangeResponse: """Deserializes the GetExchangeResponse from a dictionary.""" @@ -835,6 +1076,12 @@ def as_dict(self) -> dict: if self.file_info: body['file_info'] = self.file_info.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the GetFileResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.file_info: body['file_info'] = self.file_info + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetFileResponse: """Deserializes the GetFileResponse from a dictionary.""" @@ -852,6 +1099,12 @@ def as_dict(self) -> dict: if self.version is not None: body['version'] = self.version return body + def as_shallow_dict(self) -> dict: + """Serializes the GetLatestVersionProviderAnalyticsDashboardResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.version is not None: body['version'] = self.version + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetLatestVersionProviderAnalyticsDashboardResponse: """Deserializes the GetLatestVersionProviderAnalyticsDashboardResponse from a dictionary.""" @@ -872,6 +1125,13 @@ def as_dict(self) -> dict: body['shared_data_objects'] = [v.as_dict() for v in self.shared_data_objects] return body + def as_shallow_dict(self) -> dict: + """Serializes the GetListingContentMetadataResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.shared_data_objects: body['shared_data_objects'] = self.shared_data_objects + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetListingContentMetadataResponse: """Deserializes the GetListingContentMetadataResponse from a dictionary.""" @@ -889,6 +1149,12 @@ def as_dict(self) -> dict: if self.listing: body['listing'] = self.listing.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the GetListingResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.listing: body['listing'] = self.listing + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetListingResponse: """Deserializes the GetListingResponse from a dictionary.""" @@ -908,6 +1174,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the GetListingsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.listings: body['listings'] = self.listings + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetListingsResponse: """Deserializes the GetListingsResponse from a dictionary.""" @@ -926,6 +1199,12 @@ def as_dict(self) -> dict: body['personalization_requests'] = [v.as_dict() for v in self.personalization_requests] return body + def as_shallow_dict(self) -> dict: + """Serializes the GetPersonalizationRequestResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.personalization_requests: body['personalization_requests'] = self.personalization_requests + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetPersonalizationRequestResponse: """Deserializes the GetPersonalizationRequestResponse from a dictionary.""" @@ -943,6 +1222,12 @@ def as_dict(self) -> dict: if self.provider: body['provider'] = self.provider.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the GetProviderResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.provider: body['provider'] = self.provider + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetProviderResponse: """Deserializes the GetProviderResponse from a dictionary.""" @@ -959,6 +1244,12 @@ def as_dict(self) -> dict: if self.installation: body['installation'] = self.installation.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the Installation into a shallow dictionary of its immediate attributes.""" + body = {} + if self.installation: body['installation'] = self.installation + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Installation: """Deserializes the Installation from a dictionary.""" @@ -1011,6 +1302,24 @@ def as_dict(self) -> dict: if self.tokens: body['tokens'] = [v.as_dict() for v in self.tokens] return body + def as_shallow_dict(self) -> dict: + """Serializes the InstallationDetail into a shallow dictionary of its immediate attributes.""" + body = {} + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.error_message is not None: body['error_message'] = self.error_message + if self.id is not None: body['id'] = self.id + if self.installed_on is not None: body['installed_on'] = self.installed_on + if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.listing_name is not None: body['listing_name'] = self.listing_name + if self.recipient_type is not None: body['recipient_type'] = self.recipient_type + if self.repo_name is not None: body['repo_name'] = self.repo_name + if self.repo_path is not None: body['repo_path'] = self.repo_path + if self.share_name is not None: body['share_name'] = self.share_name + if self.status is not None: body['status'] = self.status + if self.token_detail: body['token_detail'] = self.token_detail + if self.tokens: body['tokens'] = self.tokens + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> InstallationDetail: """Deserializes the InstallationDetail from a dictionary.""" @@ -1048,6 +1357,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListAllInstallationsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.installations: body['installations'] = self.installations + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListAllInstallationsResponse: """Deserializes the ListAllInstallationsResponse from a dictionary.""" @@ -1069,6 +1385,13 @@ def as_dict(self) -> dict: body['personalization_requests'] = [v.as_dict() for v in self.personalization_requests] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListAllPersonalizationRequestsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.personalization_requests: body['personalization_requests'] = self.personalization_requests + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListAllPersonalizationRequestsResponse: """Deserializes the ListAllPersonalizationRequestsResponse from a dictionary.""" @@ -1090,6 +1413,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListExchangeFiltersResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.filters: body['filters'] = self.filters + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListExchangeFiltersResponse: """Deserializes the ListExchangeFiltersResponse from a dictionary.""" @@ -1110,6 +1440,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListExchangesForListingResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.exchange_listing: body['exchange_listing'] = self.exchange_listing + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListExchangesForListingResponse: """Deserializes the ListExchangesForListingResponse from a dictionary.""" @@ -1130,6 +1467,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListExchangesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.exchanges: body['exchanges'] = self.exchanges + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListExchangesResponse: """Deserializes the ListExchangesResponse from a dictionary.""" @@ -1150,6 +1494,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListFilesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.file_infos: body['file_infos'] = self.file_infos + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListFilesResponse: """Deserializes the ListFilesResponse from a dictionary.""" @@ -1170,6 +1521,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListFulfillmentsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.fulfillments: body['fulfillments'] = self.fulfillments + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListFulfillmentsResponse: """Deserializes the ListFulfillmentsResponse from a dictionary.""" @@ -1190,6 +1548,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListInstallationsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.installations: body['installations'] = self.installations + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListInstallationsResponse: """Deserializes the ListInstallationsResponse from a dictionary.""" @@ -1210,6 +1575,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListListingsForExchangeResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.exchange_listings: body['exchange_listings'] = self.exchange_listings + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListListingsForExchangeResponse: """Deserializes the ListListingsForExchangeResponse from a dictionary.""" @@ -1230,6 +1602,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListListingsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.listings: body['listings'] = self.listings + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListListingsResponse: """Deserializes the ListListingsResponse from a dictionary.""" @@ -1254,6 +1633,14 @@ def as_dict(self) -> dict: if self.version is not None: body['version'] = self.version return body + def as_shallow_dict(self) -> dict: + """Serializes the ListProviderAnalyticsDashboardResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.id is not None: body['id'] = self.id + if self.version is not None: body['version'] = self.version + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListProviderAnalyticsDashboardResponse: """Deserializes the ListProviderAnalyticsDashboardResponse from a dictionary.""" @@ -1275,6 +1662,13 @@ def as_dict(self) -> dict: if self.providers: body['providers'] = [v.as_dict() for v in self.providers] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListProvidersResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.providers: body['providers'] = self.providers + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListProvidersResponse: """Deserializes the ListProvidersResponse from a dictionary.""" @@ -1299,6 +1693,14 @@ def as_dict(self) -> dict: if self.summary: body['summary'] = self.summary.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the Listing into a shallow dictionary of its immediate attributes.""" + body = {} + if self.detail: body['detail'] = self.detail + if self.id is not None: body['id'] = self.id + if self.summary: body['summary'] = self.summary + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Listing: """Deserializes the Listing from a dictionary.""" @@ -1391,6 +1793,31 @@ def as_dict(self) -> dict: if self.update_frequency: body['update_frequency'] = self.update_frequency.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the ListingDetail into a shallow dictionary of its immediate attributes.""" + body = {} + if self.assets: body['assets'] = self.assets + if self.collection_date_end is not None: body['collection_date_end'] = self.collection_date_end + if self.collection_date_start is not None: body['collection_date_start'] = self.collection_date_start + if self.collection_granularity: body['collection_granularity'] = self.collection_granularity + if self.cost is not None: body['cost'] = self.cost + if self.data_source is not None: body['data_source'] = self.data_source + if self.description is not None: body['description'] = self.description + if self.documentation_link is not None: body['documentation_link'] = self.documentation_link + if self.embedded_notebook_file_infos: + body['embedded_notebook_file_infos'] = self.embedded_notebook_file_infos + if self.file_ids: body['file_ids'] = self.file_ids + if self.geographical_coverage is not None: body['geographical_coverage'] = self.geographical_coverage + if self.license is not None: body['license'] = self.license + if self.pricing_model is not None: body['pricing_model'] = self.pricing_model + if self.privacy_policy_link is not None: body['privacy_policy_link'] = self.privacy_policy_link + if self.size is not None: body['size'] = self.size + if self.support_link is not None: body['support_link'] = self.support_link + if self.tags: body['tags'] = self.tags + if self.terms_of_service is not None: body['terms_of_service'] = self.terms_of_service + if self.update_frequency: body['update_frequency'] = self.update_frequency + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListingDetail: """Deserializes the ListingDetail from a dictionary.""" @@ -1437,6 +1864,16 @@ def as_dict(self) -> dict: if self.share_info: body['share_info'] = self.share_info.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the ListingFulfillment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.fulfillment_type is not None: body['fulfillment_type'] = self.fulfillment_type + if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.recipient_type is not None: body['recipient_type'] = self.recipient_type + if self.repo_info: body['repo_info'] = self.repo_info + if self.share_info: body['share_info'] = self.share_info + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListingFulfillment: """Deserializes the ListingFulfillment from a dictionary.""" @@ -1457,6 +1894,12 @@ def as_dict(self) -> dict: if self.visibility is not None: body['visibility'] = self.visibility.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ListingSetting into a shallow dictionary of its immediate attributes.""" + body = {} + if self.visibility is not None: body['visibility'] = self.visibility + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListingSetting: """Deserializes the ListingSetting from a dictionary.""" @@ -1547,6 +1990,30 @@ def as_dict(self) -> dict: if self.updated_by_id is not None: body['updated_by_id'] = self.updated_by_id return body + def as_shallow_dict(self) -> dict: + """Serializes the ListingSummary into a shallow dictionary of its immediate attributes.""" + body = {} + if self.categories: body['categories'] = self.categories + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.created_by_id is not None: body['created_by_id'] = self.created_by_id + if self.exchange_ids: body['exchange_ids'] = self.exchange_ids + if self.git_repo: body['git_repo'] = self.git_repo + if self.listing_type is not None: body['listingType'] = self.listing_type + if self.name is not None: body['name'] = self.name + if self.provider_id is not None: body['provider_id'] = self.provider_id + if self.provider_region: body['provider_region'] = self.provider_region + if self.published_at is not None: body['published_at'] = self.published_at + if self.published_by is not None: body['published_by'] = self.published_by + if self.setting: body['setting'] = self.setting + if self.share: body['share'] = self.share + if self.status is not None: body['status'] = self.status + if self.subtitle is not None: body['subtitle'] = self.subtitle + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.updated_by_id is not None: body['updated_by_id'] = self.updated_by_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListingSummary: """Deserializes the ListingSummary from a dictionary.""" @@ -1586,6 +2053,13 @@ def as_dict(self) -> dict: if self.tag_values: body['tag_values'] = [v for v in self.tag_values] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListingTag into a shallow dictionary of its immediate attributes.""" + body = {} + if self.tag_name is not None: body['tag_name'] = self.tag_name + if self.tag_values: body['tag_values'] = self.tag_values + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListingTag: """Deserializes the ListingTag from a dictionary.""" @@ -1666,6 +2140,27 @@ def as_dict(self) -> dict: if self.updated_at is not None: body['updated_at'] = self.updated_at return body + def as_shallow_dict(self) -> dict: + """Serializes the PersonalizationRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: body['comment'] = self.comment + if self.consumer_region: body['consumer_region'] = self.consumer_region + if self.contact_info: body['contact_info'] = self.contact_info + if self.created_at is not None: body['created_at'] = self.created_at + if self.id is not None: body['id'] = self.id + if self.intended_use is not None: body['intended_use'] = self.intended_use + if self.is_from_lighthouse is not None: body['is_from_lighthouse'] = self.is_from_lighthouse + if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.listing_name is not None: body['listing_name'] = self.listing_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.provider_id is not None: body['provider_id'] = self.provider_id + if self.recipient_type is not None: body['recipient_type'] = self.recipient_type + if self.share: body['share'] = self.share + if self.status is not None: body['status'] = self.status + if self.status_message is not None: body['status_message'] = self.status_message + if self.updated_at is not None: body['updated_at'] = self.updated_at + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PersonalizationRequest: """Deserializes the PersonalizationRequest from a dictionary.""" @@ -1705,6 +2200,12 @@ def as_dict(self) -> dict: if self.id is not None: body['id'] = self.id return body + def as_shallow_dict(self) -> dict: + """Serializes the ProviderAnalyticsDashboard into a shallow dictionary of its immediate attributes.""" + body = {} + if self.id is not None: body['id'] = self.id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ProviderAnalyticsDashboard: """Deserializes the ProviderAnalyticsDashboard from a dictionary.""" @@ -1765,6 +2266,28 @@ def as_dict(self) -> dict: if self.term_of_service_link is not None: body['term_of_service_link'] = self.term_of_service_link return body + def as_shallow_dict(self) -> dict: + """Serializes the ProviderInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.business_contact_email is not None: + body['business_contact_email'] = self.business_contact_email + if self.company_website_link is not None: body['company_website_link'] = self.company_website_link + if self.dark_mode_icon_file_id is not None: + body['dark_mode_icon_file_id'] = self.dark_mode_icon_file_id + if self.dark_mode_icon_file_path is not None: + body['dark_mode_icon_file_path'] = self.dark_mode_icon_file_path + if self.description is not None: body['description'] = self.description + if self.icon_file_id is not None: body['icon_file_id'] = self.icon_file_id + if self.icon_file_path is not None: body['icon_file_path'] = self.icon_file_path + if self.id is not None: body['id'] = self.id + if self.is_featured is not None: body['is_featured'] = self.is_featured + if self.name is not None: body['name'] = self.name + if self.privacy_policy_link is not None: body['privacy_policy_link'] = self.privacy_policy_link + if self.published_by is not None: body['published_by'] = self.published_by + if self.support_contact_email is not None: body['support_contact_email'] = self.support_contact_email + if self.term_of_service_link is not None: body['term_of_service_link'] = self.term_of_service_link + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ProviderInfo: """Deserializes the ProviderInfo from a dictionary.""" @@ -1797,6 +2320,13 @@ def as_dict(self) -> dict: if self.region is not None: body['region'] = self.region return body + def as_shallow_dict(self) -> dict: + """Serializes the RegionInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cloud is not None: body['cloud'] = self.cloud + if self.region is not None: body['region'] = self.region + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RegionInfo: """Deserializes the RegionInfo from a dictionary.""" @@ -1811,6 +2341,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the RemoveExchangeForListingResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RemoveExchangeForListingResponse: """Deserializes the RemoveExchangeForListingResponse from a dictionary.""" @@ -1828,6 +2363,12 @@ def as_dict(self) -> dict: if self.git_repo_url is not None: body['git_repo_url'] = self.git_repo_url return body + def as_shallow_dict(self) -> dict: + """Serializes the RepoInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.git_repo_url is not None: body['git_repo_url'] = self.git_repo_url + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RepoInfo: """Deserializes the RepoInfo from a dictionary.""" @@ -1850,6 +2391,13 @@ def as_dict(self) -> dict: if self.repo_path is not None: body['repo_path'] = self.repo_path return body + def as_shallow_dict(self) -> dict: + """Serializes the RepoInstallation into a shallow dictionary of its immediate attributes.""" + body = {} + if self.repo_name is not None: body['repo_name'] = self.repo_name + if self.repo_path is not None: body['repo_path'] = self.repo_path + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RepoInstallation: """Deserializes the RepoInstallation from a dictionary.""" @@ -1869,6 +2417,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the SearchListingsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.listings: body['listings'] = self.listings + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SearchListingsResponse: """Deserializes the SearchListingsResponse from a dictionary.""" @@ -1889,6 +2444,13 @@ def as_dict(self) -> dict: if self.type is not None: body['type'] = self.type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ShareInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: body['name'] = self.name + if self.type is not None: body['type'] = self.type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ShareInfo: """Deserializes the ShareInfo from a dictionary.""" @@ -1910,6 +2472,13 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name return body + def as_shallow_dict(self) -> dict: + """Serializes the SharedDataObject into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data_object_type is not None: body['data_object_type'] = self.data_object_type + if self.name is not None: body['name'] = self.name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SharedDataObject: """Deserializes the SharedDataObject from a dictionary.""" @@ -1938,6 +2507,16 @@ def as_dict(self) -> dict: body['shareCredentialsVersion'] = self.share_credentials_version return body + def as_shallow_dict(self) -> dict: + """Serializes the TokenDetail into a shallow dictionary of its immediate attributes.""" + body = {} + if self.bearer_token is not None: body['bearerToken'] = self.bearer_token + if self.endpoint is not None: body['endpoint'] = self.endpoint + if self.expiration_time is not None: body['expirationTime'] = self.expiration_time + if self.share_credentials_version is not None: + body['shareCredentialsVersion'] = self.share_credentials_version + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TokenDetail: """Deserializes the TokenDetail from a dictionary.""" @@ -1983,6 +2562,18 @@ def as_dict(self) -> dict: if self.updated_by is not None: body['updated_by'] = self.updated_by return body + def as_shallow_dict(self) -> dict: + """Serializes the TokenInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.activation_url is not None: body['activation_url'] = self.activation_url + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.id is not None: body['id'] = self.id + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TokenInfo: """Deserializes the TokenInfo from a dictionary.""" @@ -2008,6 +2599,13 @@ def as_dict(self) -> dict: if self.id is not None: body['id'] = self.id return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateExchangeFilterRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.filter: body['filter'] = self.filter + if self.id is not None: body['id'] = self.id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateExchangeFilterRequest: """Deserializes the UpdateExchangeFilterRequest from a dictionary.""" @@ -2024,6 +2622,12 @@ def as_dict(self) -> dict: if self.filter: body['filter'] = self.filter.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateExchangeFilterResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.filter: body['filter'] = self.filter + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateExchangeFilterResponse: """Deserializes the UpdateExchangeFilterResponse from a dictionary.""" @@ -2043,6 +2647,13 @@ def as_dict(self) -> dict: if self.id is not None: body['id'] = self.id return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateExchangeRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.exchange: body['exchange'] = self.exchange + if self.id is not None: body['id'] = self.id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateExchangeRequest: """Deserializes the UpdateExchangeRequest from a dictionary.""" @@ -2059,6 +2670,12 @@ def as_dict(self) -> dict: if self.exchange: body['exchange'] = self.exchange.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateExchangeResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.exchange: body['exchange'] = self.exchange + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateExchangeResponse: """Deserializes the UpdateExchangeResponse from a dictionary.""" @@ -2084,6 +2701,15 @@ def as_dict(self) -> dict: if self.rotate_token is not None: body['rotate_token'] = self.rotate_token return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateInstallationRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.installation: body['installation'] = self.installation + if self.installation_id is not None: body['installation_id'] = self.installation_id + if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.rotate_token is not None: body['rotate_token'] = self.rotate_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateInstallationRequest: """Deserializes the UpdateInstallationRequest from a dictionary.""" @@ -2103,6 +2729,12 @@ def as_dict(self) -> dict: if self.installation: body['installation'] = self.installation.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateInstallationResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.installation: body['installation'] = self.installation + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateInstallationResponse: """Deserializes the UpdateInstallationResponse from a dictionary.""" @@ -2122,6 +2754,13 @@ def as_dict(self) -> dict: if self.listing: body['listing'] = self.listing.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateListingRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.id is not None: body['id'] = self.id + if self.listing: body['listing'] = self.listing + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateListingRequest: """Deserializes the UpdateListingRequest from a dictionary.""" @@ -2138,6 +2777,12 @@ def as_dict(self) -> dict: if self.listing: body['listing'] = self.listing.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateListingResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.listing: body['listing'] = self.listing + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateListingResponse: """Deserializes the UpdateListingResponse from a dictionary.""" @@ -2166,6 +2811,16 @@ def as_dict(self) -> dict: if self.status is not None: body['status'] = self.status.value return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdatePersonalizationRequestRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.reason is not None: body['reason'] = self.reason + if self.request_id is not None: body['request_id'] = self.request_id + if self.share: body['share'] = self.share + if self.status is not None: body['status'] = self.status + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdatePersonalizationRequestRequest: """Deserializes the UpdatePersonalizationRequestRequest from a dictionary.""" @@ -2186,6 +2841,12 @@ def as_dict(self) -> dict: if self.request: body['request'] = self.request.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdatePersonalizationRequestResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.request: body['request'] = self.request + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdatePersonalizationRequestResponse: """Deserializes the UpdatePersonalizationRequestResponse from a dictionary.""" @@ -2208,6 +2869,13 @@ def as_dict(self) -> dict: if self.version is not None: body['version'] = self.version return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateProviderAnalyticsDashboardRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.id is not None: body['id'] = self.id + if self.version is not None: body['version'] = self.version + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateProviderAnalyticsDashboardRequest: """Deserializes the UpdateProviderAnalyticsDashboardRequest from a dictionary.""" @@ -2232,6 +2900,14 @@ def as_dict(self) -> dict: if self.version is not None: body['version'] = self.version return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateProviderAnalyticsDashboardResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.id is not None: body['id'] = self.id + if self.version is not None: body['version'] = self.version + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateProviderAnalyticsDashboardResponse: """Deserializes the UpdateProviderAnalyticsDashboardResponse from a dictionary.""" @@ -2253,6 +2929,13 @@ def as_dict(self) -> dict: if self.provider: body['provider'] = self.provider.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateProviderRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.id is not None: body['id'] = self.id + if self.provider: body['provider'] = self.provider + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateProviderRequest: """Deserializes the UpdateProviderRequest from a dictionary.""" @@ -2269,6 +2952,12 @@ def as_dict(self) -> dict: if self.provider: body['provider'] = self.provider.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateProviderResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.provider: body['provider'] = self.provider + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateProviderResponse: """Deserializes the UpdateProviderResponse from a dictionary.""" diff --git a/databricks/sdk/service/ml.py b/databricks/sdk/service/ml.py index b2cec8126..e551c72ca 100755 --- a/databricks/sdk/service/ml.py +++ b/databricks/sdk/service/ml.py @@ -90,6 +90,21 @@ def as_dict(self) -> dict: if self.user_id is not None: body['user_id'] = self.user_id return body + def as_shallow_dict(self) -> dict: + """Serializes the Activity into a shallow dictionary of its immediate attributes.""" + body = {} + if self.activity_type is not None: body['activity_type'] = self.activity_type + if self.comment is not None: body['comment'] = self.comment + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.from_stage is not None: body['from_stage'] = self.from_stage + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: + body['last_updated_timestamp'] = self.last_updated_timestamp + if self.system_comment is not None: body['system_comment'] = self.system_comment + if self.to_stage is not None: body['to_stage'] = self.to_stage + if self.user_id is not None: body['user_id'] = self.user_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Activity: """Deserializes the Activity from a dictionary.""" @@ -177,6 +192,17 @@ def as_dict(self) -> dict: if self.version is not None: body['version'] = self.version return body + def as_shallow_dict(self) -> dict: + """Serializes the ApproveTransitionRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.archive_existing_versions is not None: + body['archive_existing_versions'] = self.archive_existing_versions + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.stage is not None: body['stage'] = self.stage + if self.version is not None: body['version'] = self.version + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ApproveTransitionRequest: """Deserializes the ApproveTransitionRequest from a dictionary.""" @@ -198,6 +224,12 @@ def as_dict(self) -> dict: if self.activity: body['activity'] = self.activity.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the ApproveTransitionRequestResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.activity: body['activity'] = self.activity + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ApproveTransitionRequestResponse: """Deserializes the ApproveTransitionRequestResponse from a dictionary.""" @@ -248,6 +280,18 @@ def as_dict(self) -> dict: if self.user_id is not None: body['user_id'] = self.user_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CommentObject into a shallow dictionary of its immediate attributes.""" + body = {} + if self.available_actions: body['available_actions'] = self.available_actions + if self.comment is not None: body['comment'] = self.comment + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: + body['last_updated_timestamp'] = self.last_updated_timestamp + if self.user_id is not None: body['user_id'] = self.user_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CommentObject: """Deserializes the CommentObject from a dictionary.""" @@ -278,6 +322,14 @@ def as_dict(self) -> dict: if self.version is not None: body['version'] = self.version return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateComment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.version is not None: body['version'] = self.version + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateComment: """Deserializes the CreateComment from a dictionary.""" @@ -295,6 +347,12 @@ def as_dict(self) -> dict: if self.comment: body['comment'] = self.comment.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateCommentResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment: body['comment'] = self.comment + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateCommentResponse: """Deserializes the CreateCommentResponse from a dictionary.""" @@ -324,6 +382,14 @@ def as_dict(self) -> dict: if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateExperiment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.artifact_location is not None: body['artifact_location'] = self.artifact_location + if self.name is not None: body['name'] = self.name + if self.tags: body['tags'] = self.tags + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateExperiment: """Deserializes the CreateExperiment from a dictionary.""" @@ -343,6 +409,12 @@ def as_dict(self) -> dict: if self.experiment_id is not None: body['experiment_id'] = self.experiment_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateExperimentResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateExperimentResponse: """Deserializes the CreateExperimentResponse from a dictionary.""" @@ -368,6 +440,14 @@ def as_dict(self) -> dict: if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateModelRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.tags: body['tags'] = self.tags + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateModelRequest: """Deserializes the CreateModelRequest from a dictionary.""" @@ -386,6 +466,12 @@ def as_dict(self) -> dict: if self.registered_model: body['registered_model'] = self.registered_model.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateModelResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.registered_model: body['registered_model'] = self.registered_model + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateModelResponse: """Deserializes the CreateModelResponse from a dictionary.""" @@ -425,6 +511,17 @@ def as_dict(self) -> dict: if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateModelVersionRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_link is not None: body['run_link'] = self.run_link + if self.source is not None: body['source'] = self.source + if self.tags: body['tags'] = self.tags + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateModelVersionRequest: """Deserializes the CreateModelVersionRequest from a dictionary.""" @@ -447,6 +544,12 @@ def as_dict(self) -> dict: if self.model_version: body['model_version'] = self.model_version.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateModelVersionResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.model_version: body['model_version'] = self.model_version + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateModelVersionResponse: """Deserializes the CreateModelVersionResponse from a dictionary.""" @@ -515,6 +618,17 @@ def as_dict(self) -> dict: if self.status is not None: body['status'] = self.status.value return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateRegistryWebhook into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.events: body['events'] = self.events + if self.http_url_spec: body['http_url_spec'] = self.http_url_spec + if self.job_spec: body['job_spec'] = self.job_spec + if self.model_name is not None: body['model_name'] = self.model_name + if self.status is not None: body['status'] = self.status + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateRegistryWebhook: """Deserializes the CreateRegistryWebhook from a dictionary.""" @@ -550,6 +664,15 @@ def as_dict(self) -> dict: if self.user_id is not None: body['user_id'] = self.user_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateRun into a shallow dictionary of its immediate attributes.""" + body = {} + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.start_time is not None: body['start_time'] = self.start_time + if self.tags: body['tags'] = self.tags + if self.user_id is not None: body['user_id'] = self.user_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateRun: """Deserializes the CreateRun from a dictionary.""" @@ -570,6 +693,12 @@ def as_dict(self) -> dict: if self.run: body['run'] = self.run.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateRunResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.run: body['run'] = self.run + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateRunResponse: """Deserializes the CreateRunResponse from a dictionary.""" @@ -607,6 +736,15 @@ def as_dict(self) -> dict: if self.version is not None: body['version'] = self.version return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateTransitionRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.stage is not None: body['stage'] = self.stage + if self.version is not None: body['version'] = self.version + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateTransitionRequest: """Deserializes the CreateTransitionRequest from a dictionary.""" @@ -627,6 +765,12 @@ def as_dict(self) -> dict: if self.request: body['request'] = self.request.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateTransitionRequestResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.request: body['request'] = self.request + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateTransitionRequestResponse: """Deserializes the CreateTransitionRequestResponse from a dictionary.""" @@ -643,6 +787,12 @@ def as_dict(self) -> dict: if self.webhook: body['webhook'] = self.webhook.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateWebhookResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.webhook: body['webhook'] = self.webhook + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateWebhookResponse: """Deserializes the CreateWebhookResponse from a dictionary.""" @@ -684,6 +834,17 @@ def as_dict(self) -> dict: if self.source_type is not None: body['source_type'] = self.source_type return body + def as_shallow_dict(self) -> dict: + """Serializes the Dataset into a shallow dictionary of its immediate attributes.""" + body = {} + if self.digest is not None: body['digest'] = self.digest + if self.name is not None: body['name'] = self.name + if self.profile is not None: body['profile'] = self.profile + if self.schema is not None: body['schema'] = self.schema + if self.source is not None: body['source'] = self.source + if self.source_type is not None: body['source_type'] = self.source_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Dataset: """Deserializes the Dataset from a dictionary.""" @@ -710,6 +871,13 @@ def as_dict(self) -> dict: if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body + def as_shallow_dict(self) -> dict: + """Serializes the DatasetInput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dataset: body['dataset'] = self.dataset + if self.tags: body['tags'] = self.tags + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DatasetInput: """Deserializes the DatasetInput from a dictionary.""" @@ -724,6 +892,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteCommentResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteCommentResponse: """Deserializes the DeleteCommentResponse from a dictionary.""" @@ -741,6 +914,12 @@ def as_dict(self) -> dict: if self.experiment_id is not None: body['experiment_id'] = self.experiment_id return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteExperiment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteExperiment: """Deserializes the DeleteExperiment from a dictionary.""" @@ -755,6 +934,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteExperimentResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteExperimentResponse: """Deserializes the DeleteExperimentResponse from a dictionary.""" @@ -769,6 +953,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteModelResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteModelResponse: """Deserializes the DeleteModelResponse from a dictionary.""" @@ -783,6 +972,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteModelTagResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteModelTagResponse: """Deserializes the DeleteModelTagResponse from a dictionary.""" @@ -797,6 +991,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteModelVersionResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteModelVersionResponse: """Deserializes the DeleteModelVersionResponse from a dictionary.""" @@ -811,6 +1010,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteModelVersionTagResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteModelVersionTagResponse: """Deserializes the DeleteModelVersionTagResponse from a dictionary.""" @@ -828,6 +1032,12 @@ def as_dict(self) -> dict: if self.run_id is not None: body['run_id'] = self.run_id return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteRun into a shallow dictionary of its immediate attributes.""" + body = {} + if self.run_id is not None: body['run_id'] = self.run_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteRun: """Deserializes the DeleteRun from a dictionary.""" @@ -842,6 +1052,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteRunResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteRunResponse: """Deserializes the DeleteRunResponse from a dictionary.""" @@ -869,6 +1084,14 @@ def as_dict(self) -> dict: if self.max_timestamp_millis is not None: body['max_timestamp_millis'] = self.max_timestamp_millis return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteRuns into a shallow dictionary of its immediate attributes.""" + body = {} + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.max_runs is not None: body['max_runs'] = self.max_runs + if self.max_timestamp_millis is not None: body['max_timestamp_millis'] = self.max_timestamp_millis + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteRuns: """Deserializes the DeleteRuns from a dictionary.""" @@ -888,6 +1111,12 @@ def as_dict(self) -> dict: if self.runs_deleted is not None: body['runs_deleted'] = self.runs_deleted return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteRunsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.runs_deleted is not None: body['runs_deleted'] = self.runs_deleted + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteRunsResponse: """Deserializes the DeleteRunsResponse from a dictionary.""" @@ -909,6 +1138,13 @@ def as_dict(self) -> dict: if self.run_id is not None: body['run_id'] = self.run_id return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteTag into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.run_id is not None: body['run_id'] = self.run_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteTag: """Deserializes the DeleteTag from a dictionary.""" @@ -923,6 +1159,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteTagResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteTagResponse: """Deserializes the DeleteTagResponse from a dictionary.""" @@ -937,6 +1178,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteTransitionRequestResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteTransitionRequestResponse: """Deserializes the DeleteTransitionRequestResponse from a dictionary.""" @@ -959,6 +1205,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteWebhookResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteWebhookResponse: """Deserializes the DeleteWebhookResponse from a dictionary.""" @@ -1001,6 +1252,18 @@ def as_dict(self) -> dict: if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body + def as_shallow_dict(self) -> dict: + """Serializes the Experiment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.artifact_location is not None: body['artifact_location'] = self.artifact_location + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.last_update_time is not None: body['last_update_time'] = self.last_update_time + if self.lifecycle_stage is not None: body['lifecycle_stage'] = self.lifecycle_stage + if self.name is not None: body['name'] = self.name + if self.tags: body['tags'] = self.tags + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Experiment: """Deserializes the Experiment from a dictionary.""" @@ -1037,6 +1300,16 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the ExperimentAccessControlRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ExperimentAccessControlRequest: """Deserializes the ExperimentAccessControlRequest from a dictionary.""" @@ -1074,6 +1347,17 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the ExperimentAccessControlResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ExperimentAccessControlResponse: """Deserializes the ExperimentAccessControlResponse from a dictionary.""" @@ -1101,6 +1385,14 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ExperimentPermission into a shallow dictionary of its immediate attributes.""" + body = {} + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ExperimentPermission: """Deserializes the ExperimentPermission from a dictionary.""" @@ -1134,6 +1426,14 @@ def as_dict(self) -> dict: if self.object_type is not None: body['object_type'] = self.object_type return body + def as_shallow_dict(self) -> dict: + """Serializes the ExperimentPermissions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ExperimentPermissions: """Deserializes the ExperimentPermissions from a dictionary.""" @@ -1157,6 +1457,13 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ExperimentPermissionsDescription into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ExperimentPermissionsDescription: """Deserializes the ExperimentPermissionsDescription from a dictionary.""" @@ -1179,6 +1486,13 @@ def as_dict(self) -> dict: if self.experiment_id is not None: body['experiment_id'] = self.experiment_id return body + def as_shallow_dict(self) -> dict: + """Serializes the ExperimentPermissionsRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ExperimentPermissionsRequest: """Deserializes the ExperimentPermissionsRequest from a dictionary.""" @@ -1202,6 +1516,13 @@ def as_dict(self) -> dict: if self.value is not None: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ExperimentTag into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ExperimentTag: """Deserializes the ExperimentTag from a dictionary.""" @@ -1227,6 +1548,14 @@ def as_dict(self) -> dict: if self.path is not None: body['path'] = self.path return body + def as_shallow_dict(self) -> dict: + """Serializes the FileInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.file_size is not None: body['file_size'] = self.file_size + if self.is_dir is not None: body['is_dir'] = self.is_dir + if self.path is not None: body['path'] = self.path + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> FileInfo: """Deserializes the FileInfo from a dictionary.""" @@ -1244,6 +1573,12 @@ def as_dict(self) -> dict: if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body + def as_shallow_dict(self) -> dict: + """Serializes the GetExperimentPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission_levels: body['permission_levels'] = self.permission_levels + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetExperimentPermissionLevelsResponse: """Deserializes the GetExperimentPermissionLevelsResponse from a dictionary.""" @@ -1261,6 +1596,12 @@ def as_dict(self) -> dict: if self.experiment: body['experiment'] = self.experiment.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the GetExperimentResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.experiment: body['experiment'] = self.experiment + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetExperimentResponse: """Deserializes the GetExperimentResponse from a dictionary.""" @@ -1282,6 +1623,13 @@ def as_dict(self) -> dict: if self.stages: body['stages'] = [v for v in self.stages] return body + def as_shallow_dict(self) -> dict: + """Serializes the GetLatestVersionsRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: body['name'] = self.name + if self.stages: body['stages'] = self.stages + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetLatestVersionsRequest: """Deserializes the GetLatestVersionsRequest from a dictionary.""" @@ -1300,6 +1648,12 @@ def as_dict(self) -> dict: if self.model_versions: body['model_versions'] = [v.as_dict() for v in self.model_versions] return body + def as_shallow_dict(self) -> dict: + """Serializes the GetLatestVersionsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.model_versions: body['model_versions'] = self.model_versions + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetLatestVersionsResponse: """Deserializes the GetLatestVersionsResponse from a dictionary.""" @@ -1321,6 +1675,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the GetMetricHistoryResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.metrics: body['metrics'] = self.metrics + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetMetricHistoryResponse: """Deserializes the GetMetricHistoryResponse from a dictionary.""" @@ -1339,6 +1700,13 @@ def as_dict(self) -> dict: body['registered_model_databricks'] = self.registered_model_databricks.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the GetModelResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.registered_model_databricks: + body['registered_model_databricks'] = self.registered_model_databricks + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetModelResponse: """Deserializes the GetModelResponse from a dictionary.""" @@ -1356,6 +1724,12 @@ def as_dict(self) -> dict: if self.artifact_uri is not None: body['artifact_uri'] = self.artifact_uri return body + def as_shallow_dict(self) -> dict: + """Serializes the GetModelVersionDownloadUriResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.artifact_uri is not None: body['artifact_uri'] = self.artifact_uri + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetModelVersionDownloadUriResponse: """Deserializes the GetModelVersionDownloadUriResponse from a dictionary.""" @@ -1372,6 +1746,12 @@ def as_dict(self) -> dict: if self.model_version: body['model_version'] = self.model_version.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the GetModelVersionResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.model_version: body['model_version'] = self.model_version + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetModelVersionResponse: """Deserializes the GetModelVersionResponse from a dictionary.""" @@ -1389,6 +1769,12 @@ def as_dict(self) -> dict: if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body + def as_shallow_dict(self) -> dict: + """Serializes the GetRegisteredModelPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission_levels: body['permission_levels'] = self.permission_levels + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetRegisteredModelPermissionLevelsResponse: """Deserializes the GetRegisteredModelPermissionLevelsResponse from a dictionary.""" @@ -1407,6 +1793,12 @@ def as_dict(self) -> dict: if self.run: body['run'] = self.run.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the GetRunResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.run: body['run'] = self.run + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetRunResponse: """Deserializes the GetRunResponse from a dictionary.""" @@ -1444,6 +1836,16 @@ def as_dict(self) -> dict: if self.url is not None: body['url'] = self.url return body + def as_shallow_dict(self) -> dict: + """Serializes the HttpUrlSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.authorization is not None: body['authorization'] = self.authorization + if self.enable_ssl_verification is not None: + body['enable_ssl_verification'] = self.enable_ssl_verification + if self.secret is not None: body['secret'] = self.secret + if self.url is not None: body['url'] = self.url + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> HttpUrlSpec: """Deserializes the HttpUrlSpec from a dictionary.""" @@ -1473,6 +1875,14 @@ def as_dict(self) -> dict: if self.url is not None: body['url'] = self.url return body + def as_shallow_dict(self) -> dict: + """Serializes the HttpUrlSpecWithoutSecret into a shallow dictionary of its immediate attributes.""" + body = {} + if self.enable_ssl_verification is not None: + body['enable_ssl_verification'] = self.enable_ssl_verification + if self.url is not None: body['url'] = self.url + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> HttpUrlSpecWithoutSecret: """Deserializes the HttpUrlSpecWithoutSecret from a dictionary.""" @@ -1494,6 +1904,13 @@ def as_dict(self) -> dict: if self.value is not None: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the InputTag into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> InputTag: """Deserializes the InputTag from a dictionary.""" @@ -1520,6 +1937,14 @@ def as_dict(self) -> dict: if self.workspace_url is not None: body['workspace_url'] = self.workspace_url return body + def as_shallow_dict(self) -> dict: + """Serializes the JobSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_token is not None: body['access_token'] = self.access_token + if self.job_id is not None: body['job_id'] = self.job_id + if self.workspace_url is not None: body['workspace_url'] = self.workspace_url + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> JobSpec: """Deserializes the JobSpec from a dictionary.""" @@ -1545,6 +1970,13 @@ def as_dict(self) -> dict: if self.workspace_url is not None: body['workspace_url'] = self.workspace_url return body + def as_shallow_dict(self) -> dict: + """Serializes the JobSpecWithoutSecret into a shallow dictionary of its immediate attributes.""" + body = {} + if self.job_id is not None: body['job_id'] = self.job_id + if self.workspace_url is not None: body['workspace_url'] = self.workspace_url + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> JobSpecWithoutSecret: """Deserializes the JobSpecWithoutSecret from a dictionary.""" @@ -1570,6 +2002,14 @@ def as_dict(self) -> dict: if self.root_uri is not None: body['root_uri'] = self.root_uri return body + def as_shallow_dict(self) -> dict: + """Serializes the ListArtifactsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.files: body['files'] = self.files + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.root_uri is not None: body['root_uri'] = self.root_uri + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListArtifactsResponse: """Deserializes the ListArtifactsResponse from a dictionary.""" @@ -1594,6 +2034,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListExperimentsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.experiments: body['experiments'] = self.experiments + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListExperimentsResponse: """Deserializes the ListExperimentsResponse from a dictionary.""" @@ -1615,6 +2062,13 @@ def as_dict(self) -> dict: if self.registered_models: body['registered_models'] = [v.as_dict() for v in self.registered_models] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListModelsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.registered_models: body['registered_models'] = self.registered_models + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListModelsResponse: """Deserializes the ListModelsResponse from a dictionary.""" @@ -1637,6 +2091,13 @@ def as_dict(self) -> dict: if self.webhooks: body['webhooks'] = [v.as_dict() for v in self.webhooks] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListRegistryWebhooks into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.webhooks: body['webhooks'] = self.webhooks + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListRegistryWebhooks: """Deserializes the ListRegistryWebhooks from a dictionary.""" @@ -1655,6 +2116,12 @@ def as_dict(self) -> dict: if self.requests: body['requests'] = [v.as_dict() for v in self.requests] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListTransitionRequestsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.requests: body['requests'] = self.requests + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListTransitionRequestsResponse: """Deserializes the ListTransitionRequestsResponse from a dictionary.""" @@ -1687,6 +2154,15 @@ def as_dict(self) -> dict: if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body + def as_shallow_dict(self) -> dict: + """Serializes the LogBatch into a shallow dictionary of its immediate attributes.""" + body = {} + if self.metrics: body['metrics'] = self.metrics + if self.params: body['params'] = self.params + if self.run_id is not None: body['run_id'] = self.run_id + if self.tags: body['tags'] = self.tags + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> LogBatch: """Deserializes the LogBatch from a dictionary.""" @@ -1704,6 +2180,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the LogBatchResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> LogBatchResponse: """Deserializes the LogBatchResponse from a dictionary.""" @@ -1725,6 +2206,13 @@ def as_dict(self) -> dict: if self.run_id is not None: body['run_id'] = self.run_id return body + def as_shallow_dict(self) -> dict: + """Serializes the LogInputs into a shallow dictionary of its immediate attributes.""" + body = {} + if self.datasets: body['datasets'] = self.datasets + if self.run_id is not None: body['run_id'] = self.run_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> LogInputs: """Deserializes the LogInputs from a dictionary.""" @@ -1739,6 +2227,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the LogInputsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> LogInputsResponse: """Deserializes the LogInputsResponse from a dictionary.""" @@ -1777,6 +2270,17 @@ def as_dict(self) -> dict: if self.value is not None: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the LogMetric into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_uuid is not None: body['run_uuid'] = self.run_uuid + if self.step is not None: body['step'] = self.step + if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.value is not None: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> LogMetric: """Deserializes the LogMetric from a dictionary.""" @@ -1796,6 +2300,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the LogMetricResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> LogMetricResponse: """Deserializes the LogMetricResponse from a dictionary.""" @@ -1817,6 +2326,13 @@ def as_dict(self) -> dict: if self.run_id is not None: body['run_id'] = self.run_id return body + def as_shallow_dict(self) -> dict: + """Serializes the LogModel into a shallow dictionary of its immediate attributes.""" + body = {} + if self.model_json is not None: body['model_json'] = self.model_json + if self.run_id is not None: body['run_id'] = self.run_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> LogModel: """Deserializes the LogModel from a dictionary.""" @@ -1831,6 +2347,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the LogModelResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> LogModelResponse: """Deserializes the LogModelResponse from a dictionary.""" @@ -1861,6 +2382,15 @@ def as_dict(self) -> dict: if self.value is not None: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the LogParam into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_uuid is not None: body['run_uuid'] = self.run_uuid + if self.value is not None: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> LogParam: """Deserializes the LogParam from a dictionary.""" @@ -1878,6 +2408,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the LogParamResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> LogParamResponse: """Deserializes the LogParamResponse from a dictionary.""" @@ -1907,6 +2442,15 @@ def as_dict(self) -> dict: if self.value is not None: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the Metric into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.step is not None: body['step'] = self.step + if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.value is not None: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Metric: """Deserializes the Metric from a dictionary.""" @@ -1953,6 +2497,19 @@ def as_dict(self) -> dict: if self.user_id is not None: body['user_id'] = self.user_id return body + def as_shallow_dict(self) -> dict: + """Serializes the Model into a shallow dictionary of its immediate attributes.""" + body = {} + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.description is not None: body['description'] = self.description + if self.last_updated_timestamp is not None: + body['last_updated_timestamp'] = self.last_updated_timestamp + if self.latest_versions: body['latest_versions'] = self.latest_versions + if self.name is not None: body['name'] = self.name + if self.tags: body['tags'] = self.tags + if self.user_id is not None: body['user_id'] = self.user_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Model: """Deserializes the Model from a dictionary.""" @@ -2010,6 +2567,21 @@ def as_dict(self) -> dict: if self.user_id is not None: body['user_id'] = self.user_id return body + def as_shallow_dict(self) -> dict: + """Serializes the ModelDatabricks into a shallow dictionary of its immediate attributes.""" + body = {} + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.description is not None: body['description'] = self.description + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: + body['last_updated_timestamp'] = self.last_updated_timestamp + if self.latest_versions: body['latest_versions'] = self.latest_versions + if self.name is not None: body['name'] = self.name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.tags: body['tags'] = self.tags + if self.user_id is not None: body['user_id'] = self.user_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ModelDatabricks: """Deserializes the ModelDatabricks from a dictionary.""" @@ -2039,6 +2611,13 @@ def as_dict(self) -> dict: if self.value is not None: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ModelTag into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ModelTag: """Deserializes the ModelTag from a dictionary.""" @@ -2106,6 +2685,25 @@ def as_dict(self) -> dict: if self.version is not None: body['version'] = self.version return body + def as_shallow_dict(self) -> dict: + """Serializes the ModelVersion into a shallow dictionary of its immediate attributes.""" + body = {} + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.current_stage is not None: body['current_stage'] = self.current_stage + if self.description is not None: body['description'] = self.description + if self.last_updated_timestamp is not None: + body['last_updated_timestamp'] = self.last_updated_timestamp + if self.name is not None: body['name'] = self.name + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_link is not None: body['run_link'] = self.run_link + if self.source is not None: body['source'] = self.source + if self.status is not None: body['status'] = self.status + if self.status_message is not None: body['status_message'] = self.status_message + if self.tags: body['tags'] = self.tags + if self.user_id is not None: body['user_id'] = self.user_id + if self.version is not None: body['version'] = self.version + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ModelVersion: """Deserializes the ModelVersion from a dictionary.""" @@ -2205,6 +2803,26 @@ def as_dict(self) -> dict: if self.version is not None: body['version'] = self.version return body + def as_shallow_dict(self) -> dict: + """Serializes the ModelVersionDatabricks into a shallow dictionary of its immediate attributes.""" + body = {} + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.current_stage is not None: body['current_stage'] = self.current_stage + if self.description is not None: body['description'] = self.description + if self.last_updated_timestamp is not None: + body['last_updated_timestamp'] = self.last_updated_timestamp + if self.name is not None: body['name'] = self.name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_link is not None: body['run_link'] = self.run_link + if self.source is not None: body['source'] = self.source + if self.status is not None: body['status'] = self.status + if self.status_message is not None: body['status_message'] = self.status_message + if self.tags: body['tags'] = self.tags + if self.user_id is not None: body['user_id'] = self.user_id + if self.version is not None: body['version'] = self.version + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ModelVersionDatabricks: """Deserializes the ModelVersionDatabricks from a dictionary.""" @@ -2247,6 +2865,13 @@ def as_dict(self) -> dict: if self.value is not None: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ModelVersionTag into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ModelVersionTag: """Deserializes the ModelVersionTag from a dictionary.""" @@ -2261,8 +2886,15 @@ class Param: value: Optional[str] = None """Value associated with this param.""" - def as_dict(self) -> dict: - """Serializes the Param into a dictionary suitable for use as a JSON request body.""" + def as_dict(self) -> dict: + """Serializes the Param into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Param into a shallow dictionary of its immediate attributes.""" body = {} if self.key is not None: body['key'] = self.key if self.value is not None: body['value'] = self.value @@ -2309,6 +2941,16 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the RegisteredModelAccessControlRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RegisteredModelAccessControlRequest: """Deserializes the RegisteredModelAccessControlRequest from a dictionary.""" @@ -2346,6 +2988,17 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the RegisteredModelAccessControlResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RegisteredModelAccessControlResponse: """Deserializes the RegisteredModelAccessControlResponse from a dictionary.""" @@ -2373,6 +3026,14 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the RegisteredModelPermission into a shallow dictionary of its immediate attributes.""" + body = {} + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RegisteredModelPermission: """Deserializes the RegisteredModelPermission from a dictionary.""" @@ -2408,6 +3069,14 @@ def as_dict(self) -> dict: if self.object_type is not None: body['object_type'] = self.object_type return body + def as_shallow_dict(self) -> dict: + """Serializes the RegisteredModelPermissions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RegisteredModelPermissions: """Deserializes the RegisteredModelPermissions from a dictionary.""" @@ -2431,6 +3100,13 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the RegisteredModelPermissionsDescription into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RegisteredModelPermissionsDescription: """Deserializes the RegisteredModelPermissionsDescription from a dictionary.""" @@ -2453,6 +3129,13 @@ def as_dict(self) -> dict: if self.registered_model_id is not None: body['registered_model_id'] = self.registered_model_id return body + def as_shallow_dict(self) -> dict: + """Serializes the RegisteredModelPermissionsRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.registered_model_id is not None: body['registered_model_id'] = self.registered_model_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RegisteredModelPermissionsRequest: """Deserializes the RegisteredModelPermissionsRequest from a dictionary.""" @@ -2536,6 +3219,21 @@ def as_dict(self) -> dict: if self.status is not None: body['status'] = self.status.value return body + def as_shallow_dict(self) -> dict: + """Serializes the RegistryWebhook into a shallow dictionary of its immediate attributes.""" + body = {} + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.description is not None: body['description'] = self.description + if self.events: body['events'] = self.events + if self.http_url_spec: body['http_url_spec'] = self.http_url_spec + if self.id is not None: body['id'] = self.id + if self.job_spec: body['job_spec'] = self.job_spec + if self.last_updated_timestamp is not None: + body['last_updated_timestamp'] = self.last_updated_timestamp + if self.model_name is not None: body['model_name'] = self.model_name + if self.status is not None: body['status'] = self.status + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RegistryWebhook: """Deserializes the RegistryWebhook from a dictionary.""" @@ -2611,6 +3309,15 @@ def as_dict(self) -> dict: if self.version is not None: body['version'] = self.version return body + def as_shallow_dict(self) -> dict: + """Serializes the RejectTransitionRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.stage is not None: body['stage'] = self.stage + if self.version is not None: body['version'] = self.version + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RejectTransitionRequest: """Deserializes the RejectTransitionRequest from a dictionary.""" @@ -2631,6 +3338,12 @@ def as_dict(self) -> dict: if self.activity: body['activity'] = self.activity.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the RejectTransitionRequestResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.activity: body['activity'] = self.activity + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RejectTransitionRequestResponse: """Deserializes the RejectTransitionRequestResponse from a dictionary.""" @@ -2652,6 +3365,13 @@ def as_dict(self) -> dict: if self.new_name is not None: body['new_name'] = self.new_name return body + def as_shallow_dict(self) -> dict: + """Serializes the RenameModelRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RenameModelRequest: """Deserializes the RenameModelRequest from a dictionary.""" @@ -2668,6 +3388,12 @@ def as_dict(self) -> dict: if self.registered_model: body['registered_model'] = self.registered_model.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the RenameModelResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.registered_model: body['registered_model'] = self.registered_model + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RenameModelResponse: """Deserializes the RenameModelResponse from a dictionary.""" @@ -2685,6 +3411,12 @@ def as_dict(self) -> dict: if self.experiment_id is not None: body['experiment_id'] = self.experiment_id return body + def as_shallow_dict(self) -> dict: + """Serializes the RestoreExperiment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RestoreExperiment: """Deserializes the RestoreExperiment from a dictionary.""" @@ -2699,6 +3431,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the RestoreExperimentResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RestoreExperimentResponse: """Deserializes the RestoreExperimentResponse from a dictionary.""" @@ -2716,6 +3453,12 @@ def as_dict(self) -> dict: if self.run_id is not None: body['run_id'] = self.run_id return body + def as_shallow_dict(self) -> dict: + """Serializes the RestoreRun into a shallow dictionary of its immediate attributes.""" + body = {} + if self.run_id is not None: body['run_id'] = self.run_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RestoreRun: """Deserializes the RestoreRun from a dictionary.""" @@ -2730,6 +3473,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the RestoreRunResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RestoreRunResponse: """Deserializes the RestoreRunResponse from a dictionary.""" @@ -2757,6 +3505,14 @@ def as_dict(self) -> dict: if self.min_timestamp_millis is not None: body['min_timestamp_millis'] = self.min_timestamp_millis return body + def as_shallow_dict(self) -> dict: + """Serializes the RestoreRuns into a shallow dictionary of its immediate attributes.""" + body = {} + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.max_runs is not None: body['max_runs'] = self.max_runs + if self.min_timestamp_millis is not None: body['min_timestamp_millis'] = self.min_timestamp_millis + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RestoreRuns: """Deserializes the RestoreRuns from a dictionary.""" @@ -2776,6 +3532,12 @@ def as_dict(self) -> dict: if self.runs_restored is not None: body['runs_restored'] = self.runs_restored return body + def as_shallow_dict(self) -> dict: + """Serializes the RestoreRunsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.runs_restored is not None: body['runs_restored'] = self.runs_restored + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RestoreRunsResponse: """Deserializes the RestoreRunsResponse from a dictionary.""" @@ -2801,6 +3563,14 @@ def as_dict(self) -> dict: if self.inputs: body['inputs'] = self.inputs.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the Run into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data: body['data'] = self.data + if self.info: body['info'] = self.info + if self.inputs: body['inputs'] = self.inputs + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Run: """Deserializes the Run from a dictionary.""" @@ -2828,6 +3598,14 @@ def as_dict(self) -> dict: if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body + def as_shallow_dict(self) -> dict: + """Serializes the RunData into a shallow dictionary of its immediate attributes.""" + body = {} + if self.metrics: body['metrics'] = self.metrics + if self.params: body['params'] = self.params + if self.tags: body['tags'] = self.tags + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RunData: """Deserializes the RunData from a dictionary.""" @@ -2883,6 +3661,20 @@ def as_dict(self) -> dict: if self.user_id is not None: body['user_id'] = self.user_id return body + def as_shallow_dict(self) -> dict: + """Serializes the RunInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.artifact_uri is not None: body['artifact_uri'] = self.artifact_uri + if self.end_time is not None: body['end_time'] = self.end_time + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.lifecycle_stage is not None: body['lifecycle_stage'] = self.lifecycle_stage + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_uuid is not None: body['run_uuid'] = self.run_uuid + if self.start_time is not None: body['start_time'] = self.start_time + if self.status is not None: body['status'] = self.status + if self.user_id is not None: body['user_id'] = self.user_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RunInfo: """Deserializes the RunInfo from a dictionary.""" @@ -2918,6 +3710,12 @@ def as_dict(self) -> dict: if self.dataset_inputs: body['dataset_inputs'] = [v.as_dict() for v in self.dataset_inputs] return body + def as_shallow_dict(self) -> dict: + """Serializes the RunInputs into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dataset_inputs: body['dataset_inputs'] = self.dataset_inputs + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RunInputs: """Deserializes the RunInputs from a dictionary.""" @@ -2939,6 +3737,13 @@ def as_dict(self) -> dict: if self.value is not None: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the RunTag into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RunTag: """Deserializes the RunTag from a dictionary.""" @@ -2975,6 +3780,16 @@ def as_dict(self) -> dict: if self.view_type is not None: body['view_type'] = self.view_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the SearchExperiments into a shallow dictionary of its immediate attributes.""" + body = {} + if self.filter is not None: body['filter'] = self.filter + if self.max_results is not None: body['max_results'] = self.max_results + if self.order_by: body['order_by'] = self.order_by + if self.page_token is not None: body['page_token'] = self.page_token + if self.view_type is not None: body['view_type'] = self.view_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SearchExperiments: """Deserializes the SearchExperiments from a dictionary.""" @@ -3001,6 +3816,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the SearchExperimentsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.experiments: body['experiments'] = self.experiments + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SearchExperimentsResponse: """Deserializes the SearchExperimentsResponse from a dictionary.""" @@ -3032,6 +3854,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the SearchModelVersionsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.model_versions: body['model_versions'] = self.model_versions + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SearchModelVersionsResponse: """Deserializes the SearchModelVersionsResponse from a dictionary.""" @@ -3054,6 +3883,13 @@ def as_dict(self) -> dict: if self.registered_models: body['registered_models'] = [v.as_dict() for v in self.registered_models] return body + def as_shallow_dict(self) -> dict: + """Serializes the SearchModelsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.registered_models: body['registered_models'] = self.registered_models + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SearchModelsResponse: """Deserializes the SearchModelsResponse from a dictionary.""" @@ -3105,6 +3941,17 @@ def as_dict(self) -> dict: if self.run_view_type is not None: body['run_view_type'] = self.run_view_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the SearchRuns into a shallow dictionary of its immediate attributes.""" + body = {} + if self.experiment_ids: body['experiment_ids'] = self.experiment_ids + if self.filter is not None: body['filter'] = self.filter + if self.max_results is not None: body['max_results'] = self.max_results + if self.order_by: body['order_by'] = self.order_by + if self.page_token is not None: body['page_token'] = self.page_token + if self.run_view_type is not None: body['run_view_type'] = self.run_view_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SearchRuns: """Deserializes the SearchRuns from a dictionary.""" @@ -3131,6 +3978,13 @@ def as_dict(self) -> dict: if self.runs: body['runs'] = [v.as_dict() for v in self.runs] return body + def as_shallow_dict(self) -> dict: + """Serializes the SearchRunsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.runs: body['runs'] = self.runs + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SearchRunsResponse: """Deserializes the SearchRunsResponse from a dictionary.""" @@ -3166,6 +4020,14 @@ def as_dict(self) -> dict: if self.value is not None: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the SetExperimentTag into a shallow dictionary of its immediate attributes.""" + body = {} + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SetExperimentTag: """Deserializes the SetExperimentTag from a dictionary.""" @@ -3182,6 +4044,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the SetExperimentTagResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SetExperimentTagResponse: """Deserializes the SetExperimentTagResponse from a dictionary.""" @@ -3210,6 +4077,14 @@ def as_dict(self) -> dict: if self.value is not None: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the SetModelTagRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.name is not None: body['name'] = self.name + if self.value is not None: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SetModelTagRequest: """Deserializes the SetModelTagRequest from a dictionary.""" @@ -3224,6 +4099,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the SetModelTagResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SetModelTagResponse: """Deserializes the SetModelTagResponse from a dictionary.""" @@ -3256,6 +4136,15 @@ def as_dict(self) -> dict: if self.version is not None: body['version'] = self.version return body + def as_shallow_dict(self) -> dict: + """Serializes the SetModelVersionTagRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.name is not None: body['name'] = self.name + if self.value is not None: body['value'] = self.value + if self.version is not None: body['version'] = self.version + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SetModelVersionTagRequest: """Deserializes the SetModelVersionTagRequest from a dictionary.""" @@ -3273,6 +4162,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the SetModelVersionTagResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SetModelVersionTagResponse: """Deserializes the SetModelVersionTagResponse from a dictionary.""" @@ -3305,6 +4199,15 @@ def as_dict(self) -> dict: if self.value is not None: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the SetTag into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_uuid is not None: body['run_uuid'] = self.run_uuid + if self.value is not None: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SetTag: """Deserializes the SetTag from a dictionary.""" @@ -3322,6 +4225,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the SetTagResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SetTagResponse: """Deserializes the SetTagResponse from a dictionary.""" @@ -3375,6 +4283,13 @@ def as_dict(self) -> dict: if self.status_code is not None: body['status_code'] = self.status_code return body + def as_shallow_dict(self) -> dict: + """Serializes the TestRegistryWebhook into a shallow dictionary of its immediate attributes.""" + body = {} + if self.body is not None: body['body'] = self.body + if self.status_code is not None: body['status_code'] = self.status_code + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TestRegistryWebhook: """Deserializes the TestRegistryWebhook from a dictionary.""" @@ -3397,6 +4312,13 @@ def as_dict(self) -> dict: if self.id is not None: body['id'] = self.id return body + def as_shallow_dict(self) -> dict: + """Serializes the TestRegistryWebhookRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.event is not None: body['event'] = self.event + if self.id is not None: body['id'] = self.id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TestRegistryWebhookRequest: """Deserializes the TestRegistryWebhookRequest from a dictionary.""" @@ -3414,6 +4336,12 @@ def as_dict(self) -> dict: if self.webhook: body['webhook'] = self.webhook.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the TestRegistryWebhookResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.webhook: body['webhook'] = self.webhook + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TestRegistryWebhookResponse: """Deserializes the TestRegistryWebhookResponse from a dictionary.""" @@ -3456,6 +4384,17 @@ def as_dict(self) -> dict: if self.version is not None: body['version'] = self.version return body + def as_shallow_dict(self) -> dict: + """Serializes the TransitionModelVersionStageDatabricks into a shallow dictionary of its immediate attributes.""" + body = {} + if self.archive_existing_versions is not None: + body['archive_existing_versions'] = self.archive_existing_versions + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.stage is not None: body['stage'] = self.stage + if self.version is not None: body['version'] = self.version + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TransitionModelVersionStageDatabricks: """Deserializes the TransitionModelVersionStageDatabricks from a dictionary.""" @@ -3503,6 +4442,16 @@ def as_dict(self) -> dict: if self.user_id is not None: body['user_id'] = self.user_id return body + def as_shallow_dict(self) -> dict: + """Serializes the TransitionRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.available_actions: body['available_actions'] = self.available_actions + if self.comment is not None: body['comment'] = self.comment + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.to_stage is not None: body['to_stage'] = self.to_stage + if self.user_id is not None: body['user_id'] = self.user_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TransitionRequest: """Deserializes the TransitionRequest from a dictionary.""" @@ -3523,6 +4472,12 @@ def as_dict(self) -> dict: if self.model_version: body['model_version'] = self.model_version.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the TransitionStageResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.model_version: body['model_version'] = self.model_version + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TransitionStageResponse: """Deserializes the TransitionStageResponse from a dictionary.""" @@ -3544,6 +4499,13 @@ def as_dict(self) -> dict: if self.id is not None: body['id'] = self.id return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateComment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: body['comment'] = self.comment + if self.id is not None: body['id'] = self.id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateComment: """Deserializes the UpdateComment from a dictionary.""" @@ -3561,6 +4523,12 @@ def as_dict(self) -> dict: if self.comment: body['comment'] = self.comment.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateCommentResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment: body['comment'] = self.comment + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateCommentResponse: """Deserializes the UpdateCommentResponse from a dictionary.""" @@ -3582,6 +4550,13 @@ def as_dict(self) -> dict: if self.new_name is not None: body['new_name'] = self.new_name return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateExperiment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.new_name is not None: body['new_name'] = self.new_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateExperiment: """Deserializes the UpdateExperiment from a dictionary.""" @@ -3596,6 +4571,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateExperimentResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateExperimentResponse: """Deserializes the UpdateExperimentResponse from a dictionary.""" @@ -3617,6 +4597,13 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateModelRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateModelRequest: """Deserializes the UpdateModelRequest from a dictionary.""" @@ -3631,6 +4618,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateModelResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateModelResponse: """Deserializes the UpdateModelResponse from a dictionary.""" @@ -3656,6 +4648,14 @@ def as_dict(self) -> dict: if self.version is not None: body['version'] = self.version return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateModelVersionRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.version is not None: body['version'] = self.version + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateModelVersionRequest: """Deserializes the UpdateModelVersionRequest from a dictionary.""" @@ -3672,6 +4672,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateModelVersionResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateModelVersionResponse: """Deserializes the UpdateModelVersionResponse from a dictionary.""" @@ -3740,6 +4745,17 @@ def as_dict(self) -> dict: if self.status is not None: body['status'] = self.status.value return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateRegistryWebhook into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.events: body['events'] = self.events + if self.http_url_spec: body['http_url_spec'] = self.http_url_spec + if self.id is not None: body['id'] = self.id + if self.job_spec: body['job_spec'] = self.job_spec + if self.status is not None: body['status'] = self.status + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateRegistryWebhook: """Deserializes the UpdateRegistryWebhook from a dictionary.""" @@ -3775,6 +4791,15 @@ def as_dict(self) -> dict: if self.status is not None: body['status'] = self.status.value return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateRun into a shallow dictionary of its immediate attributes.""" + body = {} + if self.end_time is not None: body['end_time'] = self.end_time + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_uuid is not None: body['run_uuid'] = self.run_uuid + if self.status is not None: body['status'] = self.status + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateRun: """Deserializes the UpdateRun from a dictionary.""" @@ -3795,6 +4820,12 @@ def as_dict(self) -> dict: if self.run_info: body['run_info'] = self.run_info.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateRunResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.run_info: body['run_info'] = self.run_info + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateRunResponse: """Deserializes the UpdateRunResponse from a dictionary.""" @@ -3819,6 +4850,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateWebhookResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateWebhookResponse: """Deserializes the UpdateWebhookResponse from a dictionary.""" @@ -4596,7 +5632,8 @@ def set_permissions( ) -> ExperimentPermissions: """Set experiment permissions. - Sets permissions on an experiment. Experiments can inherit permissions from their root object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param experiment_id: str The experiment for which to get or manage permissions. @@ -5571,8 +6608,8 @@ def set_permissions( ) -> RegisteredModelPermissions: """Set registered model permissions. - Sets permissions on a registered model. Registered models can inherit permissions from their root - object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param registered_model_id: str The registered model for which to get or manage permissions. diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py index 0c439ae7e..37d464af6 100755 --- a/databricks/sdk/service/oauth2.py +++ b/databricks/sdk/service/oauth2.py @@ -31,6 +31,10 @@ class CreateCustomAppIntegration: token_access_policy: Optional[TokenAccessPolicy] = None """Token access policy""" + user_authorized_scopes: Optional[List[str]] = None + """Scopes that will need to be consented by end user to mint the access token. If the user does not + authorize the access token will not be minted. Must be a subset of scopes.""" + def as_dict(self) -> dict: """Serializes the CreateCustomAppIntegration into a dictionary suitable for use as a JSON request body.""" body = {} @@ -39,6 +43,19 @@ def as_dict(self) -> dict: if self.redirect_urls: body['redirect_urls'] = [v for v in self.redirect_urls] if self.scopes: body['scopes'] = [v for v in self.scopes] if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict() + if self.user_authorized_scopes: + body['user_authorized_scopes'] = [v for v in self.user_authorized_scopes] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CreateCustomAppIntegration into a shallow dictionary of its immediate attributes.""" + body = {} + if self.confidential is not None: body['confidential'] = self.confidential + if self.name is not None: body['name'] = self.name + if self.redirect_urls: body['redirect_urls'] = self.redirect_urls + if self.scopes: body['scopes'] = self.scopes + if self.token_access_policy: body['token_access_policy'] = self.token_access_policy + if self.user_authorized_scopes: body['user_authorized_scopes'] = self.user_authorized_scopes return body @classmethod @@ -48,7 +65,8 @@ def from_dict(cls, d: Dict[str, any]) -> CreateCustomAppIntegration: name=d.get('name', None), redirect_urls=d.get('redirect_urls', None), scopes=d.get('scopes', None), - token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy)) + token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy), + user_authorized_scopes=d.get('user_authorized_scopes', None)) @dataclass @@ -71,6 +89,14 @@ def as_dict(self) -> dict: if self.integration_id is not None: body['integration_id'] = self.integration_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateCustomAppIntegrationOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.client_id is not None: body['client_id'] = self.client_id + if self.client_secret is not None: body['client_secret'] = self.client_secret + if self.integration_id is not None: body['integration_id'] = self.integration_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateCustomAppIntegrationOutput: """Deserializes the CreateCustomAppIntegrationOutput from a dictionary.""" @@ -94,6 +120,13 @@ def as_dict(self) -> dict: if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the CreatePublishedAppIntegration into a shallow dictionary of its immediate attributes.""" + body = {} + if self.app_id is not None: body['app_id'] = self.app_id + if self.token_access_policy: body['token_access_policy'] = self.token_access_policy + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreatePublishedAppIntegration: """Deserializes the CreatePublishedAppIntegration from a dictionary.""" @@ -112,6 +145,12 @@ def as_dict(self) -> dict: if self.integration_id is not None: body['integration_id'] = self.integration_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CreatePublishedAppIntegrationOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.integration_id is not None: body['integration_id'] = self.integration_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreatePublishedAppIntegrationOutput: """Deserializes the CreatePublishedAppIntegrationOutput from a dictionary.""" @@ -149,6 +188,17 @@ def as_dict(self) -> dict: if self.update_time is not None: body['update_time'] = self.update_time return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateServicePrincipalSecretResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.create_time is not None: body['create_time'] = self.create_time + if self.id is not None: body['id'] = self.id + if self.secret is not None: body['secret'] = self.secret + if self.secret_hash is not None: body['secret_hash'] = self.secret_hash + if self.status is not None: body['status'] = self.status + if self.update_time is not None: body['update_time'] = self.update_time + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateServicePrincipalSecretResponse: """Deserializes the CreateServicePrincipalSecretResponse from a dictionary.""" @@ -161,32 +211,15 @@ def from_dict(cls, d: Dict[str, any]) -> CreateServicePrincipalSecretResponse: @dataclass -class DataPlaneInfo: - authorization_details: Optional[str] = None - """Authorization details as a string.""" - - endpoint_url: Optional[str] = None - """The URL of the endpoint for this operation in the dataplane.""" +class DeleteCustomAppIntegrationOutput: def as_dict(self) -> dict: - """Serializes the DataPlaneInfo into a dictionary suitable for use as a JSON request body.""" + """Serializes the DeleteCustomAppIntegrationOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.authorization_details is not None: body['authorization_details'] = self.authorization_details - if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url return body - @classmethod - def from_dict(cls, d: Dict[str, any]) -> DataPlaneInfo: - """Deserializes the DataPlaneInfo from a dictionary.""" - return cls(authorization_details=d.get('authorization_details', None), - endpoint_url=d.get('endpoint_url', None)) - - -@dataclass -class DeleteCustomAppIntegrationOutput: - - def as_dict(self) -> dict: - """Serializes the DeleteCustomAppIntegrationOutput into a dictionary suitable for use as a JSON request body.""" + def as_shallow_dict(self) -> dict: + """Serializes the DeleteCustomAppIntegrationOutput into a shallow dictionary of its immediate attributes.""" body = {} return body @@ -204,6 +237,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeletePublishedAppIntegrationOutput into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeletePublishedAppIntegrationOutput: """Deserializes the DeletePublishedAppIntegrationOutput from a dictionary.""" @@ -218,12 +256,76 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() +@dataclass +class FederationPolicy: + create_time: Optional[str] = None + """Creation time of the federation policy.""" + + description: Optional[str] = None + """Description of the federation policy.""" + + name: Optional[str] = None + """Resource name for the federation policy. Example values include + `accounts//federationPolicies/my-federation-policy` for Account Federation Policies, + and + `accounts//servicePrincipals//federationPolicies/my-federation-policy` + for Service Principal Federation Policies. Typically an output parameter, which does not need to + be specified in create or update requests. If specified in a request, must match the value in + the request URL.""" + + oidc_policy: Optional[OidcFederationPolicy] = None + """Specifies the policy to use for validating OIDC claims in your federated tokens.""" + + uid: Optional[str] = None + """Unique, immutable id of the federation policy.""" + + update_time: Optional[str] = None + """Last update time of the federation policy.""" + + def as_dict(self) -> dict: + """Serializes the FederationPolicy into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.create_time is not None: body['create_time'] = self.create_time + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.oidc_policy: body['oidc_policy'] = self.oidc_policy.as_dict() + if self.uid is not None: body['uid'] = self.uid + if self.update_time is not None: body['update_time'] = self.update_time + return body + + def as_shallow_dict(self) -> dict: + """Serializes the FederationPolicy into a shallow dictionary of its immediate attributes.""" + body = {} + if self.create_time is not None: body['create_time'] = self.create_time + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.oidc_policy: body['oidc_policy'] = self.oidc_policy + if self.uid is not None: body['uid'] = self.uid + if self.update_time is not None: body['update_time'] = self.update_time + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> FederationPolicy: + """Deserializes the FederationPolicy from a dictionary.""" + return cls(create_time=d.get('create_time', None), + description=d.get('description', None), + name=d.get('name', None), + oidc_policy=_from_dict(d, 'oidc_policy', OidcFederationPolicy), + uid=d.get('uid', None), + update_time=d.get('update_time', None)) + + @dataclass class GetCustomAppIntegrationOutput: client_id: Optional[str] = None @@ -252,6 +354,10 @@ class GetCustomAppIntegrationOutput: token_access_policy: Optional[TokenAccessPolicy] = None """Token access policy""" + user_authorized_scopes: Optional[List[str]] = None + """Scopes that will need to be consented by end user to mint the access token. If the user does not + authorize the access token will not be minted. Must be a subset of scopes.""" + def as_dict(self) -> dict: """Serializes the GetCustomAppIntegrationOutput into a dictionary suitable for use as a JSON request body.""" body = {} @@ -265,6 +371,24 @@ def as_dict(self) -> dict: if self.redirect_urls: body['redirect_urls'] = [v for v in self.redirect_urls] if self.scopes: body['scopes'] = [v for v in self.scopes] if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict() + if self.user_authorized_scopes: + body['user_authorized_scopes'] = [v for v in self.user_authorized_scopes] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GetCustomAppIntegrationOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.client_id is not None: body['client_id'] = self.client_id + if self.confidential is not None: body['confidential'] = self.confidential + if self.create_time is not None: body['create_time'] = self.create_time + if self.created_by is not None: body['created_by'] = self.created_by + if self.creator_username is not None: body['creator_username'] = self.creator_username + if self.integration_id is not None: body['integration_id'] = self.integration_id + if self.name is not None: body['name'] = self.name + if self.redirect_urls: body['redirect_urls'] = self.redirect_urls + if self.scopes: body['scopes'] = self.scopes + if self.token_access_policy: body['token_access_policy'] = self.token_access_policy + if self.user_authorized_scopes: body['user_authorized_scopes'] = self.user_authorized_scopes return body @classmethod @@ -279,7 +403,8 @@ def from_dict(cls, d: Dict[str, any]) -> GetCustomAppIntegrationOutput: name=d.get('name', None), redirect_urls=d.get('redirect_urls', None), scopes=d.get('scopes', None), - token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy)) + token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy), + user_authorized_scopes=d.get('user_authorized_scopes', None)) @dataclass @@ -296,6 +421,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the GetCustomAppIntegrationsOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.apps: body['apps'] = self.apps + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetCustomAppIntegrationsOutput: """Deserializes the GetCustomAppIntegrationsOutput from a dictionary.""" @@ -332,6 +464,17 @@ def as_dict(self) -> dict: if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the GetPublishedAppIntegrationOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.app_id is not None: body['app_id'] = self.app_id + if self.create_time is not None: body['create_time'] = self.create_time + if self.created_by is not None: body['created_by'] = self.created_by + if self.integration_id is not None: body['integration_id'] = self.integration_id + if self.name is not None: body['name'] = self.name + if self.token_access_policy: body['token_access_policy'] = self.token_access_policy + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetPublishedAppIntegrationOutput: """Deserializes the GetPublishedAppIntegrationOutput from a dictionary.""" @@ -357,6 +500,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the GetPublishedAppIntegrationsOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.apps: body['apps'] = self.apps + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetPublishedAppIntegrationsOutput: """Deserializes the GetPublishedAppIntegrationsOutput from a dictionary.""" @@ -380,6 +530,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the GetPublishedAppsOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.apps: body['apps'] = self.apps + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetPublishedAppsOutput: """Deserializes the GetPublishedAppsOutput from a dictionary.""" @@ -387,21 +544,118 @@ def from_dict(cls, d: Dict[str, any]) -> GetPublishedAppsOutput: next_page_token=d.get('next_page_token', None)) +@dataclass +class ListFederationPoliciesResponse: + next_page_token: Optional[str] = None + + policies: Optional[List[FederationPolicy]] = None + + def as_dict(self) -> dict: + """Serializes the ListFederationPoliciesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.policies: body['policies'] = [v.as_dict() for v in self.policies] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListFederationPoliciesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.policies: body['policies'] = self.policies + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListFederationPoliciesResponse: + """Deserializes the ListFederationPoliciesResponse from a dictionary.""" + return cls(next_page_token=d.get('next_page_token', None), + policies=_repeated_dict(d, 'policies', FederationPolicy)) + + @dataclass class ListServicePrincipalSecretsResponse: + next_page_token: Optional[str] = None + """A token, which can be sent as `page_token` to retrieve the next page.""" + secrets: Optional[List[SecretInfo]] = None """List of the secrets""" def as_dict(self) -> dict: """Serializes the ListServicePrincipalSecretsResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token if self.secrets: body['secrets'] = [v.as_dict() for v in self.secrets] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListServicePrincipalSecretsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.secrets: body['secrets'] = self.secrets + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListServicePrincipalSecretsResponse: """Deserializes the ListServicePrincipalSecretsResponse from a dictionary.""" - return cls(secrets=_repeated_dict(d, 'secrets', SecretInfo)) + return cls(next_page_token=d.get('next_page_token', None), + secrets=_repeated_dict(d, 'secrets', SecretInfo)) + + +@dataclass +class OidcFederationPolicy: + """Specifies the policy to use for validating OIDC claims in your federated tokens.""" + + audiences: Optional[List[str]] = None + """The allowed token audiences, as specified in the 'aud' claim of federated tokens. The audience + identifier is intended to represent the recipient of the token. Can be any non-empty string + value. As long as the audience in the token matches at least one audience in the policy, the + token is considered a match. If audiences is unspecified, defaults to your Databricks account + id.""" + + issuer: Optional[str] = None + """The required token issuer, as specified in the 'iss' claim of federated tokens.""" + + jwks_json: Optional[str] = None + """The public keys used to validate the signature of federated tokens, in JWKS format. If + unspecified (recommended), Databricks automatically fetches the public keys from your issuer’s + well known endpoint. Databricks strongly recommends relying on your issuer’s well known + endpoint for discovering public keys.""" + + subject: Optional[str] = None + """The required token subject, as specified in the subject claim of federated tokens. Must be + specified for service principal federation policies. Must not be specified for account + federation policies.""" + + subject_claim: Optional[str] = None + """The claim that contains the subject of the token. If unspecified, the default value is 'sub'.""" + + def as_dict(self) -> dict: + """Serializes the OidcFederationPolicy into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.audiences: body['audiences'] = [v for v in self.audiences] + if self.issuer is not None: body['issuer'] = self.issuer + if self.jwks_json is not None: body['jwks_json'] = self.jwks_json + if self.subject is not None: body['subject'] = self.subject + if self.subject_claim is not None: body['subject_claim'] = self.subject_claim + return body + + def as_shallow_dict(self) -> dict: + """Serializes the OidcFederationPolicy into a shallow dictionary of its immediate attributes.""" + body = {} + if self.audiences: body['audiences'] = self.audiences + if self.issuer is not None: body['issuer'] = self.issuer + if self.jwks_json is not None: body['jwks_json'] = self.jwks_json + if self.subject is not None: body['subject'] = self.subject + if self.subject_claim is not None: body['subject_claim'] = self.subject_claim + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> OidcFederationPolicy: + """Deserializes the OidcFederationPolicy from a dictionary.""" + return cls(audiences=d.get('audiences', None), + issuer=d.get('issuer', None), + jwks_json=d.get('jwks_json', None), + subject=d.get('subject', None), + subject_claim=d.get('subject_claim', None)) @dataclass @@ -441,6 +695,19 @@ def as_dict(self) -> dict: if self.scopes: body['scopes'] = [v for v in self.scopes] return body + def as_shallow_dict(self) -> dict: + """Serializes the PublishedAppOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.app_id is not None: body['app_id'] = self.app_id + if self.client_id is not None: body['client_id'] = self.client_id + if self.description is not None: body['description'] = self.description + if self.is_confidential_client is not None: + body['is_confidential_client'] = self.is_confidential_client + if self.name is not None: body['name'] = self.name + if self.redirect_urls: body['redirect_urls'] = self.redirect_urls + if self.scopes: body['scopes'] = self.scopes + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PublishedAppOutput: """Deserializes the PublishedAppOutput from a dictionary.""" @@ -480,6 +747,16 @@ def as_dict(self) -> dict: if self.update_time is not None: body['update_time'] = self.update_time return body + def as_shallow_dict(self) -> dict: + """Serializes the SecretInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.create_time is not None: body['create_time'] = self.create_time + if self.id is not None: body['id'] = self.id + if self.secret_hash is not None: body['secret_hash'] = self.secret_hash + if self.status is not None: body['status'] = self.status + if self.update_time is not None: body['update_time'] = self.update_time + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SecretInfo: """Deserializes the SecretInfo from a dictionary.""" @@ -507,6 +784,15 @@ def as_dict(self) -> dict: body['refresh_token_ttl_in_minutes'] = self.refresh_token_ttl_in_minutes return body + def as_shallow_dict(self) -> dict: + """Serializes the TokenAccessPolicy into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_token_ttl_in_minutes is not None: + body['access_token_ttl_in_minutes'] = self.access_token_ttl_in_minutes + if self.refresh_token_ttl_in_minutes is not None: + body['refresh_token_ttl_in_minutes'] = self.refresh_token_ttl_in_minutes + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TokenAccessPolicy: """Deserializes the TokenAccessPolicy from a dictionary.""" @@ -521,15 +807,36 @@ class UpdateCustomAppIntegration: redirect_urls: Optional[List[str]] = None """List of OAuth redirect urls to be updated in the custom OAuth app integration""" + scopes: Optional[List[str]] = None + """List of OAuth scopes to be updated in the custom OAuth app integration, similar to redirect URIs + this will fully replace the existing values instead of appending""" + token_access_policy: Optional[TokenAccessPolicy] = None """Token access policy to be updated in the custom OAuth app integration""" + user_authorized_scopes: Optional[List[str]] = None + """Scopes that will need to be consented by end user to mint the access token. If the user does not + authorize the access token will not be minted. Must be a subset of scopes.""" + def as_dict(self) -> dict: """Serializes the UpdateCustomAppIntegration into a dictionary suitable for use as a JSON request body.""" body = {} if self.integration_id is not None: body['integration_id'] = self.integration_id if self.redirect_urls: body['redirect_urls'] = [v for v in self.redirect_urls] + if self.scopes: body['scopes'] = [v for v in self.scopes] if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict() + if self.user_authorized_scopes: + body['user_authorized_scopes'] = [v for v in self.user_authorized_scopes] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateCustomAppIntegration into a shallow dictionary of its immediate attributes.""" + body = {} + if self.integration_id is not None: body['integration_id'] = self.integration_id + if self.redirect_urls: body['redirect_urls'] = self.redirect_urls + if self.scopes: body['scopes'] = self.scopes + if self.token_access_policy: body['token_access_policy'] = self.token_access_policy + if self.user_authorized_scopes: body['user_authorized_scopes'] = self.user_authorized_scopes return body @classmethod @@ -537,7 +844,9 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateCustomAppIntegration: """Deserializes the UpdateCustomAppIntegration from a dictionary.""" return cls(integration_id=d.get('integration_id', None), redirect_urls=d.get('redirect_urls', None), - token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy)) + scopes=d.get('scopes', None), + token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy), + user_authorized_scopes=d.get('user_authorized_scopes', None)) @dataclass @@ -548,6 +857,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateCustomAppIntegrationOutput into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateCustomAppIntegrationOutput: """Deserializes the UpdateCustomAppIntegrationOutput from a dictionary.""" @@ -568,6 +882,13 @@ def as_dict(self) -> dict: if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdatePublishedAppIntegration into a shallow dictionary of its immediate attributes.""" + body = {} + if self.integration_id is not None: body['integration_id'] = self.integration_id + if self.token_access_policy: body['token_access_policy'] = self.token_access_policy + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdatePublishedAppIntegration: """Deserializes the UpdatePublishedAppIntegration from a dictionary.""" @@ -583,12 +904,179 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdatePublishedAppIntegrationOutput into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdatePublishedAppIntegrationOutput: """Deserializes the UpdatePublishedAppIntegrationOutput from a dictionary.""" return cls() +class AccountFederationPolicyAPI: + """These APIs manage account federation policies. + + Account federation policies allow users and service principals in your Databricks account to securely + access Databricks APIs using tokens from your trusted identity providers (IdPs). + + With token federation, your users and service principals can exchange tokens from your IdP for Databricks + OAuth tokens, which can be used to access Databricks APIs. Token federation eliminates the need to manage + Databricks secrets, and allows you to centralize management of token issuance policies in your IdP. + Databricks token federation is typically used in combination with [SCIM], so users in your IdP are + synchronized into your Databricks account. + + Token federation is configured in your Databricks account using an account federation policy. An account + federation policy specifies: * which IdP, or issuer, your Databricks account should accept tokens from * + how to determine which Databricks user, or subject, a token is issued for + + To configure a federation policy, you provide the following: * The required token __issuer__, as specified + in the “iss” claim of your tokens. The issuer is an https URL that identifies your IdP. * The allowed + token __audiences__, as specified in the “aud” claim of your tokens. This identifier is intended to + represent the recipient of the token. As long as the audience in the token matches at least one audience + in the policy, the token is considered a match. If unspecified, the default value is your Databricks + account id. * The __subject claim__, which indicates which token claim contains the Databricks username of + the user the token was issued for. If unspecified, the default value is “sub”. * Optionally, the + public keys used to validate the signature of your tokens, in JWKS format. If unspecified (recommended), + Databricks automatically fetches the public keys from your issuer’s well known endpoint. Databricks + strongly recommends relying on your issuer’s well known endpoint for discovering public keys. + + An example federation policy is: ``` issuer: "https://idp.mycompany.com/oidc" audiences: ["databricks"] + subject_claim: "sub" ``` + + An example JWT token body that matches this policy and could be used to authenticate to Databricks as user + `username@mycompany.com` is: ``` { "iss": "https://idp.mycompany.com/oidc", "aud": "databricks", "sub": + "username@mycompany.com" } ``` + + You may also need to configure your IdP to generate tokens for your users to exchange with Databricks, if + your users do not already have the ability to generate tokens that are compatible with your federation + policy. + + You do not need to configure an OAuth application in Databricks to use token federation. + + [SCIM]: https://docs.databricks.com/admin/users-groups/scim/index.html""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, + *, + policy: Optional[FederationPolicy] = None, + policy_id: Optional[str] = None) -> FederationPolicy: + """Create account federation policy. + + :param policy: :class:`FederationPolicy` (optional) + :param policy_id: str (optional) + The identifier for the federation policy. The identifier must contain only lowercase alphanumeric + characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks. + + :returns: :class:`FederationPolicy` + """ + body = policy.as_dict() + query = {} + if policy_id is not None: query['policy_id'] = policy_id + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('POST', + f'/api/2.0/accounts/{self._api.account_id}/federationPolicies', + query=query, + body=body, + headers=headers) + return FederationPolicy.from_dict(res) + + def delete(self, policy_id: str): + """Delete account federation policy. + + :param policy_id: str + The identifier for the federation policy. + + + """ + + headers = {'Accept': 'application/json', } + + self._api.do('DELETE', + f'/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}', + headers=headers) + + def get(self, policy_id: str) -> FederationPolicy: + """Get account federation policy. + + :param policy_id: str + The identifier for the federation policy. + + :returns: :class:`FederationPolicy` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do('GET', + f'/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}', + headers=headers) + return FederationPolicy.from_dict(res) + + def list(self, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[FederationPolicy]: + """List account federation policies. + + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`FederationPolicy` + """ + + query = {} + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json', } + + while True: + json = self._api.do('GET', + f'/api/2.0/accounts/{self._api.account_id}/federationPolicies', + query=query, + headers=headers) + if 'policies' in json: + for v in json['policies']: + yield FederationPolicy.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + def update(self, + policy_id: str, + *, + policy: Optional[FederationPolicy] = None, + update_mask: Optional[str] = None) -> FederationPolicy: + """Update account federation policy. + + :param policy_id: str + The identifier for the federation policy. + :param policy: :class:`FederationPolicy` (optional) + :param update_mask: str (optional) + The field mask specifies which fields of the policy to update. To specify multiple fields in the + field mask, use comma as the separator (no space). The special value '*' indicates that all fields + should be updated (full replacement). If unspecified, all fields that are set in the policy provided + in the update request will overwrite the corresponding fields in the existing policy. Example value: + 'description,oidc_policy.audiences'. + + :returns: :class:`FederationPolicy` + """ + body = policy.as_dict() + query = {} + if update_mask is not None: query['update_mask'] = update_mask + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('PATCH', + f'/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}', + query=query, + body=body, + headers=headers) + return FederationPolicy.from_dict(res) + + class CustomAppIntegrationAPI: """These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.""" @@ -602,7 +1090,8 @@ def create(self, name: Optional[str] = None, redirect_urls: Optional[List[str]] = None, scopes: Optional[List[str]] = None, - token_access_policy: Optional[TokenAccessPolicy] = None) -> CreateCustomAppIntegrationOutput: + token_access_policy: Optional[TokenAccessPolicy] = None, + user_authorized_scopes: Optional[List[str]] = None) -> CreateCustomAppIntegrationOutput: """Create Custom OAuth App Integration. Create Custom OAuth App Integration. @@ -620,6 +1109,9 @@ def create(self, profile, email. :param token_access_policy: :class:`TokenAccessPolicy` (optional) Token access policy + :param user_authorized_scopes: List[str] (optional) + Scopes that will need to be consented by end user to mint the access token. If the user does not + authorize the access token will not be minted. Must be a subset of scopes. :returns: :class:`CreateCustomAppIntegrationOutput` """ @@ -629,6 +1121,8 @@ def create(self, if redirect_urls is not None: body['redirect_urls'] = [v for v in redirect_urls] if scopes is not None: body['scopes'] = [v for v in scopes] if token_access_policy is not None: body['token_access_policy'] = token_access_policy.as_dict() + if user_authorized_scopes is not None: + body['user_authorized_scopes'] = [v for v in user_authorized_scopes] headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', @@ -661,6 +1155,7 @@ def get(self, integration_id: str) -> GetCustomAppIntegrationOutput: Gets the Custom OAuth App Integration for the given integration id. :param integration_id: str + The OAuth app integration ID. :returns: :class:`GetCustomAppIntegrationOutput` """ @@ -711,7 +1206,9 @@ def update(self, integration_id: str, *, redirect_urls: Optional[List[str]] = None, - token_access_policy: Optional[TokenAccessPolicy] = None): + scopes: Optional[List[str]] = None, + token_access_policy: Optional[TokenAccessPolicy] = None, + user_authorized_scopes: Optional[List[str]] = None): """Updates Custom OAuth App Integration. Updates an existing custom OAuth App Integration. You can retrieve the custom OAuth app integration @@ -720,14 +1217,23 @@ def update(self, :param integration_id: str :param redirect_urls: List[str] (optional) List of OAuth redirect urls to be updated in the custom OAuth app integration + :param scopes: List[str] (optional) + List of OAuth scopes to be updated in the custom OAuth app integration, similar to redirect URIs + this will fully replace the existing values instead of appending :param token_access_policy: :class:`TokenAccessPolicy` (optional) Token access policy to be updated in the custom OAuth app integration + :param user_authorized_scopes: List[str] (optional) + Scopes that will need to be consented by end user to mint the access token. If the user does not + authorize the access token will not be minted. Must be a subset of scopes. """ body = {} if redirect_urls is not None: body['redirect_urls'] = [v for v in redirect_urls] + if scopes is not None: body['scopes'] = [v for v in scopes] if token_access_policy is not None: body['token_access_policy'] = token_access_policy.as_dict() + if user_authorized_scopes is not None: + body['user_authorized_scopes'] = [v for v in user_authorized_scopes] headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } self._api.do( @@ -905,6 +1411,186 @@ def update(self, integration_id: str, *, token_access_policy: Optional[TokenAcce headers=headers) +class ServicePrincipalFederationPolicyAPI: + """These APIs manage service principal federation policies. + + Service principal federation, also known as Workload Identity Federation, allows your automated workloads + running outside of Databricks to securely access Databricks APIs without the need for Databricks secrets. + With Workload Identity Federation, your application (or workload) authenticates to Databricks as a + Databricks service principal, using tokens provided by the workload runtime. + + Databricks strongly recommends using Workload Identity Federation to authenticate to Databricks from + automated workloads, over alternatives such as OAuth client secrets or Personal Access Tokens, whenever + possible. Workload Identity Federation is supported by many popular services, including Github Actions, + Azure DevOps, GitLab, Terraform Cloud, and Kubernetes clusters, among others. + + Workload identity federation is configured in your Databricks account using a service principal federation + policy. A service principal federation policy specifies: * which IdP, or issuer, the service principal is + allowed to authenticate from * which workload identity, or subject, is allowed to authenticate as the + Databricks service principal + + To configure a federation policy, you provide the following: * The required token __issuer__, as specified + in the “iss” claim of workload identity tokens. The issuer is an https URL that identifies the + workload identity provider. * The required token __subject__, as specified in the “sub” claim of + workload identity tokens. The subject uniquely identifies the workload in the workload runtime + environment. * The allowed token __audiences__, as specified in the “aud” claim of workload identity + tokens. The audience is intended to represent the recipient of the token. As long as the audience in the + token matches at least one audience in the policy, the token is considered a match. If unspecified, the + default value is your Databricks account id. * Optionally, the public keys used to validate the signature + of the workload identity tokens, in JWKS format. If unspecified (recommended), Databricks automatically + fetches the public keys from the issuer’s well known endpoint. Databricks strongly recommends relying on + the issuer’s well known endpoint for discovering public keys. + + An example service principal federation policy, for a Github Actions workload, is: ``` issuer: + "https://token.actions.githubusercontent.com" audiences: ["https://github.com/my-github-org"] subject: + "repo:my-github-org/my-repo:environment:prod" ``` + + An example JWT token body that matches this policy and could be used to authenticate to Databricks is: ``` + { "iss": "https://token.actions.githubusercontent.com", "aud": "https://github.com/my-github-org", "sub": + "repo:my-github-org/my-repo:environment:prod" } ``` + + You may also need to configure the workload runtime to generate tokens for your workloads. + + You do not need to configure an OAuth application in Databricks to use token federation.""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, + service_principal_id: int, + *, + policy: Optional[FederationPolicy] = None, + policy_id: Optional[str] = None) -> FederationPolicy: + """Create service principal federation policy. + + :param service_principal_id: int + The service principal id for the federation policy. + :param policy: :class:`FederationPolicy` (optional) + :param policy_id: str (optional) + The identifier for the federation policy. The identifier must contain only lowercase alphanumeric + characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks. + + :returns: :class:`FederationPolicy` + """ + body = policy.as_dict() + query = {} + if policy_id is not None: query['policy_id'] = policy_id + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do( + 'POST', + f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies', + query=query, + body=body, + headers=headers) + return FederationPolicy.from_dict(res) + + def delete(self, service_principal_id: int, policy_id: str): + """Delete service principal federation policy. + + :param service_principal_id: int + The service principal id for the federation policy. + :param policy_id: str + The identifier for the federation policy. + + + """ + + headers = {'Accept': 'application/json', } + + self._api.do( + 'DELETE', + f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}', + headers=headers) + + def get(self, service_principal_id: int, policy_id: str) -> FederationPolicy: + """Get service principal federation policy. + + :param service_principal_id: int + The service principal id for the federation policy. + :param policy_id: str + The identifier for the federation policy. + + :returns: :class:`FederationPolicy` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do( + 'GET', + f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}', + headers=headers) + return FederationPolicy.from_dict(res) + + def list(self, + service_principal_id: int, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[FederationPolicy]: + """List service principal federation policies. + + :param service_principal_id: int + The service principal id for the federation policy. + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`FederationPolicy` + """ + + query = {} + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json', } + + while True: + json = self._api.do( + 'GET', + f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies', + query=query, + headers=headers) + if 'policies' in json: + for v in json['policies']: + yield FederationPolicy.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + def update(self, + service_principal_id: int, + policy_id: str, + *, + policy: Optional[FederationPolicy] = None, + update_mask: Optional[str] = None) -> FederationPolicy: + """Update service principal federation policy. + + :param service_principal_id: int + The service principal id for the federation policy. + :param policy_id: str + The identifier for the federation policy. + :param policy: :class:`FederationPolicy` (optional) + :param update_mask: str (optional) + The field mask specifies which fields of the policy to update. To specify multiple fields in the + field mask, use comma as the separator (no space). The special value '*' indicates that all fields + should be updated (full replacement). If unspecified, all fields that are set in the policy provided + in the update request will overwrite the corresponding fields in the existing policy. Example value: + 'description,oidc_policy.audiences'. + + :returns: :class:`FederationPolicy` + """ + body = policy.as_dict() + query = {} + if update_mask is not None: query['update_mask'] = update_mask + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do( + 'PATCH', + f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}', + query=query, + body=body, + headers=headers) + return FederationPolicy.from_dict(res) + + class ServicePrincipalSecretsAPI: """These APIs enable administrators to manage service principal secrets. @@ -960,7 +1646,7 @@ def delete(self, service_principal_id: int, secret_id: str): f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets/{secret_id}', headers=headers) - def list(self, service_principal_id: int) -> Iterator[SecretInfo]: + def list(self, service_principal_id: int, *, page_token: Optional[str] = None) -> Iterator[SecretInfo]: """List service principal secrets. List all secrets associated with the given service principal. This operation only returns information @@ -968,15 +1654,30 @@ def list(self, service_principal_id: int) -> Iterator[SecretInfo]: :param service_principal_id: int The service principal ID. + :param page_token: str (optional) + An opaque page token which was the `next_page_token` in the response of the previous request to list + the secrets for this service principal. Provide this token to retrieve the next page of secret + entries. When providing a `page_token`, all other parameters provided to the request must match the + previous request. To list all of the secrets for a service principal, it is necessary to continue + requesting pages of entries until the response contains no `next_page_token`. Note that the number + of entries returned must not be used to determine when the listing is complete. :returns: Iterator over :class:`SecretInfo` """ + query = {} + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } - json = self._api.do( - 'GET', - f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets', - headers=headers) - parsed = ListServicePrincipalSecretsResponse.from_dict(json).secrets - return parsed if parsed is not None else [] + while True: + json = self._api.do( + 'GET', + f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets', + query=query, + headers=headers) + if 'secrets' in json: + for v in json['secrets']: + yield SecretInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py index 9c12f8788..db5d698d6 100755 --- a/databricks/sdk/service/pipelines.py +++ b/databricks/sdk/service/pipelines.py @@ -11,7 +11,7 @@ from typing import Callable, Dict, Iterator, List, Optional from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated_dict +from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum _LOG = logging.getLogger('databricks.sdk') @@ -61,7 +61,7 @@ class CreatePipeline: """Filters on which Pipeline packages to include in the deployed graph.""" gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None - """The definition of a gateway pipeline to support CDC.""" + """The definition of a gateway pipeline to support change data capture.""" id: Optional[str] = None """Unique identifier for this pipeline.""" @@ -82,6 +82,17 @@ class CreatePipeline: photon: Optional[bool] = None """Whether Photon is enabled for this pipeline.""" + restart_window: Optional[RestartWindow] = None + """Restart window of this pipeline.""" + + run_as: Optional[RunAs] = None + """Write-only setting, available only in Create/Update calls. Specifies the user or service + principal that the pipeline runs as. If not specified, the pipeline runs as the user who created + the pipeline. + + Only `user_name` or `service_principal_name` can be specified. If both are specified, an error + is thrown.""" + schema: Optional[str] = None """The default schema (database) where tables are read from or published to. The presence of this field implies that the pipeline is in direct publishing mode.""" @@ -122,6 +133,8 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications] if self.photon is not None: body['photon'] = self.photon + if self.restart_window: body['restart_window'] = self.restart_window.as_dict() + if self.run_as: body['run_as'] = self.run_as.as_dict() if self.schema is not None: body['schema'] = self.schema if self.serverless is not None: body['serverless'] = self.serverless if self.storage is not None: body['storage'] = self.storage @@ -129,6 +142,37 @@ def as_dict(self) -> dict: if self.trigger: body['trigger'] = self.trigger.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the CreatePipeline into a shallow dictionary of its immediate attributes.""" + body = {} + if self.allow_duplicate_names is not None: body['allow_duplicate_names'] = self.allow_duplicate_names + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.catalog is not None: body['catalog'] = self.catalog + if self.channel is not None: body['channel'] = self.channel + if self.clusters: body['clusters'] = self.clusters + if self.configuration: body['configuration'] = self.configuration + if self.continuous is not None: body['continuous'] = self.continuous + if self.deployment: body['deployment'] = self.deployment + if self.development is not None: body['development'] = self.development + if self.dry_run is not None: body['dry_run'] = self.dry_run + if self.edition is not None: body['edition'] = self.edition + if self.filters: body['filters'] = self.filters + if self.gateway_definition: body['gateway_definition'] = self.gateway_definition + if self.id is not None: body['id'] = self.id + if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition + if self.libraries: body['libraries'] = self.libraries + if self.name is not None: body['name'] = self.name + if self.notifications: body['notifications'] = self.notifications + if self.photon is not None: body['photon'] = self.photon + if self.restart_window: body['restart_window'] = self.restart_window + if self.run_as: body['run_as'] = self.run_as + if self.schema is not None: body['schema'] = self.schema + if self.serverless is not None: body['serverless'] = self.serverless + if self.storage is not None: body['storage'] = self.storage + if self.target is not None: body['target'] = self.target + if self.trigger: body['trigger'] = self.trigger + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreatePipeline: """Deserializes the CreatePipeline from a dictionary.""" @@ -151,6 +195,8 @@ def from_dict(cls, d: Dict[str, any]) -> CreatePipeline: name=d.get('name', None), notifications=_repeated_dict(d, 'notifications', Notifications), photon=d.get('photon', None), + restart_window=_from_dict(d, 'restart_window', RestartWindow), + run_as=_from_dict(d, 'run_as', RunAs), schema=d.get('schema', None), serverless=d.get('serverless', None), storage=d.get('storage', None), @@ -173,6 +219,13 @@ def as_dict(self) -> dict: if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CreatePipelineResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.effective_settings: body['effective_settings'] = self.effective_settings + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreatePipelineResponse: """Deserializes the CreatePipelineResponse from a dictionary.""" @@ -193,6 +246,13 @@ def as_dict(self) -> dict: if self.timezone_id is not None: body['timezone_id'] = self.timezone_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CronTrigger into a shallow dictionary of its immediate attributes.""" + body = {} + if self.quartz_cron_schedule is not None: body['quartz_cron_schedule'] = self.quartz_cron_schedule + if self.timezone_id is not None: body['timezone_id'] = self.timezone_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CronTrigger: """Deserializes the CronTrigger from a dictionary.""" @@ -215,12 +275,32 @@ def as_dict(self) -> dict: if self.seq_no is not None: body['seq_no'] = self.seq_no return body + def as_shallow_dict(self) -> dict: + """Serializes the DataPlaneId into a shallow dictionary of its immediate attributes.""" + body = {} + if self.instance is not None: body['instance'] = self.instance + if self.seq_no is not None: body['seq_no'] = self.seq_no + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DataPlaneId: """Deserializes the DataPlaneId from a dictionary.""" return cls(instance=d.get('instance', None), seq_no=d.get('seq_no', None)) +class DayOfWeek(Enum): + """Days of week in which the restart is allowed to happen (within a five-hour window starting at + start_hour). If not specified all days of the week will be used.""" + + FRIDAY = 'FRIDAY' + MONDAY = 'MONDAY' + SATURDAY = 'SATURDAY' + SUNDAY = 'SUNDAY' + THURSDAY = 'THURSDAY' + TUESDAY = 'TUESDAY' + WEDNESDAY = 'WEDNESDAY' + + @dataclass class DeletePipelineResponse: @@ -229,6 +309,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeletePipelineResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeletePipelineResponse: """Deserializes the DeletePipelineResponse from a dictionary.""" @@ -285,7 +370,7 @@ class EditPipeline: """Filters on which Pipeline packages to include in the deployed graph.""" gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None - """The definition of a gateway pipeline to support CDC.""" + """The definition of a gateway pipeline to support change data capture.""" id: Optional[str] = None """Unique identifier for this pipeline.""" @@ -309,6 +394,17 @@ class EditPipeline: pipeline_id: Optional[str] = None """Unique identifier for this pipeline.""" + restart_window: Optional[RestartWindow] = None + """Restart window of this pipeline.""" + + run_as: Optional[RunAs] = None + """Write-only setting, available only in Create/Update calls. Specifies the user or service + principal that the pipeline runs as. If not specified, the pipeline runs as the user who created + the pipeline. + + Only `user_name` or `service_principal_name` can be specified. If both are specified, an error + is thrown.""" + schema: Optional[str] = None """The default schema (database) where tables are read from or published to. The presence of this field implies that the pipeline is in direct publishing mode.""" @@ -351,6 +447,8 @@ def as_dict(self) -> dict: if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications] if self.photon is not None: body['photon'] = self.photon if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.restart_window: body['restart_window'] = self.restart_window.as_dict() + if self.run_as: body['run_as'] = self.run_as.as_dict() if self.schema is not None: body['schema'] = self.schema if self.serverless is not None: body['serverless'] = self.serverless if self.storage is not None: body['storage'] = self.storage @@ -358,6 +456,39 @@ def as_dict(self) -> dict: if self.trigger: body['trigger'] = self.trigger.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the EditPipeline into a shallow dictionary of its immediate attributes.""" + body = {} + if self.allow_duplicate_names is not None: body['allow_duplicate_names'] = self.allow_duplicate_names + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.catalog is not None: body['catalog'] = self.catalog + if self.channel is not None: body['channel'] = self.channel + if self.clusters: body['clusters'] = self.clusters + if self.configuration: body['configuration'] = self.configuration + if self.continuous is not None: body['continuous'] = self.continuous + if self.deployment: body['deployment'] = self.deployment + if self.development is not None: body['development'] = self.development + if self.edition is not None: body['edition'] = self.edition + if self.expected_last_modified is not None: + body['expected_last_modified'] = self.expected_last_modified + if self.filters: body['filters'] = self.filters + if self.gateway_definition: body['gateway_definition'] = self.gateway_definition + if self.id is not None: body['id'] = self.id + if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition + if self.libraries: body['libraries'] = self.libraries + if self.name is not None: body['name'] = self.name + if self.notifications: body['notifications'] = self.notifications + if self.photon is not None: body['photon'] = self.photon + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.restart_window: body['restart_window'] = self.restart_window + if self.run_as: body['run_as'] = self.run_as + if self.schema is not None: body['schema'] = self.schema + if self.serverless is not None: body['serverless'] = self.serverless + if self.storage is not None: body['storage'] = self.storage + if self.target is not None: body['target'] = self.target + if self.trigger: body['trigger'] = self.trigger + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EditPipeline: """Deserializes the EditPipeline from a dictionary.""" @@ -381,6 +512,8 @@ def from_dict(cls, d: Dict[str, any]) -> EditPipeline: notifications=_repeated_dict(d, 'notifications', Notifications), photon=d.get('photon', None), pipeline_id=d.get('pipeline_id', None), + restart_window=_from_dict(d, 'restart_window', RestartWindow), + run_as=_from_dict(d, 'run_as', RunAs), schema=d.get('schema', None), serverless=d.get('serverless', None), storage=d.get('storage', None), @@ -396,6 +529,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the EditPipelineResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EditPipelineResponse: """Deserializes the EditPipelineResponse from a dictionary.""" @@ -417,6 +555,13 @@ def as_dict(self) -> dict: if self.fatal is not None: body['fatal'] = self.fatal return body + def as_shallow_dict(self) -> dict: + """Serializes the ErrorDetail into a shallow dictionary of its immediate attributes.""" + body = {} + if self.exceptions: body['exceptions'] = self.exceptions + if self.fatal is not None: body['fatal'] = self.fatal + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ErrorDetail: """Deserializes the ErrorDetail from a dictionary.""" @@ -444,6 +589,12 @@ def as_dict(self) -> dict: if self.path is not None: body['path'] = self.path return body + def as_shallow_dict(self) -> dict: + """Serializes the FileLibrary into a shallow dictionary of its immediate attributes.""" + body = {} + if self.path is not None: body['path'] = self.path + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> FileLibrary: """Deserializes the FileLibrary from a dictionary.""" @@ -465,6 +616,13 @@ def as_dict(self) -> dict: if self.include: body['include'] = [v for v in self.include] return body + def as_shallow_dict(self) -> dict: + """Serializes the Filters into a shallow dictionary of its immediate attributes.""" + body = {} + if self.exclude: body['exclude'] = self.exclude + if self.include: body['include'] = self.include + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Filters: """Deserializes the Filters from a dictionary.""" @@ -482,6 +640,12 @@ def as_dict(self) -> dict: if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body + def as_shallow_dict(self) -> dict: + """Serializes the GetPipelinePermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission_levels: body['permission_levels'] = self.permission_levels + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetPipelinePermissionLevelsResponse: """Deserializes the GetPipelinePermissionLevelsResponse from a dictionary.""" @@ -544,6 +708,24 @@ def as_dict(self) -> dict: if self.state is not None: body['state'] = self.state.value return body + def as_shallow_dict(self) -> dict: + """Serializes the GetPipelineResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cause is not None: body['cause'] = self.cause + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.effective_budget_policy_id is not None: + body['effective_budget_policy_id'] = self.effective_budget_policy_id + if self.health is not None: body['health'] = self.health + if self.last_modified is not None: body['last_modified'] = self.last_modified + if self.latest_updates: body['latest_updates'] = self.latest_updates + if self.name is not None: body['name'] = self.name + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name + if self.spec: body['spec'] = self.spec + if self.state is not None: body['state'] = self.state + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetPipelineResponse: """Deserializes the GetPipelineResponse from a dictionary.""" @@ -579,6 +761,12 @@ def as_dict(self) -> dict: if self.update: body['update'] = self.update.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the GetUpdateResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.update: body['update'] = self.update + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetUpdateResponse: """Deserializes the GetUpdateResponse from a dictionary.""" @@ -588,13 +776,13 @@ def from_dict(cls, d: Dict[str, any]) -> GetUpdateResponse: @dataclass class IngestionConfig: report: Optional[ReportSpec] = None - """Select tables from a specific source report.""" + """Select a specific source report.""" schema: Optional[SchemaSpec] = None - """Select tables from a specific source schema.""" + """Select all tables from a specific source schema.""" table: Optional[TableSpec] = None - """Select tables from a specific source table.""" + """Select a specific source table.""" def as_dict(self) -> dict: """Serializes the IngestionConfig into a dictionary suitable for use as a JSON request body.""" @@ -604,6 +792,14 @@ def as_dict(self) -> dict: if self.table: body['table'] = self.table.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the IngestionConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.report: body['report'] = self.report + if self.schema: body['schema'] = self.schema + if self.table: body['table'] = self.table + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> IngestionConfig: """Deserializes the IngestionConfig from a dictionary.""" @@ -615,7 +811,11 @@ def from_dict(cls, d: Dict[str, any]) -> IngestionConfig: @dataclass class IngestionGatewayPipelineDefinition: connection_id: Optional[str] = None - """Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the + """[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this + gateway pipeline uses to communicate with the source.""" + + connection_name: Optional[str] = None + """Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.""" gateway_storage_catalog: Optional[str] = None @@ -633,6 +833,19 @@ def as_dict(self) -> dict: """Serializes the IngestionGatewayPipelineDefinition into a dictionary suitable for use as a JSON request body.""" body = {} if self.connection_id is not None: body['connection_id'] = self.connection_id + if self.connection_name is not None: body['connection_name'] = self.connection_name + if self.gateway_storage_catalog is not None: + body['gateway_storage_catalog'] = self.gateway_storage_catalog + if self.gateway_storage_name is not None: body['gateway_storage_name'] = self.gateway_storage_name + if self.gateway_storage_schema is not None: + body['gateway_storage_schema'] = self.gateway_storage_schema + return body + + def as_shallow_dict(self) -> dict: + """Serializes the IngestionGatewayPipelineDefinition into a shallow dictionary of its immediate attributes.""" + body = {} + if self.connection_id is not None: body['connection_id'] = self.connection_id + if self.connection_name is not None: body['connection_name'] = self.connection_name if self.gateway_storage_catalog is not None: body['gateway_storage_catalog'] = self.gateway_storage_catalog if self.gateway_storage_name is not None: body['gateway_storage_name'] = self.gateway_storage_name @@ -644,6 +857,7 @@ def as_dict(self) -> dict: def from_dict(cls, d: Dict[str, any]) -> IngestionGatewayPipelineDefinition: """Deserializes the IngestionGatewayPipelineDefinition from a dictionary.""" return cls(connection_id=d.get('connection_id', None), + connection_name=d.get('connection_name', None), gateway_storage_catalog=d.get('gateway_storage_catalog', None), gateway_storage_name=d.get('gateway_storage_name', None), gateway_storage_schema=d.get('gateway_storage_schema', None)) @@ -652,12 +866,12 @@ def from_dict(cls, d: Dict[str, any]) -> IngestionGatewayPipelineDefinition: @dataclass class IngestionPipelineDefinition: connection_name: Optional[str] = None - """Immutable. The Unity Catalog connection this ingestion pipeline uses to communicate with the - source. Specify either ingestion_gateway_id or connection_name.""" + """Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with + the source. This is used with connectors for applications like Salesforce, Workday, and so on.""" ingestion_gateway_id: Optional[str] = None - """Immutable. Identifier for the ingestion gateway used by this ingestion pipeline to communicate - with the source. Specify either ingestion_gateway_id or connection_name.""" + """Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate + with the source database. This is used with connectors to databases like SQL Server.""" objects: Optional[List[IngestionConfig]] = None """Required. Settings specifying tables to replicate and the destination for the replicated tables.""" @@ -675,6 +889,15 @@ def as_dict(self) -> dict: if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the IngestionPipelineDefinition into a shallow dictionary of its immediate attributes.""" + body = {} + if self.connection_name is not None: body['connection_name'] = self.connection_name + if self.ingestion_gateway_id is not None: body['ingestion_gateway_id'] = self.ingestion_gateway_id + if self.objects: body['objects'] = self.objects + if self.table_configuration: body['table_configuration'] = self.table_configuration + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> IngestionPipelineDefinition: """Deserializes the IngestionPipelineDefinition from a dictionary.""" @@ -703,6 +926,14 @@ def as_dict(self) -> dict: if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListPipelineEventsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.events: body['events'] = self.events + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListPipelineEventsResponse: """Deserializes the ListPipelineEventsResponse from a dictionary.""" @@ -726,6 +957,13 @@ def as_dict(self) -> dict: if self.statuses: body['statuses'] = [v.as_dict() for v in self.statuses] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListPipelinesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.statuses: body['statuses'] = self.statuses + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListPipelinesResponse: """Deserializes the ListPipelinesResponse from a dictionary.""" @@ -752,6 +990,14 @@ def as_dict(self) -> dict: if self.updates: body['updates'] = [v.as_dict() for v in self.updates] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListUpdatesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token + if self.updates: body['updates'] = self.updates + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListUpdatesResponse: """Deserializes the ListUpdatesResponse from a dictionary.""" @@ -768,6 +1014,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the ManualTrigger into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ManualTrigger: """Deserializes the ManualTrigger from a dictionary.""" @@ -793,6 +1044,12 @@ def as_dict(self) -> dict: if self.path is not None: body['path'] = self.path return body + def as_shallow_dict(self) -> dict: + """Serializes the NotebookLibrary into a shallow dictionary of its immediate attributes.""" + body = {} + if self.path is not None: body['path'] = self.path + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> NotebookLibrary: """Deserializes the NotebookLibrary from a dictionary.""" @@ -819,6 +1076,13 @@ def as_dict(self) -> dict: if self.email_recipients: body['email_recipients'] = [v for v in self.email_recipients] return body + def as_shallow_dict(self) -> dict: + """Serializes the Notifications into a shallow dictionary of its immediate attributes.""" + body = {} + if self.alerts: body['alerts'] = self.alerts + if self.email_recipients: body['email_recipients'] = self.email_recipients + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Notifications: """Deserializes the Notifications from a dictionary.""" @@ -901,6 +1165,28 @@ def as_dict(self) -> dict: if self.update_id is not None: body['update_id'] = self.update_id return body + def as_shallow_dict(self) -> dict: + """Serializes the Origin into a shallow dictionary of its immediate attributes.""" + body = {} + if self.batch_id is not None: body['batch_id'] = self.batch_id + if self.cloud is not None: body['cloud'] = self.cloud + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.dataset_name is not None: body['dataset_name'] = self.dataset_name + if self.flow_id is not None: body['flow_id'] = self.flow_id + if self.flow_name is not None: body['flow_name'] = self.flow_name + if self.host is not None: body['host'] = self.host + if self.maintenance_id is not None: body['maintenance_id'] = self.maintenance_id + if self.materialization_name is not None: body['materialization_name'] = self.materialization_name + if self.org_id is not None: body['org_id'] = self.org_id + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.pipeline_name is not None: body['pipeline_name'] = self.pipeline_name + if self.region is not None: body['region'] = self.region + if self.request_id is not None: body['request_id'] = self.request_id + if self.table_id is not None: body['table_id'] = self.table_id + if self.uc_resource_id is not None: body['uc_resource_id'] = self.uc_resource_id + if self.update_id is not None: body['update_id'] = self.update_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Origin: """Deserializes the Origin from a dictionary.""" @@ -947,6 +1233,16 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the PipelineAccessControlRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PipelineAccessControlRequest: """Deserializes the PipelineAccessControlRequest from a dictionary.""" @@ -984,6 +1280,17 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the PipelineAccessControlResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PipelineAccessControlResponse: """Deserializes the PipelineAccessControlResponse from a dictionary.""" @@ -1123,6 +1430,33 @@ def as_dict(self) -> dict: if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys] return body + def as_shallow_dict(self) -> dict: + """Serializes the PipelineCluster into a shallow dictionary of its immediate attributes.""" + body = {} + if self.apply_policy_default_values is not None: + body['apply_policy_default_values'] = self.apply_policy_default_values + if self.autoscale: body['autoscale'] = self.autoscale + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes + if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.driver_instance_pool_id is not None: + body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id + if self.enable_local_disk_encryption is not None: + body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes + if self.init_scripts: body['init_scripts'] = self.init_scripts + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.label is not None: body['label'] = self.label + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.num_workers is not None: body['num_workers'] = self.num_workers + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.spark_conf: body['spark_conf'] = self.spark_conf + if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars + if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PipelineCluster: """Deserializes the PipelineCluster from a dictionary.""" @@ -1171,6 +1505,14 @@ def as_dict(self) -> dict: if self.mode is not None: body['mode'] = self.mode.value return body + def as_shallow_dict(self) -> dict: + """Serializes the PipelineClusterAutoscale into a shallow dictionary of its immediate attributes.""" + body = {} + if self.max_workers is not None: body['max_workers'] = self.max_workers + if self.min_workers is not None: body['min_workers'] = self.min_workers + if self.mode is not None: body['mode'] = self.mode + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PipelineClusterAutoscale: """Deserializes the PipelineClusterAutoscale from a dictionary.""" @@ -1204,6 +1546,13 @@ def as_dict(self) -> dict: if self.metadata_file_path is not None: body['metadata_file_path'] = self.metadata_file_path return body + def as_shallow_dict(self) -> dict: + """Serializes the PipelineDeployment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.kind is not None: body['kind'] = self.kind + if self.metadata_file_path is not None: body['metadata_file_path'] = self.metadata_file_path + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PipelineDeployment: """Deserializes the PipelineDeployment from a dictionary.""" @@ -1254,6 +1603,20 @@ def as_dict(self) -> dict: if self.timestamp is not None: body['timestamp'] = self.timestamp return body + def as_shallow_dict(self) -> dict: + """Serializes the PipelineEvent into a shallow dictionary of its immediate attributes.""" + body = {} + if self.error: body['error'] = self.error + if self.event_type is not None: body['event_type'] = self.event_type + if self.id is not None: body['id'] = self.id + if self.level is not None: body['level'] = self.level + if self.maturity_level is not None: body['maturity_level'] = self.maturity_level + if self.message is not None: body['message'] = self.message + if self.origin: body['origin'] = self.origin + if self.sequence: body['sequence'] = self.sequence + if self.timestamp is not None: body['timestamp'] = self.timestamp + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PipelineEvent: """Deserializes the PipelineEvent from a dictionary.""" @@ -1295,6 +1658,16 @@ def as_dict(self) -> dict: if self.whl is not None: body['whl'] = self.whl return body + def as_shallow_dict(self) -> dict: + """Serializes the PipelineLibrary into a shallow dictionary of its immediate attributes.""" + body = {} + if self.file: body['file'] = self.file + if self.jar is not None: body['jar'] = self.jar + if self.maven: body['maven'] = self.maven + if self.notebook: body['notebook'] = self.notebook + if self.whl is not None: body['whl'] = self.whl + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PipelineLibrary: """Deserializes the PipelineLibrary from a dictionary.""" @@ -1322,6 +1695,14 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the PipelinePermission into a shallow dictionary of its immediate attributes.""" + body = {} + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PipelinePermission: """Deserializes the PipelinePermission from a dictionary.""" @@ -1356,6 +1737,14 @@ def as_dict(self) -> dict: if self.object_type is not None: body['object_type'] = self.object_type return body + def as_shallow_dict(self) -> dict: + """Serializes the PipelinePermissions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PipelinePermissions: """Deserializes the PipelinePermissions from a dictionary.""" @@ -1379,6 +1768,13 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the PipelinePermissionsDescription into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PipelinePermissionsDescription: """Deserializes the PipelinePermissionsDescription from a dictionary.""" @@ -1401,6 +1797,13 @@ def as_dict(self) -> dict: if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id return body + def as_shallow_dict(self) -> dict: + """Serializes the PipelinePermissionsRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PipelinePermissionsRequest: """Deserializes the PipelinePermissionsRequest from a dictionary.""" @@ -1444,7 +1847,7 @@ class PipelineSpec: """Filters on which Pipeline packages to include in the deployed graph.""" gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None - """The definition of a gateway pipeline to support CDC.""" + """The definition of a gateway pipeline to support change data capture.""" id: Optional[str] = None """Unique identifier for this pipeline.""" @@ -1465,6 +1868,9 @@ class PipelineSpec: photon: Optional[bool] = None """Whether Photon is enabled for this pipeline.""" + restart_window: Optional[RestartWindow] = None + """Restart window of this pipeline.""" + schema: Optional[str] = None """The default schema (database) where tables are read from or published to. The presence of this field implies that the pipeline is in direct publishing mode.""" @@ -1503,6 +1909,7 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications] if self.photon is not None: body['photon'] = self.photon + if self.restart_window: body['restart_window'] = self.restart_window.as_dict() if self.schema is not None: body['schema'] = self.schema if self.serverless is not None: body['serverless'] = self.serverless if self.storage is not None: body['storage'] = self.storage @@ -1510,6 +1917,34 @@ def as_dict(self) -> dict: if self.trigger: body['trigger'] = self.trigger.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the PipelineSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.catalog is not None: body['catalog'] = self.catalog + if self.channel is not None: body['channel'] = self.channel + if self.clusters: body['clusters'] = self.clusters + if self.configuration: body['configuration'] = self.configuration + if self.continuous is not None: body['continuous'] = self.continuous + if self.deployment: body['deployment'] = self.deployment + if self.development is not None: body['development'] = self.development + if self.edition is not None: body['edition'] = self.edition + if self.filters: body['filters'] = self.filters + if self.gateway_definition: body['gateway_definition'] = self.gateway_definition + if self.id is not None: body['id'] = self.id + if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition + if self.libraries: body['libraries'] = self.libraries + if self.name is not None: body['name'] = self.name + if self.notifications: body['notifications'] = self.notifications + if self.photon is not None: body['photon'] = self.photon + if self.restart_window: body['restart_window'] = self.restart_window + if self.schema is not None: body['schema'] = self.schema + if self.serverless is not None: body['serverless'] = self.serverless + if self.storage is not None: body['storage'] = self.storage + if self.target is not None: body['target'] = self.target + if self.trigger: body['trigger'] = self.trigger + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PipelineSpec: """Deserializes the PipelineSpec from a dictionary.""" @@ -1530,6 +1965,7 @@ def from_dict(cls, d: Dict[str, any]) -> PipelineSpec: name=d.get('name', None), notifications=_repeated_dict(d, 'notifications', Notifications), photon=d.get('photon', None), + restart_window=_from_dict(d, 'restart_window', RestartWindow), schema=d.get('schema', None), serverless=d.get('serverless', None), storage=d.get('storage', None), @@ -1591,6 +2027,19 @@ def as_dict(self) -> dict: if self.state is not None: body['state'] = self.state.value return body + def as_shallow_dict(self) -> dict: + """Serializes the PipelineStateInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.health is not None: body['health'] = self.health + if self.latest_updates: body['latest_updates'] = self.latest_updates + if self.name is not None: body['name'] = self.name + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name + if self.state is not None: body['state'] = self.state + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PipelineStateInfo: """Deserializes the PipelineStateInfo from a dictionary.""" @@ -1624,6 +2073,13 @@ def as_dict(self) -> dict: if self.manual: body['manual'] = self.manual.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the PipelineTrigger into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cron: body['cron'] = self.cron + if self.manual: body['manual'] = self.manual + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PipelineTrigger: """Deserializes the PipelineTrigger from a dictionary.""" @@ -1658,6 +2114,16 @@ def as_dict(self) -> dict: if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the ReportSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog + if self.destination_schema is not None: body['destination_schema'] = self.destination_schema + if self.destination_table is not None: body['destination_table'] = self.destination_table + if self.source_url is not None: body['source_url'] = self.source_url + if self.table_configuration: body['table_configuration'] = self.table_configuration + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ReportSpec: """Deserializes the ReportSpec from a dictionary.""" @@ -1668,6 +2134,84 @@ def from_dict(cls, d: Dict[str, any]) -> ReportSpec: table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig)) +@dataclass +class RestartWindow: + start_hour: int + """An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day. + Continuous pipeline restart is triggered only within a five-hour window starting at this hour.""" + + days_of_week: Optional[List[DayOfWeek]] = None + """Days of week in which the restart is allowed to happen (within a five-hour window starting at + start_hour). If not specified all days of the week will be used.""" + + time_zone_id: Optional[str] = None + """Time zone id of restart window. See + https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html + for details. If not specified, UTC will be used.""" + + def as_dict(self) -> dict: + """Serializes the RestartWindow into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.days_of_week: body['days_of_week'] = [v.value for v in self.days_of_week] + if self.start_hour is not None: body['start_hour'] = self.start_hour + if self.time_zone_id is not None: body['time_zone_id'] = self.time_zone_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the RestartWindow into a shallow dictionary of its immediate attributes.""" + body = {} + if self.days_of_week: body['days_of_week'] = self.days_of_week + if self.start_hour is not None: body['start_hour'] = self.start_hour + if self.time_zone_id is not None: body['time_zone_id'] = self.time_zone_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> RestartWindow: + """Deserializes the RestartWindow from a dictionary.""" + return cls(days_of_week=_repeated_enum(d, 'days_of_week', DayOfWeek), + start_hour=d.get('start_hour', None), + time_zone_id=d.get('time_zone_id', None)) + + +@dataclass +class RunAs: + """Write-only setting, available only in Create/Update calls. Specifies the user or service + principal that the pipeline runs as. If not specified, the pipeline runs as the user who created + the pipeline. + + Only `user_name` or `service_principal_name` can be specified. If both are specified, an error + is thrown.""" + + service_principal_name: Optional[str] = None + """Application ID of an active service principal. Setting this field requires the + `servicePrincipal/user` role.""" + + user_name: Optional[str] = None + """The email of an active workspace user. Users can only set this field to their own email.""" + + def as_dict(self) -> dict: + """Serializes the RunAs into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the RunAs into a shallow dictionary of its immediate attributes.""" + body = {} + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> RunAs: + """Deserializes the RunAs from a dictionary.""" + return cls(service_principal_name=d.get('service_principal_name', None), + user_name=d.get('user_name', None)) + + @dataclass class SchemaSpec: destination_catalog: Optional[str] = None @@ -1699,6 +2243,16 @@ def as_dict(self) -> dict: if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the SchemaSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog + if self.destination_schema is not None: body['destination_schema'] = self.destination_schema + if self.source_catalog is not None: body['source_catalog'] = self.source_catalog + if self.source_schema is not None: body['source_schema'] = self.source_schema + if self.table_configuration: body['table_configuration'] = self.table_configuration + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SchemaSpec: """Deserializes the SchemaSpec from a dictionary.""" @@ -1724,6 +2278,13 @@ def as_dict(self) -> dict: if self.data_plane_id: body['data_plane_id'] = self.data_plane_id.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the Sequencing into a shallow dictionary of its immediate attributes.""" + body = {} + if self.control_plane_seq_no is not None: body['control_plane_seq_no'] = self.control_plane_seq_no + if self.data_plane_id: body['data_plane_id'] = self.data_plane_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Sequencing: """Deserializes the Sequencing from a dictionary.""" @@ -1750,6 +2311,14 @@ def as_dict(self) -> dict: if self.stack: body['stack'] = [v.as_dict() for v in self.stack] return body + def as_shallow_dict(self) -> dict: + """Serializes the SerializedException into a shallow dictionary of its immediate attributes.""" + body = {} + if self.class_name is not None: body['class_name'] = self.class_name + if self.message is not None: body['message'] = self.message + if self.stack: body['stack'] = self.stack + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SerializedException: """Deserializes the SerializedException from a dictionary.""" @@ -1781,6 +2350,15 @@ def as_dict(self) -> dict: if self.method_name is not None: body['method_name'] = self.method_name return body + def as_shallow_dict(self) -> dict: + """Serializes the StackFrame into a shallow dictionary of its immediate attributes.""" + body = {} + if self.declaring_class is not None: body['declaring_class'] = self.declaring_class + if self.file_name is not None: body['file_name'] = self.file_name + if self.line_number is not None: body['line_number'] = self.line_number + if self.method_name is not None: body['method_name'] = self.method_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> StackFrame: """Deserializes the StackFrame from a dictionary.""" @@ -1825,6 +2403,17 @@ def as_dict(self) -> dict: if self.validate_only is not None: body['validate_only'] = self.validate_only return body + def as_shallow_dict(self) -> dict: + """Serializes the StartUpdate into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cause is not None: body['cause'] = self.cause + if self.full_refresh is not None: body['full_refresh'] = self.full_refresh + if self.full_refresh_selection: body['full_refresh_selection'] = self.full_refresh_selection + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.refresh_selection: body['refresh_selection'] = self.refresh_selection + if self.validate_only is not None: body['validate_only'] = self.validate_only + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> StartUpdate: """Deserializes the StartUpdate from a dictionary.""" @@ -1856,6 +2445,12 @@ def as_dict(self) -> dict: if self.update_id is not None: body['update_id'] = self.update_id return body + def as_shallow_dict(self) -> dict: + """Serializes the StartUpdateResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.update_id is not None: body['update_id'] = self.update_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> StartUpdateResponse: """Deserializes the StartUpdateResponse from a dictionary.""" @@ -1870,6 +2465,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the StopPipelineResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> StopPipelineResponse: """Deserializes the StopPipelineResponse from a dictionary.""" @@ -1913,6 +2513,18 @@ def as_dict(self) -> dict: if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the TableSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog + if self.destination_schema is not None: body['destination_schema'] = self.destination_schema + if self.destination_table is not None: body['destination_table'] = self.destination_table + if self.source_catalog is not None: body['source_catalog'] = self.source_catalog + if self.source_schema is not None: body['source_schema'] = self.source_schema + if self.source_table is not None: body['source_table'] = self.source_table + if self.table_configuration: body['table_configuration'] = self.table_configuration + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TableSpec: """Deserializes the TableSpec from a dictionary.""" @@ -1951,6 +2563,16 @@ def as_dict(self) -> dict: if self.sequence_by: body['sequence_by'] = [v for v in self.sequence_by] return body + def as_shallow_dict(self) -> dict: + """Serializes the TableSpecificConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.primary_keys: body['primary_keys'] = self.primary_keys + if self.salesforce_include_formula_fields is not None: + body['salesforce_include_formula_fields'] = self.salesforce_include_formula_fields + if self.scd_type is not None: body['scd_type'] = self.scd_type + if self.sequence_by: body['sequence_by'] = self.sequence_by + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TableSpecificConfig: """Deserializes the TableSpecificConfig from a dictionary.""" @@ -2025,6 +2647,22 @@ def as_dict(self) -> dict: if self.validate_only is not None: body['validate_only'] = self.validate_only return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cause is not None: body['cause'] = self.cause + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.config: body['config'] = self.config + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.full_refresh is not None: body['full_refresh'] = self.full_refresh + if self.full_refresh_selection: body['full_refresh_selection'] = self.full_refresh_selection + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.refresh_selection: body['refresh_selection'] = self.refresh_selection + if self.state is not None: body['state'] = self.state + if self.update_id is not None: body['update_id'] = self.update_id + if self.validate_only is not None: body['validate_only'] = self.validate_only + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateInfo: """Deserializes the UpdateInfo from a dictionary.""" @@ -2084,6 +2722,14 @@ def as_dict(self) -> dict: if self.update_id is not None: body['update_id'] = self.update_id return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateStateInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.state is not None: body['state'] = self.state + if self.update_id is not None: body['update_id'] = self.update_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateStateInfo: """Deserializes the UpdateStateInfo from a dictionary.""" @@ -2122,13 +2768,13 @@ class PipelinesAPI: def __init__(self, api_client): self._api = api_client - def wait_get_pipeline_idle( + def wait_get_pipeline_running( self, pipeline_id: str, timeout=timedelta(minutes=20), callback: Optional[Callable[[GetPipelineResponse], None]] = None) -> GetPipelineResponse: deadline = time.time() + timeout.total_seconds() - target_states = (PipelineState.IDLE, ) + target_states = (PipelineState.RUNNING, ) failure_states = (PipelineState.FAILED, ) status_message = 'polling...' attempt = 1 @@ -2141,7 +2787,7 @@ def wait_get_pipeline_idle( if callback: callback(poll) if status in failure_states: - msg = f'failed to reach IDLE, got {status}: {status_message}' + msg = f'failed to reach RUNNING, got {status}: {status_message}' raise OperationFailed(msg) prefix = f"pipeline_id={pipeline_id}" sleep = attempt @@ -2153,13 +2799,13 @@ def wait_get_pipeline_idle( attempt += 1 raise TimeoutError(f'timed out after {timeout}: {status_message}') - def wait_get_pipeline_running( + def wait_get_pipeline_idle( self, pipeline_id: str, timeout=timedelta(minutes=20), callback: Optional[Callable[[GetPipelineResponse], None]] = None) -> GetPipelineResponse: deadline = time.time() + timeout.total_seconds() - target_states = (PipelineState.RUNNING, ) + target_states = (PipelineState.IDLE, ) failure_states = (PipelineState.FAILED, ) status_message = 'polling...' attempt = 1 @@ -2172,7 +2818,7 @@ def wait_get_pipeline_running( if callback: callback(poll) if status in failure_states: - msg = f'failed to reach RUNNING, got {status}: {status_message}' + msg = f'failed to reach IDLE, got {status}: {status_message}' raise OperationFailed(msg) prefix = f"pipeline_id={pipeline_id}" sleep = attempt @@ -2205,6 +2851,8 @@ def create(self, name: Optional[str] = None, notifications: Optional[List[Notifications]] = None, photon: Optional[bool] = None, + restart_window: Optional[RestartWindow] = None, + run_as: Optional[RunAs] = None, schema: Optional[str] = None, serverless: Optional[bool] = None, storage: Optional[str] = None, @@ -2241,7 +2889,7 @@ def create(self, :param filters: :class:`Filters` (optional) Filters on which Pipeline packages to include in the deployed graph. :param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional) - The definition of a gateway pipeline to support CDC. + The definition of a gateway pipeline to support change data capture. :param id: str (optional) Unique identifier for this pipeline. :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional) @@ -2255,6 +2903,14 @@ def create(self, List of notification settings for this pipeline. :param photon: bool (optional) Whether Photon is enabled for this pipeline. + :param restart_window: :class:`RestartWindow` (optional) + Restart window of this pipeline. + :param run_as: :class:`RunAs` (optional) + Write-only setting, available only in Create/Update calls. Specifies the user or service principal + that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline. + + Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is + thrown. :param schema: str (optional) The default schema (database) where tables are read from or published to. The presence of this field implies that the pipeline is in direct publishing mode. @@ -2290,6 +2946,8 @@ def create(self, if name is not None: body['name'] = name if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications] if photon is not None: body['photon'] = photon + if restart_window is not None: body['restart_window'] = restart_window.as_dict() + if run_as is not None: body['run_as'] = run_as.as_dict() if schema is not None: body['schema'] = schema if serverless is not None: body['serverless'] = serverless if storage is not None: body['storage'] = storage @@ -2518,7 +3176,8 @@ def set_permissions( access_control_list: Optional[List[PipelineAccessControlRequest]] = None) -> PipelinePermissions: """Set pipeline permissions. - Sets permissions on a pipeline. Pipelines can inherit permissions from their root object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param pipeline_id: str The pipeline for which to get or manage permissions. @@ -2622,6 +3281,8 @@ def update(self, name: Optional[str] = None, notifications: Optional[List[Notifications]] = None, photon: Optional[bool] = None, + restart_window: Optional[RestartWindow] = None, + run_as: Optional[RunAs] = None, schema: Optional[str] = None, serverless: Optional[bool] = None, storage: Optional[str] = None, @@ -2661,7 +3322,7 @@ def update(self, :param filters: :class:`Filters` (optional) Filters on which Pipeline packages to include in the deployed graph. :param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional) - The definition of a gateway pipeline to support CDC. + The definition of a gateway pipeline to support change data capture. :param id: str (optional) Unique identifier for this pipeline. :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional) @@ -2675,6 +3336,14 @@ def update(self, List of notification settings for this pipeline. :param photon: bool (optional) Whether Photon is enabled for this pipeline. + :param restart_window: :class:`RestartWindow` (optional) + Restart window of this pipeline. + :param run_as: :class:`RunAs` (optional) + Write-only setting, available only in Create/Update calls. Specifies the user or service principal + that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline. + + Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is + thrown. :param schema: str (optional) The default schema (database) where tables are read from or published to. The presence of this field implies that the pipeline is in direct publishing mode. @@ -2710,6 +3379,8 @@ def update(self, if name is not None: body['name'] = name if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications] if photon is not None: body['photon'] = photon + if restart_window is not None: body['restart_window'] = restart_window.as_dict() + if run_as is not None: body['run_as'] = run_as.as_dict() if schema is not None: body['schema'] = schema if serverless is not None: body['serverless'] = serverless if storage is not None: body['storage'] = storage diff --git a/databricks/sdk/service/provisioning.py b/databricks/sdk/service/provisioning.py index 1dc6f3b8d..c54120ad8 100755 --- a/databricks/sdk/service/provisioning.py +++ b/databricks/sdk/service/provisioning.py @@ -28,6 +28,12 @@ def as_dict(self) -> dict: if self.sts_role: body['sts_role'] = self.sts_role.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the AwsCredentials into a shallow dictionary of its immediate attributes.""" + body = {} + if self.sts_role: body['sts_role'] = self.sts_role + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AwsCredentials: """Deserializes the AwsCredentials from a dictionary.""" @@ -60,6 +66,16 @@ def as_dict(self) -> dict: body['reuse_key_for_cluster_volumes'] = self.reuse_key_for_cluster_volumes return body + def as_shallow_dict(self) -> dict: + """Serializes the AwsKeyInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key_alias is not None: body['key_alias'] = self.key_alias + if self.key_arn is not None: body['key_arn'] = self.key_arn + if self.key_region is not None: body['key_region'] = self.key_region + if self.reuse_key_for_cluster_volumes is not None: + body['reuse_key_for_cluster_volumes'] = self.reuse_key_for_cluster_volumes + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AwsKeyInfo: """Deserializes the AwsKeyInfo from a dictionary.""" @@ -84,6 +100,13 @@ def as_dict(self) -> dict: if self.subscription_id is not None: body['subscription_id'] = self.subscription_id return body + def as_shallow_dict(self) -> dict: + """Serializes the AzureWorkspaceInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.resource_group is not None: body['resource_group'] = self.resource_group + if self.subscription_id is not None: body['subscription_id'] = self.subscription_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AzureWorkspaceInfo: """Deserializes the AzureWorkspaceInfo from a dictionary.""" @@ -104,6 +127,12 @@ def as_dict(self) -> dict: if self.gcp: body['gcp'] = self.gcp.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the CloudResourceContainer into a shallow dictionary of its immediate attributes.""" + body = {} + if self.gcp: body['gcp'] = self.gcp + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CloudResourceContainer: """Deserializes the CloudResourceContainer from a dictionary.""" @@ -133,6 +162,15 @@ def as_dict(self) -> dict: body['reuse_key_for_cluster_volumes'] = self.reuse_key_for_cluster_volumes return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateAwsKeyInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key_alias is not None: body['key_alias'] = self.key_alias + if self.key_arn is not None: body['key_arn'] = self.key_arn + if self.reuse_key_for_cluster_volumes is not None: + body['reuse_key_for_cluster_volumes'] = self.reuse_key_for_cluster_volumes + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateAwsKeyInfo: """Deserializes the CreateAwsKeyInfo from a dictionary.""" @@ -151,6 +189,12 @@ def as_dict(self) -> dict: if self.sts_role: body['sts_role'] = self.sts_role.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateCredentialAwsCredentials into a shallow dictionary of its immediate attributes.""" + body = {} + if self.sts_role: body['sts_role'] = self.sts_role + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateCredentialAwsCredentials: """Deserializes the CreateCredentialAwsCredentials from a dictionary.""" @@ -171,6 +215,13 @@ def as_dict(self) -> dict: if self.credentials_name is not None: body['credentials_name'] = self.credentials_name return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateCredentialRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_credentials: body['aws_credentials'] = self.aws_credentials + if self.credentials_name is not None: body['credentials_name'] = self.credentials_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateCredentialRequest: """Deserializes the CreateCredentialRequest from a dictionary.""" @@ -189,6 +240,12 @@ def as_dict(self) -> dict: if self.role_arn is not None: body['role_arn'] = self.role_arn return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateCredentialStsRole into a shallow dictionary of its immediate attributes.""" + body = {} + if self.role_arn is not None: body['role_arn'] = self.role_arn + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateCredentialStsRole: """Deserializes the CreateCredentialStsRole from a dictionary.""" @@ -212,6 +269,14 @@ def as_dict(self) -> dict: if self.use_cases: body['use_cases'] = [v.value for v in self.use_cases] return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateCustomerManagedKeyRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_key_info: body['aws_key_info'] = self.aws_key_info + if self.gcp_key_info: body['gcp_key_info'] = self.gcp_key_info + if self.use_cases: body['use_cases'] = self.use_cases + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateCustomerManagedKeyRequest: """Deserializes the CreateCustomerManagedKeyRequest from a dictionary.""" @@ -231,6 +296,12 @@ def as_dict(self) -> dict: if self.kms_key_id is not None: body['kms_key_id'] = self.kms_key_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateGcpKeyInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.kms_key_id is not None: body['kms_key_id'] = self.kms_key_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateGcpKeyInfo: """Deserializes the CreateGcpKeyInfo from a dictionary.""" @@ -275,6 +346,17 @@ def as_dict(self) -> dict: if self.vpc_id is not None: body['vpc_id'] = self.vpc_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateNetworkRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.gcp_network_info: body['gcp_network_info'] = self.gcp_network_info + if self.network_name is not None: body['network_name'] = self.network_name + if self.security_group_ids: body['security_group_ids'] = self.security_group_ids + if self.subnet_ids: body['subnet_ids'] = self.subnet_ids + if self.vpc_endpoints: body['vpc_endpoints'] = self.vpc_endpoints + if self.vpc_id is not None: body['vpc_id'] = self.vpc_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateNetworkRequest: """Deserializes the CreateNetworkRequest from a dictionary.""" @@ -302,6 +384,14 @@ def as_dict(self) -> dict: body['storage_configuration_name'] = self.storage_configuration_name return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateStorageConfigurationRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.root_bucket_info: body['root_bucket_info'] = self.root_bucket_info + if self.storage_configuration_name is not None: + body['storage_configuration_name'] = self.storage_configuration_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateStorageConfigurationRequest: """Deserializes the CreateStorageConfigurationRequest from a dictionary.""" @@ -332,6 +422,15 @@ def as_dict(self) -> dict: if self.vpc_endpoint_name is not None: body['vpc_endpoint_name'] = self.vpc_endpoint_name return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateVpcEndpointRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_vpc_endpoint_id is not None: body['aws_vpc_endpoint_id'] = self.aws_vpc_endpoint_id + if self.gcp_vpc_endpoint_info: body['gcp_vpc_endpoint_info'] = self.gcp_vpc_endpoint_info + if self.region is not None: body['region'] = self.region + if self.vpc_endpoint_name is not None: body['vpc_endpoint_name'] = self.vpc_endpoint_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateVpcEndpointRequest: """Deserializes the CreateVpcEndpointRequest from a dictionary.""" @@ -412,6 +511,9 @@ class CreateWorkspaceRequest: gke_config: Optional[GkeConfig] = None """The configurations for the GKE cluster of a Databricks workspace.""" + is_no_public_ip_enabled: Optional[bool] = None + """Whether no public IP is enabled for the workspace.""" + location: Optional[str] = None """The Google Cloud region of the workspace data plane in your Google account. For example, `us-east4`.""" @@ -460,6 +562,8 @@ def as_dict(self) -> dict: if self.gcp_managed_network_config: body['gcp_managed_network_config'] = self.gcp_managed_network_config.as_dict() if self.gke_config: body['gke_config'] = self.gke_config.as_dict() + if self.is_no_public_ip_enabled is not None: + body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled if self.location is not None: body['location'] = self.location if self.managed_services_customer_managed_key_id is not None: body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id @@ -474,6 +578,34 @@ def as_dict(self) -> dict: if self.workspace_name is not None: body['workspace_name'] = self.workspace_name return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateWorkspaceRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_region is not None: body['aws_region'] = self.aws_region + if self.cloud is not None: body['cloud'] = self.cloud + if self.cloud_resource_container: body['cloud_resource_container'] = self.cloud_resource_container + if self.credentials_id is not None: body['credentials_id'] = self.credentials_id + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.deployment_name is not None: body['deployment_name'] = self.deployment_name + if self.gcp_managed_network_config: + body['gcp_managed_network_config'] = self.gcp_managed_network_config + if self.gke_config: body['gke_config'] = self.gke_config + if self.is_no_public_ip_enabled is not None: + body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled + if self.location is not None: body['location'] = self.location + if self.managed_services_customer_managed_key_id is not None: + body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id + if self.network_id is not None: body['network_id'] = self.network_id + if self.pricing_tier is not None: body['pricing_tier'] = self.pricing_tier + if self.private_access_settings_id is not None: + body['private_access_settings_id'] = self.private_access_settings_id + if self.storage_configuration_id is not None: + body['storage_configuration_id'] = self.storage_configuration_id + if self.storage_customer_managed_key_id is not None: + body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id + if self.workspace_name is not None: body['workspace_name'] = self.workspace_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateWorkspaceRequest: """Deserializes the CreateWorkspaceRequest from a dictionary.""" @@ -486,6 +618,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreateWorkspaceRequest: gcp_managed_network_config=_from_dict(d, 'gcp_managed_network_config', GcpManagedNetworkConfig), gke_config=_from_dict(d, 'gke_config', GkeConfig), + is_no_public_ip_enabled=d.get('is_no_public_ip_enabled', None), location=d.get('location', None), managed_services_customer_managed_key_id=d.get('managed_services_customer_managed_key_id', None), @@ -523,6 +656,16 @@ def as_dict(self) -> dict: if self.credentials_name is not None: body['credentials_name'] = self.credentials_name return body + def as_shallow_dict(self) -> dict: + """Serializes the Credential into a shallow dictionary of its immediate attributes.""" + body = {} + if self.account_id is not None: body['account_id'] = self.account_id + if self.aws_credentials: body['aws_credentials'] = self.aws_credentials + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.credentials_id is not None: body['credentials_id'] = self.credentials_id + if self.credentials_name is not None: body['credentials_name'] = self.credentials_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Credential: """Deserializes the Credential from a dictionary.""" @@ -550,6 +693,12 @@ def as_dict(self) -> dict: if self.project_id is not None: body['project_id'] = self.project_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CustomerFacingGcpCloudResourceContainer into a shallow dictionary of its immediate attributes.""" + body = {} + if self.project_id is not None: body['project_id'] = self.project_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CustomerFacingGcpCloudResourceContainer: """Deserializes the CustomerFacingGcpCloudResourceContainer from a dictionary.""" @@ -586,6 +735,18 @@ def as_dict(self) -> dict: if self.use_cases: body['use_cases'] = [v.value for v in self.use_cases] return body + def as_shallow_dict(self) -> dict: + """Serializes the CustomerManagedKey into a shallow dictionary of its immediate attributes.""" + body = {} + if self.account_id is not None: body['account_id'] = self.account_id + if self.aws_key_info: body['aws_key_info'] = self.aws_key_info + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.customer_managed_key_id is not None: + body['customer_managed_key_id'] = self.customer_managed_key_id + if self.gcp_key_info: body['gcp_key_info'] = self.gcp_key_info + if self.use_cases: body['use_cases'] = self.use_cases + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CustomerManagedKey: """Deserializes the CustomerManagedKey from a dictionary.""" @@ -605,6 +766,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" @@ -632,6 +798,45 @@ class ErrorType(Enum): VPC = 'vpc' +@dataclass +class ExternalCustomerInfo: + authoritative_user_email: Optional[str] = None + """Email of the authoritative user.""" + + authoritative_user_full_name: Optional[str] = None + """The authoritative user full name.""" + + customer_name: Optional[str] = None + """The legal entity name for the external workspace""" + + def as_dict(self) -> dict: + """Serializes the ExternalCustomerInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.authoritative_user_email is not None: + body['authoritative_user_email'] = self.authoritative_user_email + if self.authoritative_user_full_name is not None: + body['authoritative_user_full_name'] = self.authoritative_user_full_name + if self.customer_name is not None: body['customer_name'] = self.customer_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ExternalCustomerInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.authoritative_user_email is not None: + body['authoritative_user_email'] = self.authoritative_user_email + if self.authoritative_user_full_name is not None: + body['authoritative_user_full_name'] = self.authoritative_user_full_name + if self.customer_name is not None: body['customer_name'] = self.customer_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ExternalCustomerInfo: + """Deserializes the ExternalCustomerInfo from a dictionary.""" + return cls(authoritative_user_email=d.get('authoritative_user_email', None), + authoritative_user_full_name=d.get('authoritative_user_full_name', None), + customer_name=d.get('customer_name', None)) + + @dataclass class GcpKeyInfo: kms_key_id: str @@ -643,6 +848,12 @@ def as_dict(self) -> dict: if self.kms_key_id is not None: body['kms_key_id'] = self.kms_key_id return body + def as_shallow_dict(self) -> dict: + """Serializes the GcpKeyInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.kms_key_id is not None: body['kms_key_id'] = self.kms_key_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GcpKeyInfo: """Deserializes the GcpKeyInfo from a dictionary.""" @@ -692,6 +903,16 @@ def as_dict(self) -> dict: if self.subnet_cidr is not None: body['subnet_cidr'] = self.subnet_cidr return body + def as_shallow_dict(self) -> dict: + """Serializes the GcpManagedNetworkConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.gke_cluster_pod_ip_range is not None: + body['gke_cluster_pod_ip_range'] = self.gke_cluster_pod_ip_range + if self.gke_cluster_service_ip_range is not None: + body['gke_cluster_service_ip_range'] = self.gke_cluster_service_ip_range + if self.subnet_cidr is not None: body['subnet_cidr'] = self.subnet_cidr + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GcpManagedNetworkConfig: """Deserializes the GcpManagedNetworkConfig from a dictionary.""" @@ -737,6 +958,17 @@ def as_dict(self) -> dict: if self.vpc_id is not None: body['vpc_id'] = self.vpc_id return body + def as_shallow_dict(self) -> dict: + """Serializes the GcpNetworkInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.network_project_id is not None: body['network_project_id'] = self.network_project_id + if self.pod_ip_range_name is not None: body['pod_ip_range_name'] = self.pod_ip_range_name + if self.service_ip_range_name is not None: body['service_ip_range_name'] = self.service_ip_range_name + if self.subnet_id is not None: body['subnet_id'] = self.subnet_id + if self.subnet_region is not None: body['subnet_region'] = self.subnet_region + if self.vpc_id is not None: body['vpc_id'] = self.vpc_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GcpNetworkInfo: """Deserializes the GcpNetworkInfo from a dictionary.""" @@ -777,6 +1009,16 @@ def as_dict(self) -> dict: if self.service_attachment_id is not None: body['service_attachment_id'] = self.service_attachment_id return body + def as_shallow_dict(self) -> dict: + """Serializes the GcpVpcEndpointInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.endpoint_region is not None: body['endpoint_region'] = self.endpoint_region + if self.project_id is not None: body['project_id'] = self.project_id + if self.psc_connection_id is not None: body['psc_connection_id'] = self.psc_connection_id + if self.psc_endpoint_name is not None: body['psc_endpoint_name'] = self.psc_endpoint_name + if self.service_attachment_id is not None: body['service_attachment_id'] = self.service_attachment_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GcpVpcEndpointInfo: """Deserializes the GcpVpcEndpointInfo from a dictionary.""" @@ -813,6 +1055,13 @@ def as_dict(self) -> dict: if self.master_ip_range is not None: body['master_ip_range'] = self.master_ip_range return body + def as_shallow_dict(self) -> dict: + """Serializes the GkeConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.connectivity_type is not None: body['connectivity_type'] = self.connectivity_type + if self.master_ip_range is not None: body['master_ip_range'] = self.master_ip_range + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GkeConfig: """Deserializes the GkeConfig from a dictionary.""" @@ -905,6 +1154,24 @@ def as_dict(self) -> dict: if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body + def as_shallow_dict(self) -> dict: + """Serializes the Network into a shallow dictionary of its immediate attributes.""" + body = {} + if self.account_id is not None: body['account_id'] = self.account_id + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.error_messages: body['error_messages'] = self.error_messages + if self.gcp_network_info: body['gcp_network_info'] = self.gcp_network_info + if self.network_id is not None: body['network_id'] = self.network_id + if self.network_name is not None: body['network_name'] = self.network_name + if self.security_group_ids: body['security_group_ids'] = self.security_group_ids + if self.subnet_ids: body['subnet_ids'] = self.subnet_ids + if self.vpc_endpoints: body['vpc_endpoints'] = self.vpc_endpoints + if self.vpc_id is not None: body['vpc_id'] = self.vpc_id + if self.vpc_status is not None: body['vpc_status'] = self.vpc_status + if self.warning_messages: body['warning_messages'] = self.warning_messages + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Network: """Deserializes the Network from a dictionary.""" @@ -939,6 +1206,13 @@ def as_dict(self) -> dict: if self.error_type is not None: body['error_type'] = self.error_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the NetworkHealth into a shallow dictionary of its immediate attributes.""" + body = {} + if self.error_message is not None: body['error_message'] = self.error_message + if self.error_type is not None: body['error_type'] = self.error_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> NetworkHealth: """Deserializes the NetworkHealth from a dictionary.""" @@ -966,6 +1240,13 @@ def as_dict(self) -> dict: if self.rest_api: body['rest_api'] = [v for v in self.rest_api] return body + def as_shallow_dict(self) -> dict: + """Serializes the NetworkVpcEndpoints into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dataplane_relay: body['dataplane_relay'] = self.dataplane_relay + if self.rest_api: body['rest_api'] = self.rest_api + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> NetworkVpcEndpoints: """Deserializes the NetworkVpcEndpoints from a dictionary.""" @@ -987,6 +1268,13 @@ def as_dict(self) -> dict: if self.warning_type is not None: body['warning_type'] = self.warning_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the NetworkWarning into a shallow dictionary of its immediate attributes.""" + body = {} + if self.warning_message is not None: body['warning_message'] = self.warning_message + if self.warning_type is not None: body['warning_type'] = self.warning_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> NetworkWarning: """Deserializes the NetworkWarning from a dictionary.""" @@ -1064,6 +1352,20 @@ def as_dict(self) -> dict: if self.region is not None: body['region'] = self.region return body + def as_shallow_dict(self) -> dict: + """Serializes the PrivateAccessSettings into a shallow dictionary of its immediate attributes.""" + body = {} + if self.account_id is not None: body['account_id'] = self.account_id + if self.allowed_vpc_endpoint_ids: body['allowed_vpc_endpoint_ids'] = self.allowed_vpc_endpoint_ids + if self.private_access_level is not None: body['private_access_level'] = self.private_access_level + if self.private_access_settings_id is not None: + body['private_access_settings_id'] = self.private_access_settings_id + if self.private_access_settings_name is not None: + body['private_access_settings_name'] = self.private_access_settings_name + if self.public_access_enabled is not None: body['public_access_enabled'] = self.public_access_enabled + if self.region is not None: body['region'] = self.region + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PrivateAccessSettings: """Deserializes the PrivateAccessSettings from a dictionary.""" @@ -1084,6 +1386,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the ReplaceResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ReplaceResponse: """Deserializes the ReplaceResponse from a dictionary.""" @@ -1103,6 +1410,12 @@ def as_dict(self) -> dict: if self.bucket_name is not None: body['bucket_name'] = self.bucket_name return body + def as_shallow_dict(self) -> dict: + """Serializes the RootBucketInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.bucket_name is not None: body['bucket_name'] = self.bucket_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RootBucketInfo: """Deserializes the RootBucketInfo from a dictionary.""" @@ -1138,6 +1451,18 @@ def as_dict(self) -> dict: body['storage_configuration_name'] = self.storage_configuration_name return body + def as_shallow_dict(self) -> dict: + """Serializes the StorageConfiguration into a shallow dictionary of its immediate attributes.""" + body = {} + if self.account_id is not None: body['account_id'] = self.account_id + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.root_bucket_info: body['root_bucket_info'] = self.root_bucket_info + if self.storage_configuration_id is not None: + body['storage_configuration_id'] = self.storage_configuration_id + if self.storage_configuration_name is not None: + body['storage_configuration_name'] = self.storage_configuration_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> StorageConfiguration: """Deserializes the StorageConfiguration from a dictionary.""" @@ -1164,6 +1489,13 @@ def as_dict(self) -> dict: if self.role_arn is not None: body['role_arn'] = self.role_arn return body + def as_shallow_dict(self) -> dict: + """Serializes the StsRole into a shallow dictionary of its immediate attributes.""" + body = {} + if self.external_id is not None: body['external_id'] = self.external_id + if self.role_arn is not None: body['role_arn'] = self.role_arn + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> StsRole: """Deserializes the StsRole from a dictionary.""" @@ -1178,6 +1510,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateResponse: """Deserializes the UpdateResponse from a dictionary.""" @@ -1210,6 +1547,10 @@ class UpdateWorkspaceRequest: customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC to a customer-managed VPC by updating the workspace to add a network configuration ID.""" + private_access_settings_id: Optional[str] = None + """The ID of the workspace's private access settings configuration object. This parameter is + available only for updating failed workspaces.""" + storage_configuration_id: Optional[str] = None """The ID of the workspace's storage configuration object. This parameter is available only for updating failed workspaces.""" @@ -1232,6 +1573,28 @@ def as_dict(self) -> dict: if self.network_connectivity_config_id is not None: body['network_connectivity_config_id'] = self.network_connectivity_config_id if self.network_id is not None: body['network_id'] = self.network_id + if self.private_access_settings_id is not None: + body['private_access_settings_id'] = self.private_access_settings_id + if self.storage_configuration_id is not None: + body['storage_configuration_id'] = self.storage_configuration_id + if self.storage_customer_managed_key_id is not None: + body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateWorkspaceRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_region is not None: body['aws_region'] = self.aws_region + if self.credentials_id is not None: body['credentials_id'] = self.credentials_id + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.managed_services_customer_managed_key_id is not None: + body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id + if self.network_connectivity_config_id is not None: + body['network_connectivity_config_id'] = self.network_connectivity_config_id + if self.network_id is not None: body['network_id'] = self.network_id + if self.private_access_settings_id is not None: + body['private_access_settings_id'] = self.private_access_settings_id if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id if self.storage_customer_managed_key_id is not None: @@ -1249,6 +1612,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceRequest: None), network_connectivity_config_id=d.get('network_connectivity_config_id', None), network_id=d.get('network_id', None), + private_access_settings_id=d.get('private_access_settings_id', None), storage_configuration_id=d.get('storage_configuration_id', None), storage_customer_managed_key_id=d.get('storage_customer_managed_key_id', None), workspace_id=d.get('workspace_id', None)) @@ -1307,6 +1671,19 @@ def as_dict(self) -> dict: if self.region is not None: body['region'] = self.region return body + def as_shallow_dict(self) -> dict: + """Serializes the UpsertPrivateAccessSettingsRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.allowed_vpc_endpoint_ids: body['allowed_vpc_endpoint_ids'] = self.allowed_vpc_endpoint_ids + if self.private_access_level is not None: body['private_access_level'] = self.private_access_level + if self.private_access_settings_id is not None: + body['private_access_settings_id'] = self.private_access_settings_id + if self.private_access_settings_name is not None: + body['private_access_settings_name'] = self.private_access_settings_name + if self.public_access_enabled is not None: body['public_access_enabled'] = self.public_access_enabled + if self.region is not None: body['region'] = self.region + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpsertPrivateAccessSettingsRequest: """Deserializes the UpsertPrivateAccessSettingsRequest from a dictionary.""" @@ -1378,6 +1755,22 @@ def as_dict(self) -> dict: if self.vpc_endpoint_name is not None: body['vpc_endpoint_name'] = self.vpc_endpoint_name return body + def as_shallow_dict(self) -> dict: + """Serializes the VpcEndpoint into a shallow dictionary of its immediate attributes.""" + body = {} + if self.account_id is not None: body['account_id'] = self.account_id + if self.aws_account_id is not None: body['aws_account_id'] = self.aws_account_id + if self.aws_endpoint_service_id is not None: + body['aws_endpoint_service_id'] = self.aws_endpoint_service_id + if self.aws_vpc_endpoint_id is not None: body['aws_vpc_endpoint_id'] = self.aws_vpc_endpoint_id + if self.gcp_vpc_endpoint_info: body['gcp_vpc_endpoint_info'] = self.gcp_vpc_endpoint_info + if self.region is not None: body['region'] = self.region + if self.state is not None: body['state'] = self.state + if self.use_case is not None: body['use_case'] = self.use_case + if self.vpc_endpoint_id is not None: body['vpc_endpoint_id'] = self.vpc_endpoint_id + if self.vpc_endpoint_name is not None: body['vpc_endpoint_name'] = self.vpc_endpoint_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> VpcEndpoint: """Deserializes the VpcEndpoint from a dictionary.""" @@ -1443,6 +1836,10 @@ class Workspace: This value must be unique across all non-deleted deployments across all AWS regions.""" + external_customer_info: Optional[ExternalCustomerInfo] = None + """If this workspace is for a external customer, then external_customer_info is populated. If this + workspace is not for a external customer, then external_customer_info is empty.""" + gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None """The network settings for the workspace. The configurations are only for Databricks-managed VPCs. It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP @@ -1466,6 +1863,9 @@ class Workspace: gke_config: Optional[GkeConfig] = None """The configurations for the GKE cluster of a Databricks workspace.""" + is_no_public_ip_enabled: Optional[bool] = None + """Whether no public IP is enabled for the workspace.""" + location: Optional[str] = None """The Google Cloud region of the workspace data plane in your Google account (for example, `us-east4`).""" @@ -1524,9 +1924,12 @@ def as_dict(self) -> dict: if self.credentials_id is not None: body['credentials_id'] = self.credentials_id if self.custom_tags: body['custom_tags'] = self.custom_tags if self.deployment_name is not None: body['deployment_name'] = self.deployment_name + if self.external_customer_info: body['external_customer_info'] = self.external_customer_info.as_dict() if self.gcp_managed_network_config: body['gcp_managed_network_config'] = self.gcp_managed_network_config.as_dict() if self.gke_config: body['gke_config'] = self.gke_config.as_dict() + if self.is_no_public_ip_enabled is not None: + body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled if self.location is not None: body['location'] = self.location if self.managed_services_customer_managed_key_id is not None: body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id @@ -1545,6 +1948,42 @@ def as_dict(self) -> dict: body['workspace_status_message'] = self.workspace_status_message return body + def as_shallow_dict(self) -> dict: + """Serializes the Workspace into a shallow dictionary of its immediate attributes.""" + body = {} + if self.account_id is not None: body['account_id'] = self.account_id + if self.aws_region is not None: body['aws_region'] = self.aws_region + if self.azure_workspace_info: body['azure_workspace_info'] = self.azure_workspace_info + if self.cloud is not None: body['cloud'] = self.cloud + if self.cloud_resource_container: body['cloud_resource_container'] = self.cloud_resource_container + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.credentials_id is not None: body['credentials_id'] = self.credentials_id + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.deployment_name is not None: body['deployment_name'] = self.deployment_name + if self.external_customer_info: body['external_customer_info'] = self.external_customer_info + if self.gcp_managed_network_config: + body['gcp_managed_network_config'] = self.gcp_managed_network_config + if self.gke_config: body['gke_config'] = self.gke_config + if self.is_no_public_ip_enabled is not None: + body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled + if self.location is not None: body['location'] = self.location + if self.managed_services_customer_managed_key_id is not None: + body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id + if self.network_id is not None: body['network_id'] = self.network_id + if self.pricing_tier is not None: body['pricing_tier'] = self.pricing_tier + if self.private_access_settings_id is not None: + body['private_access_settings_id'] = self.private_access_settings_id + if self.storage_configuration_id is not None: + body['storage_configuration_id'] = self.storage_configuration_id + if self.storage_customer_managed_key_id is not None: + body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.workspace_name is not None: body['workspace_name'] = self.workspace_name + if self.workspace_status is not None: body['workspace_status'] = self.workspace_status + if self.workspace_status_message is not None: + body['workspace_status_message'] = self.workspace_status_message + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Workspace: """Deserializes the Workspace from a dictionary.""" @@ -1557,9 +1996,11 @@ def from_dict(cls, d: Dict[str, any]) -> Workspace: credentials_id=d.get('credentials_id', None), custom_tags=d.get('custom_tags', None), deployment_name=d.get('deployment_name', None), + external_customer_info=_from_dict(d, 'external_customer_info', ExternalCustomerInfo), gcp_managed_network_config=_from_dict(d, 'gcp_managed_network_config', GcpManagedNetworkConfig), gke_config=_from_dict(d, 'gke_config', GkeConfig), + is_no_public_ip_enabled=d.get('is_no_public_ip_enabled', None), location=d.get('location', None), managed_services_customer_managed_key_id=d.get('managed_services_customer_managed_key_id', None), @@ -2399,6 +2840,7 @@ def create(self, deployment_name: Optional[str] = None, gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None, gke_config: Optional[GkeConfig] = None, + is_no_public_ip_enabled: Optional[bool] = None, location: Optional[str] = None, managed_services_customer_managed_key_id: Optional[str] = None, network_id: Optional[str] = None, @@ -2477,6 +2919,8 @@ def create(self, [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html :param gke_config: :class:`GkeConfig` (optional) The configurations for the GKE cluster of a Databricks workspace. + :param is_no_public_ip_enabled: bool (optional) + Whether no public IP is enabled for the workspace. :param location: str (optional) The Google Cloud region of the workspace data plane in your Google account. For example, `us-east4`. :param managed_services_customer_managed_key_id: str (optional) @@ -2519,6 +2963,7 @@ def create(self, if gcp_managed_network_config is not None: body['gcp_managed_network_config'] = gcp_managed_network_config.as_dict() if gke_config is not None: body['gke_config'] = gke_config.as_dict() + if is_no_public_ip_enabled is not None: body['is_no_public_ip_enabled'] = is_no_public_ip_enabled if location is not None: body['location'] = location if managed_services_customer_managed_key_id is not None: body['managed_services_customer_managed_key_id'] = managed_services_customer_managed_key_id @@ -2552,6 +2997,7 @@ def create_and_wait( deployment_name: Optional[str] = None, gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None, gke_config: Optional[GkeConfig] = None, + is_no_public_ip_enabled: Optional[bool] = None, location: Optional[str] = None, managed_services_customer_managed_key_id: Optional[str] = None, network_id: Optional[str] = None, @@ -2568,6 +3014,7 @@ def create_and_wait( deployment_name=deployment_name, gcp_managed_network_config=gcp_managed_network_config, gke_config=gke_config, + is_no_public_ip_enabled=is_no_public_ip_enabled, location=location, managed_services_customer_managed_key_id=managed_services_customer_managed_key_id, network_id=network_id, @@ -2653,6 +3100,7 @@ def update(self, managed_services_customer_managed_key_id: Optional[str] = None, network_connectivity_config_id: Optional[str] = None, network_id: Optional[str] = None, + private_access_settings_id: Optional[str] = None, storage_configuration_id: Optional[str] = None, storage_customer_managed_key_id: Optional[str] = None) -> Wait[Workspace]: """Update workspace configuration. @@ -2771,6 +3219,9 @@ def update(self, The ID of the workspace's network configuration object. Used only if you already use a customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC to a customer-managed VPC by updating the workspace to add a network configuration ID. + :param private_access_settings_id: str (optional) + The ID of the workspace's private access settings configuration object. This parameter is available + only for updating failed workspaces. :param storage_configuration_id: str (optional) The ID of the workspace's storage configuration object. This parameter is available only for updating failed workspaces. @@ -2791,6 +3242,8 @@ def update(self, if network_connectivity_config_id is not None: body['network_connectivity_config_id'] = network_connectivity_config_id if network_id is not None: body['network_id'] = network_id + if private_access_settings_id is not None: + body['private_access_settings_id'] = private_access_settings_id if storage_configuration_id is not None: body['storage_configuration_id'] = storage_configuration_id if storage_customer_managed_key_id is not None: body['storage_customer_managed_key_id'] = storage_customer_managed_key_id @@ -2814,6 +3267,7 @@ def update_and_wait( managed_services_customer_managed_key_id: Optional[str] = None, network_connectivity_config_id: Optional[str] = None, network_id: Optional[str] = None, + private_access_settings_id: Optional[str] = None, storage_configuration_id: Optional[str] = None, storage_customer_managed_key_id: Optional[str] = None, timeout=timedelta(minutes=20)) -> Workspace: @@ -2823,6 +3277,7 @@ def update_and_wait( managed_services_customer_managed_key_id=managed_services_customer_managed_key_id, network_connectivity_config_id=network_connectivity_config_id, network_id=network_id, + private_access_settings_id=private_access_settings_id, storage_configuration_id=storage_configuration_id, storage_customer_managed_key_id=storage_customer_managed_key_id, workspace_id=workspace_id).result(timeout=timeout) diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index 7639d96fb..c10e43572 100755 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -12,14 +12,11 @@ import requests -from ..data_plane import DataPlaneService from ..errors import OperationFailed from ._internal import Wait, _enum, _from_dict, _repeated_dict _LOG = logging.getLogger('databricks.sdk') -from databricks.sdk.service import oauth2 - # all definitions in this file are in alphabetical order @@ -43,6 +40,14 @@ def as_dict(self) -> dict: body['ai21labs_api_key_plaintext'] = self.ai21labs_api_key_plaintext return body + def as_shallow_dict(self) -> dict: + """Serializes the Ai21LabsConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.ai21labs_api_key is not None: body['ai21labs_api_key'] = self.ai21labs_api_key + if self.ai21labs_api_key_plaintext is not None: + body['ai21labs_api_key_plaintext'] = self.ai21labs_api_key_plaintext + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Ai21LabsConfig: """Deserializes the Ai21LabsConfig from a dictionary.""" @@ -76,6 +81,15 @@ def as_dict(self) -> dict: if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the AiGatewayConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.guardrails: body['guardrails'] = self.guardrails + if self.inference_table_config: body['inference_table_config'] = self.inference_table_config + if self.rate_limits: body['rate_limits'] = self.rate_limits + if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AiGatewayConfig: """Deserializes the AiGatewayConfig from a dictionary.""" @@ -111,6 +125,15 @@ def as_dict(self) -> dict: if self.valid_topics: body['valid_topics'] = [v for v in self.valid_topics] return body + def as_shallow_dict(self) -> dict: + """Serializes the AiGatewayGuardrailParameters into a shallow dictionary of its immediate attributes.""" + body = {} + if self.invalid_keywords: body['invalid_keywords'] = self.invalid_keywords + if self.pii: body['pii'] = self.pii + if self.safety is not None: body['safety'] = self.safety + if self.valid_topics: body['valid_topics'] = self.valid_topics + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AiGatewayGuardrailParameters: """Deserializes the AiGatewayGuardrailParameters from a dictionary.""" @@ -122,11 +145,8 @@ def from_dict(cls, d: Dict[str, any]) -> AiGatewayGuardrailParameters: @dataclass class AiGatewayGuardrailPiiBehavior: - behavior: AiGatewayGuardrailPiiBehaviorBehavior - """Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input - guardrail and the request contains PII, the request is not sent to the model server and 400 - status code is returned; if 'BLOCK' is set for the output guardrail and the model response - contains PII, the PII info in the response is redacted and 400 status code is returned.""" + behavior: Optional[AiGatewayGuardrailPiiBehaviorBehavior] = None + """Configuration for input guardrail filters.""" def as_dict(self) -> dict: """Serializes the AiGatewayGuardrailPiiBehavior into a dictionary suitable for use as a JSON request body.""" @@ -134,6 +154,12 @@ def as_dict(self) -> dict: if self.behavior is not None: body['behavior'] = self.behavior.value return body + def as_shallow_dict(self) -> dict: + """Serializes the AiGatewayGuardrailPiiBehavior into a shallow dictionary of its immediate attributes.""" + body = {} + if self.behavior is not None: body['behavior'] = self.behavior + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AiGatewayGuardrailPiiBehavior: """Deserializes the AiGatewayGuardrailPiiBehavior from a dictionary.""" @@ -141,10 +167,6 @@ def from_dict(cls, d: Dict[str, any]) -> AiGatewayGuardrailPiiBehavior: class AiGatewayGuardrailPiiBehaviorBehavior(Enum): - """Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input - guardrail and the request contains PII, the request is not sent to the model server and 400 - status code is returned; if 'BLOCK' is set for the output guardrail and the model response - contains PII, the PII info in the response is redacted and 400 status code is returned.""" BLOCK = 'BLOCK' NONE = 'NONE' @@ -165,6 +187,13 @@ def as_dict(self) -> dict: if self.output: body['output'] = self.output.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the AiGatewayGuardrails into a shallow dictionary of its immediate attributes.""" + body = {} + if self.input: body['input'] = self.input + if self.output: body['output'] = self.output + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AiGatewayGuardrails: """Deserializes the AiGatewayGuardrails from a dictionary.""" @@ -198,6 +227,15 @@ def as_dict(self) -> dict: if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix return body + def as_shallow_dict(self) -> dict: + """Serializes the AiGatewayInferenceTableConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.enabled is not None: body['enabled'] = self.enabled + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AiGatewayInferenceTableConfig: """Deserializes the AiGatewayInferenceTableConfig from a dictionary.""" @@ -227,6 +265,14 @@ def as_dict(self) -> dict: if self.renewal_period is not None: body['renewal_period'] = self.renewal_period.value return body + def as_shallow_dict(self) -> dict: + """Serializes the AiGatewayRateLimit into a shallow dictionary of its immediate attributes.""" + body = {} + if self.calls is not None: body['calls'] = self.calls + if self.key is not None: body['key'] = self.key + if self.renewal_period is not None: body['renewal_period'] = self.renewal_period + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AiGatewayRateLimit: """Deserializes the AiGatewayRateLimit from a dictionary.""" @@ -236,15 +282,12 @@ def from_dict(cls, d: Dict[str, any]) -> AiGatewayRateLimit: class AiGatewayRateLimitKey(Enum): - """Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' - being the default if not specified.""" ENDPOINT = 'endpoint' USER = 'user' class AiGatewayRateLimitRenewalPeriod(Enum): - """Renewal period field for a rate limit. Currently, only 'minute' is supported.""" MINUTE = 'minute' @@ -260,6 +303,12 @@ def as_dict(self) -> dict: if self.enabled is not None: body['enabled'] = self.enabled return body + def as_shallow_dict(self) -> dict: + """Serializes the AiGatewayUsageTrackingConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.enabled is not None: body['enabled'] = self.enabled + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AiGatewayUsageTrackingConfig: """Deserializes the AiGatewayUsageTrackingConfig from a dictionary.""" @@ -277,9 +326,9 @@ class AmazonBedrockConfig: aws_access_key_id: Optional[str] = None """The Databricks secret key reference for an AWS access key ID with permissions to interact with - Bedrock services. If you prefer to paste your API key directly, see `aws_access_key_id`. You - must provide an API key using one of the following fields: `aws_access_key_id` or - `aws_access_key_id_plaintext`.""" + Bedrock services. If you prefer to paste your API key directly, see + `aws_access_key_id_plaintext`. You must provide an API key using one of the following fields: + `aws_access_key_id` or `aws_access_key_id_plaintext`.""" aws_access_key_id_plaintext: Optional[str] = None """An AWS access key ID with permissions to interact with Bedrock services provided as a plaintext @@ -312,6 +361,19 @@ def as_dict(self) -> dict: if self.bedrock_provider is not None: body['bedrock_provider'] = self.bedrock_provider.value return body + def as_shallow_dict(self) -> dict: + """Serializes the AmazonBedrockConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_access_key_id is not None: body['aws_access_key_id'] = self.aws_access_key_id + if self.aws_access_key_id_plaintext is not None: + body['aws_access_key_id_plaintext'] = self.aws_access_key_id_plaintext + if self.aws_region is not None: body['aws_region'] = self.aws_region + if self.aws_secret_access_key is not None: body['aws_secret_access_key'] = self.aws_secret_access_key + if self.aws_secret_access_key_plaintext is not None: + body['aws_secret_access_key_plaintext'] = self.aws_secret_access_key_plaintext + if self.bedrock_provider is not None: body['bedrock_provider'] = self.bedrock_provider + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AmazonBedrockConfig: """Deserializes the AmazonBedrockConfig from a dictionary.""" @@ -324,8 +386,6 @@ def from_dict(cls, d: Dict[str, any]) -> AmazonBedrockConfig: class AmazonBedrockConfigBedrockProvider(Enum): - """The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: - Anthropic, Cohere, AI21Labs, Amazon.""" AI21LABS = 'ai21labs' AMAZON = 'amazon' @@ -353,6 +413,14 @@ def as_dict(self) -> dict: body['anthropic_api_key_plaintext'] = self.anthropic_api_key_plaintext return body + def as_shallow_dict(self) -> dict: + """Serializes the AnthropicConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.anthropic_api_key is not None: body['anthropic_api_key'] = self.anthropic_api_key + if self.anthropic_api_key_plaintext is not None: + body['anthropic_api_key_plaintext'] = self.anthropic_api_key_plaintext + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AnthropicConfig: """Deserializes the AnthropicConfig from a dictionary.""" @@ -386,6 +454,15 @@ def as_dict(self) -> dict: if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix return body + def as_shallow_dict(self) -> dict: + """Serializes the AutoCaptureConfigInput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.enabled is not None: body['enabled'] = self.enabled + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AutoCaptureConfigInput: """Deserializes the AutoCaptureConfigInput from a dictionary.""" @@ -398,18 +475,21 @@ def from_dict(cls, d: Dict[str, any]) -> AutoCaptureConfigInput: @dataclass class AutoCaptureConfigOutput: catalog_name: Optional[str] = None - """The name of the catalog in Unity Catalog.""" + """The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name if + the inference table is already enabled.""" enabled: Optional[bool] = None """Indicates whether the inference table is enabled.""" schema_name: Optional[str] = None - """The name of the schema in Unity Catalog.""" + """The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if + the inference table is already enabled.""" state: Optional[AutoCaptureState] = None table_name_prefix: Optional[str] = None - """The prefix of the table in Unity Catalog.""" + """The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if + the inference table is already enabled.""" def as_dict(self) -> dict: """Serializes the AutoCaptureConfigOutput into a dictionary suitable for use as a JSON request body.""" @@ -421,6 +501,16 @@ def as_dict(self) -> dict: if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix return body + def as_shallow_dict(self) -> dict: + """Serializes the AutoCaptureConfigOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.enabled is not None: body['enabled'] = self.enabled + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.state: body['state'] = self.state + if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AutoCaptureConfigOutput: """Deserializes the AutoCaptureConfigOutput from a dictionary.""" @@ -441,6 +531,12 @@ def as_dict(self) -> dict: if self.payload_table: body['payload_table'] = self.payload_table.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the AutoCaptureState into a shallow dictionary of its immediate attributes.""" + body = {} + if self.payload_table: body['payload_table'] = self.payload_table + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AutoCaptureState: """Deserializes the AutoCaptureState from a dictionary.""" @@ -458,6 +554,12 @@ def as_dict(self) -> dict: if self.logs is not None: body['logs'] = self.logs return body + def as_shallow_dict(self) -> dict: + """Serializes the BuildLogsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.logs is not None: body['logs'] = self.logs + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> BuildLogsResponse: """Deserializes the BuildLogsResponse from a dictionary.""" @@ -479,6 +581,13 @@ def as_dict(self) -> dict: if self.role is not None: body['role'] = self.role.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ChatMessage into a shallow dictionary of its immediate attributes.""" + body = {} + if self.content is not None: body['content'] = self.content + if self.role is not None: body['role'] = self.role + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ChatMessage: """Deserializes the ChatMessage from a dictionary.""" @@ -518,6 +627,15 @@ def as_dict(self) -> dict: body['cohere_api_key_plaintext'] = self.cohere_api_key_plaintext return body + def as_shallow_dict(self) -> dict: + """Serializes the CohereConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cohere_api_base is not None: body['cohere_api_base'] = self.cohere_api_base + if self.cohere_api_key is not None: body['cohere_api_key'] = self.cohere_api_key + if self.cohere_api_key_plaintext is not None: + body['cohere_api_key_plaintext'] = self.cohere_api_key_plaintext + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CohereConfig: """Deserializes the CohereConfig from a dictionary.""" @@ -532,7 +650,11 @@ class CreateServingEndpoint: """The name of the serving endpoint. This field is required and must be unique across a Databricks workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores.""" - config: EndpointCoreConfigInput + ai_gateway: Optional[AiGatewayConfig] = None + """The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned + throughput endpoints are currently supported.""" + + config: Optional[EndpointCoreConfigInput] = None """The core config of the serving endpoint.""" ai_gateway: Optional[AiGatewayConfig] = None @@ -560,6 +682,17 @@ def as_dict(self) -> dict: if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateServingEndpoint into a shallow dictionary of its immediate attributes.""" + body = {} + if self.ai_gateway: body['ai_gateway'] = self.ai_gateway + if self.config: body['config'] = self.config + if self.name is not None: body['name'] = self.name + if self.rate_limits: body['rate_limits'] = self.rate_limits + if self.route_optimized is not None: body['route_optimized'] = self.route_optimized + if self.tags: body['tags'] = self.tags + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateServingEndpoint: """Deserializes the CreateServingEndpoint from a dictionary.""" @@ -571,6 +704,37 @@ def from_dict(cls, d: Dict[str, any]) -> CreateServingEndpoint: tags=_repeated_dict(d, 'tags', EndpointTag)) +@dataclass +class DataPlaneInfo: + """Details necessary to query this object's API through the DataPlane APIs.""" + + authorization_details: Optional[str] = None + """Authorization details as a string.""" + + endpoint_url: Optional[str] = None + """The URL of the endpoint for this operation in the dataplane.""" + + def as_dict(self) -> dict: + """Serializes the DataPlaneInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.authorization_details is not None: body['authorization_details'] = self.authorization_details + if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DataPlaneInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.authorization_details is not None: body['authorization_details'] = self.authorization_details + if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> DataPlaneInfo: + """Deserializes the DataPlaneInfo from a dictionary.""" + return cls(authorization_details=d.get('authorization_details', None), + endpoint_url=d.get('endpoint_url', None)) + + @dataclass class DatabricksModelServingConfig: databricks_workspace_url: str @@ -601,6 +765,16 @@ def as_dict(self) -> dict: body['databricks_workspace_url'] = self.databricks_workspace_url return body + def as_shallow_dict(self) -> dict: + """Serializes the DatabricksModelServingConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.databricks_api_token is not None: body['databricks_api_token'] = self.databricks_api_token + if self.databricks_api_token_plaintext is not None: + body['databricks_api_token_plaintext'] = self.databricks_api_token_plaintext + if self.databricks_workspace_url is not None: + body['databricks_workspace_url'] = self.databricks_workspace_url + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DatabricksModelServingConfig: """Deserializes the DatabricksModelServingConfig from a dictionary.""" @@ -625,6 +799,14 @@ def as_dict(self) -> dict: if self.index: body['index'] = [v for v in self.index] return body + def as_shallow_dict(self) -> dict: + """Serializes the DataframeSplitInput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.columns: body['columns'] = self.columns + if self.data: body['data'] = self.data + if self.index: body['index'] = self.index + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DataframeSplitInput: """Deserializes the DataframeSplitInput from a dictionary.""" @@ -639,6 +821,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" @@ -663,6 +850,14 @@ def as_dict(self) -> dict: if self.object is not None: body['object'] = self.object.value return body + def as_shallow_dict(self) -> dict: + """Serializes the EmbeddingsV1ResponseEmbeddingElement into a shallow dictionary of its immediate attributes.""" + body = {} + if self.embedding: body['embedding'] = self.embedding + if self.index is not None: body['index'] = self.index + if self.object is not None: body['object'] = self.object + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EmbeddingsV1ResponseEmbeddingElement: """Deserializes the EmbeddingsV1ResponseEmbeddingElement from a dictionary.""" @@ -681,21 +876,22 @@ class EmbeddingsV1ResponseEmbeddingElementObject(Enum): class EndpointCoreConfigInput: auto_capture_config: Optional[AutoCaptureConfigInput] = None """Configuration for Inference Tables which automatically logs requests and responses to Unity - Catalog.""" + Catalog. Note: this field is deprecated for creating new provisioned throughput endpoints, or + updating existing provisioned throughput endpoints that never have inference table configured; + in these cases please use AI Gateway to manage inference tables.""" name: Optional[str] = None """The name of the serving endpoint to update. This field is required.""" served_entities: Optional[List[ServedEntityInput]] = None - """A list of served entities for the endpoint to serve. A serving endpoint can have up to 15 served - entities.""" + """The list of served entities under the serving endpoint config.""" served_models: Optional[List[ServedModelInput]] = None - """(Deprecated, use served_entities instead) A list of served models for the endpoint to serve. A - serving endpoint can have up to 15 served models.""" + """(Deprecated, use served_entities instead) The list of served models under the serving endpoint + config.""" traffic_config: Optional[TrafficConfig] = None - """The traffic config defining how invocations to the serving endpoint should be routed.""" + """The traffic configuration associated with the serving endpoint config.""" def as_dict(self) -> dict: """Serializes the EndpointCoreConfigInput into a dictionary suitable for use as a JSON request body.""" @@ -707,6 +903,16 @@ def as_dict(self) -> dict: if self.traffic_config: body['traffic_config'] = self.traffic_config.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the EndpointCoreConfigInput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config + if self.name is not None: body['name'] = self.name + if self.served_entities: body['served_entities'] = self.served_entities + if self.served_models: body['served_models'] = self.served_models + if self.traffic_config: body['traffic_config'] = self.traffic_config + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EndpointCoreConfigInput: """Deserializes the EndpointCoreConfigInput from a dictionary.""" @@ -721,7 +927,9 @@ def from_dict(cls, d: Dict[str, any]) -> EndpointCoreConfigInput: class EndpointCoreConfigOutput: auto_capture_config: Optional[AutoCaptureConfigOutput] = None """Configuration for Inference Tables which automatically logs requests and responses to Unity - Catalog.""" + Catalog. Note: this field is deprecated for creating new provisioned throughput endpoints, or + updating existing provisioned throughput endpoints that never have inference table configured; + in these cases please use AI Gateway to manage inference tables.""" config_version: Optional[int] = None """The config version that the serving endpoint is currently serving.""" @@ -746,6 +954,16 @@ def as_dict(self) -> dict: if self.traffic_config: body['traffic_config'] = self.traffic_config.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the EndpointCoreConfigOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config + if self.config_version is not None: body['config_version'] = self.config_version + if self.served_entities: body['served_entities'] = self.served_entities + if self.served_models: body['served_models'] = self.served_models + if self.traffic_config: body['traffic_config'] = self.traffic_config + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EndpointCoreConfigOutput: """Deserializes the EndpointCoreConfigOutput from a dictionary.""" @@ -772,6 +990,13 @@ def as_dict(self) -> dict: if self.served_models: body['served_models'] = [v.as_dict() for v in self.served_models] return body + def as_shallow_dict(self) -> dict: + """Serializes the EndpointCoreConfigSummary into a shallow dictionary of its immediate attributes.""" + body = {} + if self.served_entities: body['served_entities'] = self.served_entities + if self.served_models: body['served_models'] = self.served_models + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EndpointCoreConfigSummary: """Deserializes the EndpointCoreConfigSummary from a dictionary.""" @@ -783,7 +1008,9 @@ def from_dict(cls, d: Dict[str, any]) -> EndpointCoreConfigSummary: class EndpointPendingConfig: auto_capture_config: Optional[AutoCaptureConfigOutput] = None """Configuration for Inference Tables which automatically logs requests and responses to Unity - Catalog.""" + Catalog. Note: this field is deprecated for creating new provisioned throughput endpoints, or + updating existing provisioned throughput endpoints that never have inference table configured; + in these cases please use AI Gateway to manage inference tables.""" config_version: Optional[int] = None """The config version that the serving endpoint is currently serving.""" @@ -812,6 +1039,17 @@ def as_dict(self) -> dict: if self.traffic_config: body['traffic_config'] = self.traffic_config.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the EndpointPendingConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config + if self.config_version is not None: body['config_version'] = self.config_version + if self.served_entities: body['served_entities'] = self.served_entities + if self.served_models: body['served_models'] = self.served_models + if self.start_time is not None: body['start_time'] = self.start_time + if self.traffic_config: body['traffic_config'] = self.traffic_config + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EndpointPendingConfig: """Deserializes the EndpointPendingConfig from a dictionary.""" @@ -843,6 +1081,13 @@ def as_dict(self) -> dict: if self.ready is not None: body['ready'] = self.ready.value return body + def as_shallow_dict(self) -> dict: + """Serializes the EndpointState into a shallow dictionary of its immediate attributes.""" + body = {} + if self.config_update is not None: body['config_update'] = self.config_update + if self.ready is not None: body['ready'] = self.ready + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EndpointState: """Deserializes the EndpointState from a dictionary.""" @@ -851,10 +1096,6 @@ def from_dict(cls, d: Dict[str, any]) -> EndpointState: class EndpointStateConfigUpdate(Enum): - """The state of an endpoint's config update. This informs the user if the pending_config is in - progress, if the update failed, or if there is no update in progress. Note that if the - endpoint's config_update state value is IN_PROGRESS, another update can not be made until the - update completes or fails.""" IN_PROGRESS = 'IN_PROGRESS' NOT_UPDATING = 'NOT_UPDATING' @@ -863,9 +1104,6 @@ class EndpointStateConfigUpdate(Enum): class EndpointStateReady(Enum): - """The state of an endpoint, indicating whether or not the endpoint is queryable. An endpoint is - READY if all of the served entities in its active configuration are ready. If any of the - actively served entities are in a non-ready state, the endpoint state will be NOT_READY.""" NOT_READY = 'NOT_READY' READY = 'READY' @@ -886,12 +1124,41 @@ def as_dict(self) -> dict: if self.value is not None: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the EndpointTag into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EndpointTag: """Deserializes the EndpointTag from a dictionary.""" return cls(key=d.get('key', None), value=d.get('value', None)) +@dataclass +class EndpointTags: + tags: Optional[List[EndpointTag]] = None + + def as_dict(self) -> dict: + """Serializes the EndpointTags into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the EndpointTags into a shallow dictionary of its immediate attributes.""" + body = {} + if self.tags: body['tags'] = self.tags + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> EndpointTags: + """Deserializes the EndpointTags from a dictionary.""" + return cls(tags=_repeated_dict(d, 'tags', EndpointTag)) + + @dataclass class ExportMetricsResponse: contents: Optional[BinaryIO] = None @@ -902,18 +1169,89 @@ def as_dict(self) -> dict: if self.contents: body['contents'] = self.contents return body + def as_shallow_dict(self) -> dict: + """Serializes the ExportMetricsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.contents: body['contents'] = self.contents + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ExportMetricsResponse: """Deserializes the ExportMetricsResponse from a dictionary.""" return cls(contents=d.get('contents', None)) +@dataclass +class ExternalFunctionRequest: + """Simple Proto message for testing""" + + connection_name: str + """The connection name to use. This is required to identify the external connection.""" + + method: ExternalFunctionRequestHttpMethod + """The HTTP method to use (e.g., 'GET', 'POST').""" + + path: str + """The relative path for the API endpoint. This is required.""" + + headers: Optional[str] = None + """Additional headers for the request. If not provided, only auth headers from connections would be + passed.""" + + json: Optional[str] = None + """The JSON payload to send in the request body.""" + + params: Optional[str] = None + """Query parameters for the request.""" + + def as_dict(self) -> dict: + """Serializes the ExternalFunctionRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.connection_name is not None: body['connection_name'] = self.connection_name + if self.headers is not None: body['headers'] = self.headers + if self.json is not None: body['json'] = self.json + if self.method is not None: body['method'] = self.method.value + if self.params is not None: body['params'] = self.params + if self.path is not None: body['path'] = self.path + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ExternalFunctionRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.connection_name is not None: body['connection_name'] = self.connection_name + if self.headers is not None: body['headers'] = self.headers + if self.json is not None: body['json'] = self.json + if self.method is not None: body['method'] = self.method + if self.params is not None: body['params'] = self.params + if self.path is not None: body['path'] = self.path + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ExternalFunctionRequest: + """Deserializes the ExternalFunctionRequest from a dictionary.""" + return cls(connection_name=d.get('connection_name', None), + headers=d.get('headers', None), + json=d.get('json', None), + method=_enum(d, 'method', ExternalFunctionRequestHttpMethod), + params=d.get('params', None), + path=d.get('path', None)) + + +class ExternalFunctionRequestHttpMethod(Enum): + + DELETE = 'DELETE' + GET = 'GET' + PATCH = 'PATCH' + POST = 'POST' + PUT = 'PUT' + + @dataclass class ExternalModel: provider: ExternalModelProvider """The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', - 'google-cloud-vertex-ai', 'openai', and 'palm'.",""" + 'google-cloud-vertex-ai', 'openai', 'palm', and 'custom'.""" name: str """The name of the external model.""" @@ -963,6 +1301,24 @@ def as_dict(self) -> dict: if self.task is not None: body['task'] = self.task return body + def as_shallow_dict(self) -> dict: + """Serializes the ExternalModel into a shallow dictionary of its immediate attributes.""" + body = {} + if self.ai21labs_config: body['ai21labs_config'] = self.ai21labs_config + if self.amazon_bedrock_config: body['amazon_bedrock_config'] = self.amazon_bedrock_config + if self.anthropic_config: body['anthropic_config'] = self.anthropic_config + if self.cohere_config: body['cohere_config'] = self.cohere_config + if self.databricks_model_serving_config: + body['databricks_model_serving_config'] = self.databricks_model_serving_config + if self.google_cloud_vertex_ai_config: + body['google_cloud_vertex_ai_config'] = self.google_cloud_vertex_ai_config + if self.name is not None: body['name'] = self.name + if self.openai_config: body['openai_config'] = self.openai_config + if self.palm_config: body['palm_config'] = self.palm_config + if self.provider is not None: body['provider'] = self.provider + if self.task is not None: body['task'] = self.task + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ExternalModel: """Deserializes the ExternalModel from a dictionary.""" @@ -982,9 +1338,6 @@ def from_dict(cls, d: Dict[str, any]) -> ExternalModel: class ExternalModelProvider(Enum): - """The name of the provider for the external model. Currently, the supported providers are - 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', - 'google-cloud-vertex-ai', 'openai', and 'palm'.",""" AI21LABS = 'ai21labs' AMAZON_BEDROCK = 'amazon-bedrock' @@ -1015,6 +1368,14 @@ def as_dict(self) -> dict: if self.total_tokens is not None: body['total_tokens'] = self.total_tokens return body + def as_shallow_dict(self) -> dict: + """Serializes the ExternalModelUsageElement into a shallow dictionary of its immediate attributes.""" + body = {} + if self.completion_tokens is not None: body['completion_tokens'] = self.completion_tokens + if self.prompt_tokens is not None: body['prompt_tokens'] = self.prompt_tokens + if self.total_tokens is not None: body['total_tokens'] = self.total_tokens + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ExternalModelUsageElement: """Deserializes the ExternalModelUsageElement from a dictionary.""" @@ -1025,17 +1386,16 @@ def from_dict(cls, d: Dict[str, any]) -> ExternalModelUsageElement: @dataclass class FoundationModel: + """All fields are not sensitive as they are hard-coded in the system and made available to + customers.""" + description: Optional[str] = None - """The description of the foundation model.""" display_name: Optional[str] = None - """The display name of the foundation model.""" docs: Optional[str] = None - """The URL to the documentation of the foundation model.""" name: Optional[str] = None - """The name of the foundation model.""" def as_dict(self) -> dict: """Serializes the FoundationModel into a dictionary suitable for use as a JSON request body.""" @@ -1046,6 +1406,15 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name return body + def as_shallow_dict(self) -> dict: + """Serializes the FoundationModel into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.display_name is not None: body['display_name'] = self.display_name + if self.docs is not None: body['docs'] = self.docs + if self.name is not None: body['name'] = self.name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> FoundationModel: """Deserializes the FoundationModel from a dictionary.""" @@ -1057,18 +1426,24 @@ def from_dict(cls, d: Dict[str, any]) -> FoundationModel: @dataclass class GetOpenApiResponse: - """The response is an OpenAPI spec in JSON format that typically includes fields like openapi, - info, servers and paths, etc.""" + contents: Optional[BinaryIO] = None def as_dict(self) -> dict: """Serializes the GetOpenApiResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.contents: body['contents'] = self.contents + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GetOpenApiResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.contents: body['contents'] = self.contents return body @classmethod def from_dict(cls, d: Dict[str, any]) -> GetOpenApiResponse: """Deserializes the GetOpenApiResponse from a dictionary.""" - return cls() + return cls(contents=d.get('contents', None)) @dataclass @@ -1082,6 +1457,12 @@ def as_dict(self) -> dict: if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body + def as_shallow_dict(self) -> dict: + """Serializes the GetServingEndpointPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission_levels: body['permission_levels'] = self.permission_levels + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetServingEndpointPermissionLevelsResponse: """Deserializes the GetServingEndpointPermissionLevelsResponse from a dictionary.""" @@ -1091,13 +1472,23 @@ def from_dict(cls, d: Dict[str, any]) -> GetServingEndpointPermissionLevelsRespo @dataclass class GoogleCloudVertexAiConfig: + project_id: str + """This is the Google Cloud project id that the service account is associated with.""" + + region: str + """This is the region for the Google Cloud Vertex AI Service. See [supported regions] for more + details. Some models are only available in specific regions. + + [supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations""" + private_key: Optional[str] = None """The Databricks secret key reference for a private key for the service account which has access to the Google Cloud Vertex AI Service. See [Best practices for managing service account keys]. If you prefer to paste your API key directly, see `private_key_plaintext`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext` - [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys""" + [Best practices for managing service account keys]: + https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys""" private_key_plaintext: Optional[str] = None """The private key for the service account which has access to the Google Cloud Vertex AI Service @@ -1105,16 +1496,8 @@ class GoogleCloudVertexAiConfig: prefer to reference your key using Databricks Secrets, see `private_key`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext`. - [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys""" - - project_id: Optional[str] = None - """This is the Google Cloud project id that the service account is associated with.""" - - region: Optional[str] = None - """This is the region for the Google Cloud Vertex AI Service. See [supported regions] for more - details. Some models are only available in specific regions. - - [supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations""" + [Best practices for managing service account keys]: + https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys""" def as_dict(self) -> dict: """Serializes the GoogleCloudVertexAiConfig into a dictionary suitable for use as a JSON request body.""" @@ -1125,6 +1508,15 @@ def as_dict(self) -> dict: if self.region is not None: body['region'] = self.region return body + def as_shallow_dict(self) -> dict: + """Serializes the GoogleCloudVertexAiConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.private_key is not None: body['private_key'] = self.private_key + if self.private_key_plaintext is not None: body['private_key_plaintext'] = self.private_key_plaintext + if self.project_id is not None: body['project_id'] = self.project_id + if self.region is not None: body['region'] = self.region + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GoogleCloudVertexAiConfig: """Deserializes the GoogleCloudVertexAiConfig from a dictionary.""" @@ -1134,6 +1526,28 @@ def from_dict(cls, d: Dict[str, any]) -> GoogleCloudVertexAiConfig: region=d.get('region', None)) +@dataclass +class HttpRequestResponse: + contents: Optional[BinaryIO] = None + + def as_dict(self) -> dict: + """Serializes the HttpRequestResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.contents: body['contents'] = self.contents + return body + + def as_shallow_dict(self) -> dict: + """Serializes the HttpRequestResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.contents: body['contents'] = self.contents + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> HttpRequestResponse: + """Deserializes the HttpRequestResponse from a dictionary.""" + return cls(contents=d.get('contents', None)) + + @dataclass class ListEndpointsResponse: endpoints: Optional[List[ServingEndpoint]] = None @@ -1145,6 +1559,12 @@ def as_dict(self) -> dict: if self.endpoints: body['endpoints'] = [v.as_dict() for v in self.endpoints] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListEndpointsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.endpoints: body['endpoints'] = self.endpoints + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListEndpointsResponse: """Deserializes the ListEndpointsResponse from a dictionary.""" @@ -1153,7 +1573,10 @@ def from_dict(cls, d: Dict[str, any]) -> ListEndpointsResponse: @dataclass class ModelDataPlaneInfo: - query_info: Optional[oauth2.DataPlaneInfo] = None + """A representation of all DataPlaneInfo for operations that can be done on a model through Data + Plane APIs.""" + + query_info: Optional[DataPlaneInfo] = None """Information required to query DataPlane API 'query' endpoint.""" def as_dict(self) -> dict: @@ -1162,14 +1585,22 @@ def as_dict(self) -> dict: if self.query_info: body['query_info'] = self.query_info.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the ModelDataPlaneInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.query_info: body['query_info'] = self.query_info + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ModelDataPlaneInfo: """Deserializes the ModelDataPlaneInfo from a dictionary.""" - return cls(query_info=_from_dict(d, 'query_info', oauth2.DataPlaneInfo)) + return cls(query_info=_from_dict(d, 'query_info', DataPlaneInfo)) @dataclass class OpenAiConfig: + """Configs needed to create an OpenAI model route.""" + microsoft_entra_client_id: Optional[str] = None """This field is only required for Azure AD OpenAI and is the Microsoft Entra Client ID.""" @@ -1243,6 +1674,28 @@ def as_dict(self) -> dict: if self.openai_organization is not None: body['openai_organization'] = self.openai_organization return body + def as_shallow_dict(self) -> dict: + """Serializes the OpenAiConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.microsoft_entra_client_id is not None: + body['microsoft_entra_client_id'] = self.microsoft_entra_client_id + if self.microsoft_entra_client_secret is not None: + body['microsoft_entra_client_secret'] = self.microsoft_entra_client_secret + if self.microsoft_entra_client_secret_plaintext is not None: + body['microsoft_entra_client_secret_plaintext'] = self.microsoft_entra_client_secret_plaintext + if self.microsoft_entra_tenant_id is not None: + body['microsoft_entra_tenant_id'] = self.microsoft_entra_tenant_id + if self.openai_api_base is not None: body['openai_api_base'] = self.openai_api_base + if self.openai_api_key is not None: body['openai_api_key'] = self.openai_api_key + if self.openai_api_key_plaintext is not None: + body['openai_api_key_plaintext'] = self.openai_api_key_plaintext + if self.openai_api_type is not None: body['openai_api_type'] = self.openai_api_type + if self.openai_api_version is not None: body['openai_api_version'] = self.openai_api_version + if self.openai_deployment_name is not None: + body['openai_deployment_name'] = self.openai_deployment_name + if self.openai_organization is not None: body['openai_organization'] = self.openai_organization + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> OpenAiConfig: """Deserializes the OpenAiConfig from a dictionary.""" @@ -1280,6 +1733,14 @@ def as_dict(self) -> dict: body['palm_api_key_plaintext'] = self.palm_api_key_plaintext return body + def as_shallow_dict(self) -> dict: + """Serializes the PaLmConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.palm_api_key is not None: body['palm_api_key'] = self.palm_api_key + if self.palm_api_key_plaintext is not None: + body['palm_api_key_plaintext'] = self.palm_api_key_plaintext + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PaLmConfig: """Deserializes the PaLmConfig from a dictionary.""" @@ -1306,6 +1767,14 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name return body + def as_shallow_dict(self) -> dict: + """Serializes the PatchServingEndpointTags into a shallow dictionary of its immediate attributes.""" + body = {} + if self.add_tags: body['add_tags'] = self.add_tags + if self.delete_tags: body['delete_tags'] = self.delete_tags + if self.name is not None: body['name'] = self.name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PatchServingEndpointTags: """Deserializes the PatchServingEndpointTags from a dictionary.""" @@ -1317,13 +1786,10 @@ def from_dict(cls, d: Dict[str, any]) -> PatchServingEndpointTags: @dataclass class PayloadTable: name: Optional[str] = None - """The name of the payload table.""" status: Optional[str] = None - """The status of the payload table.""" status_message: Optional[str] = None - """The status message of the payload table.""" def as_dict(self) -> dict: """Serializes the PayloadTable into a dictionary suitable for use as a JSON request body.""" @@ -1333,6 +1799,14 @@ def as_dict(self) -> dict: if self.status_message is not None: body['status_message'] = self.status_message return body + def as_shallow_dict(self) -> dict: + """Serializes the PayloadTable into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: body['name'] = self.name + if self.status is not None: body['status'] = self.status + if self.status_message is not None: body['status_message'] = self.status_message + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PayloadTable: """Deserializes the PayloadTable from a dictionary.""" @@ -1341,6 +1815,57 @@ def from_dict(cls, d: Dict[str, any]) -> PayloadTable: status_message=d.get('status_message', None)) +@dataclass +class PutAiGatewayRequest: + guardrails: Optional[AiGatewayGuardrails] = None + """Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and + responses.""" + + inference_table_config: Optional[AiGatewayInferenceTableConfig] = None + """Configuration for payload logging using inference tables. Use these tables to monitor and audit + data being sent to and received from model APIs and to improve model quality.""" + + name: Optional[str] = None + """The name of the serving endpoint whose AI Gateway is being updated. This field is required.""" + + rate_limits: Optional[List[AiGatewayRateLimit]] = None + """Configuration for rate limits which can be set to limit endpoint traffic.""" + + usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None + """Configuration to enable usage tracking using system tables. These tables allow you to monitor + operational usage on endpoints and their associated costs.""" + + def as_dict(self) -> dict: + """Serializes the PutAiGatewayRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.guardrails: body['guardrails'] = self.guardrails.as_dict() + if self.inference_table_config: body['inference_table_config'] = self.inference_table_config.as_dict() + if self.name is not None: body['name'] = self.name + if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits] + if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PutAiGatewayRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.guardrails: body['guardrails'] = self.guardrails + if self.inference_table_config: body['inference_table_config'] = self.inference_table_config + if self.name is not None: body['name'] = self.name + if self.rate_limits: body['rate_limits'] = self.rate_limits + if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> PutAiGatewayRequest: + """Deserializes the PutAiGatewayRequest from a dictionary.""" + return cls(guardrails=_from_dict(d, 'guardrails', AiGatewayGuardrails), + inference_table_config=_from_dict(d, 'inference_table_config', + AiGatewayInferenceTableConfig), + name=d.get('name', None), + rate_limits=_repeated_dict(d, 'rate_limits', AiGatewayRateLimit), + usage_tracking_config=_from_dict(d, 'usage_tracking_config', AiGatewayUsageTrackingConfig)) + + @dataclass class PutAiGatewayResponse: guardrails: Optional[AiGatewayGuardrails] = None @@ -1349,7 +1874,7 @@ class PutAiGatewayResponse: inference_table_config: Optional[AiGatewayInferenceTableConfig] = None """Configuration for payload logging using inference tables. Use these tables to monitor and audit - data being sent to and received from model APIs and to improve model quality .""" + data being sent to and received from model APIs and to improve model quality.""" rate_limits: Optional[List[AiGatewayRateLimit]] = None """Configuration for rate limits which can be set to limit endpoint traffic.""" @@ -1367,6 +1892,15 @@ def as_dict(self) -> dict: if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the PutAiGatewayResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.guardrails: body['guardrails'] = self.guardrails + if self.inference_table_config: body['inference_table_config'] = self.inference_table_config + if self.rate_limits: body['rate_limits'] = self.rate_limits + if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PutAiGatewayResponse: """Deserializes the PutAiGatewayResponse from a dictionary.""" @@ -1377,6 +1911,34 @@ def from_dict(cls, d: Dict[str, any]) -> PutAiGatewayResponse: usage_tracking_config=_from_dict(d, 'usage_tracking_config', AiGatewayUsageTrackingConfig)) +@dataclass +class PutRequest: + name: Optional[str] = None + """The name of the serving endpoint whose rate limits are being updated. This field is required.""" + + rate_limits: Optional[List[RateLimit]] = None + """The list of endpoint rate limits.""" + + def as_dict(self) -> dict: + """Serializes the PutRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.name is not None: body['name'] = self.name + if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PutRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: body['name'] = self.name + if self.rate_limits: body['rate_limits'] = self.rate_limits + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> PutRequest: + """Deserializes the PutRequest from a dictionary.""" + return cls(name=d.get('name', None), rate_limits=_repeated_dict(d, 'rate_limits', RateLimit)) + + @dataclass class PutResponse: rate_limits: Optional[List[RateLimit]] = None @@ -1388,6 +1950,12 @@ def as_dict(self) -> dict: if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits] return body + def as_shallow_dict(self) -> dict: + """Serializes the PutResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.rate_limits: body['rate_limits'] = self.rate_limits + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PutResponse: """Deserializes the PutResponse from a dictionary.""" @@ -1473,6 +2041,25 @@ def as_dict(self) -> dict: if self.temperature is not None: body['temperature'] = self.temperature return body + def as_shallow_dict(self) -> dict: + """Serializes the QueryEndpointInput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dataframe_records: body['dataframe_records'] = self.dataframe_records + if self.dataframe_split: body['dataframe_split'] = self.dataframe_split + if self.extra_params: body['extra_params'] = self.extra_params + if self.input: body['input'] = self.input + if self.inputs: body['inputs'] = self.inputs + if self.instances: body['instances'] = self.instances + if self.max_tokens is not None: body['max_tokens'] = self.max_tokens + if self.messages: body['messages'] = self.messages + if self.n is not None: body['n'] = self.n + if self.name is not None: body['name'] = self.name + if self.prompt: body['prompt'] = self.prompt + if self.stop: body['stop'] = self.stop + if self.stream is not None: body['stream'] = self.stream + if self.temperature is not None: body['temperature'] = self.temperature + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> QueryEndpointInput: """Deserializes the QueryEndpointInput from a dictionary.""" @@ -1532,15 +2119,29 @@ class QueryEndpointResponse: def as_dict(self) -> dict: """Serializes the QueryEndpointResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.choices: body['choices'] = [v.as_dict() for v in self.choices] + if self.choices: body['choices'] = [v.as_dict() for v in self.choices] + if self.created is not None: body['created'] = self.created + if self.data: body['data'] = [v.as_dict() for v in self.data] + if self.id is not None: body['id'] = self.id + if self.model is not None: body['model'] = self.model + if self.object is not None: body['object'] = self.object.value + if self.predictions: body['predictions'] = [v for v in self.predictions] + if self.served_model_name is not None: body['served-model-name'] = self.served_model_name + if self.usage: body['usage'] = self.usage.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the QueryEndpointResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.choices: body['choices'] = self.choices if self.created is not None: body['created'] = self.created - if self.data: body['data'] = [v.as_dict() for v in self.data] + if self.data: body['data'] = self.data if self.id is not None: body['id'] = self.id if self.model is not None: body['model'] = self.model - if self.object is not None: body['object'] = self.object.value - if self.predictions: body['predictions'] = [v for v in self.predictions] + if self.object is not None: body['object'] = self.object + if self.predictions: body['predictions'] = self.predictions if self.served_model_name is not None: body['served-model-name'] = self.served_model_name - if self.usage: body['usage'] = self.usage.as_dict() + if self.usage: body['usage'] = self.usage return body @classmethod @@ -1586,6 +2187,14 @@ def as_dict(self) -> dict: if self.renewal_period is not None: body['renewal_period'] = self.renewal_period.value return body + def as_shallow_dict(self) -> dict: + """Serializes the RateLimit into a shallow dictionary of its immediate attributes.""" + body = {} + if self.calls is not None: body['calls'] = self.calls + if self.key is not None: body['key'] = self.key + if self.renewal_period is not None: body['renewal_period'] = self.renewal_period + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RateLimit: """Deserializes the RateLimit from a dictionary.""" @@ -1595,15 +2204,12 @@ def from_dict(cls, d: Dict[str, any]) -> RateLimit: class RateLimitKey(Enum): - """Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are - supported, with 'endpoint' being the default if not specified.""" ENDPOINT = 'endpoint' USER = 'user' class RateLimitRenewalPeriod(Enum): - """Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.""" MINUTE = 'minute' @@ -1624,6 +2230,13 @@ def as_dict(self) -> dict: if self.traffic_percentage is not None: body['traffic_percentage'] = self.traffic_percentage return body + def as_shallow_dict(self) -> dict: + """Serializes the Route into a shallow dictionary of its immediate attributes.""" + body = {} + if self.served_model_name is not None: body['served_model_name'] = self.served_model_name + if self.traffic_percentage is not None: body['traffic_percentage'] = self.traffic_percentage + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Route: """Deserializes the Route from a dictionary.""" @@ -1637,11 +2250,9 @@ class ServedEntityInput: """The name of the entity to be served. The entity may be a model in the Databricks Model Registry, a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of - __catalog_name__.__schema_name__.__model_name__.""" + **catalog_name.schema_name.model_name**.""" entity_version: Optional[str] = None - """The version of the model in Databricks Model Registry to be served or empty if the entity is a - FEATURE_SPEC.""" environment_vars: Optional[Dict[str, str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs used @@ -1670,7 +2281,7 @@ class ServedEntityInput: """The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if - not specified for other entities, it defaults to -.""" + not specified for other entities, it defaults to entity_name-entity_version.""" scale_to_zero_enabled: Optional[bool] = None """Whether the compute resources for the served entity should scale down to zero.""" @@ -1683,13 +2294,13 @@ class ServedEntityInput: scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0.""" - workload_type: Optional[str] = None + workload_type: Optional[ServingModelWorkloadType] = None """The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types]. - [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types""" + [GPU types]: https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types""" def as_dict(self) -> dict: """Serializes the ServedEntityInput into a dictionary suitable for use as a JSON request body.""" @@ -1706,6 +2317,24 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled if self.workload_size is not None: body['workload_size'] = self.workload_size + if self.workload_type is not None: body['workload_type'] = self.workload_type.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ServedEntityInput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.entity_name is not None: body['entity_name'] = self.entity_name + if self.entity_version is not None: body['entity_version'] = self.entity_version + if self.environment_vars: body['environment_vars'] = self.environment_vars + if self.external_model: body['external_model'] = self.external_model + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.max_provisioned_throughput is not None: + body['max_provisioned_throughput'] = self.max_provisioned_throughput + if self.min_provisioned_throughput is not None: + body['min_provisioned_throughput'] = self.min_provisioned_throughput + if self.name is not None: body['name'] = self.name + if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled + if self.workload_size is not None: body['workload_size'] = self.workload_size if self.workload_type is not None: body['workload_type'] = self.workload_type return body @@ -1722,26 +2351,22 @@ def from_dict(cls, d: Dict[str, any]) -> ServedEntityInput: name=d.get('name', None), scale_to_zero_enabled=d.get('scale_to_zero_enabled', None), workload_size=d.get('workload_size', None), - workload_type=d.get('workload_type', None)) + workload_type=_enum(d, 'workload_type', ServingModelWorkloadType)) @dataclass class ServedEntityOutput: creation_timestamp: Optional[int] = None - """The creation timestamp of the served entity in Unix time.""" creator: Optional[str] = None - """The email of the user who created the served entity.""" entity_name: Optional[str] = None - """The name of the entity served. The entity may be a model in the Databricks Model Registry, a - model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC - object, the full name of the object is given in the form of - __catalog_name__.__schema_name__.__model_name__.""" + """The name of the entity to be served. The entity may be a model in the Databricks Model Registry, + a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC + object, the full name of the object should be given in the form of + **catalog_name.schema_name.model_name**.""" entity_version: Optional[str] = None - """The version of the served entity in Databricks Model Registry or empty if the entity is a - FEATURE_SPEC.""" environment_vars: Optional[Dict[str, str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs used @@ -1750,14 +2375,16 @@ class ServedEntityOutput: "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`""" external_model: Optional[ExternalModel] = None - """The external model that is served. NOTE: Only one of external_model, foundation_model, and - (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) is - returned based on the endpoint type.""" + """The external model to be served. NOTE: Only one of external_model and (entity_name, + entity_version, workload_size, workload_type, and scale_to_zero_enabled) can be specified with + the latter set being used for custom model serving for a Databricks registered model. For an + existing endpoint with external_model, it cannot be updated to an endpoint without + external_model. If the endpoint is created without external_model, users cannot update it to add + external_model later. The task type of all external models within an endpoint must be the same.""" foundation_model: Optional[FoundationModel] = None - """The foundation model that is served. NOTE: Only one of foundation_model, external_model, and - (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) is - returned based on the endpoint type.""" + """All fields are not sensitive as they are hard-coded in the system and made available to + customers.""" instance_profile_arn: Optional[str] = None """ARN of the instance profile that the served entity uses to access AWS resources.""" @@ -1769,13 +2396,15 @@ class ServedEntityOutput: """The minimum tokens per second that the endpoint can scale down to.""" name: Optional[str] = None - """The name of the served entity.""" + """The name of a served entity. It must be unique across an endpoint. A served entity name can + consist of alphanumeric characters, dashes, and underscores. If not specified for an external + model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if + not specified for other entities, it defaults to entity_name-entity_version.""" scale_to_zero_enabled: Optional[bool] = None """Whether the compute resources for the served entity should scale down to zero.""" state: Optional[ServedModelState] = None - """Information corresponding to the state of the served entity.""" workload_size: Optional[str] = None """The workload size of the served entity. The workload size corresponds to a range of provisioned @@ -1783,15 +2412,15 @@ class ServedEntityOutput: process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size - will be 0.""" + is 0.""" - workload_type: Optional[str] = None + workload_type: Optional[ServingModelWorkloadType] = None """The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types]. - [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types""" + [GPU types]: https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types""" def as_dict(self) -> dict: """Serializes the ServedEntityOutput into a dictionary suitable for use as a JSON request body.""" @@ -1812,6 +2441,28 @@ def as_dict(self) -> dict: if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled if self.state: body['state'] = self.state.as_dict() if self.workload_size is not None: body['workload_size'] = self.workload_size + if self.workload_type is not None: body['workload_type'] = self.workload_type.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ServedEntityOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.creator is not None: body['creator'] = self.creator + if self.entity_name is not None: body['entity_name'] = self.entity_name + if self.entity_version is not None: body['entity_version'] = self.entity_version + if self.environment_vars: body['environment_vars'] = self.environment_vars + if self.external_model: body['external_model'] = self.external_model + if self.foundation_model: body['foundation_model'] = self.foundation_model + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.max_provisioned_throughput is not None: + body['max_provisioned_throughput'] = self.max_provisioned_throughput + if self.min_provisioned_throughput is not None: + body['min_provisioned_throughput'] = self.min_provisioned_throughput + if self.name is not None: body['name'] = self.name + if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled + if self.state: body['state'] = self.state + if self.workload_size is not None: body['workload_size'] = self.workload_size if self.workload_type is not None: body['workload_type'] = self.workload_type return body @@ -1832,31 +2483,22 @@ def from_dict(cls, d: Dict[str, any]) -> ServedEntityOutput: scale_to_zero_enabled=d.get('scale_to_zero_enabled', None), state=_from_dict(d, 'state', ServedModelState), workload_size=d.get('workload_size', None), - workload_type=d.get('workload_type', None)) + workload_type=_enum(d, 'workload_type', ServingModelWorkloadType)) @dataclass class ServedEntitySpec: entity_name: Optional[str] = None - """The name of the entity served. The entity may be a model in the Databricks Model Registry, a - model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC - object, the full name of the object is given in the form of - __catalog_name__.__schema_name__.__model_name__.""" entity_version: Optional[str] = None - """The version of the served entity in Databricks Model Registry or empty if the entity is a - FEATURE_SPEC.""" external_model: Optional[ExternalModel] = None - """The external model that is served. NOTE: Only one of external_model, foundation_model, and - (entity_name, entity_version) is returned based on the endpoint type.""" foundation_model: Optional[FoundationModel] = None - """The foundation model that is served. NOTE: Only one of foundation_model, external_model, and - (entity_name, entity_version) is returned based on the endpoint type.""" + """All fields are not sensitive as they are hard-coded in the system and made available to + customers.""" name: Optional[str] = None - """The name of the served entity.""" def as_dict(self) -> dict: """Serializes the ServedEntitySpec into a dictionary suitable for use as a JSON request body.""" @@ -1868,6 +2510,16 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name return body + def as_shallow_dict(self) -> dict: + """Serializes the ServedEntitySpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.entity_name is not None: body['entity_name'] = self.entity_name + if self.entity_version is not None: body['entity_version'] = self.entity_version + if self.external_model: body['external_model'] = self.external_model + if self.foundation_model: body['foundation_model'] = self.foundation_model + if self.name is not None: body['name'] = self.name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ServedEntitySpec: """Deserializes the ServedEntitySpec from a dictionary.""" @@ -1880,24 +2532,27 @@ def from_dict(cls, d: Dict[str, any]) -> ServedEntitySpec: @dataclass class ServedModelInput: + scale_to_zero_enabled: bool + """Whether the compute resources for the served entity should scale down to zero.""" + model_name: str - """The name of the model in Databricks Model Registry to be served or if the model resides in Unity - Catalog, the full name of model, in the form of __catalog_name__.__schema_name__.__model_name__.""" model_version: str - """The version of the model in Databricks Model Registry or Unity Catalog to be served.""" - - scale_to_zero_enabled: bool - """Whether the compute resources for the served model should scale down to zero.""" environment_vars: Optional[Dict[str, str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs used - for serving this model. Note: this is an experimental feature and subject to change. Example - model environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": + for serving this entity. Note: this is an experimental feature and subject to change. Example + entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`""" instance_profile_arn: Optional[str] = None - """ARN of the instance profile that the served model will use to access AWS resources.""" + """ARN of the instance profile that the served entity uses to access AWS resources.""" + + max_provisioned_throughput: Optional[int] = None + """The maximum tokens per second that the endpoint can scale up to.""" + + min_provisioned_throughput: Optional[int] = None + """The minimum tokens per second that the endpoint can scale down to.""" max_provisioned_throughput: Optional[int] = None """The maximum tokens per second that the endpoint can scale up to.""" @@ -1906,9 +2561,18 @@ class ServedModelInput: """The minimum tokens per second that the endpoint can scale down to.""" name: Optional[str] = None - """The name of a served model. It must be unique across an endpoint. If not specified, this field - will default to -. A served model name can consist of alphanumeric - characters, dashes, and underscores.""" + """The name of a served entity. It must be unique across an endpoint. A served entity name can + consist of alphanumeric characters, dashes, and underscores. If not specified for an external + model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if + not specified for other entities, it defaults to entity_name-entity_version.""" + + workload_size: Optional[ServedModelInputWorkloadSize] = None + """The workload size of the served entity. The workload size corresponds to a range of provisioned + concurrency that the compute autoscales between. A single unit of provisioned concurrency can + process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency), + "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If + scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size + is 0.""" workload_size: Optional[ServedModelInputWorkloadSize] = None """The workload size of the served model. The workload size corresponds to a range of provisioned @@ -1919,12 +2583,12 @@ class ServedModelInput: each workload size will be 0.""" workload_type: Optional[ServedModelInputWorkloadType] = None - """The workload type of the served model. The workload type selects which type of compute to use in - the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU + """The workload type of the served entity. The workload type selects which type of compute to use + in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types]. - [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types""" + [GPU types]: https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types""" def as_dict(self) -> dict: """Serializes the ServedModelInput into a dictionary suitable for use as a JSON request body.""" @@ -1943,6 +2607,23 @@ def as_dict(self) -> dict: if self.workload_type is not None: body['workload_type'] = self.workload_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ServedModelInput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.environment_vars: body['environment_vars'] = self.environment_vars + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.max_provisioned_throughput is not None: + body['max_provisioned_throughput'] = self.max_provisioned_throughput + if self.min_provisioned_throughput is not None: + body['min_provisioned_throughput'] = self.min_provisioned_throughput + if self.model_name is not None: body['model_name'] = self.model_name + if self.model_version is not None: body['model_version'] = self.model_version + if self.name is not None: body['name'] = self.name + if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled + if self.workload_size is not None: body['workload_size'] = self.workload_size + if self.workload_type is not None: body['workload_type'] = self.workload_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ServedModelInput: """Deserializes the ServedModelInput from a dictionary.""" @@ -1959,12 +2640,6 @@ def from_dict(cls, d: Dict[str, any]) -> ServedModelInput: class ServedModelInputWorkloadSize(Enum): - """The workload size of the served model. The workload size corresponds to a range of provisioned - concurrency that the compute will autoscale between. A single unit of provisioned concurrency - can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned - concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned - concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for - each workload size will be 0.""" LARGE = 'Large' MEDIUM = 'Medium' @@ -1972,12 +2647,6 @@ class ServedModelInputWorkloadSize(Enum): class ServedModelInputWorkloadType(Enum): - """The workload type of the served model. The workload type selects which type of compute to use in - the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU - acceleration is available by selecting workload types like GPU_SMALL and others. See the - available [GPU types]. - - [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types""" CPU = 'CPU' GPU_LARGE = 'GPU_LARGE' @@ -1989,51 +2658,48 @@ class ServedModelInputWorkloadType(Enum): @dataclass class ServedModelOutput: creation_timestamp: Optional[int] = None - """The creation timestamp of the served model in Unix time.""" creator: Optional[str] = None - """The email of the user who created the served model.""" environment_vars: Optional[Dict[str, str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs used - for serving this model. Note: this is an experimental feature and subject to change. Example - model environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": + for serving this entity. Note: this is an experimental feature and subject to change. Example + entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`""" instance_profile_arn: Optional[str] = None - """ARN of the instance profile that the served model will use to access AWS resources.""" + """ARN of the instance profile that the served entity uses to access AWS resources.""" model_name: Optional[str] = None - """The name of the model in Databricks Model Registry or the full name of the model in Unity - Catalog.""" model_version: Optional[str] = None - """The version of the model in Databricks Model Registry or Unity Catalog to be served.""" name: Optional[str] = None - """The name of the served model.""" + """The name of a served entity. It must be unique across an endpoint. A served entity name can + consist of alphanumeric characters, dashes, and underscores. If not specified for an external + model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if + not specified for other entities, it defaults to entity_name-entity_version.""" scale_to_zero_enabled: Optional[bool] = None - """Whether the compute resources for the Served Model should scale down to zero.""" + """Whether the compute resources for the served entity should scale down to zero.""" state: Optional[ServedModelState] = None - """Information corresponding to the state of the Served Model.""" workload_size: Optional[str] = None - """The workload size of the served model. The workload size corresponds to a range of provisioned - concurrency that the compute will autoscale between. A single unit of provisioned concurrency - can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned - concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned - concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for - each workload size will be 0.""" + """The workload size of the served entity. The workload size corresponds to a range of provisioned + concurrency that the compute autoscales between. A single unit of provisioned concurrency can + process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency), + "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If + scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size + is 0.""" - workload_type: Optional[str] = None - """The workload type of the served model. The workload type selects which type of compute to use in - the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU + workload_type: Optional[ServingModelWorkloadType] = None + """The workload type of the served entity. The workload type selects which type of compute to use + in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types]. - [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types""" + [GPU types]: https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types""" def as_dict(self) -> dict: """Serializes the ServedModelOutput into a dictionary suitable for use as a JSON request body.""" @@ -2048,6 +2714,22 @@ def as_dict(self) -> dict: if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled if self.state: body['state'] = self.state.as_dict() if self.workload_size is not None: body['workload_size'] = self.workload_size + if self.workload_type is not None: body['workload_type'] = self.workload_type.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ServedModelOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.creator is not None: body['creator'] = self.creator + if self.environment_vars: body['environment_vars'] = self.environment_vars + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.model_name is not None: body['model_name'] = self.model_name + if self.model_version is not None: body['model_version'] = self.model_version + if self.name is not None: body['name'] = self.name + if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled + if self.state: body['state'] = self.state + if self.workload_size is not None: body['workload_size'] = self.workload_size if self.workload_type is not None: body['workload_type'] = self.workload_type return body @@ -2064,20 +2746,18 @@ def from_dict(cls, d: Dict[str, any]) -> ServedModelOutput: scale_to_zero_enabled=d.get('scale_to_zero_enabled', None), state=_from_dict(d, 'state', ServedModelState), workload_size=d.get('workload_size', None), - workload_type=d.get('workload_type', None)) + workload_type=_enum(d, 'workload_type', ServingModelWorkloadType)) @dataclass class ServedModelSpec: model_name: Optional[str] = None - """The name of the model in Databricks Model Registry or the full name of the model in Unity - Catalog.""" + """Only one of model_name and entity_name should be populated""" model_version: Optional[str] = None - """The version of the model in Databricks Model Registry or Unity Catalog to be served.""" + """Only one of model_version and entity_version should be populated""" name: Optional[str] = None - """The name of the served model.""" def as_dict(self) -> dict: """Serializes the ServedModelSpec into a dictionary suitable for use as a JSON request body.""" @@ -2087,6 +2767,14 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name return body + def as_shallow_dict(self) -> dict: + """Serializes the ServedModelSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.model_name is not None: body['model_name'] = self.model_name + if self.model_version is not None: body['model_version'] = self.model_version + if self.name is not None: body['name'] = self.name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ServedModelSpec: """Deserializes the ServedModelSpec from a dictionary.""" @@ -2098,18 +2786,8 @@ def from_dict(cls, d: Dict[str, any]) -> ServedModelSpec: @dataclass class ServedModelState: deployment: Optional[ServedModelStateDeployment] = None - """The state of the served entity deployment. DEPLOYMENT_CREATING indicates that the served entity - is not ready yet because the deployment is still being created (i.e container image is building, - model server is deploying for the first time, etc.). DEPLOYMENT_RECOVERING indicates that the - served entity was previously in a ready state but no longer is and is attempting to recover. - DEPLOYMENT_READY indicates that the served entity is ready to receive traffic. DEPLOYMENT_FAILED - indicates that there was an error trying to bring up the served entity (e.g container image - build failed, the model server failed to start due to a model loading error, etc.) - DEPLOYMENT_ABORTED indicates that the deployment was terminated likely due to a failure in - bringing up another served entity under the same endpoint and config version.""" deployment_state_message: Optional[str] = None - """More information about the state of the served entity, if available.""" def as_dict(self) -> dict: """Serializes the ServedModelState into a dictionary suitable for use as a JSON request body.""" @@ -2119,6 +2797,14 @@ def as_dict(self) -> dict: body['deployment_state_message'] = self.deployment_state_message return body + def as_shallow_dict(self) -> dict: + """Serializes the ServedModelState into a shallow dictionary of its immediate attributes.""" + body = {} + if self.deployment is not None: body['deployment'] = self.deployment + if self.deployment_state_message is not None: + body['deployment_state_message'] = self.deployment_state_message + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ServedModelState: """Deserializes the ServedModelState from a dictionary.""" @@ -2127,15 +2813,6 @@ def from_dict(cls, d: Dict[str, any]) -> ServedModelState: class ServedModelStateDeployment(Enum): - """The state of the served entity deployment. DEPLOYMENT_CREATING indicates that the served entity - is not ready yet because the deployment is still being created (i.e container image is building, - model server is deploying for the first time, etc.). DEPLOYMENT_RECOVERING indicates that the - served entity was previously in a ready state but no longer is and is attempting to recover. - DEPLOYMENT_READY indicates that the served entity is ready to receive traffic. DEPLOYMENT_FAILED - indicates that there was an error trying to bring up the served entity (e.g container image - build failed, the model server failed to start due to a model loading error, etc.) - DEPLOYMENT_ABORTED indicates that the deployment was terminated likely due to a failure in - bringing up another served entity under the same endpoint and config version.""" ABORTED = 'DEPLOYMENT_ABORTED' CREATING = 'DEPLOYMENT_CREATING' @@ -2155,6 +2832,12 @@ def as_dict(self) -> dict: if self.logs is not None: body['logs'] = self.logs return body + def as_shallow_dict(self) -> dict: + """Serializes the ServerLogsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.logs is not None: body['logs'] = self.logs + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ServerLogsResponse: """Deserializes the ServerLogsResponse from a dictionary.""" @@ -2164,8 +2847,8 @@ def from_dict(cls, d: Dict[str, any]) -> ServerLogsResponse: @dataclass class ServingEndpoint: ai_gateway: Optional[AiGatewayConfig] = None - """The AI Gateway configuration for the serving endpoint. NOTE: Only external model endpoints are - currently supported.""" + """The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned + throughput endpoints are currently supported.""" config: Optional[EndpointCoreConfigSummary] = None """The config that is currently being served by the endpoint.""" @@ -2177,8 +2860,7 @@ class ServingEndpoint: """The email of the user who created the serving endpoint.""" id: Optional[str] = None - """System-generated ID of the endpoint. This is used to refer to the endpoint in the Permissions - API""" + """System-generated ID of the endpoint, included to be used by the Permissions API.""" last_updated_timestamp: Optional[int] = None """The timestamp when the endpoint was last updated by a user in Unix time.""" @@ -2211,6 +2893,22 @@ def as_dict(self) -> dict: if self.task is not None: body['task'] = self.task return body + def as_shallow_dict(self) -> dict: + """Serializes the ServingEndpoint into a shallow dictionary of its immediate attributes.""" + body = {} + if self.ai_gateway: body['ai_gateway'] = self.ai_gateway + if self.config: body['config'] = self.config + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.creator is not None: body['creator'] = self.creator + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: + body['last_updated_timestamp'] = self.last_updated_timestamp + if self.name is not None: body['name'] = self.name + if self.state: body['state'] = self.state + if self.tags: body['tags'] = self.tags + if self.task is not None: body['task'] = self.task + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ServingEndpoint: """Deserializes the ServingEndpoint from a dictionary.""" @@ -2250,6 +2948,16 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the ServingEndpointAccessControlRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ServingEndpointAccessControlRequest: """Deserializes the ServingEndpointAccessControlRequest from a dictionary.""" @@ -2287,6 +2995,17 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the ServingEndpointAccessControlResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ServingEndpointAccessControlResponse: """Deserializes the ServingEndpointAccessControlResponse from a dictionary.""" @@ -2300,8 +3019,8 @@ def from_dict(cls, d: Dict[str, any]) -> ServingEndpointAccessControlResponse: @dataclass class ServingEndpointDetailed: ai_gateway: Optional[AiGatewayConfig] = None - """The AI Gateway configuration for the serving endpoint. NOTE: Only external model endpoints are - currently supported.""" + """The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned + throughput endpoints are currently supported.""" config: Optional[EndpointCoreConfigOutput] = None """The config that is currently being served by the endpoint.""" @@ -2367,6 +3086,27 @@ def as_dict(self) -> dict: if self.task is not None: body['task'] = self.task return body + def as_shallow_dict(self) -> dict: + """Serializes the ServingEndpointDetailed into a shallow dictionary of its immediate attributes.""" + body = {} + if self.ai_gateway: body['ai_gateway'] = self.ai_gateway + if self.config: body['config'] = self.config + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.creator is not None: body['creator'] = self.creator + if self.data_plane_info: body['data_plane_info'] = self.data_plane_info + if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: + body['last_updated_timestamp'] = self.last_updated_timestamp + if self.name is not None: body['name'] = self.name + if self.pending_config: body['pending_config'] = self.pending_config + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.route_optimized is not None: body['route_optimized'] = self.route_optimized + if self.state: body['state'] = self.state + if self.tags: body['tags'] = self.tags + if self.task is not None: body['task'] = self.task + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ServingEndpointDetailed: """Deserializes the ServingEndpointDetailed from a dictionary.""" @@ -2388,7 +3128,6 @@ def from_dict(cls, d: Dict[str, any]) -> ServingEndpointDetailed: class ServingEndpointDetailedPermissionLevel(Enum): - """The permission level of the principal making the request.""" CAN_MANAGE = 'CAN_MANAGE' CAN_QUERY = 'CAN_QUERY' @@ -2412,6 +3151,14 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ServingEndpointPermission into a shallow dictionary of its immediate attributes.""" + body = {} + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ServingEndpointPermission: """Deserializes the ServingEndpointPermission from a dictionary.""" @@ -2445,6 +3192,14 @@ def as_dict(self) -> dict: if self.object_type is not None: body['object_type'] = self.object_type return body + def as_shallow_dict(self) -> dict: + """Serializes the ServingEndpointPermissions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ServingEndpointPermissions: """Deserializes the ServingEndpointPermissions from a dictionary.""" @@ -2468,6 +3223,13 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ServingEndpointPermissionsDescription into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ServingEndpointPermissionsDescription: """Deserializes the ServingEndpointPermissionsDescription from a dictionary.""" @@ -2490,6 +3252,13 @@ def as_dict(self) -> dict: if self.serving_endpoint_id is not None: body['serving_endpoint_id'] = self.serving_endpoint_id return body + def as_shallow_dict(self) -> dict: + """Serializes the ServingEndpointPermissionsRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.serving_endpoint_id is not None: body['serving_endpoint_id'] = self.serving_endpoint_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ServingEndpointPermissionsRequest: """Deserializes the ServingEndpointPermissionsRequest from a dictionary.""" @@ -2498,6 +3267,15 @@ def from_dict(cls, d: Dict[str, any]) -> ServingEndpointPermissionsRequest: serving_endpoint_id=d.get('serving_endpoint_id', None)) +class ServingModelWorkloadType(Enum): + + CPU = 'CPU' + GPU_LARGE = 'GPU_LARGE' + GPU_MEDIUM = 'GPU_MEDIUM' + GPU_SMALL = 'GPU_SMALL' + MULTIGPU_MEDIUM = 'MULTIGPU_MEDIUM' + + @dataclass class TrafficConfig: routes: Optional[List[Route]] = None @@ -2509,6 +3287,12 @@ def as_dict(self) -> dict: if self.routes: body['routes'] = [v.as_dict() for v in self.routes] return body + def as_shallow_dict(self) -> dict: + """Serializes the TrafficConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.routes: body['routes'] = self.routes + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TrafficConfig: """Deserializes the TrafficConfig from a dictionary.""" @@ -2542,6 +3326,16 @@ def as_dict(self) -> dict: if self.text is not None: body['text'] = self.text return body + def as_shallow_dict(self) -> dict: + """Serializes the V1ResponseChoiceElement into a shallow dictionary of its immediate attributes.""" + body = {} + if self.finish_reason is not None: body['finishReason'] = self.finish_reason + if self.index is not None: body['index'] = self.index + if self.logprobs is not None: body['logprobs'] = self.logprobs + if self.message: body['message'] = self.message + if self.text is not None: body['text'] = self.text + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> V1ResponseChoiceElement: """Deserializes the V1ResponseChoiceElement from a dictionary.""" @@ -2621,9 +3415,9 @@ def build_logs(self, name: str, served_model_name: str) -> BuildLogsResponse: def create(self, name: str, - config: EndpointCoreConfigInput, *, ai_gateway: Optional[AiGatewayConfig] = None, + config: Optional[EndpointCoreConfigInput] = None, rate_limits: Optional[List[RateLimit]] = None, route_optimized: Optional[bool] = None, tags: Optional[List[EndpointTag]] = None) -> Wait[ServingEndpointDetailed]: @@ -2632,7 +3426,10 @@ def create(self, :param name: str The name of the serving endpoint. This field is required and must be unique across a Databricks workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores. - :param config: :class:`EndpointCoreConfigInput` + :param ai_gateway: :class:`AiGatewayConfig` (optional) + The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned + throughput endpoints are currently supported. + :param config: :class:`EndpointCoreConfigInput` (optional) The core config of the serving endpoint. :param ai_gateway: :class:`AiGatewayConfig` (optional) The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are @@ -2666,9 +3463,9 @@ def create(self, def create_and_wait( self, name: str, - config: EndpointCoreConfigInput, *, ai_gateway: Optional[AiGatewayConfig] = None, + config: Optional[EndpointCoreConfigInput] = None, rate_limits: Optional[List[RateLimit]] = None, route_optimized: Optional[bool] = None, tags: Optional[List[EndpointTag]] = None, @@ -2684,7 +3481,6 @@ def delete(self, name: str): """Delete a serving endpoint. :param name: str - The name of the serving endpoint. This field is required. """ @@ -2726,7 +3522,7 @@ def get(self, name: str) -> ServingEndpointDetailed: res = self._api.do('GET', f'/api/2.0/serving-endpoints/{name}', headers=headers) return ServingEndpointDetailed.from_dict(res) - def get_open_api(self, name: str): + def get_open_api(self, name: str) -> GetOpenApiResponse: """Get the schema for a serving endpoint. Get the query schema of the serving endpoint in OpenAPI format. The schema contains information for @@ -2735,12 +3531,13 @@ def get_open_api(self, name: str): :param name: str The name of the serving endpoint that the served model belongs to. This field is required. - + :returns: :class:`GetOpenApiResponse` """ - headers = {'Accept': 'application/json', } + headers = {'Accept': 'text/plain', } - self._api.do('GET', f'/api/2.0/serving-endpoints/{name}/openapi', headers=headers) + res = self._api.do('GET', f'/api/2.0/serving-endpoints/{name}/openapi', headers=headers, raw=True) + return GetOpenApiResponse.from_dict(res) def get_permission_levels(self, serving_endpoint_id: str) -> GetServingEndpointPermissionLevelsResponse: """Get serving endpoint permission levels. @@ -2779,6 +3576,44 @@ def get_permissions(self, serving_endpoint_id: str) -> ServingEndpointPermission headers=headers) return ServingEndpointPermissions.from_dict(res) + def http_request(self, + connection_name: str, + method: ExternalFunctionRequestHttpMethod, + path: str, + *, + headers: Optional[str] = None, + json: Optional[str] = None, + params: Optional[str] = None) -> HttpRequestResponse: + """Make external services call using the credentials stored in UC Connection. + + :param connection_name: str + The connection name to use. This is required to identify the external connection. + :param method: :class:`ExternalFunctionRequestHttpMethod` + The HTTP method to use (e.g., 'GET', 'POST'). + :param path: str + The relative path for the API endpoint. This is required. + :param headers: str (optional) + Additional headers for the request. If not provided, only auth headers from connections would be + passed. + :param json: str (optional) + The JSON payload to send in the request body. + :param params: str (optional) + Query parameters for the request. + + :returns: :class:`HttpRequestResponse` + """ + body = {} + if connection_name is not None: body['connection_name'] = connection_name + if headers is not None: body['headers'] = headers + if json is not None: body['json'] = json + if method is not None: body['method'] = method.value + if params is not None: body['params'] = params + if path is not None: body['path'] = path + headers = {'Accept': 'text/plain', 'Content-Type': 'application/json', } + + res = self._api.do('POST', '/api/2.0/external-function', body=body, headers=headers, raw=True) + return HttpRequestResponse.from_dict(res) + def list(self) -> Iterator[ServingEndpoint]: """Get all serving endpoints. @@ -2815,7 +3650,7 @@ def patch(self, name: str, *, add_tags: Optional[List[EndpointTag]] = None, - delete_tags: Optional[List[str]] = None) -> Iterator[EndpointTag]: + delete_tags: Optional[List[str]] = None) -> EndpointTags: """Update tags of a serving endpoint. Used to batch add and delete tags from a serving endpoint with a single API call. @@ -2827,7 +3662,7 @@ def patch(self, :param delete_tags: List[str] (optional) List of tag keys to delete - :returns: Iterator over :class:`EndpointTag` + :returns: :class:`EndpointTags` """ body = {} if add_tags is not None: body['add_tags'] = [v.as_dict() for v in add_tags] @@ -2835,7 +3670,7 @@ def patch(self, headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('PATCH', f'/api/2.0/serving-endpoints/{name}/tags', body=body, headers=headers) - return [EndpointTag.from_dict(v) for v in res] + return EndpointTags.from_dict(res) def put(self, name: str, *, rate_limits: Optional[List[RateLimit]] = None) -> PutResponse: """Update rate limits of a serving endpoint. @@ -2870,8 +3705,8 @@ def put_ai_gateway( usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None) -> PutAiGatewayResponse: """Update AI Gateway of a serving endpoint. - Used to update the AI Gateway of a serving endpoint. NOTE: Only external model endpoints are currently - supported. + Used to update the AI Gateway of a serving endpoint. NOTE: Only external model and provisioned + throughput endpoints are currently supported. :param name: str The name of the serving endpoint whose AI Gateway is being updated. This field is required. @@ -2994,8 +3829,8 @@ def set_permissions( ) -> ServingEndpointPermissions: """Set serving endpoint permissions. - Sets permissions on a serving endpoint. Serving endpoints can inherit permissions from their root - object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param serving_endpoint_id: str The serving endpoint for which to get or manage permissions. @@ -3031,14 +3866,16 @@ def update_config(self, The name of the serving endpoint to update. This field is required. :param auto_capture_config: :class:`AutoCaptureConfigInput` (optional) Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog. + Note: this field is deprecated for creating new provisioned throughput endpoints, or updating + existing provisioned throughput endpoints that never have inference table configured; in these cases + please use AI Gateway to manage inference tables. :param served_entities: List[:class:`ServedEntityInput`] (optional) - A list of served entities for the endpoint to serve. A serving endpoint can have up to 15 served - entities. + The list of served entities under the serving endpoint config. :param served_models: List[:class:`ServedModelInput`] (optional) - (Deprecated, use served_entities instead) A list of served models for the endpoint to serve. A - serving endpoint can have up to 15 served models. + (Deprecated, use served_entities instead) The list of served models under the serving endpoint + config. :param traffic_config: :class:`TrafficConfig` (optional) - The traffic config defining how invocations to the serving endpoint should be routed. + The traffic configuration associated with the serving endpoint config. :returns: Long-running operation waiter for :class:`ServingEndpointDetailed`. @@ -3110,6 +3947,7 @@ class ServingEndpointsDataPlaneAPI: def __init__(self, api_client, control_plane): self._api = api_client self._control_plane = control_plane + from ..data_plane import DataPlaneService self._data_plane_service = DataPlaneService() def query(self, diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py index a6a235158..42c8c882d 100755 --- a/databricks/sdk/service/settings.py +++ b/databricks/sdk/service/settings.py @@ -14,6 +14,194 @@ # all definitions in this file are in alphabetical order +@dataclass +class AccountIpAccessEnable: + acct_ip_acl_enable: BooleanMessage + + etag: Optional[str] = None + """etag used for versioning. The response is at least as fresh as the eTag provided. This is used + for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + overwriting each other. It is strongly suggested that systems make use of the etag in the read + -> update pattern to perform setting updates in order to avoid race conditions. That is, get an + etag from a GET request, and pass it with the PATCH request to identify the setting version you + are updating.""" + + setting_name: Optional[str] = None + """Name of the corresponding setting. This field is populated in the response, but it will not be + respected even if it's set in the request body. The setting name in the path parameter will be + respected instead. Setting name is required to be 'default' if the setting only has one instance + per workspace.""" + + def as_dict(self) -> dict: + """Serializes the AccountIpAccessEnable into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.acct_ip_acl_enable: body['acct_ip_acl_enable'] = self.acct_ip_acl_enable.as_dict() + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccountIpAccessEnable into a shallow dictionary of its immediate attributes.""" + body = {} + if self.acct_ip_acl_enable: body['acct_ip_acl_enable'] = self.acct_ip_acl_enable + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AccountIpAccessEnable: + """Deserializes the AccountIpAccessEnable from a dictionary.""" + return cls(acct_ip_acl_enable=_from_dict(d, 'acct_ip_acl_enable', BooleanMessage), + etag=d.get('etag', None), + setting_name=d.get('setting_name', None)) + + +@dataclass +class AibiDashboardEmbeddingAccessPolicy: + access_policy_type: AibiDashboardEmbeddingAccessPolicyAccessPolicyType + + def as_dict(self) -> dict: + """Serializes the AibiDashboardEmbeddingAccessPolicy into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.access_policy_type is not None: body['access_policy_type'] = self.access_policy_type.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AibiDashboardEmbeddingAccessPolicy into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_policy_type is not None: body['access_policy_type'] = self.access_policy_type + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AibiDashboardEmbeddingAccessPolicy: + """Deserializes the AibiDashboardEmbeddingAccessPolicy from a dictionary.""" + return cls(access_policy_type=_enum(d, 'access_policy_type', + AibiDashboardEmbeddingAccessPolicyAccessPolicyType)) + + +class AibiDashboardEmbeddingAccessPolicyAccessPolicyType(Enum): + + ALLOW_ALL_DOMAINS = 'ALLOW_ALL_DOMAINS' + ALLOW_APPROVED_DOMAINS = 'ALLOW_APPROVED_DOMAINS' + DENY_ALL_DOMAINS = 'DENY_ALL_DOMAINS' + + +@dataclass +class AibiDashboardEmbeddingAccessPolicySetting: + aibi_dashboard_embedding_access_policy: AibiDashboardEmbeddingAccessPolicy + + etag: Optional[str] = None + """etag used for versioning. The response is at least as fresh as the eTag provided. This is used + for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + overwriting each other. It is strongly suggested that systems make use of the etag in the read + -> update pattern to perform setting updates in order to avoid race conditions. That is, get an + etag from a GET request, and pass it with the PATCH request to identify the setting version you + are updating.""" + + setting_name: Optional[str] = None + """Name of the corresponding setting. This field is populated in the response, but it will not be + respected even if it's set in the request body. The setting name in the path parameter will be + respected instead. Setting name is required to be 'default' if the setting only has one instance + per workspace.""" + + def as_dict(self) -> dict: + """Serializes the AibiDashboardEmbeddingAccessPolicySetting into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.aibi_dashboard_embedding_access_policy: + body[ + 'aibi_dashboard_embedding_access_policy'] = self.aibi_dashboard_embedding_access_policy.as_dict( + ) + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AibiDashboardEmbeddingAccessPolicySetting into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aibi_dashboard_embedding_access_policy: + body['aibi_dashboard_embedding_access_policy'] = self.aibi_dashboard_embedding_access_policy + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AibiDashboardEmbeddingAccessPolicySetting: + """Deserializes the AibiDashboardEmbeddingAccessPolicySetting from a dictionary.""" + return cls(aibi_dashboard_embedding_access_policy=_from_dict( + d, 'aibi_dashboard_embedding_access_policy', AibiDashboardEmbeddingAccessPolicy), + etag=d.get('etag', None), + setting_name=d.get('setting_name', None)) + + +@dataclass +class AibiDashboardEmbeddingApprovedDomains: + approved_domains: Optional[List[str]] = None + + def as_dict(self) -> dict: + """Serializes the AibiDashboardEmbeddingApprovedDomains into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.approved_domains: body['approved_domains'] = [v for v in self.approved_domains] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AibiDashboardEmbeddingApprovedDomains into a shallow dictionary of its immediate attributes.""" + body = {} + if self.approved_domains: body['approved_domains'] = self.approved_domains + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AibiDashboardEmbeddingApprovedDomains: + """Deserializes the AibiDashboardEmbeddingApprovedDomains from a dictionary.""" + return cls(approved_domains=d.get('approved_domains', None)) + + +@dataclass +class AibiDashboardEmbeddingApprovedDomainsSetting: + aibi_dashboard_embedding_approved_domains: AibiDashboardEmbeddingApprovedDomains + + etag: Optional[str] = None + """etag used for versioning. The response is at least as fresh as the eTag provided. This is used + for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + overwriting each other. It is strongly suggested that systems make use of the etag in the read + -> update pattern to perform setting updates in order to avoid race conditions. That is, get an + etag from a GET request, and pass it with the PATCH request to identify the setting version you + are updating.""" + + setting_name: Optional[str] = None + """Name of the corresponding setting. This field is populated in the response, but it will not be + respected even if it's set in the request body. The setting name in the path parameter will be + respected instead. Setting name is required to be 'default' if the setting only has one instance + per workspace.""" + + def as_dict(self) -> dict: + """Serializes the AibiDashboardEmbeddingApprovedDomainsSetting into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.aibi_dashboard_embedding_approved_domains: + body[ + 'aibi_dashboard_embedding_approved_domains'] = self.aibi_dashboard_embedding_approved_domains.as_dict( + ) + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AibiDashboardEmbeddingApprovedDomainsSetting into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aibi_dashboard_embedding_approved_domains: + body['aibi_dashboard_embedding_approved_domains'] = self.aibi_dashboard_embedding_approved_domains + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AibiDashboardEmbeddingApprovedDomainsSetting: + """Deserializes the AibiDashboardEmbeddingApprovedDomainsSetting from a dictionary.""" + return cls(aibi_dashboard_embedding_approved_domains=_from_dict( + d, 'aibi_dashboard_embedding_approved_domains', AibiDashboardEmbeddingApprovedDomains), + etag=d.get('etag', None), + setting_name=d.get('setting_name', None)) + + @dataclass class AutomaticClusterUpdateSetting: automatic_cluster_update_workspace: ClusterAutoRestartMessage @@ -41,6 +229,15 @@ def as_dict(self) -> dict: if self.setting_name is not None: body['setting_name'] = self.setting_name return body + def as_shallow_dict(self) -> dict: + """Serializes the AutomaticClusterUpdateSetting into a shallow dictionary of its immediate attributes.""" + body = {} + if self.automatic_cluster_update_workspace: + body['automatic_cluster_update_workspace'] = self.automatic_cluster_update_workspace + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AutomaticClusterUpdateSetting: """Deserializes the AutomaticClusterUpdateSetting from a dictionary.""" @@ -60,6 +257,12 @@ def as_dict(self) -> dict: if self.value is not None: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the BooleanMessage into a shallow dictionary of its immediate attributes.""" + body = {} + if self.value is not None: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> BooleanMessage: """Deserializes the BooleanMessage from a dictionary.""" @@ -94,6 +297,17 @@ def as_dict(self) -> dict: body['restart_even_if_no_updates_available'] = self.restart_even_if_no_updates_available return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterAutoRestartMessage into a shallow dictionary of its immediate attributes.""" + body = {} + if self.can_toggle is not None: body['can_toggle'] = self.can_toggle + if self.enabled is not None: body['enabled'] = self.enabled + if self.enablement_details: body['enablement_details'] = self.enablement_details + if self.maintenance_window: body['maintenance_window'] = self.maintenance_window + if self.restart_even_if_no_updates_available is not None: + body['restart_even_if_no_updates_available'] = self.restart_even_if_no_updates_available + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterAutoRestartMessage: """Deserializes the ClusterAutoRestartMessage from a dictionary.""" @@ -135,6 +349,17 @@ def as_dict(self) -> dict: body['unavailable_for_non_enterprise_tier'] = self.unavailable_for_non_enterprise_tier return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterAutoRestartMessageEnablementDetails into a shallow dictionary of its immediate attributes.""" + body = {} + if self.forced_for_compliance_mode is not None: + body['forced_for_compliance_mode'] = self.forced_for_compliance_mode + if self.unavailable_for_disabled_entitlement is not None: + body['unavailable_for_disabled_entitlement'] = self.unavailable_for_disabled_entitlement + if self.unavailable_for_non_enterprise_tier is not None: + body['unavailable_for_non_enterprise_tier'] = self.unavailable_for_non_enterprise_tier + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterAutoRestartMessageEnablementDetails: """Deserializes the ClusterAutoRestartMessageEnablementDetails from a dictionary.""" @@ -154,6 +379,12 @@ def as_dict(self) -> dict: body['week_day_based_schedule'] = self.week_day_based_schedule.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterAutoRestartMessageMaintenanceWindow into a shallow dictionary of its immediate attributes.""" + body = {} + if self.week_day_based_schedule: body['week_day_based_schedule'] = self.week_day_based_schedule + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterAutoRestartMessageMaintenanceWindow: """Deserializes the ClusterAutoRestartMessageMaintenanceWindow from a dictionary.""" @@ -188,6 +419,14 @@ def as_dict(self) -> dict: if self.window_start_time: body['window_start_time'] = self.window_start_time.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule into a shallow dictionary of its immediate attributes.""" + body = {} + if self.day_of_week is not None: body['day_of_week'] = self.day_of_week + if self.frequency is not None: body['frequency'] = self.frequency + if self.window_start_time: body['window_start_time'] = self.window_start_time + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule: """Deserializes the ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule from a dictionary.""" @@ -222,6 +461,13 @@ def as_dict(self) -> dict: if self.minutes is not None: body['minutes'] = self.minutes return body + def as_shallow_dict(self) -> dict: + """Serializes the ClusterAutoRestartMessageMaintenanceWindowWindowStartTime into a shallow dictionary of its immediate attributes.""" + body = {} + if self.hours is not None: body['hours'] = self.hours + if self.minutes is not None: body['minutes'] = self.minutes + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ClusterAutoRestartMessageMaintenanceWindowWindowStartTime: """Deserializes the ClusterAutoRestartMessageMaintenanceWindowWindowStartTime from a dictionary.""" @@ -245,6 +491,13 @@ def as_dict(self) -> dict: if self.is_enabled is not None: body['is_enabled'] = self.is_enabled return body + def as_shallow_dict(self) -> dict: + """Serializes the ComplianceSecurityProfile into a shallow dictionary of its immediate attributes.""" + body = {} + if self.compliance_standards: body['compliance_standards'] = self.compliance_standards + if self.is_enabled is not None: body['is_enabled'] = self.is_enabled + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ComplianceSecurityProfile: """Deserializes the ComplianceSecurityProfile from a dictionary.""" @@ -282,6 +535,15 @@ def as_dict(self) -> dict: if self.setting_name is not None: body['setting_name'] = self.setting_name return body + def as_shallow_dict(self) -> dict: + """Serializes the ComplianceSecurityProfileSetting into a shallow dictionary of its immediate attributes.""" + body = {} + if self.compliance_security_profile_workspace: + body['compliance_security_profile_workspace'] = self.compliance_security_profile_workspace + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ComplianceSecurityProfileSetting: """Deserializes the ComplianceSecurityProfileSetting from a dictionary.""" @@ -301,7 +563,9 @@ class ComplianceStandard(Enum): FEDRAMP_IL5 = 'FEDRAMP_IL5' FEDRAMP_MODERATE = 'FEDRAMP_MODERATE' HIPAA = 'HIPAA' + HITRUST = 'HITRUST' IRAP_PROTECTED = 'IRAP_PROTECTED' + ISMAP = 'ISMAP' ITAR_EAR = 'ITAR_EAR' NONE = 'NONE' PCI_DSS = 'PCI_DSS' @@ -329,6 +593,16 @@ def as_dict(self) -> dict: if self.slack: body['slack'] = self.slack.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the Config into a shallow dictionary of its immediate attributes.""" + body = {} + if self.email: body['email'] = self.email + if self.generic_webhook: body['generic_webhook'] = self.generic_webhook + if self.microsoft_teams: body['microsoft_teams'] = self.microsoft_teams + if self.pagerduty: body['pagerduty'] = self.pagerduty + if self.slack: body['slack'] = self.slack + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Config: """Deserializes the Config from a dictionary.""" @@ -362,6 +636,14 @@ def as_dict(self) -> dict: if self.list_type is not None: body['list_type'] = self.list_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateIpAccessList into a shallow dictionary of its immediate attributes.""" + body = {} + if self.ip_addresses: body['ip_addresses'] = self.ip_addresses + if self.label is not None: body['label'] = self.label + if self.list_type is not None: body['list_type'] = self.list_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateIpAccessList: """Deserializes the CreateIpAccessList from a dictionary.""" @@ -383,6 +665,12 @@ def as_dict(self) -> dict: if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateIpAccessListResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.ip_access_list: body['ip_access_list'] = self.ip_access_list + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateIpAccessListResponse: """Deserializes the CreateIpAccessListResponse from a dictionary.""" @@ -407,6 +695,13 @@ def as_dict(self) -> dict: if self.region is not None: body['region'] = self.region return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateNetworkConnectivityConfigRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: body['name'] = self.name + if self.region is not None: body['region'] = self.region + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateNetworkConnectivityConfigRequest: """Deserializes the CreateNetworkConnectivityConfigRequest from a dictionary.""" @@ -428,6 +723,13 @@ def as_dict(self) -> dict: if self.display_name is not None: body['display_name'] = self.display_name return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateNotificationDestinationRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.config: body['config'] = self.config + if self.display_name is not None: body['display_name'] = self.display_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateNotificationDestinationRequest: """Deserializes the CreateNotificationDestinationRequest from a dictionary.""" @@ -455,6 +757,14 @@ def as_dict(self) -> dict: if self.lifetime_seconds is not None: body['lifetime_seconds'] = self.lifetime_seconds return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateOboTokenRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.application_id is not None: body['application_id'] = self.application_id + if self.comment is not None: body['comment'] = self.comment + if self.lifetime_seconds is not None: body['lifetime_seconds'] = self.lifetime_seconds + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateOboTokenRequest: """Deserializes the CreateOboTokenRequest from a dictionary.""" @@ -479,6 +789,13 @@ def as_dict(self) -> dict: if self.token_value is not None: body['token_value'] = self.token_value return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateOboTokenResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.token_info: body['token_info'] = self.token_info + if self.token_value is not None: body['token_value'] = self.token_value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateOboTokenResponse: """Deserializes the CreateOboTokenResponse from a dictionary.""" @@ -506,6 +823,15 @@ def as_dict(self) -> dict: if self.resource_id is not None: body['resource_id'] = self.resource_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CreatePrivateEndpointRuleRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.group_id is not None: body['group_id'] = self.group_id + if self.network_connectivity_config_id is not None: + body['network_connectivity_config_id'] = self.network_connectivity_config_id + if self.resource_id is not None: body['resource_id'] = self.resource_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreatePrivateEndpointRuleRequest: """Deserializes the CreatePrivateEndpointRuleRequest from a dictionary.""" @@ -541,6 +867,13 @@ def as_dict(self) -> dict: if self.lifetime_seconds is not None: body['lifetime_seconds'] = self.lifetime_seconds return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateTokenRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: body['comment'] = self.comment + if self.lifetime_seconds is not None: body['lifetime_seconds'] = self.lifetime_seconds + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateTokenRequest: """Deserializes the CreateTokenRequest from a dictionary.""" @@ -562,6 +895,13 @@ def as_dict(self) -> dict: if self.token_value is not None: body['token_value'] = self.token_value return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateTokenResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.token_info: body['token_info'] = self.token_info + if self.token_value is not None: body['token_value'] = self.token_value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateTokenResponse: """Deserializes the CreateTokenResponse from a dictionary.""" @@ -588,6 +928,13 @@ def as_dict(self) -> dict: if self.is_enforced is not None: body['is_enforced'] = self.is_enforced return body + def as_shallow_dict(self) -> dict: + """Serializes the CspEnablementAccount into a shallow dictionary of its immediate attributes.""" + body = {} + if self.compliance_standards: body['compliance_standards'] = self.compliance_standards + if self.is_enforced is not None: body['is_enforced'] = self.is_enforced + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CspEnablementAccount: """Deserializes the CspEnablementAccount from a dictionary.""" @@ -622,6 +969,14 @@ def as_dict(self) -> dict: if self.setting_name is not None: body['setting_name'] = self.setting_name return body + def as_shallow_dict(self) -> dict: + """Serializes the CspEnablementAccountSetting into a shallow dictionary of its immediate attributes.""" + body = {} + if self.csp_enablement_account: body['csp_enablement_account'] = self.csp_enablement_account + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CspEnablementAccountSetting: """Deserializes the CspEnablementAccountSetting from a dictionary.""" @@ -664,6 +1019,14 @@ def as_dict(self) -> dict: if self.setting_name is not None: body['setting_name'] = self.setting_name return body + def as_shallow_dict(self) -> dict: + """Serializes the DefaultNamespaceSetting into a shallow dictionary of its immediate attributes.""" + body = {} + if self.etag is not None: body['etag'] = self.etag + if self.namespace: body['namespace'] = self.namespace + if self.setting_name is not None: body['setting_name'] = self.setting_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DefaultNamespaceSetting: """Deserializes the DefaultNamespaceSetting from a dictionary.""" @@ -672,6 +1035,96 @@ def from_dict(cls, d: Dict[str, any]) -> DefaultNamespaceSetting: setting_name=d.get('setting_name', None)) +@dataclass +class DeleteAccountIpAccessEnableResponse: + """The etag is returned.""" + + etag: str + """etag used for versioning. The response is at least as fresh as the eTag provided. This is used + for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + overwriting each other. It is strongly suggested that systems make use of the etag in the read + -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get + an etag from a GET request, and pass it with the DELETE request to identify the rule set version + you are deleting.""" + + def as_dict(self) -> dict: + """Serializes the DeleteAccountIpAccessEnableResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.etag is not None: body['etag'] = self.etag + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DeleteAccountIpAccessEnableResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.etag is not None: body['etag'] = self.etag + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> DeleteAccountIpAccessEnableResponse: + """Deserializes the DeleteAccountIpAccessEnableResponse from a dictionary.""" + return cls(etag=d.get('etag', None)) + + +@dataclass +class DeleteAibiDashboardEmbeddingAccessPolicySettingResponse: + """The etag is returned.""" + + etag: str + """etag used for versioning. The response is at least as fresh as the eTag provided. This is used + for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + overwriting each other. It is strongly suggested that systems make use of the etag in the read + -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get + an etag from a GET request, and pass it with the DELETE request to identify the rule set version + you are deleting.""" + + def as_dict(self) -> dict: + """Serializes the DeleteAibiDashboardEmbeddingAccessPolicySettingResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.etag is not None: body['etag'] = self.etag + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DeleteAibiDashboardEmbeddingAccessPolicySettingResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.etag is not None: body['etag'] = self.etag + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> DeleteAibiDashboardEmbeddingAccessPolicySettingResponse: + """Deserializes the DeleteAibiDashboardEmbeddingAccessPolicySettingResponse from a dictionary.""" + return cls(etag=d.get('etag', None)) + + +@dataclass +class DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse: + """The etag is returned.""" + + etag: str + """etag used for versioning. The response is at least as fresh as the eTag provided. This is used + for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + overwriting each other. It is strongly suggested that systems make use of the etag in the read + -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get + an etag from a GET request, and pass it with the DELETE request to identify the rule set version + you are deleting.""" + + def as_dict(self) -> dict: + """Serializes the DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.etag is not None: body['etag'] = self.etag + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.etag is not None: body['etag'] = self.etag + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse: + """Deserializes the DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse from a dictionary.""" + return cls(etag=d.get('etag', None)) + + @dataclass class DeleteDefaultNamespaceSettingResponse: """The etag is returned.""" @@ -690,6 +1143,12 @@ def as_dict(self) -> dict: if self.etag is not None: body['etag'] = self.etag return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteDefaultNamespaceSettingResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.etag is not None: body['etag'] = self.etag + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteDefaultNamespaceSettingResponse: """Deserializes the DeleteDefaultNamespaceSettingResponse from a dictionary.""" @@ -714,6 +1173,12 @@ def as_dict(self) -> dict: if self.etag is not None: body['etag'] = self.etag return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteDisableLegacyAccessResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.etag is not None: body['etag'] = self.etag + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteDisableLegacyAccessResponse: """Deserializes the DeleteDisableLegacyAccessResponse from a dictionary.""" @@ -738,6 +1203,12 @@ def as_dict(self) -> dict: if self.etag is not None: body['etag'] = self.etag return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteDisableLegacyDbfsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.etag is not None: body['etag'] = self.etag + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteDisableLegacyDbfsResponse: """Deserializes the DeleteDisableLegacyDbfsResponse from a dictionary.""" @@ -762,6 +1233,12 @@ def as_dict(self) -> dict: if self.etag is not None: body['etag'] = self.etag return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteDisableLegacyFeaturesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.etag is not None: body['etag'] = self.etag + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteDisableLegacyFeaturesResponse: """Deserializes the DeleteDisableLegacyFeaturesResponse from a dictionary.""" @@ -776,6 +1253,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteNetworkConnectivityConfigurationResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteNetworkConnectivityConfigurationResponse: """Deserializes the DeleteNetworkConnectivityConfigurationResponse from a dictionary.""" @@ -800,6 +1282,12 @@ def as_dict(self) -> dict: if self.etag is not None: body['etag'] = self.etag return body + def as_shallow_dict(self) -> dict: + """Serializes the DeletePersonalComputeSettingResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.etag is not None: body['etag'] = self.etag + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeletePersonalComputeSettingResponse: """Deserializes the DeletePersonalComputeSettingResponse from a dictionary.""" @@ -814,6 +1302,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" @@ -838,6 +1331,12 @@ def as_dict(self) -> dict: if self.etag is not None: body['etag'] = self.etag return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteRestrictWorkspaceAdminsSettingResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.etag is not None: body['etag'] = self.etag + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteRestrictWorkspaceAdminsSettingResponse: """Deserializes the DeleteRestrictWorkspaceAdminsSettingResponse from a dictionary.""" @@ -879,6 +1378,14 @@ def as_dict(self) -> dict: if self.setting_name is not None: body['setting_name'] = self.setting_name return body + def as_shallow_dict(self) -> dict: + """Serializes the DisableLegacyAccess into a shallow dictionary of its immediate attributes.""" + body = {} + if self.disable_legacy_access: body['disable_legacy_access'] = self.disable_legacy_access + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DisableLegacyAccess: """Deserializes the DisableLegacyAccess from a dictionary.""" @@ -913,6 +1420,14 @@ def as_dict(self) -> dict: if self.setting_name is not None: body['setting_name'] = self.setting_name return body + def as_shallow_dict(self) -> dict: + """Serializes the DisableLegacyDbfs into a shallow dictionary of its immediate attributes.""" + body = {} + if self.disable_legacy_dbfs: body['disable_legacy_dbfs'] = self.disable_legacy_dbfs + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DisableLegacyDbfs: """Deserializes the DisableLegacyDbfs from a dictionary.""" @@ -948,6 +1463,14 @@ def as_dict(self) -> dict: if self.setting_name is not None: body['setting_name'] = self.setting_name return body + def as_shallow_dict(self) -> dict: + """Serializes the DisableLegacyFeatures into a shallow dictionary of its immediate attributes.""" + body = {} + if self.disable_legacy_features: body['disable_legacy_features'] = self.disable_legacy_features + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DisableLegacyFeatures: """Deserializes the DisableLegacyFeatures from a dictionary.""" @@ -956,6 +1479,270 @@ def from_dict(cls, d: Dict[str, any]) -> DisableLegacyFeatures: setting_name=d.get('setting_name', None)) +@dataclass +class EgressNetworkPolicy: + """The network policies applying for egress traffic. This message is used by the UI/REST API. We + translate this message to the format expected by the dataplane in Lakehouse Network Manager (for + the format expected by the dataplane, see networkconfig.textproto).""" + + internet_access: Optional[EgressNetworkPolicyInternetAccessPolicy] = None + """The access policy enforced for egress traffic to the internet.""" + + def as_dict(self) -> dict: + """Serializes the EgressNetworkPolicy into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.internet_access: body['internet_access'] = self.internet_access.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the EgressNetworkPolicy into a shallow dictionary of its immediate attributes.""" + body = {} + if self.internet_access: body['internet_access'] = self.internet_access + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> EgressNetworkPolicy: + """Deserializes the EgressNetworkPolicy from a dictionary.""" + return cls(internet_access=_from_dict(d, 'internet_access', EgressNetworkPolicyInternetAccessPolicy)) + + +@dataclass +class EgressNetworkPolicyInternetAccessPolicy: + allowed_internet_destinations: Optional[ + List[EgressNetworkPolicyInternetAccessPolicyInternetDestination]] = None + + allowed_storage_destinations: Optional[ + List[EgressNetworkPolicyInternetAccessPolicyStorageDestination]] = None + + log_only_mode: Optional[EgressNetworkPolicyInternetAccessPolicyLogOnlyMode] = None + """Optional. If not specified, assume the policy is enforced for all workloads.""" + + restriction_mode: Optional[EgressNetworkPolicyInternetAccessPolicyRestrictionMode] = None + """At which level can Databricks and Databricks managed compute access Internet. FULL_ACCESS: + Databricks can access Internet. No blocking rules will apply. RESTRICTED_ACCESS: Databricks can + only access explicitly allowed internet and storage destinations, as well as UC connections and + external locations. PRIVATE_ACCESS_ONLY (not used): Databricks can only access destinations via + private link.""" + + def as_dict(self) -> dict: + """Serializes the EgressNetworkPolicyInternetAccessPolicy into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.allowed_internet_destinations: + body['allowed_internet_destinations'] = [v.as_dict() for v in self.allowed_internet_destinations] + if self.allowed_storage_destinations: + body['allowed_storage_destinations'] = [v.as_dict() for v in self.allowed_storage_destinations] + if self.log_only_mode: body['log_only_mode'] = self.log_only_mode.as_dict() + if self.restriction_mode is not None: body['restriction_mode'] = self.restriction_mode.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the EgressNetworkPolicyInternetAccessPolicy into a shallow dictionary of its immediate attributes.""" + body = {} + if self.allowed_internet_destinations: + body['allowed_internet_destinations'] = self.allowed_internet_destinations + if self.allowed_storage_destinations: + body['allowed_storage_destinations'] = self.allowed_storage_destinations + if self.log_only_mode: body['log_only_mode'] = self.log_only_mode + if self.restriction_mode is not None: body['restriction_mode'] = self.restriction_mode + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> EgressNetworkPolicyInternetAccessPolicy: + """Deserializes the EgressNetworkPolicyInternetAccessPolicy from a dictionary.""" + return cls(allowed_internet_destinations=_repeated_dict( + d, 'allowed_internet_destinations', EgressNetworkPolicyInternetAccessPolicyInternetDestination), + allowed_storage_destinations=_repeated_dict( + d, 'allowed_storage_destinations', + EgressNetworkPolicyInternetAccessPolicyStorageDestination), + log_only_mode=_from_dict(d, 'log_only_mode', + EgressNetworkPolicyInternetAccessPolicyLogOnlyMode), + restriction_mode=_enum(d, 'restriction_mode', + EgressNetworkPolicyInternetAccessPolicyRestrictionMode)) + + +@dataclass +class EgressNetworkPolicyInternetAccessPolicyInternetDestination: + """Users can specify accessible internet destinations when outbound access is restricted. We only + support domain name (FQDN) destinations for the time being, though going forwards we want to + support host names and IP addresses.""" + + destination: Optional[str] = None + + protocol: Optional[ + EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol] = None + """The filtering protocol used by the DP. For private and public preview, SEG will only support TCP + filtering (i.e. DNS based filtering, filtering by destination IP address), so protocol will be + set to TCP by default and hidden from the user. In the future, users may be able to select HTTP + filtering (i.e. SNI based filtering, filtering by FQDN).""" + + type: Optional[EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType] = None + + def as_dict(self) -> dict: + """Serializes the EgressNetworkPolicyInternetAccessPolicyInternetDestination into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.destination is not None: body['destination'] = self.destination + if self.protocol is not None: body['protocol'] = self.protocol.value + if self.type is not None: body['type'] = self.type.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the EgressNetworkPolicyInternetAccessPolicyInternetDestination into a shallow dictionary of its immediate attributes.""" + body = {} + if self.destination is not None: body['destination'] = self.destination + if self.protocol is not None: body['protocol'] = self.protocol + if self.type is not None: body['type'] = self.type + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> EgressNetworkPolicyInternetAccessPolicyInternetDestination: + """Deserializes the EgressNetworkPolicyInternetAccessPolicyInternetDestination from a dictionary.""" + return cls( + destination=d.get('destination', None), + protocol=_enum( + d, 'protocol', + EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol + ), + type=_enum(d, 'type', + EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType)) + + +class EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol(Enum): + """The filtering protocol used by the DP. For private and public preview, SEG will only support TCP + filtering (i.e. DNS based filtering, filtering by destination IP address), so protocol will be + set to TCP by default and hidden from the user. In the future, users may be able to select HTTP + filtering (i.e. SNI based filtering, filtering by FQDN).""" + + TCP = 'TCP' + + +class EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType(Enum): + + FQDN = 'FQDN' + + +@dataclass +class EgressNetworkPolicyInternetAccessPolicyLogOnlyMode: + log_only_mode_type: Optional[EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType] = None + + workloads: Optional[List[EgressNetworkPolicyInternetAccessPolicyLogOnlyModeWorkloadType]] = None + + def as_dict(self) -> dict: + """Serializes the EgressNetworkPolicyInternetAccessPolicyLogOnlyMode into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.log_only_mode_type is not None: body['log_only_mode_type'] = self.log_only_mode_type.value + if self.workloads: body['workloads'] = [v.value for v in self.workloads] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the EgressNetworkPolicyInternetAccessPolicyLogOnlyMode into a shallow dictionary of its immediate attributes.""" + body = {} + if self.log_only_mode_type is not None: body['log_only_mode_type'] = self.log_only_mode_type + if self.workloads: body['workloads'] = self.workloads + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> EgressNetworkPolicyInternetAccessPolicyLogOnlyMode: + """Deserializes the EgressNetworkPolicyInternetAccessPolicyLogOnlyMode from a dictionary.""" + return cls(log_only_mode_type=_enum( + d, 'log_only_mode_type', EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType), + workloads=_repeated_enum(d, 'workloads', + EgressNetworkPolicyInternetAccessPolicyLogOnlyModeWorkloadType)) + + +class EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType(Enum): + + ALL_SERVICES = 'ALL_SERVICES' + SELECTED_SERVICES = 'SELECTED_SERVICES' + + +class EgressNetworkPolicyInternetAccessPolicyLogOnlyModeWorkloadType(Enum): + """The values should match the list of workloads used in networkconfig.proto""" + + DBSQL = 'DBSQL' + ML_SERVING = 'ML_SERVING' + + +class EgressNetworkPolicyInternetAccessPolicyRestrictionMode(Enum): + """At which level can Databricks and Databricks managed compute access Internet. FULL_ACCESS: + Databricks can access Internet. No blocking rules will apply. RESTRICTED_ACCESS: Databricks can + only access explicitly allowed internet and storage destinations, as well as UC connections and + external locations. PRIVATE_ACCESS_ONLY (not used): Databricks can only access destinations via + private link.""" + + FULL_ACCESS = 'FULL_ACCESS' + PRIVATE_ACCESS_ONLY = 'PRIVATE_ACCESS_ONLY' + RESTRICTED_ACCESS = 'RESTRICTED_ACCESS' + + +@dataclass +class EgressNetworkPolicyInternetAccessPolicyStorageDestination: + """Users can specify accessible storage destinations.""" + + allowed_paths: Optional[List[str]] = None + + azure_container: Optional[str] = None + + azure_dns_zone: Optional[str] = None + + azure_storage_account: Optional[str] = None + + azure_storage_service: Optional[str] = None + + bucket_name: Optional[str] = None + + region: Optional[str] = None + + type: Optional[EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType] = None + + def as_dict(self) -> dict: + """Serializes the EgressNetworkPolicyInternetAccessPolicyStorageDestination into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.allowed_paths: body['allowed_paths'] = [v for v in self.allowed_paths] + if self.azure_container is not None: body['azure_container'] = self.azure_container + if self.azure_dns_zone is not None: body['azure_dns_zone'] = self.azure_dns_zone + if self.azure_storage_account is not None: body['azure_storage_account'] = self.azure_storage_account + if self.azure_storage_service is not None: body['azure_storage_service'] = self.azure_storage_service + if self.bucket_name is not None: body['bucket_name'] = self.bucket_name + if self.region is not None: body['region'] = self.region + if self.type is not None: body['type'] = self.type.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the EgressNetworkPolicyInternetAccessPolicyStorageDestination into a shallow dictionary of its immediate attributes.""" + body = {} + if self.allowed_paths: body['allowed_paths'] = self.allowed_paths + if self.azure_container is not None: body['azure_container'] = self.azure_container + if self.azure_dns_zone is not None: body['azure_dns_zone'] = self.azure_dns_zone + if self.azure_storage_account is not None: body['azure_storage_account'] = self.azure_storage_account + if self.azure_storage_service is not None: body['azure_storage_service'] = self.azure_storage_service + if self.bucket_name is not None: body['bucket_name'] = self.bucket_name + if self.region is not None: body['region'] = self.region + if self.type is not None: body['type'] = self.type + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> EgressNetworkPolicyInternetAccessPolicyStorageDestination: + """Deserializes the EgressNetworkPolicyInternetAccessPolicyStorageDestination from a dictionary.""" + return cls(allowed_paths=d.get('allowed_paths', None), + azure_container=d.get('azure_container', None), + azure_dns_zone=d.get('azure_dns_zone', None), + azure_storage_account=d.get('azure_storage_account', None), + azure_storage_service=d.get('azure_storage_service', None), + bucket_name=d.get('bucket_name', None), + region=d.get('region', None), + type=_enum( + d, 'type', + EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType)) + + +class EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType(Enum): + + AWS_S3 = 'AWS_S3' + AZURE_STORAGE = 'AZURE_STORAGE' + CLOUDFLARE_R2 = 'CLOUDFLARE_R2' + GOOGLE_CLOUD_STORAGE = 'GOOGLE_CLOUD_STORAGE' + + @dataclass class EmailConfig: addresses: Optional[List[str]] = None @@ -967,6 +1754,12 @@ def as_dict(self) -> dict: if self.addresses: body['addresses'] = [v for v in self.addresses] return body + def as_shallow_dict(self) -> dict: + """Serializes the EmailConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.addresses: body['addresses'] = self.addresses + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EmailConfig: """Deserializes the EmailConfig from a dictionary.""" @@ -981,6 +1774,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the Empty into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Empty: """Deserializes the Empty from a dictionary.""" @@ -999,6 +1797,12 @@ def as_dict(self) -> dict: if self.is_enabled is not None: body['is_enabled'] = self.is_enabled return body + def as_shallow_dict(self) -> dict: + """Serializes the EnhancedSecurityMonitoring into a shallow dictionary of its immediate attributes.""" + body = {} + if self.is_enabled is not None: body['is_enabled'] = self.is_enabled + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EnhancedSecurityMonitoring: """Deserializes the EnhancedSecurityMonitoring from a dictionary.""" @@ -1035,6 +1839,15 @@ def as_dict(self) -> dict: if self.setting_name is not None: body['setting_name'] = self.setting_name return body + def as_shallow_dict(self) -> dict: + """Serializes the EnhancedSecurityMonitoringSetting into a shallow dictionary of its immediate attributes.""" + body = {} + if self.enhanced_security_monitoring_workspace: + body['enhanced_security_monitoring_workspace'] = self.enhanced_security_monitoring_workspace + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EnhancedSecurityMonitoringSetting: """Deserializes the EnhancedSecurityMonitoringSetting from a dictionary.""" @@ -1056,6 +1869,12 @@ def as_dict(self) -> dict: if self.is_enforced is not None: body['is_enforced'] = self.is_enforced return body + def as_shallow_dict(self) -> dict: + """Serializes the EsmEnablementAccount into a shallow dictionary of its immediate attributes.""" + body = {} + if self.is_enforced is not None: body['is_enforced'] = self.is_enforced + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EsmEnablementAccount: """Deserializes the EsmEnablementAccount from a dictionary.""" @@ -1089,6 +1908,14 @@ def as_dict(self) -> dict: if self.setting_name is not None: body['setting_name'] = self.setting_name return body + def as_shallow_dict(self) -> dict: + """Serializes the EsmEnablementAccountSetting into a shallow dictionary of its immediate attributes.""" + body = {} + if self.esm_enablement_account: body['esm_enablement_account'] = self.esm_enablement_account + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EsmEnablementAccountSetting: """Deserializes the EsmEnablementAccountSetting from a dictionary.""" @@ -1126,6 +1953,16 @@ def as_dict(self) -> dict: if self.token_type is not None: body['tokenType'] = self.token_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ExchangeToken into a shallow dictionary of its immediate attributes.""" + body = {} + if self.credential is not None: body['credential'] = self.credential + if self.credential_eol_time is not None: body['credentialEolTime'] = self.credential_eol_time + if self.owner_id is not None: body['ownerId'] = self.owner_id + if self.scopes: body['scopes'] = self.scopes + if self.token_type is not None: body['tokenType'] = self.token_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ExchangeToken: """Deserializes the ExchangeToken from a dictionary.""" @@ -1157,6 +1994,14 @@ def as_dict(self) -> dict: if self.token_type: body['tokenType'] = [v.value for v in self.token_type] return body + def as_shallow_dict(self) -> dict: + """Serializes the ExchangeTokenRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.partition_id: body['partitionId'] = self.partition_id + if self.scopes: body['scopes'] = self.scopes + if self.token_type: body['tokenType'] = self.token_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ExchangeTokenRequest: """Deserializes the ExchangeTokenRequest from a dictionary.""" @@ -1177,6 +2022,12 @@ def as_dict(self) -> dict: if self.values: body['values'] = [v.as_dict() for v in self.values] return body + def as_shallow_dict(self) -> dict: + """Serializes the ExchangeTokenResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.values: body['values'] = self.values + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ExchangeTokenResponse: """Deserializes the ExchangeTokenResponse from a dictionary.""" @@ -1196,6 +2047,12 @@ def as_dict(self) -> dict: if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the FetchIpAccessListResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.ip_access_list: body['ip_access_list'] = self.ip_access_list + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> FetchIpAccessListResponse: """Deserializes the FetchIpAccessListResponse from a dictionary.""" @@ -1233,6 +2090,17 @@ def as_dict(self) -> dict: if self.username_set is not None: body['username_set'] = self.username_set return body + def as_shallow_dict(self) -> dict: + """Serializes the GenericWebhookConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.password is not None: body['password'] = self.password + if self.password_set is not None: body['password_set'] = self.password_set + if self.url is not None: body['url'] = self.url + if self.url_set is not None: body['url_set'] = self.url_set + if self.username is not None: body['username'] = self.username + if self.username_set is not None: body['username_set'] = self.username_set + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GenericWebhookConfig: """Deserializes the GenericWebhookConfig from a dictionary.""" @@ -1255,6 +2123,12 @@ def as_dict(self) -> dict: if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the GetIpAccessListResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.ip_access_list: body['ip_access_list'] = self.ip_access_list + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetIpAccessListResponse: """Deserializes the GetIpAccessListResponse from a dictionary.""" @@ -1273,6 +2147,12 @@ def as_dict(self) -> dict: if self.ip_access_lists: body['ip_access_lists'] = [v.as_dict() for v in self.ip_access_lists] return body + def as_shallow_dict(self) -> dict: + """Serializes the GetIpAccessListsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.ip_access_lists: body['ip_access_lists'] = self.ip_access_lists + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetIpAccessListsResponse: """Deserializes the GetIpAccessListsResponse from a dictionary.""" @@ -1290,6 +2170,12 @@ def as_dict(self) -> dict: if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body + def as_shallow_dict(self) -> dict: + """Serializes the GetTokenPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission_levels: body['permission_levels'] = self.permission_levels + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetTokenPermissionLevelsResponse: """Deserializes the GetTokenPermissionLevelsResponse from a dictionary.""" @@ -1308,6 +2194,12 @@ def as_dict(self) -> dict: if self.token_info: body['token_info'] = self.token_info.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the GetTokenResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.token_info: body['token_info'] = self.token_info + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetTokenResponse: """Deserializes the GetTokenResponse from a dictionary.""" @@ -1365,6 +2257,21 @@ def as_dict(self) -> dict: if self.updated_by is not None: body['updated_by'] = self.updated_by return body + def as_shallow_dict(self) -> dict: + """Serializes the IpAccessListInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.address_count is not None: body['address_count'] = self.address_count + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.enabled is not None: body['enabled'] = self.enabled + if self.ip_addresses: body['ip_addresses'] = self.ip_addresses + if self.label is not None: body['label'] = self.label + if self.list_id is not None: body['list_id'] = self.list_id + if self.list_type is not None: body['list_type'] = self.list_type + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> IpAccessListInfo: """Deserializes the IpAccessListInfo from a dictionary.""" @@ -1392,6 +2299,12 @@ def as_dict(self) -> dict: if self.ip_access_lists: body['ip_access_lists'] = [v.as_dict() for v in self.ip_access_lists] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListIpAccessListResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.ip_access_lists: body['ip_access_lists'] = self.ip_access_lists + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListIpAccessListResponse: """Deserializes the ListIpAccessListResponse from a dictionary.""" @@ -1413,6 +2326,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListNccAzurePrivateEndpointRulesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.items: body['items'] = self.items + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListNccAzurePrivateEndpointRulesResponse: """Deserializes the ListNccAzurePrivateEndpointRulesResponse from a dictionary.""" @@ -1435,6 +2355,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListNetworkConnectivityConfigurationsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.items: body['items'] = self.items + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListNetworkConnectivityConfigurationsResponse: """Deserializes the ListNetworkConnectivityConfigurationsResponse from a dictionary.""" @@ -1456,6 +2383,13 @@ def as_dict(self) -> dict: if self.results: body['results'] = [v.as_dict() for v in self.results] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListNotificationDestinationsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.results: body['results'] = self.results + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListNotificationDestinationsResponse: """Deserializes the ListNotificationDestinationsResponse from a dictionary.""" @@ -1482,6 +2416,14 @@ def as_dict(self) -> dict: if self.id is not None: body['id'] = self.id return body + def as_shallow_dict(self) -> dict: + """Serializes the ListNotificationDestinationsResult into a shallow dictionary of its immediate attributes.""" + body = {} + if self.destination_type is not None: body['destination_type'] = self.destination_type + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListNotificationDestinationsResult: """Deserializes the ListNotificationDestinationsResult from a dictionary.""" @@ -1501,6 +2443,12 @@ def as_dict(self) -> dict: if self.token_infos: body['token_infos'] = [v.as_dict() for v in self.token_infos] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListPublicTokensResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.token_infos: body['token_infos'] = self.token_infos + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListPublicTokensResponse: """Deserializes the ListPublicTokensResponse from a dictionary.""" @@ -1520,6 +2468,12 @@ def as_dict(self) -> dict: if self.token_infos: body['token_infos'] = [v.as_dict() for v in self.token_infos] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListTokensResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.token_infos: body['token_infos'] = self.token_infos + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListTokensResponse: """Deserializes the ListTokensResponse from a dictionary.""" @@ -1551,6 +2505,13 @@ def as_dict(self) -> dict: if self.url_set is not None: body['url_set'] = self.url_set return body + def as_shallow_dict(self) -> dict: + """Serializes the MicrosoftTeamsConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.url is not None: body['url'] = self.url + if self.url_set is not None: body['url_set'] = self.url_set + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> MicrosoftTeamsConfig: """Deserializes the MicrosoftTeamsConfig from a dictionary.""" @@ -1572,6 +2533,12 @@ def as_dict(self) -> dict: if self.cidr_blocks: body['cidr_blocks'] = [v for v in self.cidr_blocks] return body + def as_shallow_dict(self) -> dict: + """Serializes the NccAwsStableIpRule into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cidr_blocks: body['cidr_blocks'] = self.cidr_blocks + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> NccAwsStableIpRule: """Deserializes the NccAwsStableIpRule from a dictionary.""" @@ -1621,15 +2588,31 @@ class NccAzurePrivateEndpointRule: updated_time: Optional[int] = None """Time in epoch milliseconds when this object was updated.""" - def as_dict(self) -> dict: - """Serializes the NccAzurePrivateEndpointRule into a dictionary suitable for use as a JSON request body.""" + def as_dict(self) -> dict: + """Serializes the NccAzurePrivateEndpointRule into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.connection_state is not None: body['connection_state'] = self.connection_state.value + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.deactivated is not None: body['deactivated'] = self.deactivated + if self.deactivated_at is not None: body['deactivated_at'] = self.deactivated_at + if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name + if self.group_id is not None: body['group_id'] = self.group_id.value + if self.network_connectivity_config_id is not None: + body['network_connectivity_config_id'] = self.network_connectivity_config_id + if self.resource_id is not None: body['resource_id'] = self.resource_id + if self.rule_id is not None: body['rule_id'] = self.rule_id + if self.updated_time is not None: body['updated_time'] = self.updated_time + return body + + def as_shallow_dict(self) -> dict: + """Serializes the NccAzurePrivateEndpointRule into a shallow dictionary of its immediate attributes.""" body = {} - if self.connection_state is not None: body['connection_state'] = self.connection_state.value + if self.connection_state is not None: body['connection_state'] = self.connection_state if self.creation_time is not None: body['creation_time'] = self.creation_time if self.deactivated is not None: body['deactivated'] = self.deactivated if self.deactivated_at is not None: body['deactivated_at'] = self.deactivated_at if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name - if self.group_id is not None: body['group_id'] = self.group_id.value + if self.group_id is not None: body['group_id'] = self.group_id if self.network_connectivity_config_id is not None: body['network_connectivity_config_id'] = self.network_connectivity_config_id if self.resource_id is not None: body['resource_id'] = self.resource_id @@ -1704,6 +2687,14 @@ def as_dict(self) -> dict: if self.target_services: body['target_services'] = [v for v in self.target_services] return body + def as_shallow_dict(self) -> dict: + """Serializes the NccAzureServiceEndpointRule into a shallow dictionary of its immediate attributes.""" + body = {} + if self.subnets: body['subnets'] = self.subnets + if self.target_region is not None: body['target_region'] = self.target_region + if self.target_services: body['target_services'] = self.target_services + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> NccAzureServiceEndpointRule: """Deserializes the NccAzureServiceEndpointRule from a dictionary.""" @@ -1733,6 +2724,13 @@ def as_dict(self) -> dict: if self.target_rules: body['target_rules'] = self.target_rules.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the NccEgressConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.default_rules: body['default_rules'] = self.default_rules + if self.target_rules: body['target_rules'] = self.target_rules + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> NccEgressConfig: """Deserializes the NccEgressConfig from a dictionary.""" @@ -1762,6 +2760,14 @@ def as_dict(self) -> dict: body['azure_service_endpoint_rule'] = self.azure_service_endpoint_rule.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the NccEgressDefaultRules into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_stable_ip_rule: body['aws_stable_ip_rule'] = self.aws_stable_ip_rule + if self.azure_service_endpoint_rule: + body['azure_service_endpoint_rule'] = self.azure_service_endpoint_rule + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> NccEgressDefaultRules: """Deserializes the NccEgressDefaultRules from a dictionary.""" @@ -1784,6 +2790,13 @@ def as_dict(self) -> dict: body['azure_private_endpoint_rules'] = [v.as_dict() for v in self.azure_private_endpoint_rules] return body + def as_shallow_dict(self) -> dict: + """Serializes the NccEgressTargetRules into a shallow dictionary of its immediate attributes.""" + body = {} + if self.azure_private_endpoint_rules: + body['azure_private_endpoint_rules'] = self.azure_private_endpoint_rules + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> NccEgressTargetRules: """Deserializes the NccEgressTargetRules from a dictionary.""" @@ -1831,6 +2844,19 @@ def as_dict(self) -> dict: if self.updated_time is not None: body['updated_time'] = self.updated_time return body + def as_shallow_dict(self) -> dict: + """Serializes the NetworkConnectivityConfiguration into a shallow dictionary of its immediate attributes.""" + body = {} + if self.account_id is not None: body['account_id'] = self.account_id + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.egress_config: body['egress_config'] = self.egress_config + if self.name is not None: body['name'] = self.name + if self.network_connectivity_config_id is not None: + body['network_connectivity_config_id'] = self.network_connectivity_config_id + if self.region is not None: body['region'] = self.region + if self.updated_time is not None: body['updated_time'] = self.updated_time + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> NetworkConnectivityConfiguration: """Deserializes the NetworkConnectivityConfiguration from a dictionary.""" @@ -1867,6 +2893,15 @@ def as_dict(self) -> dict: if self.id is not None: body['id'] = self.id return body + def as_shallow_dict(self) -> dict: + """Serializes the NotificationDestination into a shallow dictionary of its immediate attributes.""" + body = {} + if self.config: body['config'] = self.config + if self.destination_type is not None: body['destination_type'] = self.destination_type + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> NotificationDestination: """Deserializes the NotificationDestination from a dictionary.""" @@ -1891,6 +2926,13 @@ def as_dict(self) -> dict: if self.integration_key_set is not None: body['integration_key_set'] = self.integration_key_set return body + def as_shallow_dict(self) -> dict: + """Serializes the PagerdutyConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.integration_key is not None: body['integration_key'] = self.integration_key + if self.integration_key_set is not None: body['integration_key_set'] = self.integration_key_set + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PagerdutyConfig: """Deserializes the PagerdutyConfig from a dictionary.""" @@ -1911,6 +2953,12 @@ def as_dict(self) -> dict: if self.workspace_id is not None: body['workspaceId'] = self.workspace_id return body + def as_shallow_dict(self) -> dict: + """Serializes the PartitionId into a shallow dictionary of its immediate attributes.""" + body = {} + if self.workspace_id is not None: body['workspaceId'] = self.workspace_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PartitionId: """Deserializes the PartitionId from a dictionary.""" @@ -1932,6 +2980,12 @@ def as_dict(self) -> dict: if self.value is not None: body['value'] = self.value.value return body + def as_shallow_dict(self) -> dict: + """Serializes the PersonalComputeMessage into a shallow dictionary of its immediate attributes.""" + body = {} + if self.value is not None: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PersonalComputeMessage: """Deserializes the PersonalComputeMessage from a dictionary.""" @@ -1975,6 +3029,14 @@ def as_dict(self) -> dict: if self.setting_name is not None: body['setting_name'] = self.setting_name return body + def as_shallow_dict(self) -> dict: + """Serializes the PersonalComputeSetting into a shallow dictionary of its immediate attributes.""" + body = {} + if self.etag is not None: body['etag'] = self.etag + if self.personal_compute: body['personal_compute'] = self.personal_compute + if self.setting_name is not None: body['setting_name'] = self.setting_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PersonalComputeSetting: """Deserializes the PersonalComputeSetting from a dictionary.""" @@ -2006,6 +3068,15 @@ def as_dict(self) -> dict: if self.token_id is not None: body['token_id'] = self.token_id return body + def as_shallow_dict(self) -> dict: + """Serializes the PublicTokenInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: body['comment'] = self.comment + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.expiry_time is not None: body['expiry_time'] = self.expiry_time + if self.token_id is not None: body['token_id'] = self.token_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PublicTokenInfo: """Deserializes the PublicTokenInfo from a dictionary.""" @@ -2046,6 +3117,16 @@ def as_dict(self) -> dict: if self.list_type is not None: body['list_type'] = self.list_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ReplaceIpAccessList into a shallow dictionary of its immediate attributes.""" + body = {} + if self.enabled is not None: body['enabled'] = self.enabled + if self.ip_access_list_id is not None: body['ip_access_list_id'] = self.ip_access_list_id + if self.ip_addresses: body['ip_addresses'] = self.ip_addresses + if self.label is not None: body['label'] = self.label + if self.list_type is not None: body['list_type'] = self.list_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ReplaceIpAccessList: """Deserializes the ReplaceIpAccessList from a dictionary.""" @@ -2064,6 +3145,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the ReplaceResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ReplaceResponse: """Deserializes the ReplaceResponse from a dictionary.""" @@ -2080,6 +3166,12 @@ def as_dict(self) -> dict: if self.status is not None: body['status'] = self.status.value return body + def as_shallow_dict(self) -> dict: + """Serializes the RestrictWorkspaceAdminsMessage into a shallow dictionary of its immediate attributes.""" + body = {} + if self.status is not None: body['status'] = self.status + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RestrictWorkspaceAdminsMessage: """Deserializes the RestrictWorkspaceAdminsMessage from a dictionary.""" @@ -2119,6 +3211,14 @@ def as_dict(self) -> dict: if self.setting_name is not None: body['setting_name'] = self.setting_name return body + def as_shallow_dict(self) -> dict: + """Serializes the RestrictWorkspaceAdminsSetting into a shallow dictionary of its immediate attributes.""" + body = {} + if self.etag is not None: body['etag'] = self.etag + if self.restrict_workspace_admins: body['restrict_workspace_admins'] = self.restrict_workspace_admins + if self.setting_name is not None: body['setting_name'] = self.setting_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RestrictWorkspaceAdminsSetting: """Deserializes the RestrictWorkspaceAdminsSetting from a dictionary.""" @@ -2139,6 +3239,12 @@ def as_dict(self) -> dict: if self.token_id is not None: body['token_id'] = self.token_id return body + def as_shallow_dict(self) -> dict: + """Serializes the RevokeTokenRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.token_id is not None: body['token_id'] = self.token_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RevokeTokenRequest: """Deserializes the RevokeTokenRequest from a dictionary.""" @@ -2153,6 +3259,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the RevokeTokenResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RevokeTokenResponse: """Deserializes the RevokeTokenResponse from a dictionary.""" @@ -2167,6 +3278,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the SetStatusResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SetStatusResponse: """Deserializes the SetStatusResponse from a dictionary.""" @@ -2188,6 +3304,13 @@ def as_dict(self) -> dict: if self.url_set is not None: body['url_set'] = self.url_set return body + def as_shallow_dict(self) -> dict: + """Serializes the SlackConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.url is not None: body['url'] = self.url + if self.url_set is not None: body['url_set'] = self.url_set + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SlackConfig: """Deserializes the SlackConfig from a dictionary.""" @@ -2205,6 +3328,12 @@ def as_dict(self) -> dict: if self.value is not None: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the StringMessage into a shallow dictionary of its immediate attributes.""" + body = {} + if self.value is not None: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> StringMessage: """Deserializes the StringMessage from a dictionary.""" @@ -2235,6 +3364,16 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the TokenAccessControlRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TokenAccessControlRequest: """Deserializes the TokenAccessControlRequest from a dictionary.""" @@ -2272,6 +3411,17 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the TokenAccessControlResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TokenAccessControlResponse: """Deserializes the TokenAccessControlResponse from a dictionary.""" @@ -2299,6 +3449,9 @@ class TokenInfo: expiry_time: Optional[int] = None """Timestamp when the token expires.""" + last_used_day: Optional[int] = None + """Approximate timestamp for the day the token was last used. Accurate up to 1 day.""" + owner_id: Optional[int] = None """User ID of the user that owns the token.""" @@ -2316,6 +3469,21 @@ def as_dict(self) -> dict: if self.created_by_username is not None: body['created_by_username'] = self.created_by_username if self.creation_time is not None: body['creation_time'] = self.creation_time if self.expiry_time is not None: body['expiry_time'] = self.expiry_time + if self.last_used_day is not None: body['last_used_day'] = self.last_used_day + if self.owner_id is not None: body['owner_id'] = self.owner_id + if self.token_id is not None: body['token_id'] = self.token_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the TokenInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: body['comment'] = self.comment + if self.created_by_id is not None: body['created_by_id'] = self.created_by_id + if self.created_by_username is not None: body['created_by_username'] = self.created_by_username + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.expiry_time is not None: body['expiry_time'] = self.expiry_time + if self.last_used_day is not None: body['last_used_day'] = self.last_used_day if self.owner_id is not None: body['owner_id'] = self.owner_id if self.token_id is not None: body['token_id'] = self.token_id if self.workspace_id is not None: body['workspace_id'] = self.workspace_id @@ -2329,6 +3497,7 @@ def from_dict(cls, d: Dict[str, any]) -> TokenInfo: created_by_username=d.get('created_by_username', None), creation_time=d.get('creation_time', None), expiry_time=d.get('expiry_time', None), + last_used_day=d.get('last_used_day', None), owner_id=d.get('owner_id', None), token_id=d.get('token_id', None), workspace_id=d.get('workspace_id', None)) @@ -2351,6 +3520,14 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the TokenPermission into a shallow dictionary of its immediate attributes.""" + body = {} + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TokenPermission: """Deserializes the TokenPermission from a dictionary.""" @@ -2382,6 +3559,14 @@ def as_dict(self) -> dict: if self.object_type is not None: body['object_type'] = self.object_type return body + def as_shallow_dict(self) -> dict: + """Serializes the TokenPermissions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TokenPermissions: """Deserializes the TokenPermissions from a dictionary.""" @@ -2404,6 +3589,13 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the TokenPermissionsDescription into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TokenPermissionsDescription: """Deserializes the TokenPermissionsDescription from a dictionary.""" @@ -2422,6 +3614,12 @@ def as_dict(self) -> dict: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] return body + def as_shallow_dict(self) -> dict: + """Serializes the TokenPermissionsRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TokenPermissionsRequest: """Deserializes the TokenPermissionsRequest from a dictionary.""" @@ -2432,9 +3630,142 @@ class TokenType(Enum): """The type of token request. As of now, only `AZURE_ACTIVE_DIRECTORY_TOKEN` is supported.""" ARCLIGHT_AZURE_EXCHANGE_TOKEN = 'ARCLIGHT_AZURE_EXCHANGE_TOKEN' + ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY = 'ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY' AZURE_ACTIVE_DIRECTORY_TOKEN = 'AZURE_ACTIVE_DIRECTORY_TOKEN' +@dataclass +class UpdateAccountIpAccessEnableRequest: + """Details required to update a setting.""" + + allow_missing: bool + """This should always be set to true for Settings API. Added for AIP compliance.""" + + setting: AccountIpAccessEnable + + field_mask: str + """The field mask must be a single string, with multiple fields separated by commas (no spaces). + The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, + as only the entire collection field can be specified. Field names must exactly match the + resource field names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the + API changes in the future.""" + + def as_dict(self) -> dict: + """Serializes the UpdateAccountIpAccessEnableRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateAccountIpAccessEnableRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateAccountIpAccessEnableRequest: + """Deserializes the UpdateAccountIpAccessEnableRequest from a dictionary.""" + return cls(allow_missing=d.get('allow_missing', None), + field_mask=d.get('field_mask', None), + setting=_from_dict(d, 'setting', AccountIpAccessEnable)) + + +@dataclass +class UpdateAibiDashboardEmbeddingAccessPolicySettingRequest: + """Details required to update a setting.""" + + allow_missing: bool + """This should always be set to true for Settings API. Added for AIP compliance.""" + + setting: AibiDashboardEmbeddingAccessPolicySetting + + field_mask: str + """The field mask must be a single string, with multiple fields separated by commas (no spaces). + The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, + as only the entire collection field can be specified. Field names must exactly match the + resource field names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the + API changes in the future.""" + + def as_dict(self) -> dict: + """Serializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateAibiDashboardEmbeddingAccessPolicySettingRequest: + """Deserializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest from a dictionary.""" + return cls(allow_missing=d.get('allow_missing', None), + field_mask=d.get('field_mask', None), + setting=_from_dict(d, 'setting', AibiDashboardEmbeddingAccessPolicySetting)) + + +@dataclass +class UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest: + """Details required to update a setting.""" + + allow_missing: bool + """This should always be set to true for Settings API. Added for AIP compliance.""" + + setting: AibiDashboardEmbeddingApprovedDomainsSetting + + field_mask: str + """The field mask must be a single string, with multiple fields separated by commas (no spaces). + The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, + as only the entire collection field can be specified. Field names must exactly match the + resource field names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the + API changes in the future.""" + + def as_dict(self) -> dict: + """Serializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest: + """Deserializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest from a dictionary.""" + return cls(allow_missing=d.get('allow_missing', None), + field_mask=d.get('field_mask', None), + setting=_from_dict(d, 'setting', AibiDashboardEmbeddingApprovedDomainsSetting)) + + @dataclass class UpdateAutomaticClusterUpdateSettingRequest: """Details required to update a setting.""" @@ -2445,9 +3776,15 @@ class UpdateAutomaticClusterUpdateSettingRequest: setting: AutomaticClusterUpdateSetting field_mask: str - """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of - the setting payload will be updated. The field mask needs to be supplied as single string. To - specify multiple fields in the field mask, use comma as the separator (no space).""" + """The field mask must be a single string, with multiple fields separated by commas (no spaces). + The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, + as only the entire collection field can be specified. Field names must exactly match the + resource field names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the + API changes in the future.""" def as_dict(self) -> dict: """Serializes the UpdateAutomaticClusterUpdateSettingRequest into a dictionary suitable for use as a JSON request body.""" @@ -2457,6 +3794,14 @@ def as_dict(self) -> dict: if self.setting: body['setting'] = self.setting.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateAutomaticClusterUpdateSettingRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateAutomaticClusterUpdateSettingRequest: """Deserializes the UpdateAutomaticClusterUpdateSettingRequest from a dictionary.""" @@ -2475,9 +3820,15 @@ class UpdateComplianceSecurityProfileSettingRequest: setting: ComplianceSecurityProfileSetting field_mask: str - """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of - the setting payload will be updated. The field mask needs to be supplied as single string. To - specify multiple fields in the field mask, use comma as the separator (no space).""" + """The field mask must be a single string, with multiple fields separated by commas (no spaces). + The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, + as only the entire collection field can be specified. Field names must exactly match the + resource field names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the + API changes in the future.""" def as_dict(self) -> dict: """Serializes the UpdateComplianceSecurityProfileSettingRequest into a dictionary suitable for use as a JSON request body.""" @@ -2487,6 +3838,14 @@ def as_dict(self) -> dict: if self.setting: body['setting'] = self.setting.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateComplianceSecurityProfileSettingRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateComplianceSecurityProfileSettingRequest: """Deserializes the UpdateComplianceSecurityProfileSettingRequest from a dictionary.""" @@ -2505,9 +3864,15 @@ class UpdateCspEnablementAccountSettingRequest: setting: CspEnablementAccountSetting field_mask: str - """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of - the setting payload will be updated. The field mask needs to be supplied as single string. To - specify multiple fields in the field mask, use comma as the separator (no space).""" + """The field mask must be a single string, with multiple fields separated by commas (no spaces). + The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, + as only the entire collection field can be specified. Field names must exactly match the + resource field names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the + API changes in the future.""" def as_dict(self) -> dict: """Serializes the UpdateCspEnablementAccountSettingRequest into a dictionary suitable for use as a JSON request body.""" @@ -2517,6 +3882,14 @@ def as_dict(self) -> dict: if self.setting: body['setting'] = self.setting.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateCspEnablementAccountSettingRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateCspEnablementAccountSettingRequest: """Deserializes the UpdateCspEnablementAccountSettingRequest from a dictionary.""" @@ -2542,9 +3915,15 @@ class UpdateDefaultNamespaceSettingRequest: applies when using Unity Catalog-enabled compute.""" field_mask: str - """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of - the setting payload will be updated. The field mask needs to be supplied as single string. To - specify multiple fields in the field mask, use comma as the separator (no space).""" + """The field mask must be a single string, with multiple fields separated by commas (no spaces). + The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, + as only the entire collection field can be specified. Field names must exactly match the + resource field names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the + API changes in the future.""" def as_dict(self) -> dict: """Serializes the UpdateDefaultNamespaceSettingRequest into a dictionary suitable for use as a JSON request body.""" @@ -2554,6 +3933,14 @@ def as_dict(self) -> dict: if self.setting: body['setting'] = self.setting.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateDefaultNamespaceSettingRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateDefaultNamespaceSettingRequest: """Deserializes the UpdateDefaultNamespaceSettingRequest from a dictionary.""" @@ -2572,9 +3959,15 @@ class UpdateDisableLegacyAccessRequest: setting: DisableLegacyAccess field_mask: str - """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of - the setting payload will be updated. The field mask needs to be supplied as single string. To - specify multiple fields in the field mask, use comma as the separator (no space).""" + """The field mask must be a single string, with multiple fields separated by commas (no spaces). + The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, + as only the entire collection field can be specified. Field names must exactly match the + resource field names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the + API changes in the future.""" def as_dict(self) -> dict: """Serializes the UpdateDisableLegacyAccessRequest into a dictionary suitable for use as a JSON request body.""" @@ -2584,6 +3977,14 @@ def as_dict(self) -> dict: if self.setting: body['setting'] = self.setting.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateDisableLegacyAccessRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateDisableLegacyAccessRequest: """Deserializes the UpdateDisableLegacyAccessRequest from a dictionary.""" @@ -2602,9 +4003,15 @@ class UpdateDisableLegacyDbfsRequest: setting: DisableLegacyDbfs field_mask: str - """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of - the setting payload will be updated. The field mask needs to be supplied as single string. To - specify multiple fields in the field mask, use comma as the separator (no space).""" + """The field mask must be a single string, with multiple fields separated by commas (no spaces). + The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, + as only the entire collection field can be specified. Field names must exactly match the + resource field names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the + API changes in the future.""" def as_dict(self) -> dict: """Serializes the UpdateDisableLegacyDbfsRequest into a dictionary suitable for use as a JSON request body.""" @@ -2614,6 +4021,14 @@ def as_dict(self) -> dict: if self.setting: body['setting'] = self.setting.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateDisableLegacyDbfsRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateDisableLegacyDbfsRequest: """Deserializes the UpdateDisableLegacyDbfsRequest from a dictionary.""" @@ -2632,9 +4047,15 @@ class UpdateDisableLegacyFeaturesRequest: setting: DisableLegacyFeatures field_mask: str - """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of - the setting payload will be updated. The field mask needs to be supplied as single string. To - specify multiple fields in the field mask, use comma as the separator (no space).""" + """The field mask must be a single string, with multiple fields separated by commas (no spaces). + The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, + as only the entire collection field can be specified. Field names must exactly match the + resource field names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the + API changes in the future.""" def as_dict(self) -> dict: """Serializes the UpdateDisableLegacyFeaturesRequest into a dictionary suitable for use as a JSON request body.""" @@ -2644,6 +4065,14 @@ def as_dict(self) -> dict: if self.setting: body['setting'] = self.setting.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateDisableLegacyFeaturesRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateDisableLegacyFeaturesRequest: """Deserializes the UpdateDisableLegacyFeaturesRequest from a dictionary.""" @@ -2662,9 +4091,15 @@ class UpdateEnhancedSecurityMonitoringSettingRequest: setting: EnhancedSecurityMonitoringSetting field_mask: str - """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of - the setting payload will be updated. The field mask needs to be supplied as single string. To - specify multiple fields in the field mask, use comma as the separator (no space).""" + """The field mask must be a single string, with multiple fields separated by commas (no spaces). + The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, + as only the entire collection field can be specified. Field names must exactly match the + resource field names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the + API changes in the future.""" def as_dict(self) -> dict: """Serializes the UpdateEnhancedSecurityMonitoringSettingRequest into a dictionary suitable for use as a JSON request body.""" @@ -2674,6 +4109,14 @@ def as_dict(self) -> dict: if self.setting: body['setting'] = self.setting.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateEnhancedSecurityMonitoringSettingRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateEnhancedSecurityMonitoringSettingRequest: """Deserializes the UpdateEnhancedSecurityMonitoringSettingRequest from a dictionary.""" @@ -2692,9 +4135,15 @@ class UpdateEsmEnablementAccountSettingRequest: setting: EsmEnablementAccountSetting field_mask: str - """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of - the setting payload will be updated. The field mask needs to be supplied as single string. To - specify multiple fields in the field mask, use comma as the separator (no space).""" + """The field mask must be a single string, with multiple fields separated by commas (no spaces). + The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, + as only the entire collection field can be specified. Field names must exactly match the + resource field names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the + API changes in the future.""" def as_dict(self) -> dict: """Serializes the UpdateEsmEnablementAccountSettingRequest into a dictionary suitable for use as a JSON request body.""" @@ -2704,6 +4153,14 @@ def as_dict(self) -> dict: if self.setting: body['setting'] = self.setting.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateEsmEnablementAccountSettingRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateEsmEnablementAccountSettingRequest: """Deserializes the UpdateEsmEnablementAccountSettingRequest from a dictionary.""" @@ -2743,6 +4200,16 @@ def as_dict(self) -> dict: if self.list_type is not None: body['list_type'] = self.list_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateIpAccessList into a shallow dictionary of its immediate attributes.""" + body = {} + if self.enabled is not None: body['enabled'] = self.enabled + if self.ip_access_list_id is not None: body['ip_access_list_id'] = self.ip_access_list_id + if self.ip_addresses: body['ip_addresses'] = self.ip_addresses + if self.label is not None: body['label'] = self.label + if self.list_type is not None: body['list_type'] = self.list_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateIpAccessList: """Deserializes the UpdateIpAccessList from a dictionary.""" @@ -2762,6 +4229,7 @@ class UpdateNotificationDestinationRequest: """The display name for the notification destination.""" id: Optional[str] = None + """UUID identifying notification destination.""" def as_dict(self) -> dict: """Serializes the UpdateNotificationDestinationRequest into a dictionary suitable for use as a JSON request body.""" @@ -2771,6 +4239,14 @@ def as_dict(self) -> dict: if self.id is not None: body['id'] = self.id return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateNotificationDestinationRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.config: body['config'] = self.config + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateNotificationDestinationRequest: """Deserializes the UpdateNotificationDestinationRequest from a dictionary.""" @@ -2789,9 +4265,15 @@ class UpdatePersonalComputeSettingRequest: setting: PersonalComputeSetting field_mask: str - """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of - the setting payload will be updated. The field mask needs to be supplied as single string. To - specify multiple fields in the field mask, use comma as the separator (no space).""" + """The field mask must be a single string, with multiple fields separated by commas (no spaces). + The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, + as only the entire collection field can be specified. Field names must exactly match the + resource field names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the + API changes in the future.""" def as_dict(self) -> dict: """Serializes the UpdatePersonalComputeSettingRequest into a dictionary suitable for use as a JSON request body.""" @@ -2801,6 +4283,14 @@ def as_dict(self) -> dict: if self.setting: body['setting'] = self.setting.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdatePersonalComputeSettingRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdatePersonalComputeSettingRequest: """Deserializes the UpdatePersonalComputeSettingRequest from a dictionary.""" @@ -2817,6 +4307,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateResponse: """Deserializes the UpdateResponse from a dictionary.""" @@ -2833,9 +4328,15 @@ class UpdateRestrictWorkspaceAdminsSettingRequest: setting: RestrictWorkspaceAdminsSetting field_mask: str - """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of - the setting payload will be updated. The field mask needs to be supplied as single string. To - specify multiple fields in the field mask, use comma as the separator (no space).""" + """The field mask must be a single string, with multiple fields separated by commas (no spaces). + The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, + as only the entire collection field can be specified. Field names must exactly match the + resource field names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the + API changes in the future.""" def as_dict(self) -> dict: """Serializes the UpdateRestrictWorkspaceAdminsSettingRequest into a dictionary suitable for use as a JSON request body.""" @@ -2845,6 +4346,14 @@ def as_dict(self) -> dict: if self.setting: body['setting'] = self.setting.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateRestrictWorkspaceAdminsSettingRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateRestrictWorkspaceAdminsSettingRequest: """Deserializes the UpdateRestrictWorkspaceAdminsSettingRequest from a dictionary.""" @@ -3079,6 +4588,7 @@ def __init__(self, api_client): self._csp_enablement_account = CspEnablementAccountAPI(self._api) self._disable_legacy_features = DisableLegacyFeaturesAPI(self._api) + self._enable_ip_access_lists = EnableIpAccessListsAPI(self._api) self._esm_enablement_account = EsmEnablementAccountAPI(self._api) self._personal_compute = PersonalComputeAPI(self._api) @@ -3092,6 +4602,11 @@ def disable_legacy_features(self) -> DisableLegacyFeaturesAPI: """Disable legacy features for new Databricks workspaces.""" return self._disable_legacy_features + @property + def enable_ip_access_lists(self) -> EnableIpAccessListsAPI: + """Controls the enforcement of IP access lists for accessing the account console.""" + return self._enable_ip_access_lists + @property def esm_enablement_account(self) -> EsmEnablementAccountAPI: """The enhanced security monitoring setting at the account level controls whether to enable the feature on new workspaces.""" @@ -3103,6 +4618,197 @@ def personal_compute(self) -> PersonalComputeAPI: return self._personal_compute +class AibiDashboardEmbeddingAccessPolicyAPI: + """Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or disabled at the + workspace level. By default, this setting is conditionally enabled (ALLOW_APPROVED_DOMAINS).""" + + def __init__(self, api_client): + self._api = api_client + + def delete(self, + *, + etag: Optional[str] = None) -> DeleteAibiDashboardEmbeddingAccessPolicySettingResponse: + """Delete the AI/BI dashboard embedding access policy. + + Delete the AI/BI dashboard embedding access policy, reverting back to the default. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`DeleteAibiDashboardEmbeddingAccessPolicySettingResponse` + """ + + query = {} + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json', } + + res = self._api.do('DELETE', + '/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default', + query=query, + headers=headers) + return DeleteAibiDashboardEmbeddingAccessPolicySettingResponse.from_dict(res) + + def get(self, *, etag: Optional[str] = None) -> AibiDashboardEmbeddingAccessPolicySetting: + """Retrieve the AI/BI dashboard embedding access policy. + + Retrieves the AI/BI dashboard embedding access policy. The default setting is ALLOW_APPROVED_DOMAINS, + permitting AI/BI dashboards to be embedded on approved domains. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting` + """ + + query = {} + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json', } + + res = self._api.do('GET', + '/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default', + query=query, + headers=headers) + return AibiDashboardEmbeddingAccessPolicySetting.from_dict(res) + + def update(self, allow_missing: bool, setting: AibiDashboardEmbeddingAccessPolicySetting, + field_mask: str) -> AibiDashboardEmbeddingAccessPolicySetting: + """Update the AI/BI dashboard embedding access policy. + + Updates the AI/BI dashboard embedding access policy at the workspace level. + + :param allow_missing: bool + This should always be set to true for Settings API. Added for AIP compliance. + :param setting: :class:`AibiDashboardEmbeddingAccessPolicySetting` + :param field_mask: str + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + + :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting` + """ + body = {} + if allow_missing is not None: body['allow_missing'] = allow_missing + if field_mask is not None: body['field_mask'] = field_mask + if setting is not None: body['setting'] = setting.as_dict() + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('PATCH', + '/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default', + body=body, + headers=headers) + return AibiDashboardEmbeddingAccessPolicySetting.from_dict(res) + + +class AibiDashboardEmbeddingApprovedDomainsAPI: + """Controls the list of domains approved to host the embedded AI/BI dashboards. The approved domains list + can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS.""" + + def __init__(self, api_client): + self._api = api_client + + def delete(self, + *, + etag: Optional[str] = None) -> DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse: + """Delete AI/BI dashboard embedding approved domains. + + Delete the list of domains approved to host embedded AI/BI dashboards, reverting back to the default + empty list. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse` + """ + + query = {} + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json', } + + res = self._api.do('DELETE', + '/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default', + query=query, + headers=headers) + return DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse.from_dict(res) + + def get(self, *, etag: Optional[str] = None) -> AibiDashboardEmbeddingApprovedDomainsSetting: + """Retrieve the list of domains approved to host embedded AI/BI dashboards. + + Retrieves the list of domains approved to host embedded AI/BI dashboards. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting` + """ + + query = {} + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json', } + + res = self._api.do('GET', + '/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default', + query=query, + headers=headers) + return AibiDashboardEmbeddingApprovedDomainsSetting.from_dict(res) + + def update(self, allow_missing: bool, setting: AibiDashboardEmbeddingApprovedDomainsSetting, + field_mask: str) -> AibiDashboardEmbeddingApprovedDomainsSetting: + """Update the list of domains approved to host embedded AI/BI dashboards. + + Updates the list of domains approved to host embedded AI/BI dashboards. This update will fail if the + current workspace access policy is not ALLOW_APPROVED_DOMAINS. + + :param allow_missing: bool + This should always be set to true for Settings API. Added for AIP compliance. + :param setting: :class:`AibiDashboardEmbeddingApprovedDomainsSetting` + :param field_mask: str + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + + :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting` + """ + body = {} + if allow_missing is not None: body['allow_missing'] = allow_missing + if field_mask is not None: body['field_mask'] = field_mask + if setting is not None: body['setting'] = setting.as_dict() + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('PATCH', + '/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default', + body=body, + headers=headers) + return AibiDashboardEmbeddingApprovedDomainsSetting.from_dict(res) + + class AutomaticClusterUpdateAPI: """Controls whether automatic cluster update is enabled for the current workspace. By default, it is turned off.""" @@ -3148,9 +4854,15 @@ def update(self, allow_missing: bool, setting: AutomaticClusterUpdateSetting, This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`AutomaticClusterUpdateSetting` :param field_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :returns: :class:`AutomaticClusterUpdateSetting` """ @@ -3214,9 +4926,15 @@ def update(self, allow_missing: bool, setting: ComplianceSecurityProfileSetting, This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`ComplianceSecurityProfileSetting` :param field_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :returns: :class:`ComplianceSecurityProfileSetting` """ @@ -3316,9 +5034,15 @@ def update(self, allow_missing: bool, setting: CspEnablementAccountSetting, This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`CspEnablementAccountSetting` :param field_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :returns: :class:`CspEnablementAccountSetting` """ @@ -3426,9 +5150,15 @@ def update(self, allow_missing: bool, setting: DefaultNamespaceSetting, restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only applies when using Unity Catalog-enabled compute. :param field_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :returns: :class:`DefaultNamespaceSetting` """ @@ -3516,9 +5246,15 @@ def update(self, allow_missing: bool, setting: DisableLegacyAccess, This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DisableLegacyAccess` :param field_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :returns: :class:`DisableLegacyAccess` """ @@ -3601,9 +5337,15 @@ def update(self, allow_missing: bool, setting: DisableLegacyDbfs, field_mask: st This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DisableLegacyDbfs` :param field_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :returns: :class:`DisableLegacyDbfs` """ @@ -3692,9 +5434,15 @@ def update(self, allow_missing: bool, setting: DisableLegacyFeatures, This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DisableLegacyFeatures` :param field_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :returns: :class:`DisableLegacyFeatures` """ @@ -3712,6 +5460,101 @@ def update(self, allow_missing: bool, setting: DisableLegacyFeatures, return DisableLegacyFeatures.from_dict(res) +class EnableIpAccessListsAPI: + """Controls the enforcement of IP access lists for accessing the account console. Allowing you to enable or + disable restricted access based on IP addresses.""" + + def __init__(self, api_client): + self._api = api_client + + def delete(self, *, etag: Optional[str] = None) -> DeleteAccountIpAccessEnableResponse: + """Delete the account IP access toggle setting. + + Reverts the value of the account IP access toggle setting to default (ON) + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`DeleteAccountIpAccessEnableResponse` + """ + + query = {} + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json', } + + res = self._api.do( + 'DELETE', + f'/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default', + query=query, + headers=headers) + return DeleteAccountIpAccessEnableResponse.from_dict(res) + + def get(self, *, etag: Optional[str] = None) -> AccountIpAccessEnable: + """Get the account IP access toggle setting. + + Gets the value of the account IP access toggle setting. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`AccountIpAccessEnable` + """ + + query = {} + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json', } + + res = self._api.do( + 'GET', + f'/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default', + query=query, + headers=headers) + return AccountIpAccessEnable.from_dict(res) + + def update(self, allow_missing: bool, setting: AccountIpAccessEnable, + field_mask: str) -> AccountIpAccessEnable: + """Update the account IP access toggle setting. + + Updates the value of the account IP access toggle setting. + + :param allow_missing: bool + This should always be set to true for Settings API. Added for AIP compliance. + :param setting: :class:`AccountIpAccessEnable` + :param field_mask: str + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + + :returns: :class:`AccountIpAccessEnable` + """ + body = {} + if allow_missing is not None: body['allow_missing'] = allow_missing + if field_mask is not None: body['field_mask'] = field_mask + if setting is not None: body['setting'] = setting.as_dict() + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do( + 'PATCH', + f'/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default', + body=body, + headers=headers) + return AccountIpAccessEnable.from_dict(res) + + class EnhancedSecurityMonitoringAPI: """Controls whether enhanced security monitoring is enabled for the current workspace. If the compliance security profile is enabled, this is automatically enabled. By default, it is disabled. However, if the @@ -3761,9 +5604,15 @@ def update(self, allow_missing: bool, setting: EnhancedSecurityMonitoringSetting This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`EnhancedSecurityMonitoringSetting` :param field_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :returns: :class:`EnhancedSecurityMonitoringSetting` """ @@ -3824,9 +5673,15 @@ def update(self, allow_missing: bool, setting: EsmEnablementAccountSetting, This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`EsmEnablementAccountSetting` :param field_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :returns: :class:`EsmEnablementAccountSetting` """ @@ -4365,6 +6220,7 @@ def update(self, required in the request body. :param id: str + UUID identifying notification destination. :param config: :class:`Config` (optional) The configuration for the notification destination. Must wrap EXACTLY one of the nested configs. :param display_name: str (optional) @@ -4455,9 +6311,15 @@ def update(self, allow_missing: bool, setting: PersonalComputeSetting, This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`PersonalComputeSetting` :param field_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :returns: :class:`PersonalComputeSetting` """ @@ -4555,9 +6417,15 @@ def update(self, allow_missing: bool, setting: RestrictWorkspaceAdminsSetting, This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`RestrictWorkspaceAdminsSetting` :param field_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :returns: :class:`RestrictWorkspaceAdminsSetting` """ @@ -4580,6 +6448,8 @@ class SettingsAPI: def __init__(self, api_client): self._api = api_client + self._aibi_dashboard_embedding_access_policy = AibiDashboardEmbeddingAccessPolicyAPI(self._api) + self._aibi_dashboard_embedding_approved_domains = AibiDashboardEmbeddingApprovedDomainsAPI(self._api) self._automatic_cluster_update = AutomaticClusterUpdateAPI(self._api) self._compliance_security_profile = ComplianceSecurityProfileAPI(self._api) self._default_namespace = DefaultNamespaceAPI(self._api) @@ -4588,6 +6458,16 @@ def __init__(self, api_client): self._enhanced_security_monitoring = EnhancedSecurityMonitoringAPI(self._api) self._restrict_workspace_admins = RestrictWorkspaceAdminsAPI(self._api) + @property + def aibi_dashboard_embedding_access_policy(self) -> AibiDashboardEmbeddingAccessPolicyAPI: + """Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or disabled at the workspace level.""" + return self._aibi_dashboard_embedding_access_policy + + @property + def aibi_dashboard_embedding_approved_domains(self) -> AibiDashboardEmbeddingApprovedDomainsAPI: + """Controls the list of domains approved to host the embedded AI/BI dashboards.""" + return self._aibi_dashboard_embedding_approved_domains + @property def automatic_cluster_update(self) -> AutomaticClusterUpdateAPI: """Controls whether automatic cluster update is enabled for the current workspace.""" @@ -4667,7 +6547,7 @@ def delete(self, token_id: str): Deletes a token, specified by its ID. :param token_id: str - The ID of the token to get. + The ID of the token to revoke. """ @@ -4751,7 +6631,8 @@ def set_permissions( access_control_list: Optional[List[TokenAccessControlRequest]] = None) -> TokenPermissions: """Set token permissions. - Sets permissions on all tokens. Tokens can inherit permissions from their root object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param access_control_list: List[:class:`TokenAccessControlRequest`] (optional) diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py index 772bc7aee..2015f4ac5 100755 --- a/databricks/sdk/service/sharing.py +++ b/databricks/sdk/service/sharing.py @@ -23,418 +23,6 @@ class AuthenticationType(Enum): TOKEN = 'TOKEN' -@dataclass -class CentralCleanRoomInfo: - clean_room_assets: Optional[List[CleanRoomAssetInfo]] = None - """All assets from all collaborators that are available in the clean room. Only one of table_info - or notebook_info will be filled in.""" - - collaborators: Optional[List[CleanRoomCollaboratorInfo]] = None - """All collaborators who are in the clean room.""" - - creator: Optional[CleanRoomCollaboratorInfo] = None - """The collaborator who created the clean room.""" - - station_cloud: Optional[str] = None - """The cloud where clean room tasks will be run.""" - - station_region: Optional[str] = None - """The region where clean room tasks will be run.""" - - def as_dict(self) -> dict: - """Serializes the CentralCleanRoomInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.clean_room_assets: body['clean_room_assets'] = [v.as_dict() for v in self.clean_room_assets] - if self.collaborators: body['collaborators'] = [v.as_dict() for v in self.collaborators] - if self.creator: body['creator'] = self.creator.as_dict() - if self.station_cloud is not None: body['station_cloud'] = self.station_cloud - if self.station_region is not None: body['station_region'] = self.station_region - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> CentralCleanRoomInfo: - """Deserializes the CentralCleanRoomInfo from a dictionary.""" - return cls(clean_room_assets=_repeated_dict(d, 'clean_room_assets', CleanRoomAssetInfo), - collaborators=_repeated_dict(d, 'collaborators', CleanRoomCollaboratorInfo), - creator=_from_dict(d, 'creator', CleanRoomCollaboratorInfo), - station_cloud=d.get('station_cloud', None), - station_region=d.get('station_region', None)) - - -@dataclass -class CleanRoomAssetInfo: - added_at: Optional[int] = None - """Time at which this asset was added, in epoch milliseconds.""" - - notebook_info: Optional[CleanRoomNotebookInfo] = None - """Details about the notebook asset.""" - - owner: Optional[CleanRoomCollaboratorInfo] = None - """The collaborator who owns the asset.""" - - table_info: Optional[CleanRoomTableInfo] = None - """Details about the table asset.""" - - updated_at: Optional[int] = None - """Time at which this asset was updated, in epoch milliseconds.""" - - def as_dict(self) -> dict: - """Serializes the CleanRoomAssetInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.added_at is not None: body['added_at'] = self.added_at - if self.notebook_info: body['notebook_info'] = self.notebook_info.as_dict() - if self.owner: body['owner'] = self.owner.as_dict() - if self.table_info: body['table_info'] = self.table_info.as_dict() - if self.updated_at is not None: body['updated_at'] = self.updated_at - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetInfo: - """Deserializes the CleanRoomAssetInfo from a dictionary.""" - return cls(added_at=d.get('added_at', None), - notebook_info=_from_dict(d, 'notebook_info', CleanRoomNotebookInfo), - owner=_from_dict(d, 'owner', CleanRoomCollaboratorInfo), - table_info=_from_dict(d, 'table_info', CleanRoomTableInfo), - updated_at=d.get('updated_at', None)) - - -@dataclass -class CleanRoomCatalog: - catalog_name: Optional[str] = None - """Name of the catalog in the clean room station. Empty for notebooks.""" - - notebook_files: Optional[List[SharedDataObject]] = None - """The details of the shared notebook files.""" - - tables: Optional[List[SharedDataObject]] = None - """The details of the shared tables.""" - - def as_dict(self) -> dict: - """Serializes the CleanRoomCatalog into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.notebook_files: body['notebook_files'] = [v.as_dict() for v in self.notebook_files] - if self.tables: body['tables'] = [v.as_dict() for v in self.tables] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> CleanRoomCatalog: - """Deserializes the CleanRoomCatalog from a dictionary.""" - return cls(catalog_name=d.get('catalog_name', None), - notebook_files=_repeated_dict(d, 'notebook_files', SharedDataObject), - tables=_repeated_dict(d, 'tables', SharedDataObject)) - - -@dataclass -class CleanRoomCatalogUpdate: - catalog_name: Optional[str] = None - """The name of the catalog to update assets.""" - - updates: Optional[SharedDataObjectUpdate] = None - """The updates to the assets in the catalog.""" - - def as_dict(self) -> dict: - """Serializes the CleanRoomCatalogUpdate into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.updates: body['updates'] = self.updates.as_dict() - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> CleanRoomCatalogUpdate: - """Deserializes the CleanRoomCatalogUpdate from a dictionary.""" - return cls(catalog_name=d.get('catalog_name', None), - updates=_from_dict(d, 'updates', SharedDataObjectUpdate)) - - -@dataclass -class CleanRoomCollaboratorInfo: - global_metastore_id: Optional[str] = None - """The global Unity Catalog metastore id of the collaborator. Also known as the sharing identifier. - The identifier is of format __cloud__:__region__:__metastore-uuid__.""" - - organization_name: Optional[str] = None - """The organization name of the collaborator. This is configured in the metastore for Delta Sharing - and is used to identify the organization to other collaborators.""" - - def as_dict(self) -> dict: - """Serializes the CleanRoomCollaboratorInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id - if self.organization_name is not None: body['organization_name'] = self.organization_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> CleanRoomCollaboratorInfo: - """Deserializes the CleanRoomCollaboratorInfo from a dictionary.""" - return cls(global_metastore_id=d.get('global_metastore_id', None), - organization_name=d.get('organization_name', None)) - - -@dataclass -class CleanRoomInfo: - comment: Optional[str] = None - """User-provided free-form text description.""" - - created_at: Optional[int] = None - """Time at which this clean room was created, in epoch milliseconds.""" - - created_by: Optional[str] = None - """Username of clean room creator.""" - - local_catalogs: Optional[List[CleanRoomCatalog]] = None - """Catalog aliases shared by the current collaborator with asset details.""" - - name: Optional[str] = None - """Name of the clean room.""" - - owner: Optional[str] = None - """Username of current owner of clean room.""" - - remote_detailed_info: Optional[CentralCleanRoomInfo] = None - """Central clean room details.""" - - updated_at: Optional[int] = None - """Time at which this clean room was updated, in epoch milliseconds.""" - - updated_by: Optional[str] = None - """Username of clean room updater.""" - - def as_dict(self) -> dict: - """Serializes the CleanRoomInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.local_catalogs: body['local_catalogs'] = [v.as_dict() for v in self.local_catalogs] - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.remote_detailed_info: body['remote_detailed_info'] = self.remote_detailed_info.as_dict() - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> CleanRoomInfo: - """Deserializes the CleanRoomInfo from a dictionary.""" - return cls(comment=d.get('comment', None), - created_at=d.get('created_at', None), - created_by=d.get('created_by', None), - local_catalogs=_repeated_dict(d, 'local_catalogs', CleanRoomCatalog), - name=d.get('name', None), - owner=d.get('owner', None), - remote_detailed_info=_from_dict(d, 'remote_detailed_info', CentralCleanRoomInfo), - updated_at=d.get('updated_at', None), - updated_by=d.get('updated_by', None)) - - -@dataclass -class CleanRoomNotebookInfo: - notebook_content: Optional[str] = None - """The base64 representation of the notebook content in HTML.""" - - notebook_name: Optional[str] = None - """The name of the notebook.""" - - def as_dict(self) -> dict: - """Serializes the CleanRoomNotebookInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.notebook_content is not None: body['notebook_content'] = self.notebook_content - if self.notebook_name is not None: body['notebook_name'] = self.notebook_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> CleanRoomNotebookInfo: - """Deserializes the CleanRoomNotebookInfo from a dictionary.""" - return cls(notebook_content=d.get('notebook_content', None), - notebook_name=d.get('notebook_name', None)) - - -@dataclass -class CleanRoomTableInfo: - catalog_name: Optional[str] = None - """Name of parent catalog.""" - - columns: Optional[List[ColumnInfo]] = None - """The array of __ColumnInfo__ definitions of the table's columns.""" - - full_name: Optional[str] = None - """Full name of table, in form of __catalog_name__.__schema_name__.__table_name__""" - - name: Optional[str] = None - """Name of table, relative to parent schema.""" - - schema_name: Optional[str] = None - """Name of parent schema relative to its parent catalog.""" - - def as_dict(self) -> dict: - """Serializes the CleanRoomTableInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.columns: body['columns'] = [v.as_dict() for v in self.columns] - if self.full_name is not None: body['full_name'] = self.full_name - if self.name is not None: body['name'] = self.name - if self.schema_name is not None: body['schema_name'] = self.schema_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> CleanRoomTableInfo: - """Deserializes the CleanRoomTableInfo from a dictionary.""" - return cls(catalog_name=d.get('catalog_name', None), - columns=_repeated_dict(d, 'columns', ColumnInfo), - full_name=d.get('full_name', None), - name=d.get('name', None), - schema_name=d.get('schema_name', None)) - - -@dataclass -class ColumnInfo: - comment: Optional[str] = None - """User-provided free-form text description.""" - - mask: Optional[ColumnMask] = None - - name: Optional[str] = None - """Name of Column.""" - - nullable: Optional[bool] = None - """Whether field may be Null (default: true).""" - - partition_index: Optional[int] = None - """Partition index for column.""" - - position: Optional[int] = None - """Ordinal position of column (starting at position 0).""" - - type_interval_type: Optional[str] = None - """Format of IntervalType.""" - - type_json: Optional[str] = None - """Full data type specification, JSON-serialized.""" - - type_name: Optional[ColumnTypeName] = None - """Name of type (INT, STRUCT, MAP, etc.).""" - - type_precision: Optional[int] = None - """Digits of precision; required for DecimalTypes.""" - - type_scale: Optional[int] = None - """Digits to right of decimal; Required for DecimalTypes.""" - - type_text: Optional[str] = None - """Full data type specification as SQL/catalogString text.""" - - def as_dict(self) -> dict: - """Serializes the ColumnInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: body['comment'] = self.comment - if self.mask: body['mask'] = self.mask.as_dict() - if self.name is not None: body['name'] = self.name - if self.nullable is not None: body['nullable'] = self.nullable - if self.partition_index is not None: body['partition_index'] = self.partition_index - if self.position is not None: body['position'] = self.position - if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type - if self.type_json is not None: body['type_json'] = self.type_json - if self.type_name is not None: body['type_name'] = self.type_name.value - if self.type_precision is not None: body['type_precision'] = self.type_precision - if self.type_scale is not None: body['type_scale'] = self.type_scale - if self.type_text is not None: body['type_text'] = self.type_text - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> ColumnInfo: - """Deserializes the ColumnInfo from a dictionary.""" - return cls(comment=d.get('comment', None), - mask=_from_dict(d, 'mask', ColumnMask), - name=d.get('name', None), - nullable=d.get('nullable', None), - partition_index=d.get('partition_index', None), - position=d.get('position', None), - type_interval_type=d.get('type_interval_type', None), - type_json=d.get('type_json', None), - type_name=_enum(d, 'type_name', ColumnTypeName), - type_precision=d.get('type_precision', None), - type_scale=d.get('type_scale', None), - type_text=d.get('type_text', None)) - - -@dataclass -class ColumnMask: - function_name: Optional[str] = None - """The full name of the column mask SQL UDF.""" - - using_column_names: Optional[List[str]] = None - """The list of additional table columns to be passed as input to the column mask function. The - first arg of the mask function should be of the type of the column being masked and the types of - the rest of the args should match the types of columns in 'using_column_names'.""" - - def as_dict(self) -> dict: - """Serializes the ColumnMask into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.function_name is not None: body['function_name'] = self.function_name - if self.using_column_names: body['using_column_names'] = [v for v in self.using_column_names] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> ColumnMask: - """Deserializes the ColumnMask from a dictionary.""" - return cls(function_name=d.get('function_name', None), - using_column_names=d.get('using_column_names', None)) - - -class ColumnTypeName(Enum): - """Name of type (INT, STRUCT, MAP, etc.).""" - - ARRAY = 'ARRAY' - BINARY = 'BINARY' - BOOLEAN = 'BOOLEAN' - BYTE = 'BYTE' - CHAR = 'CHAR' - DATE = 'DATE' - DECIMAL = 'DECIMAL' - DOUBLE = 'DOUBLE' - FLOAT = 'FLOAT' - INT = 'INT' - INTERVAL = 'INTERVAL' - LONG = 'LONG' - MAP = 'MAP' - NULL = 'NULL' - SHORT = 'SHORT' - STRING = 'STRING' - STRUCT = 'STRUCT' - TABLE_TYPE = 'TABLE_TYPE' - TIMESTAMP = 'TIMESTAMP' - TIMESTAMP_NTZ = 'TIMESTAMP_NTZ' - USER_DEFINED_TYPE = 'USER_DEFINED_TYPE' - - -@dataclass -class CreateCleanRoom: - name: str - """Name of the clean room.""" - - remote_detailed_info: CentralCleanRoomInfo - """Central clean room details.""" - - comment: Optional[str] = None - """User-provided free-form text description.""" - - def as_dict(self) -> dict: - """Serializes the CreateCleanRoom into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.remote_detailed_info: body['remote_detailed_info'] = self.remote_detailed_info.as_dict() - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> CreateCleanRoom: - """Deserializes the CreateCleanRoom from a dictionary.""" - return cls(comment=d.get('comment', None), - name=d.get('name', None), - remote_detailed_info=_from_dict(d, 'remote_detailed_info', CentralCleanRoomInfo)) - - @dataclass class CreateProvider: name: str @@ -447,7 +35,8 @@ class CreateProvider: """Description about the provider.""" recipient_profile_str: Optional[str] = None - """This field is required when the __authentication_type__ is **TOKEN** or not provided.""" + """This field is required when the __authentication_type__ is **TOKEN**, + **OAUTH_CLIENT_CREDENTIALS** or not provided.""" def as_dict(self) -> dict: """Serializes the CreateProvider into a dictionary suitable for use as a JSON request body.""" @@ -458,6 +47,15 @@ def as_dict(self) -> dict: if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateProvider into a shallow dictionary of its immediate attributes.""" + body = {} + if self.authentication_type is not None: body['authentication_type'] = self.authentication_type + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateProvider: """Deserializes the CreateProvider from a dictionary.""" @@ -479,7 +77,7 @@ class CreateRecipient: """Description about the recipient.""" data_recipient_global_metastore_id: Optional[str] = None - """The global Unity Catalog metastore id provided by the data recipient. This field is required + """The global Unity Catalog metastore id provided by the data recipient. This field is only present when the __authentication_type__ is **DATABRICKS**. The identifier is of format __cloud__:__region__:__metastore-uuid__.""" @@ -493,10 +91,12 @@ class CreateRecipient: """Username of the recipient owner.""" properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None - """Recipient properties as map of string key-value pairs.""" + """Recipient properties as map of string key-value pairs. When provided in update request, the + specified properties will override the existing properties. To add and remove properties, one + would need to perform a read-modify-write.""" sharing_code: Optional[str] = None - """The one-time sharing code provided by the data recipient. This field is required when the + """The one-time sharing code provided by the data recipient. This field is only present when the __authentication_type__ is **DATABRICKS**.""" def as_dict(self) -> dict: @@ -514,6 +114,21 @@ def as_dict(self) -> dict: if self.sharing_code is not None: body['sharing_code'] = self.sharing_code return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateRecipient into a shallow dictionary of its immediate attributes.""" + body = {} + if self.authentication_type is not None: body['authentication_type'] = self.authentication_type + if self.comment is not None: body['comment'] = self.comment + if self.data_recipient_global_metastore_id is not None: + body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.ip_access_list: body['ip_access_list'] = self.ip_access_list + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs + if self.sharing_code is not None: body['sharing_code'] = self.sharing_code + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateRecipient: """Deserializes the CreateRecipient from a dictionary.""" @@ -547,6 +162,14 @@ def as_dict(self) -> dict: if self.storage_root is not None: body['storage_root'] = self.storage_root return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateShare into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.storage_root is not None: body['storage_root'] = self.storage_root + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateShare: """Deserializes the CreateShare from a dictionary.""" @@ -563,6 +186,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" @@ -577,6 +205,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the GetActivationUrlInfoResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetActivationUrlInfoResponse: """Deserializes the GetActivationUrlInfoResponse from a dictionary.""" @@ -599,6 +232,13 @@ def as_dict(self) -> dict: if self.permissions_out: body['permissions_out'] = [v.as_dict() for v in self.permissions_out] return body + def as_shallow_dict(self) -> dict: + """Serializes the GetRecipientSharePermissionsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.permissions_out: body['permissions_out'] = self.permissions_out + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetRecipientSharePermissionsResponse: """Deserializes the GetRecipientSharePermissionsResponse from a dictionary.""" @@ -617,33 +257,16 @@ def as_dict(self) -> dict: if self.allowed_ip_addresses: body['allowed_ip_addresses'] = [v for v in self.allowed_ip_addresses] return body - @classmethod - def from_dict(cls, d: Dict[str, any]) -> IpAccessList: - """Deserializes the IpAccessList from a dictionary.""" - return cls(allowed_ip_addresses=d.get('allowed_ip_addresses', None)) - - -@dataclass -class ListCleanRoomsResponse: - clean_rooms: Optional[List[CleanRoomInfo]] = None - """An array of clean rooms. Remote details (central) are not included.""" - - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" - - def as_dict(self) -> dict: - """Serializes the ListCleanRoomsResponse into a dictionary suitable for use as a JSON request body.""" + def as_shallow_dict(self) -> dict: + """Serializes the IpAccessList into a shallow dictionary of its immediate attributes.""" body = {} - if self.clean_rooms: body['clean_rooms'] = [v.as_dict() for v in self.clean_rooms] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.allowed_ip_addresses: body['allowed_ip_addresses'] = self.allowed_ip_addresses return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> ListCleanRoomsResponse: - """Deserializes the ListCleanRoomsResponse from a dictionary.""" - return cls(clean_rooms=_repeated_dict(d, 'clean_rooms', CleanRoomInfo), - next_page_token=d.get('next_page_token', None)) + def from_dict(cls, d: Dict[str, any]) -> IpAccessList: + """Deserializes the IpAccessList from a dictionary.""" + return cls(allowed_ip_addresses=d.get('allowed_ip_addresses', None)) @dataclass @@ -662,6 +285,13 @@ def as_dict(self) -> dict: if self.shares: body['shares'] = [v.as_dict() for v in self.shares] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListProviderSharesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.shares: body['shares'] = self.shares + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListProviderSharesResponse: """Deserializes the ListProviderSharesResponse from a dictionary.""" @@ -685,6 +315,13 @@ def as_dict(self) -> dict: if self.providers: body['providers'] = [v.as_dict() for v in self.providers] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListProvidersResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.providers: body['providers'] = self.providers + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListProvidersResponse: """Deserializes the ListProvidersResponse from a dictionary.""" @@ -708,6 +345,13 @@ def as_dict(self) -> dict: if self.recipients: body['recipients'] = [v.as_dict() for v in self.recipients] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListRecipientsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.recipients: body['recipients'] = self.recipients + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListRecipientsResponse: """Deserializes the ListRecipientsResponse from a dictionary.""" @@ -731,6 +375,13 @@ def as_dict(self) -> dict: if self.shares: body['shares'] = [v.as_dict() for v in self.shares] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListSharesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.shares: body['shares'] = self.shares + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListSharesResponse: """Deserializes the ListSharesResponse from a dictionary.""" @@ -749,12 +400,41 @@ def as_dict(self) -> dict: if self.values: body['values'] = [v.as_dict() for v in self.values] return body + def as_shallow_dict(self) -> dict: + """Serializes the Partition into a shallow dictionary of its immediate attributes.""" + body = {} + if self.values: body['values'] = self.values + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Partition: """Deserializes the Partition from a dictionary.""" return cls(values=_repeated_dict(d, 'values', PartitionValue)) +@dataclass +class PartitionSpecificationPartition: + values: Optional[List[PartitionValue]] = None + """An array of partition values.""" + + def as_dict(self) -> dict: + """Serializes the PartitionSpecificationPartition into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.values: body['values'] = [v.as_dict() for v in self.values] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PartitionSpecificationPartition into a shallow dictionary of its immediate attributes.""" + body = {} + if self.values: body['values'] = self.values + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> PartitionSpecificationPartition: + """Deserializes the PartitionSpecificationPartition from a dictionary.""" + return cls(values=_repeated_dict(d, 'values', PartitionValue)) + + @dataclass class PartitionValue: name: Optional[str] = None @@ -764,7 +444,7 @@ class PartitionValue: """The operator to apply for the value.""" recipient_property_key: Optional[str] = None - """The key of a Delta Sharing recipient's property. For example `databricks-account-id`. When this + """The key of a Delta Sharing recipient's property. For example "databricks-account-id". When this field is set, field `value` can not be set.""" value: Optional[str] = None @@ -781,6 +461,16 @@ def as_dict(self) -> dict: if self.value is not None: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the PartitionValue into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: body['name'] = self.name + if self.op is not None: body['op'] = self.op + if self.recipient_property_key is not None: + body['recipient_property_key'] = self.recipient_property_key + if self.value is not None: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PartitionValue: """Deserializes the PartitionValue from a dictionary.""" @@ -791,7 +481,6 @@ def from_dict(cls, d: Dict[str, any]) -> PartitionValue: class PartitionValueOp(Enum): - """The operator to apply for the value.""" EQUAL = 'EQUAL' LIKE = 'LIKE' @@ -809,6 +498,7 @@ class Privilege(Enum): CREATE_EXTERNAL_TABLE = 'CREATE_EXTERNAL_TABLE' CREATE_EXTERNAL_VOLUME = 'CREATE_EXTERNAL_VOLUME' CREATE_FOREIGN_CATALOG = 'CREATE_FOREIGN_CATALOG' + CREATE_FOREIGN_SECURABLE = 'CREATE_FOREIGN_SECURABLE' CREATE_FUNCTION = 'CREATE_FUNCTION' CREATE_MANAGED_STORAGE = 'CREATE_MANAGED_STORAGE' CREATE_MATERIALIZED_VIEW = 'CREATE_MATERIALIZED_VIEW' @@ -860,6 +550,13 @@ def as_dict(self) -> dict: if self.privileges: body['privileges'] = [v.value for v in self.privileges] return body + def as_shallow_dict(self) -> dict: + """Serializes the PrivilegeAssignment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.principal is not None: body['principal'] = self.principal + if self.privileges: body['privileges'] = self.privileges + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PrivilegeAssignment: """Deserializes the PrivilegeAssignment from a dictionary.""" @@ -887,7 +584,7 @@ class ProviderInfo: data_provider_global_metastore_id: Optional[str] = None """The global UC metastore id of the data provider. This field is only present when the __authentication_type__ is **DATABRICKS**. The identifier is of format - ::.""" + __cloud__:__region__:__metastore-uuid__.""" metastore_id: Optional[str] = None """UUID of the provider's UC metastore. This field is only present when the __authentication_type__ @@ -900,10 +597,12 @@ class ProviderInfo: """Username of Provider owner.""" recipient_profile: Optional[RecipientProfile] = None - """The recipient profile. This field is only present when the authentication_type is `TOKEN`.""" + """The recipient profile. This field is only present when the authentication_type is `TOKEN` or + `OAUTH_CLIENT_CREDENTIALS`.""" recipient_profile_str: Optional[str] = None - """This field is only present when the authentication_type is `TOKEN` or not provided.""" + """This field is required when the __authentication_type__ is **TOKEN**, + **OAUTH_CLIENT_CREDENTIALS** or not provided.""" region: Optional[str] = None """Cloud region of the provider's UC metastore. This field is only present when the @@ -913,7 +612,7 @@ class ProviderInfo: """Time at which this Provider was created, in epoch milliseconds.""" updated_by: Optional[str] = None - """Username of user who last modified Share.""" + """Username of user who last modified Provider.""" def as_dict(self) -> dict: """Serializes the ProviderInfo into a dictionary suitable for use as a JSON request body.""" @@ -935,6 +634,26 @@ def as_dict(self) -> dict: if self.updated_by is not None: body['updated_by'] = self.updated_by return body + def as_shallow_dict(self) -> dict: + """Serializes the ProviderInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.authentication_type is not None: body['authentication_type'] = self.authentication_type + if self.cloud is not None: body['cloud'] = self.cloud + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.data_provider_global_metastore_id is not None: + body['data_provider_global_metastore_id'] = self.data_provider_global_metastore_id + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.recipient_profile: body['recipient_profile'] = self.recipient_profile + if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str + if self.region is not None: body['region'] = self.region + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ProviderInfo: """Deserializes the ProviderInfo from a dictionary.""" @@ -965,6 +684,12 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name return body + def as_shallow_dict(self) -> dict: + """Serializes the ProviderShare into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: body['name'] = self.name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ProviderShare: """Deserializes the ProviderShare from a dictionary.""" @@ -984,8 +709,8 @@ class RecipientInfo: """The delta sharing authentication type.""" cloud: Optional[str] = None - """Cloud vendor of the recipient's Unity Catalog Metstore. This field is only present when the - __authentication_type__ is **DATABRICKS**`.""" + """Cloud vendor of the recipient's Unity Catalog Metastore. This field is only present when the + __authentication_type__ is **DATABRICKS**.""" comment: Optional[str] = None """Description about the recipient.""" @@ -1001,12 +726,15 @@ class RecipientInfo: when the __authentication_type__ is **DATABRICKS**. The identifier is of format __cloud__:__region__:__metastore-uuid__.""" + expiration_time: Optional[int] = None + """Expiration timestamp of the token, in epoch milliseconds.""" + ip_access_list: Optional[IpAccessList] = None """IP Access List""" metastore_id: Optional[str] = None - """Unique identifier of recipient's Unity Catalog metastore. This field is only present when the - __authentication_type__ is **DATABRICKS**""" + """Unique identifier of recipient's Unity Catalog Metastore. This field is only present when the + __authentication_type__ is **DATABRICKS**.""" name: Optional[str] = None """Name of Recipient.""" @@ -1015,10 +743,12 @@ class RecipientInfo: """Username of the recipient owner.""" properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None - """Recipient properties as map of string key-value pairs.""" + """Recipient properties as map of string key-value pairs. When provided in update request, the + specified properties will override the existing properties. To add and remove properties, one + would need to perform a read-modify-write.""" region: Optional[str] = None - """Cloud region of the recipient's Unity Catalog Metstore. This field is only present when the + """Cloud region of the recipient's Unity Catalog Metastore. This field is only present when the __authentication_type__ is **DATABRICKS**.""" sharing_code: Optional[str] = None @@ -1046,6 +776,7 @@ def as_dict(self) -> dict: if self.created_by is not None: body['created_by'] = self.created_by if self.data_recipient_global_metastore_id is not None: body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() if self.metastore_id is not None: body['metastore_id'] = self.metastore_id if self.name is not None: body['name'] = self.name @@ -1058,6 +789,31 @@ def as_dict(self) -> dict: if self.updated_by is not None: body['updated_by'] = self.updated_by return body + def as_shallow_dict(self) -> dict: + """Serializes the RecipientInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.activated is not None: body['activated'] = self.activated + if self.activation_url is not None: body['activation_url'] = self.activation_url + if self.authentication_type is not None: body['authentication_type'] = self.authentication_type + if self.cloud is not None: body['cloud'] = self.cloud + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.data_recipient_global_metastore_id is not None: + body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.ip_access_list: body['ip_access_list'] = self.ip_access_list + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs + if self.region is not None: body['region'] = self.region + if self.sharing_code is not None: body['sharing_code'] = self.sharing_code + if self.tokens: body['tokens'] = self.tokens + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RecipientInfo: """Deserializes the RecipientInfo from a dictionary.""" @@ -1069,6 +825,7 @@ def from_dict(cls, d: Dict[str, any]) -> RecipientInfo: created_at=d.get('created_at', None), created_by=d.get('created_by', None), data_recipient_global_metastore_id=d.get('data_recipient_global_metastore_id', None), + expiration_time=d.get('expiration_time', None), ip_access_list=_from_dict(d, 'ip_access_list', IpAccessList), metastore_id=d.get('metastore_id', None), name=d.get('name', None), @@ -1101,6 +858,15 @@ def as_dict(self) -> dict: body['share_credentials_version'] = self.share_credentials_version return body + def as_shallow_dict(self) -> dict: + """Serializes the RecipientProfile into a shallow dictionary of its immediate attributes.""" + body = {} + if self.bearer_token is not None: body['bearer_token'] = self.bearer_token + if self.endpoint is not None: body['endpoint'] = self.endpoint + if self.share_credentials_version is not None: + body['share_credentials_version'] = self.share_credentials_version + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RecipientProfile: """Deserializes the RecipientProfile from a dictionary.""" @@ -1116,7 +882,7 @@ class RecipientTokenInfo: retrieved.""" created_at: Optional[int] = None - """Time at which this recipient Token was created, in epoch milliseconds.""" + """Time at which this recipient token was created, in epoch milliseconds.""" created_by: Optional[str] = None """Username of recipient token creator.""" @@ -1128,10 +894,10 @@ class RecipientTokenInfo: """Unique ID of the recipient token.""" updated_at: Optional[int] = None - """Time at which this recipient Token was updated, in epoch milliseconds.""" + """Time at which this recipient token was updated, in epoch milliseconds.""" updated_by: Optional[str] = None - """Username of recipient Token updater.""" + """Username of recipient token updater.""" def as_dict(self) -> dict: """Serializes the RecipientTokenInfo into a dictionary suitable for use as a JSON request body.""" @@ -1145,6 +911,18 @@ def as_dict(self) -> dict: if self.updated_by is not None: body['updated_by'] = self.updated_by return body + def as_shallow_dict(self) -> dict: + """Serializes the RecipientTokenInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.activation_url is not None: body['activation_url'] = self.activation_url + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.id is not None: body['id'] = self.id + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RecipientTokenInfo: """Deserializes the RecipientTokenInfo from a dictionary.""" @@ -1181,6 +959,16 @@ def as_dict(self) -> dict: body['shareCredentialsVersion'] = self.share_credentials_version return body + def as_shallow_dict(self) -> dict: + """Serializes the RetrieveTokenResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.bearer_token is not None: body['bearerToken'] = self.bearer_token + if self.endpoint is not None: body['endpoint'] = self.endpoint + if self.expiration_time is not None: body['expirationTime'] = self.expiration_time + if self.share_credentials_version is not None: + body['shareCredentialsVersion'] = self.share_credentials_version + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RetrieveTokenResponse: """Deserializes the RetrieveTokenResponse from a dictionary.""" @@ -1198,7 +986,7 @@ class RotateRecipientToken: expire the existing token immediately, negative number will return an error.""" name: Optional[str] = None - """The name of the recipient.""" + """The name of the Recipient.""" def as_dict(self) -> dict: """Serializes the RotateRecipientToken into a dictionary suitable for use as a JSON request body.""" @@ -1208,6 +996,14 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name return body + def as_shallow_dict(self) -> dict: + """Serializes the RotateRecipientToken into a shallow dictionary of its immediate attributes.""" + body = {} + if self.existing_token_expire_in_seconds is not None: + body['existing_token_expire_in_seconds'] = self.existing_token_expire_in_seconds + if self.name is not None: body['name'] = self.name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RotateRecipientToken: """Deserializes the RotateRecipientToken from a dictionary.""" @@ -1228,15 +1024,18 @@ def as_dict(self) -> dict: if self.properties: body['properties'] = self.properties return body + def as_shallow_dict(self) -> dict: + """Serializes the SecurablePropertiesKvPairs into a shallow dictionary of its immediate attributes.""" + body = {} + if self.properties: body['properties'] = self.properties + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SecurablePropertiesKvPairs: """Deserializes the SecurablePropertiesKvPairs from a dictionary.""" return cls(properties=d.get('properties', None)) -SecurablePropertiesMap = Dict[str, str] - - @dataclass class ShareInfo: comment: Optional[str] = None @@ -1284,6 +1083,21 @@ def as_dict(self) -> dict: if self.updated_by is not None: body['updated_by'] = self.updated_by return body + def as_shallow_dict(self) -> dict: + """Serializes the ShareInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.name is not None: body['name'] = self.name + if self.objects: body['objects'] = self.objects + if self.owner is not None: body['owner'] = self.owner + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ShareInfo: """Deserializes the ShareInfo from a dictionary.""" @@ -1315,6 +1129,13 @@ def as_dict(self) -> dict: if self.share_name is not None: body['share_name'] = self.share_name return body + def as_shallow_dict(self) -> dict: + """Serializes the ShareToPrivilegeAssignment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.privilege_assignments: body['privilege_assignments'] = self.privilege_assignments + if self.share_name is not None: body['share_name'] = self.share_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ShareToPrivilegeAssignment: """Deserializes the ShareToPrivilegeAssignment from a dictionary.""" @@ -1397,6 +1218,25 @@ def as_dict(self) -> dict: if self.string_shared_as is not None: body['string_shared_as'] = self.string_shared_as return body + def as_shallow_dict(self) -> dict: + """Serializes the SharedDataObject into a shallow dictionary of its immediate attributes.""" + body = {} + if self.added_at is not None: body['added_at'] = self.added_at + if self.added_by is not None: body['added_by'] = self.added_by + if self.cdf_enabled is not None: body['cdf_enabled'] = self.cdf_enabled + if self.comment is not None: body['comment'] = self.comment + if self.content is not None: body['content'] = self.content + if self.data_object_type is not None: body['data_object_type'] = self.data_object_type + if self.history_data_sharing_status is not None: + body['history_data_sharing_status'] = self.history_data_sharing_status + if self.name is not None: body['name'] = self.name + if self.partitions: body['partitions'] = self.partitions + if self.shared_as is not None: body['shared_as'] = self.shared_as + if self.start_version is not None: body['start_version'] = self.start_version + if self.status is not None: body['status'] = self.status + if self.string_shared_as is not None: body['string_shared_as'] = self.string_shared_as + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SharedDataObject: """Deserializes the SharedDataObject from a dictionary.""" @@ -1419,6 +1259,8 @@ def from_dict(cls, d: Dict[str, any]) -> SharedDataObject: class SharedDataObjectDataObjectType(Enum): """The type of the data object.""" + FEATURE_SPEC = 'FEATURE_SPEC' + FUNCTION = 'FUNCTION' MATERIALIZED_VIEW = 'MATERIALIZED_VIEW' MODEL = 'MODEL' NOTEBOOK_FILE = 'NOTEBOOK_FILE' @@ -1458,6 +1300,13 @@ def as_dict(self) -> dict: if self.data_object: body['data_object'] = self.data_object.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the SharedDataObjectUpdate into a shallow dictionary of its immediate attributes.""" + body = {} + if self.action is not None: body['action'] = self.action + if self.data_object: body['data_object'] = self.data_object + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SharedDataObjectUpdate: """Deserializes the SharedDataObjectUpdate from a dictionary.""" @@ -1474,42 +1323,15 @@ class SharedDataObjectUpdateAction(Enum): @dataclass -class UpdateCleanRoom: - catalog_updates: Optional[List[CleanRoomCatalogUpdate]] = None - """Array of shared data object updates.""" - - comment: Optional[str] = None - """User-provided free-form text description.""" - - name: Optional[str] = None - """The name of the clean room.""" - - owner: Optional[str] = None - """Username of current owner of clean room.""" +class UpdatePermissionsResponse: def as_dict(self) -> dict: - """Serializes the UpdateCleanRoom into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdatePermissionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_updates: body['catalog_updates'] = [v.as_dict() for v in self.catalog_updates] - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner return body - @classmethod - def from_dict(cls, d: Dict[str, any]) -> UpdateCleanRoom: - """Deserializes the UpdateCleanRoom from a dictionary.""" - return cls(catalog_updates=_repeated_dict(d, 'catalog_updates', CleanRoomCatalogUpdate), - comment=d.get('comment', None), - name=d.get('name', None), - owner=d.get('owner', None)) - - -@dataclass -class UpdatePermissionsResponse: - - def as_dict(self) -> dict: - """Serializes the UpdatePermissionsResponse into a dictionary suitable for use as a JSON request body.""" + def as_shallow_dict(self) -> dict: + """Serializes the UpdatePermissionsResponse into a shallow dictionary of its immediate attributes.""" body = {} return body @@ -1534,7 +1356,8 @@ class UpdateProvider: """Username of Provider owner.""" recipient_profile_str: Optional[str] = None - """This field is required when the __authentication_type__ is **TOKEN** or not provided.""" + """This field is required when the __authentication_type__ is **TOKEN**, + **OAUTH_CLIENT_CREDENTIALS** or not provided.""" def as_dict(self) -> dict: """Serializes the UpdateProvider into a dictionary suitable for use as a JSON request body.""" @@ -1546,6 +1369,16 @@ def as_dict(self) -> dict: if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateProvider into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateProvider: """Deserializes the UpdateProvider from a dictionary.""" @@ -1571,7 +1404,7 @@ class UpdateRecipient: """Name of the recipient.""" new_name: Optional[str] = None - """New name for the recipient.""" + """New name for the recipient. .""" owner: Optional[str] = None """Username of the recipient owner.""" @@ -1593,6 +1426,18 @@ def as_dict(self) -> dict: if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateRecipient into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: body['comment'] = self.comment + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.ip_access_list: body['ip_access_list'] = self.ip_access_list + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateRecipient: """Deserializes the UpdateRecipient from a dictionary.""" @@ -1605,20 +1450,6 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateRecipient: properties_kvpairs=_from_dict(d, 'properties_kvpairs', SecurablePropertiesKvPairs)) -@dataclass -class UpdateResponse: - - def as_dict(self) -> dict: - """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> UpdateResponse: - """Deserializes the UpdateResponse from a dictionary.""" - return cls() - - @dataclass class UpdateShare: comment: Optional[str] = None @@ -1650,6 +1481,17 @@ def as_dict(self) -> dict: if self.updates: body['updates'] = [v.as_dict() for v in self.updates] return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateShare into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.updates: body['updates'] = self.updates + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateShare: """Deserializes the UpdateShare from a dictionary.""" @@ -1690,6 +1532,15 @@ def as_dict(self) -> dict: if self.page_token is not None: body['page_token'] = self.page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateSharePermissions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.changes: body['changes'] = self.changes + if self.max_results is not None: body['max_results'] = self.max_results + if self.name is not None: body['name'] = self.name + if self.page_token is not None: body['page_token'] = self.page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateSharePermissions: """Deserializes the UpdateSharePermissions from a dictionary.""" @@ -1699,157 +1550,6 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateSharePermissions: page_token=d.get('page_token', None)) -class CleanRoomsAPI: - """A clean room is a secure, privacy-protecting environment where two or more parties can share sensitive - enterprise data, including customer data, for measurements, insights, activation and other use cases. - - To create clean rooms, you must be a metastore admin or a user with the **CREATE_CLEAN_ROOM** privilege.""" - - def __init__(self, api_client): - self._api = api_client - - def create(self, - name: str, - remote_detailed_info: CentralCleanRoomInfo, - *, - comment: Optional[str] = None) -> CleanRoomInfo: - """Create a clean room. - - Creates a new clean room with specified colaborators. The caller must be a metastore admin or have the - **CREATE_CLEAN_ROOM** privilege on the metastore. - - :param name: str - Name of the clean room. - :param remote_detailed_info: :class:`CentralCleanRoomInfo` - Central clean room details. - :param comment: str (optional) - User-provided free-form text description. - - :returns: :class:`CleanRoomInfo` - """ - body = {} - if comment is not None: body['comment'] = comment - if name is not None: body['name'] = name - if remote_detailed_info is not None: body['remote_detailed_info'] = remote_detailed_info.as_dict() - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - - res = self._api.do('POST', '/api/2.1/unity-catalog/clean-rooms', body=body, headers=headers) - return CleanRoomInfo.from_dict(res) - - def delete(self, name: str): - """Delete a clean room. - - Deletes a data object clean room from the metastore. The caller must be an owner of the clean room. - - :param name: str - The name of the clean room. - - - """ - - headers = {'Accept': 'application/json', } - - self._api.do('DELETE', f'/api/2.1/unity-catalog/clean-rooms/{name}', headers=headers) - - def get(self, name: str, *, include_remote_details: Optional[bool] = None) -> CleanRoomInfo: - """Get a clean room. - - Gets a data object clean room from the metastore. The caller must be a metastore admin or the owner of - the clean room. - - :param name: str - The name of the clean room. - :param include_remote_details: bool (optional) - Whether to include remote details (central) on the clean room. - - :returns: :class:`CleanRoomInfo` - """ - - query = {} - if include_remote_details is not None: query['include_remote_details'] = include_remote_details - headers = {'Accept': 'application/json', } - - res = self._api.do('GET', f'/api/2.1/unity-catalog/clean-rooms/{name}', query=query, headers=headers) - return CleanRoomInfo.from_dict(res) - - def list(self, - *, - max_results: Optional[int] = None, - page_token: Optional[str] = None) -> Iterator[CleanRoomInfo]: - """List clean rooms. - - Gets an array of data object clean rooms from the metastore. The caller must be a metastore admin or - the owner of the clean room. There is no guarantee of a specific ordering of the elements in the - array. - - :param max_results: int (optional) - Maximum number of clean rooms to return. If not set, all the clean rooms are returned (not - recommended). - when set to a value greater than 0, the page length is the minimum of this value and - a server configured value; - when set to 0, the page length is set to a server configured value - (recommended); - when set to a value less than 0, an invalid parameter error is returned; - :param page_token: str (optional) - Opaque pagination token to go to next page based on previous query. - - :returns: Iterator over :class:`CleanRoomInfo` - """ - - query = {} - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json', } - - while True: - json = self._api.do('GET', '/api/2.1/unity-catalog/clean-rooms', query=query, headers=headers) - if 'clean_rooms' in json: - for v in json['clean_rooms']: - yield CleanRoomInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - def update(self, - name: str, - *, - catalog_updates: Optional[List[CleanRoomCatalogUpdate]] = None, - comment: Optional[str] = None, - owner: Optional[str] = None) -> CleanRoomInfo: - """Update a clean room. - - Updates the clean room with the changes and data objects in the request. The caller must be the owner - of the clean room or a metastore admin. - - When the caller is a metastore admin, only the __owner__ field can be updated. - - In the case that the clean room name is changed **updateCleanRoom** requires that the caller is both - the clean room owner and a metastore admin. - - For each table that is added through this method, the clean room owner must also have **SELECT** - privilege on the table. The privilege must be maintained indefinitely for recipients to be able to - access the table. Typically, you should use a group as the clean room owner. - - Table removals through **update** do not require additional privileges. - - :param name: str - The name of the clean room. - :param catalog_updates: List[:class:`CleanRoomCatalogUpdate`] (optional) - Array of shared data object updates. - :param comment: str (optional) - User-provided free-form text description. - :param owner: str (optional) - Username of current owner of clean room. - - :returns: :class:`CleanRoomInfo` - """ - body = {} - if catalog_updates is not None: body['catalog_updates'] = [v.as_dict() for v in catalog_updates] - if comment is not None: body['comment'] = comment - if owner is not None: body['owner'] = owner - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - - res = self._api.do('PATCH', f'/api/2.1/unity-catalog/clean-rooms/{name}', body=body, headers=headers) - return CleanRoomInfo.from_dict(res) - - class ProvidersAPI: """A data provider is an object representing the organization in the real world who shares the data. A provider contains shares which further contain the shared data.""" @@ -1875,7 +1575,8 @@ def create(self, :param comment: str (optional) Description about the provider. :param recipient_profile_str: str (optional) - This field is required when the __authentication_type__ is **TOKEN** or not provided. + This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** + or not provided. :returns: :class:`ProviderInfo` """ @@ -1957,6 +1658,7 @@ def list(self, if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } + if "max_results" not in query: query['max_results'] = 0 while True: json = self._api.do('GET', '/api/2.1/unity-catalog/providers', query=query, headers=headers) if 'providers' in json: @@ -1998,12 +1700,18 @@ def list_shares(self, if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } - json = self._api.do('GET', - f'/api/2.1/unity-catalog/providers/{name}/shares', - query=query, - headers=headers) - parsed = ListProviderSharesResponse.from_dict(json).shares - return parsed if parsed is not None else [] + if "max_results" not in query: query['max_results'] = 0 + while True: + json = self._api.do('GET', + f'/api/2.1/unity-catalog/providers/{name}/shares', + query=query, + headers=headers) + if 'shares' in json: + for v in json['shares']: + yield ProviderShare.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] def update(self, name: str, @@ -2027,7 +1735,8 @@ def update(self, :param owner: str (optional) Username of Provider owner. :param recipient_profile_str: str (optional) - This field is required when the __authentication_type__ is **TOKEN** or not provided. + This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** + or not provided. :returns: :class:`ProviderInfo` """ @@ -2122,7 +1831,7 @@ def create(self, """Create a share recipient. Creates a new recipient with the delta sharing authentication type in the metastore. The caller must - be a metastore admin or has the **CREATE_RECIPIENT** privilege on the metastore. + be a metastore admin or have the **CREATE_RECIPIENT** privilege on the metastore. :param name: str Name of Recipient. @@ -2131,8 +1840,8 @@ def create(self, :param comment: str (optional) Description about the recipient. :param data_recipient_global_metastore_id: str (optional) - The global Unity Catalog metastore id provided by the data recipient. This field is required when - the __authentication_type__ is **DATABRICKS**. The identifier is of format + The global Unity Catalog metastore id provided by the data recipient. This field is only present + when the __authentication_type__ is **DATABRICKS**. The identifier is of format __cloud__:__region__:__metastore-uuid__. :param expiration_time: int (optional) Expiration timestamp of the token, in epoch milliseconds. @@ -2141,9 +1850,11 @@ def create(self, :param owner: str (optional) Username of the recipient owner. :param properties_kvpairs: :class:`SecurablePropertiesKvPairs` (optional) - Recipient properties as map of string key-value pairs. + Recipient properties as map of string key-value pairs. When provided in update request, the + specified properties will override the existing properties. To add and remove properties, one would + need to perform a read-modify-write. :param sharing_code: str (optional) - The one-time sharing code provided by the data recipient. This field is required when the + The one-time sharing code provided by the data recipient. This field is only present when the __authentication_type__ is **DATABRICKS**. :returns: :class:`RecipientInfo` @@ -2233,6 +1944,7 @@ def list(self, if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } + if "max_results" not in query: query['max_results'] = 0 while True: json = self._api.do('GET', '/api/2.1/unity-catalog/recipients', query=query, headers=headers) if 'recipients' in json: @@ -2249,7 +1961,7 @@ def rotate_token(self, name: str, existing_token_expire_in_seconds: int) -> Reci The caller must be the owner of the recipient. :param name: str - The name of the recipient. + The name of the Recipient. :param existing_token_expire_in_seconds: int The expiration time of the bearer token in ISO 8601 format. This will set the expiration_time of existing token only to a smaller timestamp, it cannot extend the expiration_time. Use 0 to expire @@ -2313,7 +2025,7 @@ def update(self, ip_access_list: Optional[IpAccessList] = None, new_name: Optional[str] = None, owner: Optional[str] = None, - properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None): + properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None) -> RecipientInfo: """Update a share recipient. Updates an existing recipient in the metastore. The caller must be a metastore admin or the owner of @@ -2329,7 +2041,7 @@ def update(self, :param ip_access_list: :class:`IpAccessList` (optional) IP Access List :param new_name: str (optional) - New name for the recipient. + New name for the recipient. . :param owner: str (optional) Username of the recipient owner. :param properties_kvpairs: :class:`SecurablePropertiesKvPairs` (optional) @@ -2337,7 +2049,7 @@ def update(self, specified properties will override the existing properties. To add and remove properties, one would need to perform a read-modify-write. - + :returns: :class:`RecipientInfo` """ body = {} if comment is not None: body['comment'] = comment @@ -2348,7 +2060,8 @@ def update(self, if properties_kvpairs is not None: body['properties_kvpairs'] = properties_kvpairs.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('PATCH', f'/api/2.1/unity-catalog/recipients/{name}', body=body, headers=headers) + res = self._api.do('PATCH', f'/api/2.1/unity-catalog/recipients/{name}', body=body, headers=headers) + return RecipientInfo.from_dict(res) class SharesAPI: @@ -2452,6 +2165,7 @@ def list(self, if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } + if "max_results" not in query: query['max_results'] = 0 while True: json = self._api.do('GET', '/api/2.1/unity-catalog/shares', query=query, headers=headers) if 'shares' in json: diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index 7a224feeb..059b744ef 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -36,6 +36,14 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the AccessControl into a shallow dictionary of its immediate attributes.""" + body = {} + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AccessControl: """Deserializes the AccessControl from a dictionary.""" @@ -118,6 +126,26 @@ def as_dict(self) -> dict: if self.update_time is not None: body['update_time'] = self.update_time return body + def as_shallow_dict(self) -> dict: + """Serializes the Alert into a shallow dictionary of its immediate attributes.""" + body = {} + if self.condition: body['condition'] = self.condition + if self.create_time is not None: body['create_time'] = self.create_time + if self.custom_body is not None: body['custom_body'] = self.custom_body + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state + if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.query_id is not None: body['query_id'] = self.query_id + if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger + if self.state is not None: body['state'] = self.state + if self.trigger_time is not None: body['trigger_time'] = self.trigger_time + if self.update_time is not None: body['update_time'] = self.update_time + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Alert: """Deserializes the Alert from a dictionary.""" @@ -161,6 +189,15 @@ def as_dict(self) -> dict: if self.threshold: body['threshold'] = self.threshold.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the AlertCondition into a shallow dictionary of its immediate attributes.""" + body = {} + if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state + if self.op is not None: body['op'] = self.op + if self.operand: body['operand'] = self.operand + if self.threshold: body['threshold'] = self.threshold + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AlertCondition: """Deserializes the AlertCondition from a dictionary.""" @@ -180,6 +217,12 @@ def as_dict(self) -> dict: if self.column: body['column'] = self.column.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the AlertConditionOperand into a shallow dictionary of its immediate attributes.""" + body = {} + if self.column: body['column'] = self.column + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AlertConditionOperand: """Deserializes the AlertConditionOperand from a dictionary.""" @@ -196,6 +239,12 @@ def as_dict(self) -> dict: if self.value: body['value'] = self.value.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the AlertConditionThreshold into a shallow dictionary of its immediate attributes.""" + body = {} + if self.value: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AlertConditionThreshold: """Deserializes the AlertConditionThreshold from a dictionary.""" @@ -212,6 +261,12 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name return body + def as_shallow_dict(self) -> dict: + """Serializes the AlertOperandColumn into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: body['name'] = self.name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AlertOperandColumn: """Deserializes the AlertOperandColumn from a dictionary.""" @@ -234,6 +289,14 @@ def as_dict(self) -> dict: if self.string_value is not None: body['string_value'] = self.string_value return body + def as_shallow_dict(self) -> dict: + """Serializes the AlertOperandValue into a shallow dictionary of its immediate attributes.""" + body = {} + if self.bool_value is not None: body['bool_value'] = self.bool_value + if self.double_value is not None: body['double_value'] = self.double_value + if self.string_value is not None: body['string_value'] = self.string_value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AlertOperandValue: """Deserializes the AlertOperandValue from a dictionary.""" @@ -297,6 +360,18 @@ def as_dict(self) -> dict: if self.value: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the AlertOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.column is not None: body['column'] = self.column + if self.custom_body is not None: body['custom_body'] = self.custom_body + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state + if self.muted is not None: body['muted'] = self.muted + if self.op is not None: body['op'] = self.op + if self.value: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AlertOptions: """Deserializes the AlertOptions from a dictionary.""" @@ -382,6 +457,24 @@ def as_dict(self) -> dict: if self.user_id is not None: body['user_id'] = self.user_id return body + def as_shallow_dict(self) -> dict: + """Serializes the AlertQuery into a shallow dictionary of its immediate attributes.""" + body = {} + if self.created_at is not None: body['created_at'] = self.created_at + if self.data_source_id is not None: body['data_source_id'] = self.data_source_id + if self.description is not None: body['description'] = self.description + if self.id is not None: body['id'] = self.id + if self.is_archived is not None: body['is_archived'] = self.is_archived + if self.is_draft is not None: body['is_draft'] = self.is_draft + if self.is_safe is not None: body['is_safe'] = self.is_safe + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.query is not None: body['query'] = self.query + if self.tags: body['tags'] = self.tags + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.user_id is not None: body['user_id'] = self.user_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AlertQuery: """Deserializes the AlertQuery from a dictionary.""" @@ -434,6 +527,15 @@ def as_dict(self) -> dict: if self.row_offset is not None: body['row_offset'] = self.row_offset return body + def as_shallow_dict(self) -> dict: + """Serializes the BaseChunkInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.byte_count is not None: body['byte_count'] = self.byte_count + if self.chunk_index is not None: body['chunk_index'] = self.chunk_index + if self.row_count is not None: body['row_count'] = self.row_count + if self.row_offset is not None: body['row_offset'] = self.row_offset + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> BaseChunkInfo: """Deserializes the BaseChunkInfo from a dictionary.""" @@ -451,6 +553,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the CancelExecutionResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CancelExecutionResponse: """Deserializes the CancelExecutionResponse from a dictionary.""" @@ -473,6 +580,13 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name.value return body + def as_shallow_dict(self) -> dict: + """Serializes the Channel into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dbsql_version is not None: body['dbsql_version'] = self.dbsql_version + if self.name is not None: body['name'] = self.name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Channel: """Deserializes the Channel from a dictionary.""" @@ -496,6 +610,13 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name.value return body + def as_shallow_dict(self) -> dict: + """Serializes the ChannelInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dbsql_version is not None: body['dbsql_version'] = self.dbsql_version + if self.name is not None: body['name'] = self.name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ChannelInfo: """Deserializes the ChannelInfo from a dictionary.""" @@ -507,7 +628,80 @@ class ChannelName(Enum): CHANNEL_NAME_CURRENT = 'CHANNEL_NAME_CURRENT' CHANNEL_NAME_CUSTOM = 'CHANNEL_NAME_CUSTOM' CHANNEL_NAME_PREVIEW = 'CHANNEL_NAME_PREVIEW' - CHANNEL_NAME_UNSPECIFIED = 'CHANNEL_NAME_UNSPECIFIED' + CHANNEL_NAME_PREVIOUS = 'CHANNEL_NAME_PREVIOUS' + + +@dataclass +class ClientConfig: + allow_custom_js_visualizations: Optional[bool] = None + + allow_downloads: Optional[bool] = None + + allow_external_shares: Optional[bool] = None + + allow_subscriptions: Optional[bool] = None + + date_format: Optional[str] = None + + date_time_format: Optional[str] = None + + disable_publish: Optional[bool] = None + + enable_legacy_autodetect_types: Optional[bool] = None + + feature_show_permissions_control: Optional[bool] = None + + hide_plotly_mode_bar: Optional[bool] = None + + def as_dict(self) -> dict: + """Serializes the ClientConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.allow_custom_js_visualizations is not None: + body['allow_custom_js_visualizations'] = self.allow_custom_js_visualizations + if self.allow_downloads is not None: body['allow_downloads'] = self.allow_downloads + if self.allow_external_shares is not None: body['allow_external_shares'] = self.allow_external_shares + if self.allow_subscriptions is not None: body['allow_subscriptions'] = self.allow_subscriptions + if self.date_format is not None: body['date_format'] = self.date_format + if self.date_time_format is not None: body['date_time_format'] = self.date_time_format + if self.disable_publish is not None: body['disable_publish'] = self.disable_publish + if self.enable_legacy_autodetect_types is not None: + body['enable_legacy_autodetect_types'] = self.enable_legacy_autodetect_types + if self.feature_show_permissions_control is not None: + body['feature_show_permissions_control'] = self.feature_show_permissions_control + if self.hide_plotly_mode_bar is not None: body['hide_plotly_mode_bar'] = self.hide_plotly_mode_bar + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ClientConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.allow_custom_js_visualizations is not None: + body['allow_custom_js_visualizations'] = self.allow_custom_js_visualizations + if self.allow_downloads is not None: body['allow_downloads'] = self.allow_downloads + if self.allow_external_shares is not None: body['allow_external_shares'] = self.allow_external_shares + if self.allow_subscriptions is not None: body['allow_subscriptions'] = self.allow_subscriptions + if self.date_format is not None: body['date_format'] = self.date_format + if self.date_time_format is not None: body['date_time_format'] = self.date_time_format + if self.disable_publish is not None: body['disable_publish'] = self.disable_publish + if self.enable_legacy_autodetect_types is not None: + body['enable_legacy_autodetect_types'] = self.enable_legacy_autodetect_types + if self.feature_show_permissions_control is not None: + body['feature_show_permissions_control'] = self.feature_show_permissions_control + if self.hide_plotly_mode_bar is not None: body['hide_plotly_mode_bar'] = self.hide_plotly_mode_bar + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ClientConfig: + """Deserializes the ClientConfig from a dictionary.""" + return cls(allow_custom_js_visualizations=d.get('allow_custom_js_visualizations', None), + allow_downloads=d.get('allow_downloads', None), + allow_external_shares=d.get('allow_external_shares', None), + allow_subscriptions=d.get('allow_subscriptions', None), + date_format=d.get('date_format', None), + date_time_format=d.get('date_time_format', None), + disable_publish=d.get('disable_publish', None), + enable_legacy_autodetect_types=d.get('enable_legacy_autodetect_types', None), + feature_show_permissions_control=d.get('feature_show_permissions_control', None), + hide_plotly_mode_bar=d.get('hide_plotly_mode_bar', None)) @dataclass @@ -547,6 +741,18 @@ def as_dict(self) -> dict: if self.type_text is not None: body['type_text'] = self.type_text return body + def as_shallow_dict(self) -> dict: + """Serializes the ColumnInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: body['name'] = self.name + if self.position is not None: body['position'] = self.position + if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type + if self.type_name is not None: body['type_name'] = self.type_name + if self.type_precision is not None: body['type_precision'] = self.type_precision + if self.type_scale is not None: body['type_scale'] = self.type_scale + if self.type_text is not None: body['type_text'] = self.type_text + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ColumnInfo: """Deserializes the ColumnInfo from a dictionary.""" @@ -612,6 +818,16 @@ def as_dict(self) -> dict: if self.rearm is not None: body['rearm'] = self.rearm return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateAlert into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.parent is not None: body['parent'] = self.parent + if self.query_id is not None: body['query_id'] = self.query_id + if self.rearm is not None: body['rearm'] = self.rearm + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateAlert: """Deserializes the CreateAlert from a dictionary.""" @@ -632,6 +848,12 @@ def as_dict(self) -> dict: if self.alert: body['alert'] = self.alert.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateAlertRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.alert: body['alert'] = self.alert + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateAlertRequest: """Deserializes the CreateAlertRequest from a dictionary.""" @@ -683,6 +905,19 @@ def as_dict(self) -> dict: if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateAlertRequestAlert into a shallow dictionary of its immediate attributes.""" + body = {} + if self.condition: body['condition'] = self.condition + if self.custom_body is not None: body['custom_body'] = self.custom_body + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.display_name is not None: body['display_name'] = self.display_name + if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.query_id is not None: body['query_id'] = self.query_id + if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateAlertRequestAlert: """Deserializes the CreateAlertRequestAlert from a dictionary.""" @@ -706,6 +941,12 @@ def as_dict(self) -> dict: if self.query: body['query'] = self.query.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateQueryRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.query: body['query'] = self.query + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateQueryRequest: """Deserializes the CreateQueryRequest from a dictionary.""" @@ -762,6 +1003,22 @@ def as_dict(self) -> dict: if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateQueryRequestQuery into a shallow dictionary of its immediate attributes.""" + body = {} + if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit + if self.catalog is not None: body['catalog'] = self.catalog + if self.description is not None: body['description'] = self.description + if self.display_name is not None: body['display_name'] = self.display_name + if self.parameters: body['parameters'] = self.parameters + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.query_text is not None: body['query_text'] = self.query_text + if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode + if self.schema is not None: body['schema'] = self.schema + if self.tags: body['tags'] = self.tags + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateQueryRequestQuery: """Deserializes the CreateQueryRequestQuery from a dictionary.""" @@ -788,6 +1045,12 @@ def as_dict(self) -> dict: if self.visualization: body['visualization'] = self.visualization.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateVisualizationRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.visualization: body['visualization'] = self.visualization + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateVisualizationRequest: """Deserializes the CreateVisualizationRequest from a dictionary.""" @@ -823,6 +1086,16 @@ def as_dict(self) -> dict: if self.type is not None: body['type'] = self.type return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateVisualizationRequestVisualization into a shallow dictionary of its immediate attributes.""" + body = {} + if self.display_name is not None: body['display_name'] = self.display_name + if self.query_id is not None: body['query_id'] = self.query_id + if self.serialized_options is not None: body['serialized_options'] = self.serialized_options + if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan + if self.type is not None: body['type'] = self.type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateVisualizationRequestVisualization: """Deserializes the CreateVisualizationRequestVisualization from a dictionary.""" @@ -924,6 +1197,25 @@ def as_dict(self) -> dict: if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateWarehouseRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins + if self.channel: body['channel'] = self.channel + if self.cluster_size is not None: body['cluster_size'] = self.cluster_size + if self.creator_name is not None: body['creator_name'] = self.creator_name + if self.enable_photon is not None: body['enable_photon'] = self.enable_photon + if self.enable_serverless_compute is not None: + body['enable_serverless_compute'] = self.enable_serverless_compute + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters + if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters + if self.name is not None: body['name'] = self.name + if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy + if self.tags: body['tags'] = self.tags + if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateWarehouseRequest: """Deserializes the CreateWarehouseRequest from a dictionary.""" @@ -962,6 +1254,12 @@ def as_dict(self) -> dict: if self.id is not None: body['id'] = self.id return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateWarehouseResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.id is not None: body['id'] = self.id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateWarehouseResponse: """Deserializes the CreateWarehouseResponse from a dictionary.""" @@ -999,6 +1297,17 @@ def as_dict(self) -> dict: if self.width is not None: body['width'] = self.width return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateWidget into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.id is not None: body['id'] = self.id + if self.options: body['options'] = self.options + if self.text is not None: body['text'] = self.text + if self.visualization_id is not None: body['visualization_id'] = self.visualization_id + if self.width is not None: body['width'] = self.width + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateWidget: """Deserializes the CreateWidget from a dictionary.""" @@ -1090,6 +1399,29 @@ def as_dict(self) -> dict: if self.widgets: body['widgets'] = [v.as_dict() for v in self.widgets] return body + def as_shallow_dict(self) -> dict: + """Serializes the Dashboard into a shallow dictionary of its immediate attributes.""" + body = {} + if self.can_edit is not None: body['can_edit'] = self.can_edit + if self.created_at is not None: body['created_at'] = self.created_at + if self.dashboard_filters_enabled is not None: + body['dashboard_filters_enabled'] = self.dashboard_filters_enabled + if self.id is not None: body['id'] = self.id + if self.is_archived is not None: body['is_archived'] = self.is_archived + if self.is_draft is not None: body['is_draft'] = self.is_draft + if self.is_favorite is not None: body['is_favorite'] = self.is_favorite + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.parent is not None: body['parent'] = self.parent + if self.permission_tier is not None: body['permission_tier'] = self.permission_tier + if self.slug is not None: body['slug'] = self.slug + if self.tags: body['tags'] = self.tags + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.user: body['user'] = self.user + if self.user_id is not None: body['user_id'] = self.user_id + if self.widgets: body['widgets'] = self.widgets + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Dashboard: """Deserializes the Dashboard from a dictionary.""" @@ -1134,6 +1466,15 @@ def as_dict(self) -> dict: if self.tags: body['tags'] = [v for v in self.tags] return body + def as_shallow_dict(self) -> dict: + """Serializes the DashboardEditContent into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.name is not None: body['name'] = self.name + if self.run_as_role is not None: body['run_as_role'] = self.run_as_role + if self.tags: body['tags'] = self.tags + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DashboardEditContent: """Deserializes the DashboardEditContent from a dictionary.""" @@ -1155,6 +1496,12 @@ def as_dict(self) -> dict: if self.moved_to_trash_at is not None: body['moved_to_trash_at'] = self.moved_to_trash_at return body + def as_shallow_dict(self) -> dict: + """Serializes the DashboardOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.moved_to_trash_at is not None: body['moved_to_trash_at'] = self.moved_to_trash_at + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DashboardOptions: """Deserializes the DashboardOptions from a dictionary.""" @@ -1193,6 +1540,18 @@ def as_dict(self) -> dict: if self.tags: body['tags'] = [v for v in self.tags] return body + def as_shallow_dict(self) -> dict: + """Serializes the DashboardPostContent into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dashboard_filters_enabled is not None: + body['dashboard_filters_enabled'] = self.dashboard_filters_enabled + if self.is_favorite is not None: body['is_favorite'] = self.is_favorite + if self.name is not None: body['name'] = self.name + if self.parent is not None: body['parent'] = self.parent + if self.run_as_role is not None: body['run_as_role'] = self.run_as_role + if self.tags: body['tags'] = self.tags + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DashboardPostContent: """Deserializes the DashboardPostContent from a dictionary.""" @@ -1253,6 +1612,20 @@ def as_dict(self) -> dict: if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body + def as_shallow_dict(self) -> dict: + """Serializes the DataSource into a shallow dictionary of its immediate attributes.""" + body = {} + if self.id is not None: body['id'] = self.id + if self.name is not None: body['name'] = self.name + if self.pause_reason is not None: body['pause_reason'] = self.pause_reason + if self.paused is not None: body['paused'] = self.paused + if self.supports_auto_limit is not None: body['supports_auto_limit'] = self.supports_auto_limit + if self.syntax is not None: body['syntax'] = self.syntax + if self.type is not None: body['type'] = self.type + if self.view_only is not None: body['view_only'] = self.view_only + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DataSource: """Deserializes the DataSource from a dictionary.""" @@ -1287,6 +1660,13 @@ def as_dict(self) -> dict: if self.start is not None: body['start'] = self.start return body + def as_shallow_dict(self) -> dict: + """Serializes the DateRange into a shallow dictionary of its immediate attributes.""" + body = {} + if self.end is not None: body['end'] = self.end + if self.start is not None: body['start'] = self.start + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DateRange: """Deserializes the DateRange from a dictionary.""" @@ -1317,6 +1697,16 @@ def as_dict(self) -> dict: if self.start_day_of_week is not None: body['start_day_of_week'] = self.start_day_of_week return body + def as_shallow_dict(self) -> dict: + """Serializes the DateRangeValue into a shallow dictionary of its immediate attributes.""" + body = {} + if self.date_range_value: body['date_range_value'] = self.date_range_value + if self.dynamic_date_range_value is not None: + body['dynamic_date_range_value'] = self.dynamic_date_range_value + if self.precision is not None: body['precision'] = self.precision + if self.start_day_of_week is not None: body['start_day_of_week'] = self.start_day_of_week + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DateRangeValue: """Deserializes the DateRangeValue from a dictionary.""" @@ -1368,6 +1758,14 @@ def as_dict(self) -> dict: if self.precision is not None: body['precision'] = self.precision.value return body + def as_shallow_dict(self) -> dict: + """Serializes the DateValue into a shallow dictionary of its immediate attributes.""" + body = {} + if self.date_value is not None: body['date_value'] = self.date_value + if self.dynamic_date_value is not None: body['dynamic_date_value'] = self.dynamic_date_value + if self.precision is not None: body['precision'] = self.precision + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DateValue: """Deserializes the DateValue from a dictionary.""" @@ -1390,6 +1788,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" @@ -1404,6 +1807,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteWarehouseResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteWarehouseResponse: """Deserializes the DeleteWarehouseResponse from a dictionary.""" @@ -1443,10 +1851,20 @@ def as_dict(self) -> dict: if self.rearm is not None: body['rearm'] = self.rearm return body - @classmethod - def from_dict(cls, d: Dict[str, any]) -> EditAlert: - """Deserializes the EditAlert from a dictionary.""" - return cls(alert_id=d.get('alert_id', None), + def as_shallow_dict(self) -> dict: + """Serializes the EditAlert into a shallow dictionary of its immediate attributes.""" + body = {} + if self.alert_id is not None: body['alert_id'] = self.alert_id + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.query_id is not None: body['query_id'] = self.query_id + if self.rearm is not None: body['rearm'] = self.rearm + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> EditAlert: + """Deserializes the EditAlert from a dictionary.""" + return cls(alert_id=d.get('alert_id', None), name=d.get('name', None), options=_from_dict(d, 'options', AlertOptions), query_id=d.get('query_id', None), @@ -1547,6 +1965,26 @@ def as_dict(self) -> dict: if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the EditWarehouseRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins + if self.channel: body['channel'] = self.channel + if self.cluster_size is not None: body['cluster_size'] = self.cluster_size + if self.creator_name is not None: body['creator_name'] = self.creator_name + if self.enable_photon is not None: body['enable_photon'] = self.enable_photon + if self.enable_serverless_compute is not None: + body['enable_serverless_compute'] = self.enable_serverless_compute + if self.id is not None: body['id'] = self.id + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters + if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters + if self.name is not None: body['name'] = self.name + if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy + if self.tags: body['tags'] = self.tags + if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EditWarehouseRequest: """Deserializes the EditWarehouseRequest from a dictionary.""" @@ -1583,6 +2021,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the EditWarehouseResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EditWarehouseResponse: """Deserializes the EditWarehouseResponse from a dictionary.""" @@ -1599,6 +2042,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the Empty into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Empty: """Deserializes the Empty from a dictionary.""" @@ -1618,6 +2066,13 @@ def as_dict(self) -> dict: if self.value is not None: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the EndpointConfPair into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EndpointConfPair: """Deserializes the EndpointConfPair from a dictionary.""" @@ -1652,6 +2107,16 @@ def as_dict(self) -> dict: if self.summary is not None: body['summary'] = self.summary return body + def as_shallow_dict(self) -> dict: + """Serializes the EndpointHealth into a shallow dictionary of its immediate attributes.""" + body = {} + if self.details is not None: body['details'] = self.details + if self.failure_reason: body['failure_reason'] = self.failure_reason + if self.message is not None: body['message'] = self.message + if self.status is not None: body['status'] = self.status + if self.summary is not None: body['summary'] = self.summary + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EndpointHealth: """Deserializes the EndpointHealth from a dictionary.""" @@ -1780,6 +2245,32 @@ def as_dict(self) -> dict: if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the EndpointInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins + if self.channel: body['channel'] = self.channel + if self.cluster_size is not None: body['cluster_size'] = self.cluster_size + if self.creator_name is not None: body['creator_name'] = self.creator_name + if self.enable_photon is not None: body['enable_photon'] = self.enable_photon + if self.enable_serverless_compute is not None: + body['enable_serverless_compute'] = self.enable_serverless_compute + if self.health: body['health'] = self.health + if self.id is not None: body['id'] = self.id + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.jdbc_url is not None: body['jdbc_url'] = self.jdbc_url + if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters + if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters + if self.name is not None: body['name'] = self.name + if self.num_active_sessions is not None: body['num_active_sessions'] = self.num_active_sessions + if self.num_clusters is not None: body['num_clusters'] = self.num_clusters + if self.odbc_params: body['odbc_params'] = self.odbc_params + if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy + if self.state is not None: body['state'] = self.state + if self.tags: body['tags'] = self.tags + if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EndpointInfo: """Deserializes the EndpointInfo from a dictionary.""" @@ -1827,6 +2318,13 @@ def as_dict(self) -> dict: if self.value is not None: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the EndpointTagPair into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EndpointTagPair: """Deserializes the EndpointTagPair from a dictionary.""" @@ -1843,6 +2341,12 @@ def as_dict(self) -> dict: if self.custom_tags: body['custom_tags'] = [v.as_dict() for v in self.custom_tags] return body + def as_shallow_dict(self) -> dict: + """Serializes the EndpointTags into a shallow dictionary of its immediate attributes.""" + body = {} + if self.custom_tags: body['custom_tags'] = self.custom_tags + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EndpointTags: """Deserializes the EndpointTags from a dictionary.""" @@ -1868,6 +2372,14 @@ def as_dict(self) -> dict: if self.values: body['values'] = [v for v in self.values] return body + def as_shallow_dict(self) -> dict: + """Serializes the EnumValue into a shallow dictionary of its immediate attributes.""" + body = {} + if self.enum_options is not None: body['enum_options'] = self.enum_options + if self.multi_values_options: body['multi_values_options'] = self.multi_values_options + if self.values: body['values'] = self.values + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EnumValue: """Deserializes the EnumValue from a dictionary.""" @@ -2009,6 +2521,22 @@ def as_dict(self) -> dict: if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body + def as_shallow_dict(self) -> dict: + """Serializes the ExecuteStatementRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.byte_limit is not None: body['byte_limit'] = self.byte_limit + if self.catalog is not None: body['catalog'] = self.catalog + if self.disposition is not None: body['disposition'] = self.disposition + if self.format is not None: body['format'] = self.format + if self.on_wait_timeout is not None: body['on_wait_timeout'] = self.on_wait_timeout + if self.parameters: body['parameters'] = self.parameters + if self.row_limit is not None: body['row_limit'] = self.row_limit + if self.schema is not None: body['schema'] = self.schema + if self.statement is not None: body['statement'] = self.statement + if self.wait_timeout is not None: body['wait_timeout'] = self.wait_timeout + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ExecuteStatementRequest: """Deserializes the ExecuteStatementRequest from a dictionary.""" @@ -2089,6 +2617,21 @@ def as_dict(self) -> dict: if self.row_offset is not None: body['row_offset'] = self.row_offset return body + def as_shallow_dict(self) -> dict: + """Serializes the ExternalLink into a shallow dictionary of its immediate attributes.""" + body = {} + if self.byte_count is not None: body['byte_count'] = self.byte_count + if self.chunk_index is not None: body['chunk_index'] = self.chunk_index + if self.expiration is not None: body['expiration'] = self.expiration + if self.external_link is not None: body['external_link'] = self.external_link + if self.http_headers: body['http_headers'] = self.http_headers + if self.next_chunk_index is not None: body['next_chunk_index'] = self.next_chunk_index + if self.next_chunk_internal_link is not None: + body['next_chunk_internal_link'] = self.next_chunk_internal_link + if self.row_count is not None: body['row_count'] = self.row_count + if self.row_offset is not None: body['row_offset'] = self.row_offset + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ExternalLink: """Deserializes the ExternalLink from a dictionary.""" @@ -2129,6 +2672,14 @@ def as_dict(self) -> dict: if self.object_type is not None: body['object_type'] = self.object_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the GetResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetResponse: """Deserializes the GetResponse from a dictionary.""" @@ -2148,6 +2699,12 @@ def as_dict(self) -> dict: if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body + def as_shallow_dict(self) -> dict: + """Serializes the GetWarehousePermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission_levels: body['permission_levels'] = self.permission_levels + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetWarehousePermissionLevelsResponse: """Deserializes the GetWarehousePermissionLevelsResponse from a dictionary.""" @@ -2272,6 +2829,32 @@ def as_dict(self) -> dict: if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the GetWarehouseResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins + if self.channel: body['channel'] = self.channel + if self.cluster_size is not None: body['cluster_size'] = self.cluster_size + if self.creator_name is not None: body['creator_name'] = self.creator_name + if self.enable_photon is not None: body['enable_photon'] = self.enable_photon + if self.enable_serverless_compute is not None: + body['enable_serverless_compute'] = self.enable_serverless_compute + if self.health: body['health'] = self.health + if self.id is not None: body['id'] = self.id + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.jdbc_url is not None: body['jdbc_url'] = self.jdbc_url + if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters + if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters + if self.name is not None: body['name'] = self.name + if self.num_active_sessions is not None: body['num_active_sessions'] = self.num_active_sessions + if self.num_clusters is not None: body['num_clusters'] = self.num_clusters + if self.odbc_params: body['odbc_params'] = self.odbc_params + if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy + if self.state is not None: body['state'] = self.state + if self.tags: body['tags'] = self.tags + if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetWarehouseResponse: """Deserializes the GetWarehouseResponse from a dictionary.""" @@ -2358,6 +2941,22 @@ def as_dict(self) -> dict: body['sql_configuration_parameters'] = self.sql_configuration_parameters.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the GetWorkspaceWarehouseConfigResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.channel: body['channel'] = self.channel + if self.config_param: body['config_param'] = self.config_param + if self.data_access_config: body['data_access_config'] = self.data_access_config + if self.enabled_warehouse_types: body['enabled_warehouse_types'] = self.enabled_warehouse_types + if self.global_param: body['global_param'] = self.global_param + if self.google_service_account is not None: + body['google_service_account'] = self.google_service_account + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.security_policy is not None: body['security_policy'] = self.security_policy + if self.sql_configuration_parameters: + body['sql_configuration_parameters'] = self.sql_configuration_parameters + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetWorkspaceWarehouseConfigResponse: """Deserializes the GetWorkspaceWarehouseConfigResponse from a dictionary.""" @@ -2433,6 +3032,22 @@ def as_dict(self) -> dict: if self.user: body['user'] = self.user.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the LegacyAlert into a shallow dictionary of its immediate attributes.""" + body = {} + if self.created_at is not None: body['created_at'] = self.created_at + if self.id is not None: body['id'] = self.id + if self.last_triggered_at is not None: body['last_triggered_at'] = self.last_triggered_at + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.parent is not None: body['parent'] = self.parent + if self.query: body['query'] = self.query + if self.rearm is not None: body['rearm'] = self.rearm + if self.state is not None: body['state'] = self.state + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.user: body['user'] = self.user + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> LegacyAlert: """Deserializes the LegacyAlert from a dictionary.""" @@ -2463,74 +3078,49 @@ class LegacyQuery: can_edit: Optional[bool] = None """Describes whether the authenticated user is allowed to edit the definition of this query.""" - created_at: Optional[str] = None - """The timestamp when this query was created.""" + catalog: Optional[str] = None + """Name of the catalog where this query will be executed.""" - data_source_id: Optional[str] = None - """Data source ID maps to the ID of the data source used by the resource and is distinct from the - warehouse ID. [Learn more] - - [Learn more]: https://docs.databricks.com/api/workspace/datasources/list""" + create_time: Optional[str] = None + """Timestamp when this query was created.""" description: Optional[str] = None """General description that conveys additional information about this query such as usage notes.""" - id: Optional[str] = None - """Query ID.""" - - is_archived: Optional[bool] = None - """Indicates whether the query is trashed. Trashed queries can't be used in dashboards, or appear - in search results. If this boolean is `true`, the `options` property for this query includes a - `moved_to_trash_at` timestamp. Trashed queries are permanently deleted after 30 days.""" - - is_draft: Optional[bool] = None - """Whether the query is a draft. Draft queries only appear in list views for their owners. - Visualizations from draft queries cannot appear on dashboards.""" - - is_favorite: Optional[bool] = None - """Whether this query object appears in the current user's favorites list. This flag determines - whether the star icon for favorites is selected.""" - - is_safe: Optional[bool] = None - """Text parameter types are not safe from SQL injection for all types of data source. Set this - Boolean parameter to `true` if a query either does not use any text type parameters or uses a - data source type where text type parameters are handled safely.""" - - last_modified_by: Optional[User] = None + display_name: Optional[str] = None + """Display name of the query that appears in list views, widget headings, and on the query page.""" - last_modified_by_id: Optional[int] = None - """The ID of the user who last saved changes to this query.""" + id: Optional[str] = None + """UUID identifying the query.""" - latest_query_data_id: Optional[str] = None - """If there is a cached result for this query and user, this field includes the query result ID. If - this query uses parameters, this field is always null.""" + last_modifier_user_name: Optional[str] = None + """Username of the user who last saved changes to this query.""" - name: Optional[str] = None - """The title of this query that appears in list views, widget headings, and on the query page.""" + lifecycle_state: Optional[LifecycleState] = None + """Indicates whether the query is trashed.""" - options: Optional[QueryOptions] = None + owner_user_name: Optional[str] = None + """Username of the user that owns the query.""" - parent: Optional[str] = None - """The identifier of the workspace folder containing the object.""" + parameters: Optional[List[QueryParameter]] = None + """List of query parameter definitions.""" - permission_tier: Optional[PermissionLevel] = None - """* `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query - * `CAN_MANAGE`: Can manage the query""" + parent_path: Optional[str] = None + """Workspace path of the workspace folder containing the object.""" - query: Optional[str] = None - """The text of the query to be run.""" + query_text: Optional[str] = None + """Text of the query to be run.""" - query_hash: Optional[str] = None - """A SHA-256 hash of the query text along with the authenticated user ID.""" + run_as_mode: Optional[RunAsMode] = None + """Sets the "Run as" role for the object.""" - run_as_role: Optional[RunAsRole] = None - """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as - viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" + schema: Optional[str] = None + """Name of the schema where this query will be executed.""" tags: Optional[List[str]] = None - updated_at: Optional[str] = None - """The timestamp at which this query was last updated.""" + update_time: Optional[str] = None + """Timestamp when this query was last updated.""" user: Optional[User] = None @@ -2542,6 +3132,29 @@ class LegacyQuery: def as_dict(self) -> dict: """Serializes the LegacyQuery into a dictionary suitable for use as a JSON request body.""" body = {} + if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit + if self.catalog is not None: body['catalog'] = self.catalog + if self.create_time is not None: body['create_time'] = self.create_time + if self.description is not None: body['description'] = self.description + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + if self.last_modifier_user_name is not None: + body['last_modifier_user_name'] = self.last_modifier_user_name + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.query_text is not None: body['query_text'] = self.query_text + if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value + if self.schema is not None: body['schema'] = self.schema + if self.tags: body['tags'] = [v for v in self.tags] + if self.update_time is not None: body['update_time'] = self.update_time + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the LegacyQuery into a shallow dictionary of its immediate attributes.""" + body = {} if self.can_edit is not None: body['can_edit'] = self.can_edit if self.created_at is not None: body['created_at'] = self.created_at if self.data_source_id is not None: body['data_source_id'] = self.data_source_id @@ -2551,21 +3164,21 @@ def as_dict(self) -> dict: if self.is_draft is not None: body['is_draft'] = self.is_draft if self.is_favorite is not None: body['is_favorite'] = self.is_favorite if self.is_safe is not None: body['is_safe'] = self.is_safe - if self.last_modified_by: body['last_modified_by'] = self.last_modified_by.as_dict() + if self.last_modified_by: body['last_modified_by'] = self.last_modified_by if self.last_modified_by_id is not None: body['last_modified_by_id'] = self.last_modified_by_id if self.latest_query_data_id is not None: body['latest_query_data_id'] = self.latest_query_data_id if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options.as_dict() + if self.options: body['options'] = self.options if self.parent is not None: body['parent'] = self.parent - if self.permission_tier is not None: body['permission_tier'] = self.permission_tier.value + if self.permission_tier is not None: body['permission_tier'] = self.permission_tier if self.query is not None: body['query'] = self.query if self.query_hash is not None: body['query_hash'] = self.query_hash - if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value - if self.tags: body['tags'] = [v for v in self.tags] + if self.run_as_role is not None: body['run_as_role'] = self.run_as_role + if self.tags: body['tags'] = self.tags if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.user: body['user'] = self.user.as_dict() + if self.user: body['user'] = self.user if self.user_id is not None: body['user_id'] = self.user_id - if self.visualizations: body['visualizations'] = [v.as_dict() for v in self.visualizations] + if self.visualizations: body['visualizations'] = self.visualizations return body @classmethod @@ -2575,21 +3188,16 @@ def from_dict(cls, d: Dict[str, any]) -> LegacyQuery: created_at=d.get('created_at', None), data_source_id=d.get('data_source_id', None), description=d.get('description', None), + display_name=d.get('display_name', None), id=d.get('id', None), - is_archived=d.get('is_archived', None), - is_draft=d.get('is_draft', None), - is_favorite=d.get('is_favorite', None), - is_safe=d.get('is_safe', None), - last_modified_by=_from_dict(d, 'last_modified_by', User), - last_modified_by_id=d.get('last_modified_by_id', None), - latest_query_data_id=d.get('latest_query_data_id', None), - name=d.get('name', None), - options=_from_dict(d, 'options', QueryOptions), - parent=d.get('parent', None), - permission_tier=_enum(d, 'permission_tier', PermissionLevel), - query=d.get('query', None), - query_hash=d.get('query_hash', None), - run_as_role=_enum(d, 'run_as_role', RunAsRole), + last_modifier_user_name=d.get('last_modifier_user_name', None), + lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState), + owner_user_name=d.get('owner_user_name', None), + parameters=_repeated_dict(d, 'parameters', QueryParameter), + parent_path=d.get('parent_path', None), + query_text=d.get('query_text', None), + run_as_mode=_enum(d, 'run_as_mode', RunAsMode), + schema=d.get('schema', None), tags=d.get('tags', None), updated_at=d.get('updated_at', None), user=_from_dict(d, 'user', User), @@ -2639,6 +3247,19 @@ def as_dict(self) -> dict: if self.updated_at is not None: body['updated_at'] = self.updated_at return body + def as_shallow_dict(self) -> dict: + """Serializes the LegacyVisualization into a shallow dictionary of its immediate attributes.""" + body = {} + if self.created_at is not None: body['created_at'] = self.created_at + if self.description is not None: body['description'] = self.description + if self.id is not None: body['id'] = self.id + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.query: body['query'] = self.query + if self.type is not None: body['type'] = self.type + if self.updated_at is not None: body['updated_at'] = self.updated_at + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> LegacyVisualization: """Deserializes the LegacyVisualization from a dictionary.""" @@ -2671,6 +3292,13 @@ def as_dict(self) -> dict: if self.results: body['results'] = [v.as_dict() for v in self.results] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListAlertsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.results: body['results'] = self.results + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListAlertsResponse: """Deserializes the ListAlertsResponse from a dictionary.""" @@ -2748,6 +3376,25 @@ def as_dict(self) -> dict: if self.update_time is not None: body['update_time'] = self.update_time return body + def as_shallow_dict(self) -> dict: + """Serializes the ListAlertsResponseAlert into a shallow dictionary of its immediate attributes.""" + body = {} + if self.condition: body['condition'] = self.condition + if self.create_time is not None: body['create_time'] = self.create_time + if self.custom_body is not None: body['custom_body'] = self.custom_body + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state + if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.query_id is not None: body['query_id'] = self.query_id + if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger + if self.state is not None: body['state'] = self.state + if self.trigger_time is not None: body['trigger_time'] = self.trigger_time + if self.update_time is not None: body['update_time'] = self.update_time + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListAlertsResponseAlert: """Deserializes the ListAlertsResponseAlert from a dictionary.""" @@ -2791,6 +3438,14 @@ def as_dict(self) -> dict: if self.res: body['res'] = [v.as_dict() for v in self.res] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListQueriesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.has_next_page is not None: body['has_next_page'] = self.has_next_page + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.res: body['res'] = self.res + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListQueriesResponse: """Deserializes the ListQueriesResponse from a dictionary.""" @@ -2812,6 +3467,13 @@ def as_dict(self) -> dict: if self.results: body['results'] = [v.as_dict() for v in self.results] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListQueryObjectsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.results: body['results'] = self.results + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListQueryObjectsResponse: """Deserializes the ListQueryObjectsResponse from a dictionary.""" @@ -2890,6 +3552,28 @@ def as_dict(self) -> dict: if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body + def as_shallow_dict(self) -> dict: + """Serializes the ListQueryObjectsResponseQuery into a shallow dictionary of its immediate attributes.""" + body = {} + if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit + if self.catalog is not None: body['catalog'] = self.catalog + if self.create_time is not None: body['create_time'] = self.create_time + if self.description is not None: body['description'] = self.description + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + if self.last_modifier_user_name is not None: + body['last_modifier_user_name'] = self.last_modifier_user_name + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.parameters: body['parameters'] = self.parameters + if self.query_text is not None: body['query_text'] = self.query_text + if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode + if self.schema is not None: body['schema'] = self.schema + if self.tags: body['tags'] = self.tags + if self.update_time is not None: body['update_time'] = self.update_time + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListQueryObjectsResponseQuery: """Deserializes the ListQueryObjectsResponseQuery from a dictionary.""" @@ -2934,6 +3618,15 @@ def as_dict(self) -> dict: if self.results: body['results'] = [v.as_dict() for v in self.results] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.count is not None: body['count'] = self.count + if self.page is not None: body['page'] = self.page + if self.page_size is not None: body['page_size'] = self.page_size + if self.results: body['results'] = self.results + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListResponse: """Deserializes the ListResponse from a dictionary.""" @@ -2956,6 +3649,13 @@ def as_dict(self) -> dict: if self.results: body['results'] = [v.as_dict() for v in self.results] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListVisualizationsForQueryResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.results: body['results'] = self.results + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListVisualizationsForQueryResponse: """Deserializes the ListVisualizationsForQueryResponse from a dictionary.""" @@ -2974,6 +3674,12 @@ def as_dict(self) -> dict: if self.warehouses: body['warehouses'] = [v.as_dict() for v in self.warehouses] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListWarehousesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.warehouses: body['warehouses'] = self.warehouses + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListWarehousesResponse: """Deserializes the ListWarehousesResponse from a dictionary.""" @@ -2999,6 +3705,14 @@ def as_dict(self) -> dict: if self.suffix is not None: body['suffix'] = self.suffix return body + def as_shallow_dict(self) -> dict: + """Serializes the MultiValuesOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.prefix is not None: body['prefix'] = self.prefix + if self.separator is not None: body['separator'] = self.separator + if self.suffix is not None: body['suffix'] = self.suffix + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> MultiValuesOptions: """Deserializes the MultiValuesOptions from a dictionary.""" @@ -3017,6 +3731,12 @@ def as_dict(self) -> dict: if self.value is not None: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the NumericValue into a shallow dictionary of its immediate attributes.""" + body = {} + if self.value is not None: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> NumericValue: """Deserializes the NumericValue from a dictionary.""" @@ -3060,6 +3780,15 @@ def as_dict(self) -> dict: if self.protocol is not None: body['protocol'] = self.protocol return body + def as_shallow_dict(self) -> dict: + """Serializes the OdbcParams into a shallow dictionary of its immediate attributes.""" + body = {} + if self.hostname is not None: body['hostname'] = self.hostname + if self.path is not None: body['path'] = self.path + if self.port is not None: body['port'] = self.port + if self.protocol is not None: body['protocol'] = self.protocol + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> OdbcParams: """Deserializes the OdbcParams from a dictionary.""" @@ -3114,6 +3843,18 @@ def as_dict(self) -> dict: if self.value: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the Parameter into a shallow dictionary of its immediate attributes.""" + body = {} + if self.enum_options is not None: body['enumOptions'] = self.enum_options + if self.multi_values_options: body['multiValuesOptions'] = self.multi_values_options + if self.name is not None: body['name'] = self.name + if self.query_id is not None: body['queryId'] = self.query_id + if self.title is not None: body['title'] = self.title + if self.type is not None: body['type'] = self.type + if self.value: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Parameter: """Deserializes the Parameter from a dictionary.""" @@ -3232,6 +3973,29 @@ def as_dict(self) -> dict: if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body + def as_shallow_dict(self) -> dict: + """Serializes the Query into a shallow dictionary of its immediate attributes.""" + body = {} + if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit + if self.catalog is not None: body['catalog'] = self.catalog + if self.create_time is not None: body['create_time'] = self.create_time + if self.description is not None: body['description'] = self.description + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + if self.last_modifier_user_name is not None: + body['last_modifier_user_name'] = self.last_modifier_user_name + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.parameters: body['parameters'] = self.parameters + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.query_text is not None: body['query_text'] = self.query_text + if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode + if self.schema is not None: body['schema'] = self.schema + if self.tags: body['tags'] = self.tags + if self.update_time is not None: body['update_time'] = self.update_time + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Query: """Deserializes the Query from a dictionary.""" @@ -3273,6 +4037,14 @@ def as_dict(self) -> dict: if self.values: body['values'] = [v for v in self.values] return body + def as_shallow_dict(self) -> dict: + """Serializes the QueryBackedValue into a shallow dictionary of its immediate attributes.""" + body = {} + if self.multi_values_options: body['multi_values_options'] = self.multi_values_options + if self.query_id is not None: body['query_id'] = self.query_id + if self.values: body['values'] = self.values + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> QueryBackedValue: """Deserializes the QueryBackedValue from a dictionary.""" @@ -3324,6 +4096,19 @@ def as_dict(self) -> dict: if self.tags: body['tags'] = [v for v in self.tags] return body + def as_shallow_dict(self) -> dict: + """Serializes the QueryEditContent into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data_source_id is not None: body['data_source_id'] = self.data_source_id + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.query is not None: body['query'] = self.query + if self.query_id is not None: body['query_id'] = self.query_id + if self.run_as_role is not None: body['run_as_role'] = self.run_as_role + if self.tags: body['tags'] = self.tags + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> QueryEditContent: """Deserializes the QueryEditContent from a dictionary.""" @@ -3363,6 +4148,16 @@ def as_dict(self) -> dict: if self.warehouse_ids: body['warehouse_ids'] = [v for v in self.warehouse_ids] return body + def as_shallow_dict(self) -> dict: + """Serializes the QueryFilter into a shallow dictionary of its immediate attributes.""" + body = {} + if self.query_start_time_range: body['query_start_time_range'] = self.query_start_time_range + if self.statement_ids: body['statement_ids'] = self.statement_ids + if self.statuses: body['statuses'] = self.statuses + if self.user_ids: body['user_ids'] = self.user_ids + if self.warehouse_ids: body['warehouse_ids'] = self.warehouse_ids + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> QueryFilter: """Deserializes the QueryFilter from a dictionary.""" @@ -3472,6 +4267,33 @@ def as_dict(self) -> dict: if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body + def as_shallow_dict(self) -> dict: + """Serializes the QueryInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.channel_used: body['channel_used'] = self.channel_used + if self.duration is not None: body['duration'] = self.duration + if self.endpoint_id is not None: body['endpoint_id'] = self.endpoint_id + if self.error_message is not None: body['error_message'] = self.error_message + if self.executed_as_user_id is not None: body['executed_as_user_id'] = self.executed_as_user_id + if self.executed_as_user_name is not None: body['executed_as_user_name'] = self.executed_as_user_name + if self.execution_end_time_ms is not None: body['execution_end_time_ms'] = self.execution_end_time_ms + if self.is_final is not None: body['is_final'] = self.is_final + if self.lookup_key is not None: body['lookup_key'] = self.lookup_key + if self.metrics: body['metrics'] = self.metrics + if self.plans_state is not None: body['plans_state'] = self.plans_state + if self.query_end_time_ms is not None: body['query_end_time_ms'] = self.query_end_time_ms + if self.query_id is not None: body['query_id'] = self.query_id + if self.query_start_time_ms is not None: body['query_start_time_ms'] = self.query_start_time_ms + if self.query_text is not None: body['query_text'] = self.query_text + if self.rows_produced is not None: body['rows_produced'] = self.rows_produced + if self.spark_ui_url is not None: body['spark_ui_url'] = self.spark_ui_url + if self.statement_type is not None: body['statement_type'] = self.statement_type + if self.status is not None: body['status'] = self.status + if self.user_id is not None: body['user_id'] = self.user_id + if self.user_name is not None: body['user_name'] = self.user_name + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> QueryInfo: """Deserializes the QueryInfo from a dictionary.""" @@ -3522,6 +4344,15 @@ def as_dict(self) -> dict: if self.results: body['results'] = [v.as_dict() for v in self.results] return body + def as_shallow_dict(self) -> dict: + """Serializes the QueryList into a shallow dictionary of its immediate attributes.""" + body = {} + if self.count is not None: body['count'] = self.count + if self.page is not None: body['page'] = self.page + if self.page_size is not None: body['page_size'] = self.page_size + if self.results: body['results'] = self.results + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> QueryList: """Deserializes the QueryList from a dictionary.""" @@ -3605,8 +4436,38 @@ class QueryMetrics: write_remote_bytes: Optional[int] = None """Size pf persistent data written to cloud object storage in your cloud tenant, in bytes.""" - def as_dict(self) -> dict: - """Serializes the QueryMetrics into a dictionary suitable for use as a JSON request body.""" + def as_dict(self) -> dict: + """Serializes the QueryMetrics into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.compilation_time_ms is not None: body['compilation_time_ms'] = self.compilation_time_ms + if self.execution_time_ms is not None: body['execution_time_ms'] = self.execution_time_ms + if self.network_sent_bytes is not None: body['network_sent_bytes'] = self.network_sent_bytes + if self.overloading_queue_start_timestamp is not None: + body['overloading_queue_start_timestamp'] = self.overloading_queue_start_timestamp + if self.photon_total_time_ms is not None: body['photon_total_time_ms'] = self.photon_total_time_ms + if self.provisioning_queue_start_timestamp is not None: + body['provisioning_queue_start_timestamp'] = self.provisioning_queue_start_timestamp + if self.pruned_bytes is not None: body['pruned_bytes'] = self.pruned_bytes + if self.pruned_files_count is not None: body['pruned_files_count'] = self.pruned_files_count + if self.query_compilation_start_timestamp is not None: + body['query_compilation_start_timestamp'] = self.query_compilation_start_timestamp + if self.read_bytes is not None: body['read_bytes'] = self.read_bytes + if self.read_cache_bytes is not None: body['read_cache_bytes'] = self.read_cache_bytes + if self.read_files_count is not None: body['read_files_count'] = self.read_files_count + if self.read_partitions_count is not None: body['read_partitions_count'] = self.read_partitions_count + if self.read_remote_bytes is not None: body['read_remote_bytes'] = self.read_remote_bytes + if self.result_fetch_time_ms is not None: body['result_fetch_time_ms'] = self.result_fetch_time_ms + if self.result_from_cache is not None: body['result_from_cache'] = self.result_from_cache + if self.rows_produced_count is not None: body['rows_produced_count'] = self.rows_produced_count + if self.rows_read_count is not None: body['rows_read_count'] = self.rows_read_count + if self.spill_to_disk_bytes is not None: body['spill_to_disk_bytes'] = self.spill_to_disk_bytes + if self.task_total_time_ms is not None: body['task_total_time_ms'] = self.task_total_time_ms + if self.total_time_ms is not None: body['total_time_ms'] = self.total_time_ms + if self.write_remote_bytes is not None: body['write_remote_bytes'] = self.write_remote_bytes + return body + + def as_shallow_dict(self) -> dict: + """Serializes the QueryMetrics into a shallow dictionary of its immediate attributes.""" body = {} if self.compilation_time_ms is not None: body['compilation_time_ms'] = self.compilation_time_ms if self.execution_time_ms is not None: body['execution_time_ms'] = self.execution_time_ms @@ -3685,6 +4546,15 @@ def as_dict(self) -> dict: if self.schema is not None: body['schema'] = self.schema return body + def as_shallow_dict(self) -> dict: + """Serializes the QueryOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.catalog is not None: body['catalog'] = self.catalog + if self.moved_to_trash_at is not None: body['moved_to_trash_at'] = self.moved_to_trash_at + if self.parameters: body['parameters'] = self.parameters + if self.schema is not None: body['schema'] = self.schema + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> QueryOptions: """Deserializes the QueryOptions from a dictionary.""" @@ -3734,6 +4604,19 @@ def as_dict(self) -> dict: if self.title is not None: body['title'] = self.title return body + def as_shallow_dict(self) -> dict: + """Serializes the QueryParameter into a shallow dictionary of its immediate attributes.""" + body = {} + if self.date_range_value: body['date_range_value'] = self.date_range_value + if self.date_value: body['date_value'] = self.date_value + if self.enum_value: body['enum_value'] = self.enum_value + if self.name is not None: body['name'] = self.name + if self.numeric_value: body['numeric_value'] = self.numeric_value + if self.query_backed_value: body['query_backed_value'] = self.query_backed_value + if self.text_value: body['text_value'] = self.text_value + if self.title is not None: body['title'] = self.title + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> QueryParameter: """Deserializes the QueryParameter from a dictionary.""" @@ -3791,6 +4674,19 @@ def as_dict(self) -> dict: if self.tags: body['tags'] = [v for v in self.tags] return body + def as_shallow_dict(self) -> dict: + """Serializes the QueryPostContent into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data_source_id is not None: body['data_source_id'] = self.data_source_id + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.parent is not None: body['parent'] = self.parent + if self.query is not None: body['query'] = self.query + if self.run_as_role is not None: body['run_as_role'] = self.run_as_role + if self.tags: body['tags'] = self.tags + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> QueryPostContent: """Deserializes the QueryPostContent from a dictionary.""" @@ -3858,6 +4754,13 @@ def as_dict(self) -> dict: body['configuration_pairs'] = [v.as_dict() for v in self.configuration_pairs] return body + def as_shallow_dict(self) -> dict: + """Serializes the RepeatedEndpointConfPairs into a shallow dictionary of its immediate attributes.""" + body = {} + if self.config_pair: body['config_pair'] = self.config_pair + if self.configuration_pairs: body['configuration_pairs'] = self.configuration_pairs + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RepeatedEndpointConfPairs: """Deserializes the RepeatedEndpointConfPairs from a dictionary.""" @@ -3873,6 +4776,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the RestoreResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RestoreResponse: """Deserializes the RestoreResponse from a dictionary.""" @@ -3924,6 +4832,20 @@ def as_dict(self) -> dict: if self.row_offset is not None: body['row_offset'] = self.row_offset return body + def as_shallow_dict(self) -> dict: + """Serializes the ResultData into a shallow dictionary of its immediate attributes.""" + body = {} + if self.byte_count is not None: body['byte_count'] = self.byte_count + if self.chunk_index is not None: body['chunk_index'] = self.chunk_index + if self.data_array: body['data_array'] = self.data_array + if self.external_links: body['external_links'] = self.external_links + if self.next_chunk_index is not None: body['next_chunk_index'] = self.next_chunk_index + if self.next_chunk_internal_link is not None: + body['next_chunk_internal_link'] = self.next_chunk_internal_link + if self.row_count is not None: body['row_count'] = self.row_count + if self.row_offset is not None: body['row_offset'] = self.row_offset + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ResultData: """Deserializes the ResultData from a dictionary.""" @@ -3974,6 +4896,18 @@ def as_dict(self) -> dict: if self.truncated is not None: body['truncated'] = self.truncated return body + def as_shallow_dict(self) -> dict: + """Serializes the ResultManifest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.chunks: body['chunks'] = self.chunks + if self.format is not None: body['format'] = self.format + if self.schema: body['schema'] = self.schema + if self.total_byte_count is not None: body['total_byte_count'] = self.total_byte_count + if self.total_chunk_count is not None: body['total_chunk_count'] = self.total_chunk_count + if self.total_row_count is not None: body['total_row_count'] = self.total_row_count + if self.truncated is not None: body['truncated'] = self.truncated + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ResultManifest: """Deserializes the ResultManifest from a dictionary.""" @@ -4001,6 +4935,13 @@ def as_dict(self) -> dict: if self.columns: body['columns'] = [v.as_dict() for v in self.columns] return body + def as_shallow_dict(self) -> dict: + """Serializes the ResultSchema into a shallow dictionary of its immediate attributes.""" + body = {} + if self.column_count is not None: body['column_count'] = self.column_count + if self.columns: body['columns'] = self.columns + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ResultSchema: """Deserializes the ResultSchema from a dictionary.""" @@ -4035,6 +4976,13 @@ def as_dict(self) -> dict: if self.message is not None: body['message'] = self.message return body + def as_shallow_dict(self) -> dict: + """Serializes the ServiceError into a shallow dictionary of its immediate attributes.""" + body = {} + if self.error_code is not None: body['error_code'] = self.error_code + if self.message is not None: body['message'] = self.message + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ServiceError: """Deserializes the ServiceError from a dictionary.""" @@ -4078,6 +5026,14 @@ def as_dict(self) -> dict: if self.object_type is not None: body['object_type'] = self.object_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the SetResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SetResponse: """Deserializes the SetResponse from a dictionary.""" @@ -4138,6 +5094,22 @@ def as_dict(self) -> dict: body['sql_configuration_parameters'] = self.sql_configuration_parameters.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the SetWorkspaceWarehouseConfigRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.channel: body['channel'] = self.channel + if self.config_param: body['config_param'] = self.config_param + if self.data_access_config: body['data_access_config'] = self.data_access_config + if self.enabled_warehouse_types: body['enabled_warehouse_types'] = self.enabled_warehouse_types + if self.global_param: body['global_param'] = self.global_param + if self.google_service_account is not None: + body['google_service_account'] = self.google_service_account + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.security_policy is not None: body['security_policy'] = self.security_policy + if self.sql_configuration_parameters: + body['sql_configuration_parameters'] = self.sql_configuration_parameters + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SetWorkspaceWarehouseConfigRequest: """Deserializes the SetWorkspaceWarehouseConfigRequest from a dictionary.""" @@ -4170,6 +5142,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the SetWorkspaceWarehouseConfigResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SetWorkspaceWarehouseConfigResponse: """Deserializes the SetWorkspaceWarehouseConfigResponse from a dictionary.""" @@ -4192,6 +5169,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the StartWarehouseResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> StartWarehouseResponse: """Deserializes the StartWarehouseResponse from a dictionary.""" @@ -4233,6 +5215,14 @@ def as_dict(self) -> dict: if self.value is not None: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the StatementParameterListItem into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: body['name'] = self.name + if self.type is not None: body['type'] = self.type + if self.value is not None: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> StatementParameterListItem: """Deserializes the StatementParameterListItem from a dictionary.""" @@ -4262,6 +5252,15 @@ def as_dict(self) -> dict: if self.status: body['status'] = self.status.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the StatementResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.manifest: body['manifest'] = self.manifest + if self.result: body['result'] = self.result + if self.statement_id is not None: body['statement_id'] = self.statement_id + if self.status: body['status'] = self.status + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> StatementResponse: """Deserializes the StatementResponse from a dictionary.""" @@ -4306,6 +5305,13 @@ def as_dict(self) -> dict: if self.state is not None: body['state'] = self.state.value return body + def as_shallow_dict(self) -> dict: + """Serializes the StatementStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.error: body['error'] = self.error + if self.state is not None: body['state'] = self.state + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> StatementStatus: """Deserializes the StatementStatus from a dictionary.""" @@ -4329,6 +5335,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the StopWarehouseResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> StopWarehouseResponse: """Deserializes the StopWarehouseResponse from a dictionary.""" @@ -4345,6 +5356,12 @@ def as_dict(self) -> dict: if self.message is not None: body['message'] = self.message.value return body + def as_shallow_dict(self) -> dict: + """Serializes the Success into a shallow dictionary of its immediate attributes.""" + body = {} + if self.message is not None: body['message'] = self.message + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Success: """Deserializes the Success from a dictionary.""" @@ -4375,6 +5392,14 @@ def as_dict(self) -> dict: if self.type is not None: body['type'] = self.type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the TerminationReason into a shallow dictionary of its immediate attributes.""" + body = {} + if self.code is not None: body['code'] = self.code + if self.parameters: body['parameters'] = self.parameters + if self.type is not None: body['type'] = self.type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TerminationReason: """Deserializes the TerminationReason from a dictionary.""" @@ -4486,6 +5511,12 @@ def as_dict(self) -> dict: if self.value is not None: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the TextValue into a shallow dictionary of its immediate attributes.""" + body = {} + if self.value is not None: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TextValue: """Deserializes the TextValue from a dictionary.""" @@ -4507,6 +5538,13 @@ def as_dict(self) -> dict: if self.start_time_ms is not None: body['start_time_ms'] = self.start_time_ms return body + def as_shallow_dict(self) -> dict: + """Serializes the TimeRange into a shallow dictionary of its immediate attributes.""" + body = {} + if self.end_time_ms is not None: body['end_time_ms'] = self.end_time_ms + if self.start_time_ms is not None: body['start_time_ms'] = self.start_time_ms + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TimeRange: """Deserializes the TimeRange from a dictionary.""" @@ -4524,6 +5562,12 @@ def as_dict(self) -> dict: if self.new_owner is not None: body['new_owner'] = self.new_owner return body + def as_shallow_dict(self) -> dict: + """Serializes the TransferOwnershipObjectId into a shallow dictionary of its immediate attributes.""" + body = {} + if self.new_owner is not None: body['new_owner'] = self.new_owner + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> TransferOwnershipObjectId: """Deserializes the TransferOwnershipObjectId from a dictionary.""" @@ -4533,9 +5577,15 @@ def from_dict(cls, d: Dict[str, any]) -> TransferOwnershipObjectId: @dataclass class UpdateAlertRequest: update_mask: str - """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of - the setting payload will be updated. The field mask needs to be supplied as single string. To - specify multiple fields in the field mask, use comma as the separator (no space).""" + """The field mask must be a single string, with multiple fields separated by commas (no spaces). + The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, + as only the entire collection field can be specified. Field names must exactly match the + resource field names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the + API changes in the future.""" alert: Optional[UpdateAlertRequestAlert] = None @@ -4549,6 +5599,14 @@ def as_dict(self) -> dict: if self.update_mask is not None: body['update_mask'] = self.update_mask return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateAlertRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.alert: body['alert'] = self.alert + if self.id is not None: body['id'] = self.id + if self.update_mask is not None: body['update_mask'] = self.update_mask + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateAlertRequest: """Deserializes the UpdateAlertRequest from a dictionary.""" @@ -4602,6 +5660,19 @@ def as_dict(self) -> dict: if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateAlertRequestAlert into a shallow dictionary of its immediate attributes.""" + body = {} + if self.condition: body['condition'] = self.condition + if self.custom_body is not None: body['custom_body'] = self.custom_body + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.display_name is not None: body['display_name'] = self.display_name + if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.query_id is not None: body['query_id'] = self.query_id + if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateAlertRequestAlert: """Deserializes the UpdateAlertRequestAlert from a dictionary.""" @@ -4618,9 +5689,15 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateAlertRequestAlert: @dataclass class UpdateQueryRequest: update_mask: str - """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of - the setting payload will be updated. The field mask needs to be supplied as single string. To - specify multiple fields in the field mask, use comma as the separator (no space).""" + """The field mask must be a single string, with multiple fields separated by commas (no spaces). + The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, + as only the entire collection field can be specified. Field names must exactly match the + resource field names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the + API changes in the future.""" id: Optional[str] = None @@ -4634,6 +5711,14 @@ def as_dict(self) -> dict: if self.update_mask is not None: body['update_mask'] = self.update_mask return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateQueryRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.id is not None: body['id'] = self.id + if self.query: body['query'] = self.query + if self.update_mask is not None: body['update_mask'] = self.update_mask + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateQueryRequest: """Deserializes the UpdateQueryRequest from a dictionary.""" @@ -4692,6 +5777,22 @@ def as_dict(self) -> dict: if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateQueryRequestQuery into a shallow dictionary of its immediate attributes.""" + body = {} + if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit + if self.catalog is not None: body['catalog'] = self.catalog + if self.description is not None: body['description'] = self.description + if self.display_name is not None: body['display_name'] = self.display_name + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.parameters: body['parameters'] = self.parameters + if self.query_text is not None: body['query_text'] = self.query_text + if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode + if self.schema is not None: body['schema'] = self.schema + if self.tags: body['tags'] = self.tags + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateQueryRequestQuery: """Deserializes the UpdateQueryRequestQuery from a dictionary.""" @@ -4716,6 +5817,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateResponse: """Deserializes the UpdateResponse from a dictionary.""" @@ -4725,9 +5831,15 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateResponse: @dataclass class UpdateVisualizationRequest: update_mask: str - """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of - the setting payload will be updated. The field mask needs to be supplied as single string. To - specify multiple fields in the field mask, use comma as the separator (no space).""" + """The field mask must be a single string, with multiple fields separated by commas (no spaces). + The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, + as only the entire collection field can be specified. Field names must exactly match the + resource field names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the + API changes in the future.""" id: Optional[str] = None @@ -4741,6 +5853,14 @@ def as_dict(self) -> dict: if self.visualization: body['visualization'] = self.visualization.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateVisualizationRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.id is not None: body['id'] = self.id + if self.update_mask is not None: body['update_mask'] = self.update_mask + if self.visualization: body['visualization'] = self.visualization + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateVisualizationRequest: """Deserializes the UpdateVisualizationRequest from a dictionary.""" @@ -4774,6 +5894,15 @@ def as_dict(self) -> dict: if self.type is not None: body['type'] = self.type return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateVisualizationRequestVisualization into a shallow dictionary of its immediate attributes.""" + body = {} + if self.display_name is not None: body['display_name'] = self.display_name + if self.serialized_options is not None: body['serialized_options'] = self.serialized_options + if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan + if self.type is not None: body['type'] = self.type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateVisualizationRequestVisualization: """Deserializes the UpdateVisualizationRequestVisualization from a dictionary.""" @@ -4799,6 +5928,14 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name return body + def as_shallow_dict(self) -> dict: + """Serializes the User into a shallow dictionary of its immediate attributes.""" + body = {} + if self.email is not None: body['email'] = self.email + if self.id is not None: body['id'] = self.id + if self.name is not None: body['name'] = self.name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> User: """Deserializes the User from a dictionary.""" @@ -4846,6 +5983,19 @@ def as_dict(self) -> dict: if self.update_time is not None: body['update_time'] = self.update_time return body + def as_shallow_dict(self) -> dict: + """Serializes the Visualization into a shallow dictionary of its immediate attributes.""" + body = {} + if self.create_time is not None: body['create_time'] = self.create_time + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + if self.query_id is not None: body['query_id'] = self.query_id + if self.serialized_options is not None: body['serialized_options'] = self.serialized_options + if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan + if self.type is not None: body['type'] = self.type + if self.update_time is not None: body['update_time'] = self.update_time + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Visualization: """Deserializes the Visualization from a dictionary.""" @@ -4883,6 +6033,16 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the WarehouseAccessControlRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> WarehouseAccessControlRequest: """Deserializes the WarehouseAccessControlRequest from a dictionary.""" @@ -4920,6 +6080,17 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the WarehouseAccessControlResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> WarehouseAccessControlResponse: """Deserializes the WarehouseAccessControlResponse from a dictionary.""" @@ -4947,6 +6118,14 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the WarehousePermission into a shallow dictionary of its immediate attributes.""" + body = {} + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> WarehousePermission: """Deserializes the WarehousePermission from a dictionary.""" @@ -4981,6 +6160,14 @@ def as_dict(self) -> dict: if self.object_type is not None: body['object_type'] = self.object_type return body + def as_shallow_dict(self) -> dict: + """Serializes the WarehousePermissions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> WarehousePermissions: """Deserializes the WarehousePermissions from a dictionary.""" @@ -5004,6 +6191,13 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the WarehousePermissionsDescription into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> WarehousePermissionsDescription: """Deserializes the WarehousePermissionsDescription from a dictionary.""" @@ -5026,6 +6220,13 @@ def as_dict(self) -> dict: if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body + def as_shallow_dict(self) -> dict: + """Serializes the WarehousePermissionsRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> WarehousePermissionsRequest: """Deserializes the WarehousePermissionsRequest from a dictionary.""" @@ -5050,6 +6251,13 @@ def as_dict(self) -> dict: if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the WarehouseTypePair into a shallow dictionary of its immediate attributes.""" + body = {} + if self.enabled is not None: body['enabled'] = self.enabled + if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> WarehouseTypePair: """Deserializes the WarehouseTypePair from a dictionary.""" @@ -5090,6 +6298,15 @@ def as_dict(self) -> dict: if self.width is not None: body['width'] = self.width return body + def as_shallow_dict(self) -> dict: + """Serializes the Widget into a shallow dictionary of its immediate attributes.""" + body = {} + if self.id is not None: body['id'] = self.id + if self.options: body['options'] = self.options + if self.visualization: body['visualization'] = self.visualization + if self.width is not None: body['width'] = self.width + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Widget: """Deserializes the Widget from a dictionary.""" @@ -5136,6 +6353,18 @@ def as_dict(self) -> dict: if self.updated_at is not None: body['updated_at'] = self.updated_at return body + def as_shallow_dict(self) -> dict: + """Serializes the WidgetOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.created_at is not None: body['created_at'] = self.created_at + if self.description is not None: body['description'] = self.description + if self.is_hidden is not None: body['isHidden'] = self.is_hidden + if self.parameter_mappings: body['parameterMappings'] = self.parameter_mappings + if self.position: body['position'] = self.position + if self.title is not None: body['title'] = self.title + if self.updated_at is not None: body['updated_at'] = self.updated_at + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> WidgetOptions: """Deserializes the WidgetOptions from a dictionary.""" @@ -5178,6 +6407,16 @@ def as_dict(self) -> dict: if self.size_y is not None: body['sizeY'] = self.size_y return body + def as_shallow_dict(self) -> dict: + """Serializes the WidgetPosition into a shallow dictionary of its immediate attributes.""" + body = {} + if self.auto_height is not None: body['autoHeight'] = self.auto_height + if self.col is not None: body['col'] = self.col + if self.row is not None: body['row'] = self.row + if self.size_x is not None: body['sizeX'] = self.size_x + if self.size_y is not None: body['sizeY'] = self.size_y + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> WidgetPosition: """Deserializes the WidgetPosition from a dictionary.""" @@ -5280,9 +6519,15 @@ def update(self, id: str, update_mask: str, *, alert: Optional[UpdateAlertReques :param id: str :param update_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :param alert: :class:`UpdateAlertRequestAlert` (optional) :returns: :class:`Alert` @@ -5989,9 +7234,15 @@ def update(self, id: str, update_mask: str, *, query: Optional[UpdateQueryReques :param id: str :param update_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :param query: :class:`UpdateQueryRequestQuery` (optional) :returns: :class:`Query` @@ -6363,9 +7614,15 @@ def update(self, :param id: str :param update_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :param visualization: :class:`UpdateVisualizationRequestVisualization` (optional) :returns: :class:`Visualization` @@ -6502,6 +7759,24 @@ def update(self, return LegacyVisualization.from_dict(res) +class RedashConfigAPI: + """Redash V2 service for workspace configurations (internal)""" + + def __init__(self, api_client): + self._api = api_client + + def get_config(self) -> ClientConfig: + """Read workspace configuration for Redash-v2. + + :returns: :class:`ClientConfig` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do('GET', '/api/2.0/redash-v2/config', headers=headers) + return ClientConfig.from_dict(res) + + class StatementExecutionAPI: """The Databricks SQL Statement Execution API can be used to execute SQL statements on a SQL warehouse and fetch the result. @@ -6579,11 +7854,10 @@ class StatementExecutionAPI: outstanding statement might have already completed execution when the cancel request arrives. Polling for status until a terminal state is reached is a reliable way to determine the final state. - Wait timeouts are approximate, occur server-side, and cannot account for things such as caller delays and network - latency from caller to service. - The system will auto-close a statement after one hour if the client - stops polling and thus you must poll at least once an hour. - The results are only available for one hour - after success; polling does not extend this. - The SQL Execution API must be used for the entire lifecycle - of the statement. For example, you cannot use the Jobs API to execute the command, and then the SQL - Execution API to cancel it. + latency from caller to service. - To guarantee that the statement is kept alive, you must poll at least + once every 15 minutes. - The results are only available for one hour after success; polling does not + extend this. - The SQL Execution API must be used for the entire lifecycle of the statement. For example, + you cannot use the Jobs API to execute the command, and then the SQL Execution API to cancel it. [Apache Arrow Columnar]: https://arrow.apache.org/overview/ [Databricks SQL Statement Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html""" @@ -7243,7 +8517,8 @@ def set_permissions(self, ) -> WarehousePermissions: """Set SQL warehouse permissions. - Sets permissions on a SQL warehouse. SQL warehouses can inherit permissions from their root object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param warehouse_id: str The SQL warehouse for which to get or manage permissions. diff --git a/databricks/sdk/service/vectorsearch.py b/databricks/sdk/service/vectorsearch.py index d6c28b840..f1e6aeaa3 100755 --- a/databricks/sdk/service/vectorsearch.py +++ b/databricks/sdk/service/vectorsearch.py @@ -29,6 +29,12 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name return body + def as_shallow_dict(self) -> dict: + """Serializes the ColumnInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: body['name'] = self.name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ColumnInfo: """Deserializes the ColumnInfo from a dictionary.""" @@ -50,6 +56,13 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateEndpoint into a shallow dictionary of its immediate attributes.""" + body = {} + if self.endpoint_type is not None: body['endpoint_type'] = self.endpoint_type + if self.name is not None: body['name'] = self.name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateEndpoint: """Deserializes the CreateEndpoint from a dictionary.""" @@ -93,6 +106,17 @@ def as_dict(self) -> dict: if self.primary_key is not None: body['primary_key'] = self.primary_key return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateVectorIndexRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.delta_sync_index_spec: body['delta_sync_index_spec'] = self.delta_sync_index_spec + if self.direct_access_index_spec: body['direct_access_index_spec'] = self.direct_access_index_spec + if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name + if self.index_type is not None: body['index_type'] = self.index_type + if self.name is not None: body['name'] = self.name + if self.primary_key is not None: body['primary_key'] = self.primary_key + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateVectorIndexRequest: """Deserializes the CreateVectorIndexRequest from a dictionary.""" @@ -116,6 +140,12 @@ def as_dict(self) -> dict: if self.vector_index: body['vector_index'] = self.vector_index.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateVectorIndexResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.vector_index: body['vector_index'] = self.vector_index + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateVectorIndexResponse: """Deserializes the CreateVectorIndexResponse from a dictionary.""" @@ -139,6 +169,13 @@ def as_dict(self) -> dict: if self.success_row_count is not None: body['success_row_count'] = self.success_row_count return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteDataResult into a shallow dictionary of its immediate attributes.""" + body = {} + if self.failed_primary_keys: body['failed_primary_keys'] = self.failed_primary_keys + if self.success_row_count is not None: body['success_row_count'] = self.success_row_count + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteDataResult: """Deserializes the DeleteDataResult from a dictionary.""" @@ -171,6 +208,13 @@ def as_dict(self) -> dict: if self.primary_keys: body['primary_keys'] = [v for v in self.primary_keys] return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteDataVectorIndexRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.index_name is not None: body['index_name'] = self.index_name + if self.primary_keys: body['primary_keys'] = self.primary_keys + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteDataVectorIndexRequest: """Deserializes the DeleteDataVectorIndexRequest from a dictionary.""" @@ -194,6 +238,13 @@ def as_dict(self) -> dict: if self.status is not None: body['status'] = self.status.value return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteDataVectorIndexResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.result: body['result'] = self.result + if self.status is not None: body['status'] = self.status + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteDataVectorIndexResponse: """Deserializes the DeleteDataVectorIndexResponse from a dictionary.""" @@ -209,6 +260,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteEndpointResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteEndpointResponse: """Deserializes the DeleteEndpointResponse from a dictionary.""" @@ -223,6 +279,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteIndexResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteIndexResponse: """Deserializes the DeleteIndexResponse from a dictionary.""" @@ -272,6 +333,18 @@ def as_dict(self) -> dict: if self.source_table is not None: body['source_table'] = self.source_table return body + def as_shallow_dict(self) -> dict: + """Serializes the DeltaSyncVectorIndexSpecRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.columns_to_sync: body['columns_to_sync'] = self.columns_to_sync + if self.embedding_source_columns: body['embedding_source_columns'] = self.embedding_source_columns + if self.embedding_vector_columns: body['embedding_vector_columns'] = self.embedding_vector_columns + if self.embedding_writeback_table is not None: + body['embedding_writeback_table'] = self.embedding_writeback_table + if self.pipeline_type is not None: body['pipeline_type'] = self.pipeline_type + if self.source_table is not None: body['source_table'] = self.source_table + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeltaSyncVectorIndexSpecRequest: """Deserializes the DeltaSyncVectorIndexSpecRequest from a dictionary.""" @@ -325,6 +398,18 @@ def as_dict(self) -> dict: if self.source_table is not None: body['source_table'] = self.source_table return body + def as_shallow_dict(self) -> dict: + """Serializes the DeltaSyncVectorIndexSpecResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.embedding_source_columns: body['embedding_source_columns'] = self.embedding_source_columns + if self.embedding_vector_columns: body['embedding_vector_columns'] = self.embedding_vector_columns + if self.embedding_writeback_table is not None: + body['embedding_writeback_table'] = self.embedding_writeback_table + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.pipeline_type is not None: body['pipeline_type'] = self.pipeline_type + if self.source_table is not None: body['source_table'] = self.source_table + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeltaSyncVectorIndexSpecResponse: """Deserializes the DeltaSyncVectorIndexSpecResponse from a dictionary.""" @@ -363,6 +448,14 @@ def as_dict(self) -> dict: if self.schema_json is not None: body['schema_json'] = self.schema_json return body + def as_shallow_dict(self) -> dict: + """Serializes the DirectAccessVectorIndexSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.embedding_source_columns: body['embedding_source_columns'] = self.embedding_source_columns + if self.embedding_vector_columns: body['embedding_vector_columns'] = self.embedding_vector_columns + if self.schema_json is not None: body['schema_json'] = self.schema_json + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DirectAccessVectorIndexSpec: """Deserializes the DirectAccessVectorIndexSpec from a dictionary.""" @@ -389,6 +482,14 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name return body + def as_shallow_dict(self) -> dict: + """Serializes the EmbeddingSourceColumn into a shallow dictionary of its immediate attributes.""" + body = {} + if self.embedding_model_endpoint_name is not None: + body['embedding_model_endpoint_name'] = self.embedding_model_endpoint_name + if self.name is not None: body['name'] = self.name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EmbeddingSourceColumn: """Deserializes the EmbeddingSourceColumn from a dictionary.""" @@ -411,6 +512,13 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name return body + def as_shallow_dict(self) -> dict: + """Serializes the EmbeddingVectorColumn into a shallow dictionary of its immediate attributes.""" + body = {} + if self.embedding_dimension is not None: body['embedding_dimension'] = self.embedding_dimension + if self.name is not None: body['name'] = self.name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EmbeddingVectorColumn: """Deserializes the EmbeddingVectorColumn from a dictionary.""" @@ -461,6 +569,21 @@ def as_dict(self) -> dict: if self.num_indexes is not None: body['num_indexes'] = self.num_indexes return body + def as_shallow_dict(self) -> dict: + """Serializes the EndpointInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.creator is not None: body['creator'] = self.creator + if self.endpoint_status: body['endpoint_status'] = self.endpoint_status + if self.endpoint_type is not None: body['endpoint_type'] = self.endpoint_type + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: + body['last_updated_timestamp'] = self.last_updated_timestamp + if self.last_updated_user is not None: body['last_updated_user'] = self.last_updated_user + if self.name is not None: body['name'] = self.name + if self.num_indexes is not None: body['num_indexes'] = self.num_indexes + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EndpointInfo: """Deserializes the EndpointInfo from a dictionary.""" @@ -492,6 +615,13 @@ def as_dict(self) -> dict: if self.state is not None: body['state'] = self.state.value return body + def as_shallow_dict(self) -> dict: + """Serializes the EndpointStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.message is not None: body['message'] = self.message + if self.state is not None: body['state'] = self.state + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> EndpointStatus: """Deserializes the EndpointStatus from a dictionary.""" @@ -528,6 +658,13 @@ def as_dict(self) -> dict: if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the ListEndpointResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.endpoints: body['endpoints'] = self.endpoints + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListEndpointResponse: """Deserializes the ListEndpointResponse from a dictionary.""" @@ -545,6 +682,12 @@ def as_dict(self) -> dict: if self.values: body['values'] = [v.as_dict() for v in self.values] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListValue into a shallow dictionary of its immediate attributes.""" + body = {} + if self.values: body['values'] = self.values + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListValue: """Deserializes the ListValue from a dictionary.""" @@ -566,6 +709,13 @@ def as_dict(self) -> dict: if self.vector_indexes: body['vector_indexes'] = [v.as_dict() for v in self.vector_indexes] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListVectorIndexesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.vector_indexes: body['vector_indexes'] = self.vector_indexes + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListVectorIndexesResponse: """Deserializes the ListVectorIndexesResponse from a dictionary.""" @@ -590,6 +740,13 @@ def as_dict(self) -> dict: if self.value: body['value'] = self.value.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the MapStringValueEntry into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.value: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> MapStringValueEntry: """Deserializes the MapStringValueEntry from a dictionary.""" @@ -628,6 +785,16 @@ def as_dict(self) -> dict: if self.primary_key is not None: body['primary_key'] = self.primary_key return body + def as_shallow_dict(self) -> dict: + """Serializes the MiniVectorIndex into a shallow dictionary of its immediate attributes.""" + body = {} + if self.creator is not None: body['creator'] = self.creator + if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name + if self.index_type is not None: body['index_type'] = self.index_type + if self.name is not None: body['name'] = self.name + if self.primary_key is not None: body['primary_key'] = self.primary_key + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> MiniVectorIndex: """Deserializes the MiniVectorIndex from a dictionary.""" @@ -672,6 +839,14 @@ def as_dict(self) -> dict: if self.page_token is not None: body['page_token'] = self.page_token return body + def as_shallow_dict(self) -> dict: + """Serializes the QueryVectorIndexNextPageRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name + if self.index_name is not None: body['index_name'] = self.index_name + if self.page_token is not None: body['page_token'] = self.page_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> QueryVectorIndexNextPageRequest: """Deserializes the QueryVectorIndexNextPageRequest from a dictionary.""" @@ -724,6 +899,19 @@ def as_dict(self) -> dict: if self.score_threshold is not None: body['score_threshold'] = self.score_threshold return body + def as_shallow_dict(self) -> dict: + """Serializes the QueryVectorIndexRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.columns: body['columns'] = self.columns + if self.filters_json is not None: body['filters_json'] = self.filters_json + if self.index_name is not None: body['index_name'] = self.index_name + if self.num_results is not None: body['num_results'] = self.num_results + if self.query_text is not None: body['query_text'] = self.query_text + if self.query_type is not None: body['query_type'] = self.query_type + if self.query_vector: body['query_vector'] = self.query_vector + if self.score_threshold is not None: body['score_threshold'] = self.score_threshold + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> QueryVectorIndexRequest: """Deserializes the QueryVectorIndexRequest from a dictionary.""" @@ -758,6 +946,14 @@ def as_dict(self) -> dict: if self.result: body['result'] = self.result.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the QueryVectorIndexResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.manifest: body['manifest'] = self.manifest + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.result: body['result'] = self.result + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> QueryVectorIndexResponse: """Deserializes the QueryVectorIndexResponse from a dictionary.""" @@ -783,6 +979,13 @@ def as_dict(self) -> dict: if self.row_count is not None: body['row_count'] = self.row_count return body + def as_shallow_dict(self) -> dict: + """Serializes the ResultData into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data_array: body['data_array'] = self.data_array + if self.row_count is not None: body['row_count'] = self.row_count + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ResultData: """Deserializes the ResultData from a dictionary.""" @@ -806,6 +1009,13 @@ def as_dict(self) -> dict: if self.columns: body['columns'] = [v.as_dict() for v in self.columns] return body + def as_shallow_dict(self) -> dict: + """Serializes the ResultManifest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.column_count is not None: body['column_count'] = self.column_count + if self.columns: body['columns'] = self.columns + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ResultManifest: """Deserializes the ResultManifest from a dictionary.""" @@ -833,6 +1043,14 @@ def as_dict(self) -> dict: if self.num_results is not None: body['num_results'] = self.num_results return body + def as_shallow_dict(self) -> dict: + """Serializes the ScanVectorIndexRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.index_name is not None: body['index_name'] = self.index_name + if self.last_primary_key is not None: body['last_primary_key'] = self.last_primary_key + if self.num_results is not None: body['num_results'] = self.num_results + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ScanVectorIndexRequest: """Deserializes the ScanVectorIndexRequest from a dictionary.""" @@ -858,6 +1076,13 @@ def as_dict(self) -> dict: if self.last_primary_key is not None: body['last_primary_key'] = self.last_primary_key return body + def as_shallow_dict(self) -> dict: + """Serializes the ScanVectorIndexResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data: body['data'] = self.data + if self.last_primary_key is not None: body['last_primary_key'] = self.last_primary_key + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ScanVectorIndexResponse: """Deserializes the ScanVectorIndexResponse from a dictionary.""" @@ -875,6 +1100,12 @@ def as_dict(self) -> dict: if self.fields: body['fields'] = [v.as_dict() for v in self.fields] return body + def as_shallow_dict(self) -> dict: + """Serializes the Struct into a shallow dictionary of its immediate attributes.""" + body = {} + if self.fields: body['fields'] = self.fields + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Struct: """Deserializes the Struct from a dictionary.""" @@ -889,6 +1120,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the SyncIndexResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SyncIndexResponse: """Deserializes the SyncIndexResponse from a dictionary.""" @@ -912,6 +1148,13 @@ def as_dict(self) -> dict: if self.success_row_count is not None: body['success_row_count'] = self.success_row_count return body + def as_shallow_dict(self) -> dict: + """Serializes the UpsertDataResult into a shallow dictionary of its immediate attributes.""" + body = {} + if self.failed_primary_keys: body['failed_primary_keys'] = self.failed_primary_keys + if self.success_row_count is not None: body['success_row_count'] = self.success_row_count + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpsertDataResult: """Deserializes the UpsertDataResult from a dictionary.""" @@ -944,6 +1187,13 @@ def as_dict(self) -> dict: if self.inputs_json is not None: body['inputs_json'] = self.inputs_json return body + def as_shallow_dict(self) -> dict: + """Serializes the UpsertDataVectorIndexRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.index_name is not None: body['index_name'] = self.index_name + if self.inputs_json is not None: body['inputs_json'] = self.inputs_json + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpsertDataVectorIndexRequest: """Deserializes the UpsertDataVectorIndexRequest from a dictionary.""" @@ -967,6 +1217,13 @@ def as_dict(self) -> dict: if self.status is not None: body['status'] = self.status.value return body + def as_shallow_dict(self) -> dict: + """Serializes the UpsertDataVectorIndexResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.result: body['result'] = self.result + if self.status is not None: body['status'] = self.status + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpsertDataVectorIndexResponse: """Deserializes the UpsertDataVectorIndexResponse from a dictionary.""" @@ -999,6 +1256,17 @@ def as_dict(self) -> dict: if self.struct_value: body['struct_value'] = self.struct_value.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the Value into a shallow dictionary of its immediate attributes.""" + body = {} + if self.bool_value is not None: body['bool_value'] = self.bool_value + if self.list_value: body['list_value'] = self.list_value + if self.null_value is not None: body['null_value'] = self.null_value + if self.number_value is not None: body['number_value'] = self.number_value + if self.string_value is not None: body['string_value'] = self.string_value + if self.struct_value: body['struct_value'] = self.struct_value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Value: """Deserializes the Value from a dictionary.""" @@ -1052,6 +1320,19 @@ def as_dict(self) -> dict: if self.status: body['status'] = self.status.as_dict() return body + def as_shallow_dict(self) -> dict: + """Serializes the VectorIndex into a shallow dictionary of its immediate attributes.""" + body = {} + if self.creator is not None: body['creator'] = self.creator + if self.delta_sync_index_spec: body['delta_sync_index_spec'] = self.delta_sync_index_spec + if self.direct_access_index_spec: body['direct_access_index_spec'] = self.direct_access_index_spec + if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name + if self.index_type is not None: body['index_type'] = self.index_type + if self.name is not None: body['name'] = self.name + if self.primary_key is not None: body['primary_key'] = self.primary_key + if self.status: body['status'] = self.status + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> VectorIndex: """Deserializes the VectorIndex from a dictionary.""" @@ -1090,6 +1371,15 @@ def as_dict(self) -> dict: if self.ready is not None: body['ready'] = self.ready return body + def as_shallow_dict(self) -> dict: + """Serializes the VectorIndexStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.index_url is not None: body['index_url'] = self.index_url + if self.indexed_row_count is not None: body['indexed_row_count'] = self.indexed_row_count + if self.message is not None: body['message'] = self.message + if self.ready is not None: body['ready'] = self.ready + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> VectorIndexStatus: """Deserializes the VectorIndexStatus from a dictionary.""" diff --git a/databricks/sdk/service/workspace.py b/databricks/sdk/service/workspace.py index 7c8bfbd5e..eb5418987 100755 --- a/databricks/sdk/service/workspace.py +++ b/databricks/sdk/service/workspace.py @@ -29,6 +29,13 @@ def as_dict(self) -> dict: if self.principal is not None: body['principal'] = self.principal return body + def as_shallow_dict(self) -> dict: + """Serializes the AclItem into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission is not None: body['permission'] = self.permission + if self.principal is not None: body['principal'] = self.principal + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AclItem: """Deserializes the AclItem from a dictionary.""" @@ -57,6 +64,13 @@ def as_dict(self) -> dict: if self.resource_id is not None: body['resource_id'] = self.resource_id return body + def as_shallow_dict(self) -> dict: + """Serializes the AzureKeyVaultSecretScopeMetadata into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dns_name is not None: body['dns_name'] = self.dns_name + if self.resource_id is not None: body['resource_id'] = self.resource_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> AzureKeyVaultSecretScopeMetadata: """Deserializes the AzureKeyVaultSecretScopeMetadata from a dictionary.""" @@ -91,6 +105,14 @@ def as_dict(self) -> dict: if self.personal_access_token is not None: body['personal_access_token'] = self.personal_access_token return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateCredentialsRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.git_provider is not None: body['git_provider'] = self.git_provider + if self.git_username is not None: body['git_username'] = self.git_username + if self.personal_access_token is not None: body['personal_access_token'] = self.personal_access_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateCredentialsRequest: """Deserializes the CreateCredentialsRequest from a dictionary.""" @@ -119,6 +141,14 @@ def as_dict(self) -> dict: if self.git_username is not None: body['git_username'] = self.git_username return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateCredentialsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.git_provider is not None: body['git_provider'] = self.git_provider + if self.git_username is not None: body['git_username'] = self.git_username + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateCredentialsResponse: """Deserializes the CreateCredentialsResponse from a dictionary.""" @@ -154,6 +184,15 @@ def as_dict(self) -> dict: if self.url is not None: body['url'] = self.url return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateRepoRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.path is not None: body['path'] = self.path + if self.provider is not None: body['provider'] = self.provider + if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout + if self.url is not None: body['url'] = self.url + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateRepoRequest: """Deserializes the CreateRepoRequest from a dictionary.""" @@ -198,6 +237,18 @@ def as_dict(self) -> dict: if self.url is not None: body['url'] = self.url return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateRepoResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.branch is not None: body['branch'] = self.branch + if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id + if self.id is not None: body['id'] = self.id + if self.path is not None: body['path'] = self.path + if self.provider is not None: body['provider'] = self.provider + if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout + if self.url is not None: body['url'] = self.url + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateRepoResponse: """Deserializes the CreateRepoResponse from a dictionary.""" @@ -234,6 +285,16 @@ def as_dict(self) -> dict: if self.scope_backend_type is not None: body['scope_backend_type'] = self.scope_backend_type.value return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateScope into a shallow dictionary of its immediate attributes.""" + body = {} + if self.backend_azure_keyvault: body['backend_azure_keyvault'] = self.backend_azure_keyvault + if self.initial_manage_principal is not None: + body['initial_manage_principal'] = self.initial_manage_principal + if self.scope is not None: body['scope'] = self.scope + if self.scope_backend_type is not None: body['scope_backend_type'] = self.scope_backend_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateScope: """Deserializes the CreateScope from a dictionary.""" @@ -252,6 +313,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the CreateScopeResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateScopeResponse: """Deserializes the CreateScopeResponse from a dictionary.""" @@ -278,6 +344,14 @@ def as_dict(self) -> dict: if self.git_username is not None: body['git_username'] = self.git_username return body + def as_shallow_dict(self) -> dict: + """Serializes the CredentialInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.git_provider is not None: body['git_provider'] = self.git_provider + if self.git_username is not None: body['git_username'] = self.git_username + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> CredentialInfo: """Deserializes the CredentialInfo from a dictionary.""" @@ -303,6 +377,13 @@ def as_dict(self) -> dict: if self.recursive is not None: body['recursive'] = self.recursive return body + def as_shallow_dict(self) -> dict: + """Serializes the Delete into a shallow dictionary of its immediate attributes.""" + body = {} + if self.path is not None: body['path'] = self.path + if self.recursive is not None: body['recursive'] = self.recursive + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Delete: """Deserializes the Delete from a dictionary.""" @@ -324,6 +405,13 @@ def as_dict(self) -> dict: if self.scope is not None: body['scope'] = self.scope return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteAcl into a shallow dictionary of its immediate attributes.""" + body = {} + if self.principal is not None: body['principal'] = self.principal + if self.scope is not None: body['scope'] = self.scope + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteAcl: """Deserializes the DeleteAcl from a dictionary.""" @@ -338,6 +426,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteAclResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteAclResponse: """Deserializes the DeleteAclResponse from a dictionary.""" @@ -352,6 +445,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteCredentialsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteCredentialsResponse: """Deserializes the DeleteCredentialsResponse from a dictionary.""" @@ -366,6 +464,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteRepoResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteRepoResponse: """Deserializes the DeleteRepoResponse from a dictionary.""" @@ -380,6 +483,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" @@ -397,6 +505,12 @@ def as_dict(self) -> dict: if self.scope is not None: body['scope'] = self.scope return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteScope into a shallow dictionary of its immediate attributes.""" + body = {} + if self.scope is not None: body['scope'] = self.scope + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteScope: """Deserializes the DeleteScope from a dictionary.""" @@ -411,6 +525,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteScopeResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteScopeResponse: """Deserializes the DeleteScopeResponse from a dictionary.""" @@ -432,6 +551,13 @@ def as_dict(self) -> dict: if self.scope is not None: body['scope'] = self.scope return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteSecret into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.scope is not None: body['scope'] = self.scope + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteSecret: """Deserializes the DeleteSecret from a dictionary.""" @@ -446,6 +572,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the DeleteSecretResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> DeleteSecretResponse: """Deserializes the DeleteSecretResponse from a dictionary.""" @@ -478,6 +609,13 @@ def as_dict(self) -> dict: if self.file_type is not None: body['file_type'] = self.file_type return body + def as_shallow_dict(self) -> dict: + """Serializes the ExportResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.content is not None: body['content'] = self.content + if self.file_type is not None: body['file_type'] = self.file_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ExportResponse: """Deserializes the ExportResponse from a dictionary.""" @@ -504,6 +642,14 @@ def as_dict(self) -> dict: if self.git_username is not None: body['git_username'] = self.git_username return body + def as_shallow_dict(self) -> dict: + """Serializes the GetCredentialsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.git_provider is not None: body['git_provider'] = self.git_provider + if self.git_username is not None: body['git_username'] = self.git_username + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetCredentialsResponse: """Deserializes the GetCredentialsResponse from a dictionary.""" @@ -523,6 +669,12 @@ def as_dict(self) -> dict: if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body + def as_shallow_dict(self) -> dict: + """Serializes the GetRepoPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission_levels: body['permission_levels'] = self.permission_levels + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetRepoPermissionLevelsResponse: """Deserializes the GetRepoPermissionLevelsResponse from a dictionary.""" @@ -564,6 +716,18 @@ def as_dict(self) -> dict: if self.url is not None: body['url'] = self.url return body + def as_shallow_dict(self) -> dict: + """Serializes the GetRepoResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.branch is not None: body['branch'] = self.branch + if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id + if self.id is not None: body['id'] = self.id + if self.path is not None: body['path'] = self.path + if self.provider is not None: body['provider'] = self.provider + if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout + if self.url is not None: body['url'] = self.url + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetRepoResponse: """Deserializes the GetRepoResponse from a dictionary.""" @@ -591,6 +755,13 @@ def as_dict(self) -> dict: if self.value is not None: body['value'] = self.value return body + def as_shallow_dict(self) -> dict: + """Serializes the GetSecretResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetSecretResponse: """Deserializes the GetSecretResponse from a dictionary.""" @@ -608,6 +779,12 @@ def as_dict(self) -> dict: if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body + def as_shallow_dict(self) -> dict: + """Serializes the GetWorkspaceObjectPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission_levels: body['permission_levels'] = self.permission_levels + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> GetWorkspaceObjectPermissionLevelsResponse: """Deserializes the GetWorkspaceObjectPermissionLevelsResponse from a dictionary.""" @@ -657,6 +834,16 @@ def as_dict(self) -> dict: if self.path is not None: body['path'] = self.path return body + def as_shallow_dict(self) -> dict: + """Serializes the Import into a shallow dictionary of its immediate attributes.""" + body = {} + if self.content is not None: body['content'] = self.content + if self.format is not None: body['format'] = self.format + if self.language is not None: body['language'] = self.language + if self.overwrite is not None: body['overwrite'] = self.overwrite + if self.path is not None: body['path'] = self.path + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Import: """Deserializes the Import from a dictionary.""" @@ -684,6 +871,7 @@ class ImportFormat(Enum): DBC = 'DBC' HTML = 'HTML' JUPYTER = 'JUPYTER' + RAW = 'RAW' R_MARKDOWN = 'R_MARKDOWN' SOURCE = 'SOURCE' @@ -696,6 +884,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the ImportResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ImportResponse: """Deserializes the ImportResponse from a dictionary.""" @@ -722,6 +915,12 @@ def as_dict(self) -> dict: if self.items: body['items'] = [v.as_dict() for v in self.items] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListAclsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.items: body['items'] = self.items + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListAclsResponse: """Deserializes the ListAclsResponse from a dictionary.""" @@ -739,6 +938,12 @@ def as_dict(self) -> dict: if self.credentials: body['credentials'] = [v.as_dict() for v in self.credentials] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListCredentialsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.credentials: body['credentials'] = self.credentials + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListCredentialsResponse: """Deserializes the ListCredentialsResponse from a dictionary.""" @@ -761,6 +966,13 @@ def as_dict(self) -> dict: if self.repos: body['repos'] = [v.as_dict() for v in self.repos] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListReposResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.repos: body['repos'] = self.repos + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListReposResponse: """Deserializes the ListReposResponse from a dictionary.""" @@ -778,6 +990,12 @@ def as_dict(self) -> dict: if self.objects: body['objects'] = [v.as_dict() for v in self.objects] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.objects: body['objects'] = self.objects + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListResponse: """Deserializes the ListResponse from a dictionary.""" @@ -795,6 +1013,12 @@ def as_dict(self) -> dict: if self.scopes: body['scopes'] = [v.as_dict() for v in self.scopes] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListScopesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.scopes: body['scopes'] = self.scopes + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListScopesResponse: """Deserializes the ListScopesResponse from a dictionary.""" @@ -812,6 +1036,12 @@ def as_dict(self) -> dict: if self.secrets: body['secrets'] = [v.as_dict() for v in self.secrets] return body + def as_shallow_dict(self) -> dict: + """Serializes the ListSecretsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.secrets: body['secrets'] = self.secrets + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ListSecretsResponse: """Deserializes the ListSecretsResponse from a dictionary.""" @@ -830,6 +1060,12 @@ def as_dict(self) -> dict: if self.path is not None: body['path'] = self.path return body + def as_shallow_dict(self) -> dict: + """Serializes the Mkdirs into a shallow dictionary of its immediate attributes.""" + body = {} + if self.path is not None: body['path'] = self.path + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> Mkdirs: """Deserializes the Mkdirs from a dictionary.""" @@ -844,6 +1080,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the MkdirsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> MkdirsResponse: """Deserializes the MkdirsResponse from a dictionary.""" @@ -893,6 +1134,19 @@ def as_dict(self) -> dict: if self.size is not None: body['size'] = self.size return body + def as_shallow_dict(self) -> dict: + """Serializes the ObjectInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.created_at is not None: body['created_at'] = self.created_at + if self.language is not None: body['language'] = self.language + if self.modified_at is not None: body['modified_at'] = self.modified_at + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type + if self.path is not None: body['path'] = self.path + if self.resource_id is not None: body['resource_id'] = self.resource_id + if self.size is not None: body['size'] = self.size + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> ObjectInfo: """Deserializes the ObjectInfo from a dictionary.""" @@ -940,6 +1194,14 @@ def as_dict(self) -> dict: if self.scope is not None: body['scope'] = self.scope return body + def as_shallow_dict(self) -> dict: + """Serializes the PutAcl into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission is not None: body['permission'] = self.permission + if self.principal is not None: body['principal'] = self.principal + if self.scope is not None: body['scope'] = self.scope + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PutAcl: """Deserializes the PutAcl from a dictionary.""" @@ -956,6 +1218,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the PutAclResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PutAclResponse: """Deserializes the PutAclResponse from a dictionary.""" @@ -985,6 +1252,15 @@ def as_dict(self) -> dict: if self.string_value is not None: body['string_value'] = self.string_value return body + def as_shallow_dict(self) -> dict: + """Serializes the PutSecret into a shallow dictionary of its immediate attributes.""" + body = {} + if self.bytes_value is not None: body['bytes_value'] = self.bytes_value + if self.key is not None: body['key'] = self.key + if self.scope is not None: body['scope'] = self.scope + if self.string_value is not None: body['string_value'] = self.string_value + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PutSecret: """Deserializes the PutSecret from a dictionary.""" @@ -1002,6 +1278,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the PutSecretResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> PutSecretResponse: """Deserializes the PutSecretResponse from a dictionary.""" @@ -1032,6 +1313,16 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the RepoAccessControlRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RepoAccessControlRequest: """Deserializes the RepoAccessControlRequest from a dictionary.""" @@ -1069,6 +1360,17 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the RepoAccessControlResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RepoAccessControlResponse: """Deserializes the RepoAccessControlResponse from a dictionary.""" @@ -1116,6 +1418,18 @@ def as_dict(self) -> dict: if self.url is not None: body['url'] = self.url return body + def as_shallow_dict(self) -> dict: + """Serializes the RepoInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.branch is not None: body['branch'] = self.branch + if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id + if self.id is not None: body['id'] = self.id + if self.path is not None: body['path'] = self.path + if self.provider is not None: body['provider'] = self.provider + if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout + if self.url is not None: body['url'] = self.url + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RepoInfo: """Deserializes the RepoInfo from a dictionary.""" @@ -1145,6 +1459,14 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the RepoPermission into a shallow dictionary of its immediate attributes.""" + body = {} + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RepoPermission: """Deserializes the RepoPermission from a dictionary.""" @@ -1179,6 +1501,14 @@ def as_dict(self) -> dict: if self.object_type is not None: body['object_type'] = self.object_type return body + def as_shallow_dict(self) -> dict: + """Serializes the RepoPermissions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RepoPermissions: """Deserializes the RepoPermissions from a dictionary.""" @@ -1201,6 +1531,13 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the RepoPermissionsDescription into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RepoPermissionsDescription: """Deserializes the RepoPermissionsDescription from a dictionary.""" @@ -1223,6 +1560,13 @@ def as_dict(self) -> dict: if self.repo_id is not None: body['repo_id'] = self.repo_id return body + def as_shallow_dict(self) -> dict: + """Serializes the RepoPermissionsRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.repo_id is not None: body['repo_id'] = self.repo_id + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> RepoPermissionsRequest: """Deserializes the RepoPermissionsRequest from a dictionary.""" @@ -1252,6 +1596,14 @@ def as_dict(self) -> dict: body['last_updated_timestamp'] = self.last_updated_timestamp return body + def as_shallow_dict(self) -> dict: + """Serializes the SecretMetadata into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.last_updated_timestamp is not None: + body['last_updated_timestamp'] = self.last_updated_timestamp + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SecretMetadata: """Deserializes the SecretMetadata from a dictionary.""" @@ -1277,6 +1629,14 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name return body + def as_shallow_dict(self) -> dict: + """Serializes the SecretScope into a shallow dictionary of its immediate attributes.""" + body = {} + if self.backend_type is not None: body['backend_type'] = self.backend_type + if self.keyvault_metadata: body['keyvault_metadata'] = self.keyvault_metadata + if self.name is not None: body['name'] = self.name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SecretScope: """Deserializes the SecretScope from a dictionary.""" @@ -1300,6 +1660,12 @@ def as_dict(self) -> dict: if self.patterns: body['patterns'] = [v for v in self.patterns] return body + def as_shallow_dict(self) -> dict: + """Serializes the SparseCheckout into a shallow dictionary of its immediate attributes.""" + body = {} + if self.patterns: body['patterns'] = self.patterns + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SparseCheckout: """Deserializes the SparseCheckout from a dictionary.""" @@ -1321,6 +1687,12 @@ def as_dict(self) -> dict: if self.patterns: body['patterns'] = [v for v in self.patterns] return body + def as_shallow_dict(self) -> dict: + """Serializes the SparseCheckoutUpdate into a shallow dictionary of its immediate attributes.""" + body = {} + if self.patterns: body['patterns'] = self.patterns + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> SparseCheckoutUpdate: """Deserializes the SparseCheckoutUpdate from a dictionary.""" @@ -1359,6 +1731,15 @@ def as_dict(self) -> dict: if self.personal_access_token is not None: body['personal_access_token'] = self.personal_access_token return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateCredentialsRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.git_provider is not None: body['git_provider'] = self.git_provider + if self.git_username is not None: body['git_username'] = self.git_username + if self.personal_access_token is not None: body['personal_access_token'] = self.personal_access_token + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateCredentialsRequest: """Deserializes the UpdateCredentialsRequest from a dictionary.""" @@ -1376,6 +1757,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateCredentialsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateCredentialsResponse: """Deserializes the UpdateCredentialsResponse from a dictionary.""" @@ -1408,6 +1794,15 @@ def as_dict(self) -> dict: if self.tag is not None: body['tag'] = self.tag return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateRepoRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.branch is not None: body['branch'] = self.branch + if self.repo_id is not None: body['repo_id'] = self.repo_id + if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout + if self.tag is not None: body['tag'] = self.tag + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateRepoRequest: """Deserializes the UpdateRepoRequest from a dictionary.""" @@ -1425,6 +1820,11 @@ def as_dict(self) -> dict: body = {} return body + def as_shallow_dict(self) -> dict: + """Serializes the UpdateRepoResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateRepoResponse: """Deserializes the UpdateRepoResponse from a dictionary.""" @@ -1455,6 +1855,16 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the WorkspaceObjectAccessControlRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> WorkspaceObjectAccessControlRequest: """Deserializes the WorkspaceObjectAccessControlRequest from a dictionary.""" @@ -1492,6 +1902,17 @@ def as_dict(self) -> dict: if self.user_name is not None: body['user_name'] = self.user_name return body + def as_shallow_dict(self) -> dict: + """Serializes the WorkspaceObjectAccessControlResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> WorkspaceObjectAccessControlResponse: """Deserializes the WorkspaceObjectAccessControlResponse from a dictionary.""" @@ -1519,6 +1940,14 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the WorkspaceObjectPermission into a shallow dictionary of its immediate attributes.""" + body = {} + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> WorkspaceObjectPermission: """Deserializes the WorkspaceObjectPermission from a dictionary.""" @@ -1553,6 +1982,14 @@ def as_dict(self) -> dict: if self.object_type is not None: body['object_type'] = self.object_type return body + def as_shallow_dict(self) -> dict: + """Serializes the WorkspaceObjectPermissions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> WorkspaceObjectPermissions: """Deserializes the WorkspaceObjectPermissions from a dictionary.""" @@ -1576,6 +2013,13 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body + def as_shallow_dict(self) -> dict: + """Serializes the WorkspaceObjectPermissionsDescription into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> WorkspaceObjectPermissionsDescription: """Deserializes the WorkspaceObjectPermissionsDescription from a dictionary.""" @@ -1602,6 +2046,14 @@ def as_dict(self) -> dict: if self.workspace_object_type is not None: body['workspace_object_type'] = self.workspace_object_type return body + def as_shallow_dict(self) -> dict: + """Serializes the WorkspaceObjectPermissionsRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.workspace_object_id is not None: body['workspace_object_id'] = self.workspace_object_id + if self.workspace_object_type is not None: body['workspace_object_type'] = self.workspace_object_type + return body + @classmethod def from_dict(cls, d: Dict[str, any]) -> WorkspaceObjectPermissionsRequest: """Deserializes the WorkspaceObjectPermissionsRequest from a dictionary.""" @@ -1799,7 +2251,7 @@ def delete(self, repo_id: int): Deletes the specified repo. :param repo_id: int - ID of the Git folder (repo) object in the workspace. + The ID for the corresponding repo to delete. """ @@ -1897,7 +2349,8 @@ def set_permissions( access_control_list: Optional[List[RepoAccessControlRequest]] = None) -> RepoPermissions: """Set repo permissions. - Sets permissions on a repo. Repos can inherit permissions from their root object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param repo_id: str The repo for which to get or manage permissions. @@ -2527,8 +2980,9 @@ def set_permissions( ) -> WorkspaceObjectPermissions: """Set workspace object permissions. - Sets permissions on a workspace object. Workspace objects can inherit permissions from their parent - objects or root object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their parent objects or root + object. :param workspace_object_type: str The workspace object type for which to get or manage permissions. diff --git a/databricks/sdk/useragent.py b/databricks/sdk/useragent.py index 5b15d2822..45adfe51d 100644 --- a/databricks/sdk/useragent.py +++ b/databricks/sdk/useragent.py @@ -148,4 +148,58 @@ def to_string(alternate_product_info: Optional[Tuple[str, str]] = None, base.extend(_extra) base.extend(_get_upstream_user_agent_info()) base.extend(_get_runtime_info()) + if cicd_provider() != "": + base.append((CICD_KEY, cicd_provider())) return " ".join(f"{k}/{v}" for k, v in base) + + +# List of CI/CD providers and pairs of envvar/value that are used to detect them. +_PROVIDERS = { + "github": [("GITHUB_ACTIONS", "true")], + "gitlab": [("GITLAB_CI", "true")], + "jenkins": [("JENKINS_URL", "")], + "azure-devops": [("TF_BUILD", "True")], + "circle": [("CIRCLECI", "true")], + "travis": [("TRAVIS", "true")], + "bitbucket": [("BITBUCKET_BUILD_NUMBER", "")], + "google-cloud-build": [("PROJECT_ID", ""), ("BUILD_ID", ""), ("PROJECT_NUMBER", ""), ("LOCATION", "")], + "aws-code-build": [("CODEBUILD_BUILD_ARN", "")], + "tf-cloud": [("TFC_RUN_ID", "")], +} + +# Private variable to store the CI/CD provider. This value is computed at +# the first invocation of cicd_providers() and is cached for subsequent calls. +_cicd_provider = None + + +def cicd_provider() -> str: + """Return the CI/CD provider if detected, or an empty string otherwise.""" + + # This function is safe because (i) assignation are atomic, and (ii) + # computating the CI/CD provider is idempotent. + global _cicd_provider + if _cicd_provider is not None: + return _cicd_provider + + providers = [] + for p in _PROVIDERS: + found = True + for envvar, value in _PROVIDERS[p]: + v = os.getenv(envvar) + if v is None or (value != "" and v != value): + found = False + break + + if found: + providers.append(p) + + if len(providers) == 0: + _cicd_provider = "" + else: + # TODO: reconsider what to do if multiple providers are detected. + # The current mechanism as the benefit of being deterministic and + # robust to ordering changes in _PROVIDERS. + providers.sort() + _cicd_provider = providers[0] + + return _cicd_provider diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py index aae5aca67..c09c695fd 100644 --- a/databricks/sdk/version.py +++ b/databricks/sdk/version.py @@ -1 +1 @@ -__version__ = '0.36.0' +__version__ = '0.44.1' diff --git a/docs/account/billing/budget_policy.rst b/docs/account/billing/budget_policy.rst new file mode 100644 index 000000000..6f7d7ede1 --- /dev/null +++ b/docs/account/billing/budget_policy.rst @@ -0,0 +1,88 @@ +``a.budget_policy``: Budget Policy +================================== +.. currentmodule:: databricks.sdk.service.billing + +.. py:class:: BudgetPolicyAPI + + A service serves REST API about Budget policies + + .. py:method:: create( [, custom_tags: Optional[List[compute.CustomPolicyTag]], policy_name: Optional[str], request_id: Optional[str]]) -> BudgetPolicy + + Create a budget policy. + + Creates a new policy. + + :param custom_tags: List[:class:`CustomPolicyTag`] (optional) + A list of tags defined by the customer. At most 40 entries are allowed per policy. + :param policy_name: str (optional) + The name of the policy. - Must be unique among active policies. - Can contain only characters of + 0-9, a-z, A-Z, -, =, ., :, /, @, _, +, whitespace. + :param request_id: str (optional) + A unique identifier for this request. Restricted to 36 ASCII characters. A random UUID is + recommended. This request is only idempotent if a `request_id` is provided. + + :returns: :class:`BudgetPolicy` + + + .. py:method:: delete(policy_id: str) + + Delete a budget policy. + + Deletes a policy + + :param policy_id: str + The Id of the policy. + + + + + .. py:method:: get(policy_id: str) -> BudgetPolicy + + Get a budget policy. + + Retrieves a policy by it's ID. + + :param policy_id: str + The Id of the policy. + + :returns: :class:`BudgetPolicy` + + + .. py:method:: list( [, filter_by: Optional[Filter], page_size: Optional[int], page_token: Optional[str], sort_spec: Optional[SortSpec]]) -> Iterator[BudgetPolicy] + + List policies. + + Lists all policies. Policies are returned in the alphabetically ascending order of their names. + + :param filter_by: :class:`Filter` (optional) + A filter to apply to the list of policies. + :param page_size: int (optional) + The maximum number of budget policies to return. If unspecified, at most 100 budget policies will be + returned. The maximum value is 1000; values above 1000 will be coerced to 1000. + :param page_token: str (optional) + A page token, received from a previous `ListServerlessPolicies` call. Provide this to retrieve the + subsequent page. If unspecified, the first page will be returned. + + When paginating, all other parameters provided to `ListServerlessPoliciesRequest` must match the + call that provided the page token. + :param sort_spec: :class:`SortSpec` (optional) + The sort specification. + + :returns: Iterator over :class:`BudgetPolicy` + + + .. py:method:: update(policy_id: str [, limit_config: Optional[LimitConfig], policy: Optional[BudgetPolicy]]) -> BudgetPolicy + + Update a budget policy. + + Updates a policy + + :param policy_id: str + The Id of the policy. This field is generated by Databricks and globally unique. + :param limit_config: :class:`LimitConfig` (optional) + DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy + :param policy: :class:`BudgetPolicy` (optional) + Contains the BudgetPolicy details. + + :returns: :class:`BudgetPolicy` + \ No newline at end of file diff --git a/docs/account/billing/budgets.rst b/docs/account/billing/budgets.rst index edba0a733..43c77d00b 100644 --- a/docs/account/billing/budgets.rst +++ b/docs/account/billing/budgets.rst @@ -115,7 +115,7 @@ Gets a budget configuration for an account. Both account and budget configuration are specified by ID. :param budget_id: str - The Databricks budget configuration ID. + The budget configuration ID :returns: :class:`GetBudgetConfigurationResponse` diff --git a/docs/account/billing/index.rst b/docs/account/billing/index.rst index 0e07da594..b8b317616 100644 --- a/docs/account/billing/index.rst +++ b/docs/account/billing/index.rst @@ -8,6 +8,7 @@ Configure different aspects of Databricks billing and usage. :maxdepth: 1 billable_usage + budget_policy budgets log_delivery usage_dashboards \ No newline at end of file diff --git a/docs/account/oauth2/custom_app_integration.rst b/docs/account/oauth2/custom_app_integration.rst index 0dcc3d8e0..7043a343b 100644 --- a/docs/account/oauth2/custom_app_integration.rst +++ b/docs/account/oauth2/custom_app_integration.rst @@ -7,7 +7,7 @@ These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud. - .. py:method:: create( [, confidential: Optional[bool], name: Optional[str], redirect_urls: Optional[List[str]], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy]]) -> CreateCustomAppIntegrationOutput + .. py:method:: create( [, confidential: Optional[bool], name: Optional[str], redirect_urls: Optional[List[str]], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy], user_authorized_scopes: Optional[List[str]]]) -> CreateCustomAppIntegrationOutput Create Custom OAuth App Integration. @@ -26,6 +26,9 @@ profile, email. :param token_access_policy: :class:`TokenAccessPolicy` (optional) Token access policy + :param user_authorized_scopes: List[str] (optional) + Scopes that will need to be consented by end user to mint the access token. If the user does not + authorize the access token will not be minted. Must be a subset of scopes. :returns: :class:`CreateCustomAppIntegrationOutput` @@ -49,6 +52,7 @@ Gets the Custom OAuth App Integration for the given integration id. :param integration_id: str + The OAuth app integration ID. :returns: :class:`GetCustomAppIntegrationOutput` @@ -66,7 +70,7 @@ :returns: Iterator over :class:`GetCustomAppIntegrationOutput` - .. py:method:: update(integration_id: str [, redirect_urls: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy]]) + .. py:method:: update(integration_id: str [, redirect_urls: Optional[List[str]], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy], user_authorized_scopes: Optional[List[str]]]) Updates Custom OAuth App Integration. @@ -76,8 +80,14 @@ :param integration_id: str :param redirect_urls: List[str] (optional) List of OAuth redirect urls to be updated in the custom OAuth app integration + :param scopes: List[str] (optional) + List of OAuth scopes to be updated in the custom OAuth app integration, similar to redirect URIs + this will fully replace the existing values instead of appending :param token_access_policy: :class:`TokenAccessPolicy` (optional) Token access policy to be updated in the custom OAuth app integration + :param user_authorized_scopes: List[str] (optional) + Scopes that will need to be consented by end user to mint the access token. If the user does not + authorize the access token will not be minted. Must be a subset of scopes. \ No newline at end of file diff --git a/docs/account/oauth2/federation_policy.rst b/docs/account/oauth2/federation_policy.rst new file mode 100644 index 000000000..c95bf563c --- /dev/null +++ b/docs/account/oauth2/federation_policy.rst @@ -0,0 +1,105 @@ +``a.federation_policy``: Account Federation Policies +==================================================== +.. currentmodule:: databricks.sdk.service.oauth2 + +.. py:class:: AccountFederationPolicyAPI + + These APIs manage account federation policies. + + Account federation policies allow users and service principals in your Databricks account to securely + access Databricks APIs using tokens from your trusted identity providers (IdPs). + + With token federation, your users and service principals can exchange tokens from your IdP for Databricks + OAuth tokens, which can be used to access Databricks APIs. Token federation eliminates the need to manage + Databricks secrets, and allows you to centralize management of token issuance policies in your IdP. + Databricks token federation is typically used in combination with [SCIM], so users in your IdP are + synchronized into your Databricks account. + + Token federation is configured in your Databricks account using an account federation policy. An account + federation policy specifies: * which IdP, or issuer, your Databricks account should accept tokens from * + how to determine which Databricks user, or subject, a token is issued for + + To configure a federation policy, you provide the following: * The required token __issuer__, as specified + in the “iss” claim of your tokens. The issuer is an https URL that identifies your IdP. * The allowed + token __audiences__, as specified in the “aud” claim of your tokens. This identifier is intended to + represent the recipient of the token. As long as the audience in the token matches at least one audience + in the policy, the token is considered a match. If unspecified, the default value is your Databricks + account id. * The __subject claim__, which indicates which token claim contains the Databricks username of + the user the token was issued for. If unspecified, the default value is “sub”. * Optionally, the + public keys used to validate the signature of your tokens, in JWKS format. If unspecified (recommended), + Databricks automatically fetches the public keys from your issuer’s well known endpoint. Databricks + strongly recommends relying on your issuer’s well known endpoint for discovering public keys. + + An example federation policy is: ``` issuer: "https://idp.mycompany.com/oidc" audiences: ["databricks"] + subject_claim: "sub" ``` + + An example JWT token body that matches this policy and could be used to authenticate to Databricks as user + `username@mycompany.com` is: ``` { "iss": "https://idp.mycompany.com/oidc", "aud": "databricks", "sub": + "username@mycompany.com" } ``` + + You may also need to configure your IdP to generate tokens for your users to exchange with Databricks, if + your users do not already have the ability to generate tokens that are compatible with your federation + policy. + + You do not need to configure an OAuth application in Databricks to use token federation. + + [SCIM]: https://docs.databricks.com/admin/users-groups/scim/index.html + + .. py:method:: create( [, policy: Optional[FederationPolicy], policy_id: Optional[str]]) -> FederationPolicy + + Create account federation policy. + + :param policy: :class:`FederationPolicy` (optional) + :param policy_id: str (optional) + The identifier for the federation policy. The identifier must contain only lowercase alphanumeric + characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks. + + :returns: :class:`FederationPolicy` + + + .. py:method:: delete(policy_id: str) + + Delete account federation policy. + + :param policy_id: str + The identifier for the federation policy. + + + + + .. py:method:: get(policy_id: str) -> FederationPolicy + + Get account federation policy. + + :param policy_id: str + The identifier for the federation policy. + + :returns: :class:`FederationPolicy` + + + .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[FederationPolicy] + + List account federation policies. + + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`FederationPolicy` + + + .. py:method:: update(policy_id: str [, policy: Optional[FederationPolicy], update_mask: Optional[str]]) -> FederationPolicy + + Update account federation policy. + + :param policy_id: str + The identifier for the federation policy. + :param policy: :class:`FederationPolicy` (optional) + :param update_mask: str (optional) + The field mask specifies which fields of the policy to update. To specify multiple fields in the + field mask, use comma as the separator (no space). The special value '*' indicates that all fields + should be updated (full replacement). If unspecified, all fields that are set in the policy provided + in the update request will overwrite the corresponding fields in the existing policy. Example value: + 'description,oidc_policy.audiences'. + + :returns: :class:`FederationPolicy` + \ No newline at end of file diff --git a/docs/account/oauth2/index.rst b/docs/account/oauth2/index.rst index a4663ef6b..745a3e721 100644 --- a/docs/account/oauth2/index.rst +++ b/docs/account/oauth2/index.rst @@ -8,6 +8,8 @@ Configure OAuth 2.0 application registrations for Databricks :maxdepth: 1 custom_app_integration + federation_policy o_auth_published_apps published_app_integration + service_principal_federation_policy service_principal_secrets \ No newline at end of file diff --git a/docs/account/oauth2/service_principal_federation_policy.rst b/docs/account/oauth2/service_principal_federation_policy.rst new file mode 100644 index 000000000..2e0577ba4 --- /dev/null +++ b/docs/account/oauth2/service_principal_federation_policy.rst @@ -0,0 +1,115 @@ +``a.service_principal_federation_policy``: Service Principal Federation Policies +================================================================================ +.. currentmodule:: databricks.sdk.service.oauth2 + +.. py:class:: ServicePrincipalFederationPolicyAPI + + These APIs manage service principal federation policies. + + Service principal federation, also known as Workload Identity Federation, allows your automated workloads + running outside of Databricks to securely access Databricks APIs without the need for Databricks secrets. + With Workload Identity Federation, your application (or workload) authenticates to Databricks as a + Databricks service principal, using tokens provided by the workload runtime. + + Databricks strongly recommends using Workload Identity Federation to authenticate to Databricks from + automated workloads, over alternatives such as OAuth client secrets or Personal Access Tokens, whenever + possible. Workload Identity Federation is supported by many popular services, including Github Actions, + Azure DevOps, GitLab, Terraform Cloud, and Kubernetes clusters, among others. + + Workload identity federation is configured in your Databricks account using a service principal federation + policy. A service principal federation policy specifies: * which IdP, or issuer, the service principal is + allowed to authenticate from * which workload identity, or subject, is allowed to authenticate as the + Databricks service principal + + To configure a federation policy, you provide the following: * The required token __issuer__, as specified + in the “iss” claim of workload identity tokens. The issuer is an https URL that identifies the + workload identity provider. * The required token __subject__, as specified in the “sub” claim of + workload identity tokens. The subject uniquely identifies the workload in the workload runtime + environment. * The allowed token __audiences__, as specified in the “aud” claim of workload identity + tokens. The audience is intended to represent the recipient of the token. As long as the audience in the + token matches at least one audience in the policy, the token is considered a match. If unspecified, the + default value is your Databricks account id. * Optionally, the public keys used to validate the signature + of the workload identity tokens, in JWKS format. If unspecified (recommended), Databricks automatically + fetches the public keys from the issuer’s well known endpoint. Databricks strongly recommends relying on + the issuer’s well known endpoint for discovering public keys. + + An example service principal federation policy, for a Github Actions workload, is: ``` issuer: + "https://token.actions.githubusercontent.com" audiences: ["https://github.com/my-github-org"] subject: + "repo:my-github-org/my-repo:environment:prod" ``` + + An example JWT token body that matches this policy and could be used to authenticate to Databricks is: ``` + { "iss": "https://token.actions.githubusercontent.com", "aud": "https://github.com/my-github-org", "sub": + "repo:my-github-org/my-repo:environment:prod" } ``` + + You may also need to configure the workload runtime to generate tokens for your workloads. + + You do not need to configure an OAuth application in Databricks to use token federation. + + .. py:method:: create(service_principal_id: int [, policy: Optional[FederationPolicy], policy_id: Optional[str]]) -> FederationPolicy + + Create service principal federation policy. + + :param service_principal_id: int + The service principal id for the federation policy. + :param policy: :class:`FederationPolicy` (optional) + :param policy_id: str (optional) + The identifier for the federation policy. The identifier must contain only lowercase alphanumeric + characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks. + + :returns: :class:`FederationPolicy` + + + .. py:method:: delete(service_principal_id: int, policy_id: str) + + Delete service principal federation policy. + + :param service_principal_id: int + The service principal id for the federation policy. + :param policy_id: str + The identifier for the federation policy. + + + + + .. py:method:: get(service_principal_id: int, policy_id: str) -> FederationPolicy + + Get service principal federation policy. + + :param service_principal_id: int + The service principal id for the federation policy. + :param policy_id: str + The identifier for the federation policy. + + :returns: :class:`FederationPolicy` + + + .. py:method:: list(service_principal_id: int [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[FederationPolicy] + + List service principal federation policies. + + :param service_principal_id: int + The service principal id for the federation policy. + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`FederationPolicy` + + + .. py:method:: update(service_principal_id: int, policy_id: str [, policy: Optional[FederationPolicy], update_mask: Optional[str]]) -> FederationPolicy + + Update service principal federation policy. + + :param service_principal_id: int + The service principal id for the federation policy. + :param policy_id: str + The identifier for the federation policy. + :param policy: :class:`FederationPolicy` (optional) + :param update_mask: str (optional) + The field mask specifies which fields of the policy to update. To specify multiple fields in the + field mask, use comma as the separator (no space). The special value '*' indicates that all fields + should be updated (full replacement). If unspecified, all fields that are set in the policy provided + in the update request will overwrite the corresponding fields in the existing policy. Example value: + 'description,oidc_policy.audiences'. + + :returns: :class:`FederationPolicy` + \ No newline at end of file diff --git a/docs/account/oauth2/service_principal_secrets.rst b/docs/account/oauth2/service_principal_secrets.rst index 4249b9dea..955d6da53 100644 --- a/docs/account/oauth2/service_principal_secrets.rst +++ b/docs/account/oauth2/service_principal_secrets.rst @@ -42,7 +42,7 @@ - .. py:method:: list(service_principal_id: int) -> Iterator[SecretInfo] + .. py:method:: list(service_principal_id: int [, page_token: Optional[str]]) -> Iterator[SecretInfo] List service principal secrets. @@ -51,6 +51,13 @@ :param service_principal_id: int The service principal ID. + :param page_token: str (optional) + An opaque page token which was the `next_page_token` in the response of the previous request to list + the secrets for this service principal. Provide this token to retrieve the next page of secret + entries. When providing a `page_token`, all other parameters provided to the request must match the + previous request. To list all of the secrets for a service principal, it is necessary to continue + requesting pages of entries until the response contains no `next_page_token`. Note that the number + of entries returned must not be used to determine when the listing is complete. :returns: Iterator over :class:`SecretInfo` \ No newline at end of file diff --git a/docs/account/provisioning/workspaces.rst b/docs/account/provisioning/workspaces.rst index 98c47cc9b..ad8a75942 100644 --- a/docs/account/provisioning/workspaces.rst +++ b/docs/account/provisioning/workspaces.rst @@ -11,7 +11,7 @@ These endpoints are available if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. - .. py:method:: create(workspace_name: str [, aws_region: Optional[str], cloud: Optional[str], cloud_resource_container: Optional[CloudResourceContainer], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], deployment_name: Optional[str], gcp_managed_network_config: Optional[GcpManagedNetworkConfig], gke_config: Optional[GkeConfig], location: Optional[str], managed_services_customer_managed_key_id: Optional[str], network_id: Optional[str], pricing_tier: Optional[PricingTier], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str]]) -> Wait[Workspace] + .. py:method:: create(workspace_name: str [, aws_region: Optional[str], cloud: Optional[str], cloud_resource_container: Optional[CloudResourceContainer], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], deployment_name: Optional[str], gcp_managed_network_config: Optional[GcpManagedNetworkConfig], gke_config: Optional[GkeConfig], is_no_public_ip_enabled: Optional[bool], location: Optional[str], managed_services_customer_managed_key_id: Optional[str], network_id: Optional[str], pricing_tier: Optional[PricingTier], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str]]) -> Wait[Workspace] Usage: @@ -116,6 +116,8 @@ [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html :param gke_config: :class:`GkeConfig` (optional) The configurations for the GKE cluster of a Databricks workspace. + :param is_no_public_ip_enabled: bool (optional) + Whether no public IP is enabled for the workspace. :param location: str (optional) The Google Cloud region of the workspace data plane in your Google account. For example, `us-east4`. :param managed_services_customer_managed_key_id: str (optional) @@ -148,7 +150,7 @@ See :method:wait_get_workspace_running for more details. - .. py:method:: create_and_wait(workspace_name: str [, aws_region: Optional[str], cloud: Optional[str], cloud_resource_container: Optional[CloudResourceContainer], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], deployment_name: Optional[str], gcp_managed_network_config: Optional[GcpManagedNetworkConfig], gke_config: Optional[GkeConfig], location: Optional[str], managed_services_customer_managed_key_id: Optional[str], network_id: Optional[str], pricing_tier: Optional[PricingTier], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> Workspace + .. py:method:: create_and_wait(workspace_name: str [, aws_region: Optional[str], cloud: Optional[str], cloud_resource_container: Optional[CloudResourceContainer], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], deployment_name: Optional[str], gcp_managed_network_config: Optional[GcpManagedNetworkConfig], gke_config: Optional[GkeConfig], is_no_public_ip_enabled: Optional[bool], location: Optional[str], managed_services_customer_managed_key_id: Optional[str], network_id: Optional[str], pricing_tier: Optional[PricingTier], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> Workspace .. py:method:: delete(workspace_id: int) @@ -227,7 +229,7 @@ :returns: Iterator over :class:`Workspace` - .. py:method:: update(workspace_id: int [, aws_region: Optional[str], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], managed_services_customer_managed_key_id: Optional[str], network_connectivity_config_id: Optional[str], network_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str]]) -> Wait[Workspace] + .. py:method:: update(workspace_id: int [, aws_region: Optional[str], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], managed_services_customer_managed_key_id: Optional[str], network_connectivity_config_id: Optional[str], network_id: Optional[str], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str]]) -> Wait[Workspace] Usage: @@ -370,6 +372,9 @@ The ID of the workspace's network configuration object. Used only if you already use a customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC to a customer-managed VPC by updating the workspace to add a network configuration ID. + :param private_access_settings_id: str (optional) + The ID of the workspace's private access settings configuration object. This parameter is available + only for updating failed workspaces. :param storage_configuration_id: str (optional) The ID of the workspace's storage configuration object. This parameter is available only for updating failed workspaces. @@ -382,7 +387,7 @@ See :method:wait_get_workspace_running for more details. - .. py:method:: update_and_wait(workspace_id: int [, aws_region: Optional[str], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], managed_services_customer_managed_key_id: Optional[str], network_connectivity_config_id: Optional[str], network_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> Workspace + .. py:method:: update_and_wait(workspace_id: int [, aws_region: Optional[str], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], managed_services_customer_managed_key_id: Optional[str], network_connectivity_config_id: Optional[str], network_id: Optional[str], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> Workspace .. py:method:: wait_get_workspace_running(workspace_id: int, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[Workspace], None]]) -> Workspace diff --git a/docs/account/settings/csp_enablement_account.rst b/docs/account/settings/csp_enablement_account.rst index b6fec691c..885aae89f 100644 --- a/docs/account/settings/csp_enablement_account.rst +++ b/docs/account/settings/csp_enablement_account.rst @@ -37,9 +37,15 @@ This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`CspEnablementAccountSetting` :param field_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :returns: :class:`CspEnablementAccountSetting` \ No newline at end of file diff --git a/docs/account/settings/disable_legacy_features.rst b/docs/account/settings/disable_legacy_features.rst index d7f1db9d3..b10d7e2dc 100644 --- a/docs/account/settings/disable_legacy_features.rst +++ b/docs/account/settings/disable_legacy_features.rst @@ -52,9 +52,15 @@ This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DisableLegacyFeatures` :param field_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :returns: :class:`DisableLegacyFeatures` \ No newline at end of file diff --git a/docs/account/settings/enable_ip_access_lists.rst b/docs/account/settings/enable_ip_access_lists.rst new file mode 100644 index 000000000..9485b7332 --- /dev/null +++ b/docs/account/settings/enable_ip_access_lists.rst @@ -0,0 +1,63 @@ +``a.settings.enable_ip_access_lists``: Enable Account IP Access Lists +===================================================================== +.. currentmodule:: databricks.sdk.service.settings + +.. py:class:: EnableIpAccessListsAPI + + Controls the enforcement of IP access lists for accessing the account console. Allowing you to enable or + disable restricted access based on IP addresses. + + .. py:method:: delete( [, etag: Optional[str]]) -> DeleteAccountIpAccessEnableResponse + + Delete the account IP access toggle setting. + + Reverts the value of the account IP access toggle setting to default (ON) + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`DeleteAccountIpAccessEnableResponse` + + + .. py:method:: get( [, etag: Optional[str]]) -> AccountIpAccessEnable + + Get the account IP access toggle setting. + + Gets the value of the account IP access toggle setting. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`AccountIpAccessEnable` + + + .. py:method:: update(allow_missing: bool, setting: AccountIpAccessEnable, field_mask: str) -> AccountIpAccessEnable + + Update the account IP access toggle setting. + + Updates the value of the account IP access toggle setting. + + :param allow_missing: bool + This should always be set to true for Settings API. Added for AIP compliance. + :param setting: :class:`AccountIpAccessEnable` + :param field_mask: str + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + + :returns: :class:`AccountIpAccessEnable` + \ No newline at end of file diff --git a/docs/account/settings/esm_enablement_account.rst b/docs/account/settings/esm_enablement_account.rst index 59376793b..e9359d907 100644 --- a/docs/account/settings/esm_enablement_account.rst +++ b/docs/account/settings/esm_enablement_account.rst @@ -34,9 +34,15 @@ This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`EsmEnablementAccountSetting` :param field_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :returns: :class:`EsmEnablementAccountSetting` \ No newline at end of file diff --git a/docs/account/settings/index.rst b/docs/account/settings/index.rst index abf97c6a0..9ffe7694e 100644 --- a/docs/account/settings/index.rst +++ b/docs/account/settings/index.rst @@ -12,5 +12,6 @@ Manage security settings for Accounts and Workspaces settings csp_enablement_account disable_legacy_features + enable_ip_access_lists esm_enablement_account personal_compute \ No newline at end of file diff --git a/docs/account/settings/personal_compute.rst b/docs/account/settings/personal_compute.rst index 00ccf3012..54e958a28 100644 --- a/docs/account/settings/personal_compute.rst +++ b/docs/account/settings/personal_compute.rst @@ -54,9 +54,15 @@ This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`PersonalComputeSetting` :param field_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :returns: :class:`PersonalComputeSetting` \ No newline at end of file diff --git a/docs/account/settings/settings.rst b/docs/account/settings/settings.rst index 3df647279..abf1c0e45 100644 --- a/docs/account/settings/settings.rst +++ b/docs/account/settings/settings.rst @@ -25,6 +25,12 @@ provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions prior to 13.3LTS. + .. py:property:: enable_ip_access_lists + :type: EnableIpAccessListsAPI + + Controls the enforcement of IP access lists for accessing the account console. Allowing you to enable or + disable restricted access based on IP addresses. + .. py:property:: esm_enablement_account :type: EsmEnablementAccountAPI diff --git a/docs/dbdataclasses/apps.rst b/docs/dbdataclasses/apps.rst index 2d522c625..2214e2ac9 100644 --- a/docs/dbdataclasses/apps.rst +++ b/docs/dbdataclasses/apps.rst @@ -190,14 +190,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateAppDeploymentRequest - :members: - :undoc-members: - -.. autoclass:: CreateAppRequest - :members: - :undoc-members: - .. autoclass:: GetAppPermissionLevelsResponse :members: :undoc-members: @@ -217,7 +209,3 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. autoclass:: StopAppRequest :members: :undoc-members: - -.. autoclass:: UpdateAppRequest - :members: - :undoc-members: diff --git a/docs/dbdataclasses/billing.rst b/docs/dbdataclasses/billing.rst index 25deb0a18..590fd693e 100644 --- a/docs/dbdataclasses/billing.rst +++ b/docs/dbdataclasses/billing.rst @@ -57,6 +57,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: BudgetPolicy + :members: + :undoc-members: + .. autoclass:: CreateBillingUsageDashboardRequest :members: :undoc-members: @@ -85,6 +89,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: CreateBudgetPolicyRequest + :members: + :undoc-members: + .. autoclass:: CreateLogDeliveryConfigurationParams :members: :undoc-members: @@ -93,6 +101,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: DeleteResponse + :members: + :undoc-members: + .. py:class:: DeliveryStatus The status string for log delivery. Possible values are: * `CREATED`: There were no log delivery attempts since the config was created. * `SUCCEEDED`: The latest attempt of log delivery has succeeded completely. * `USER_FAILURE`: The latest attempt of log delivery failed because of misconfiguration of customer provided permissions on role or storage. * `SYSTEM_FAILURE`: The latest attempt of log delivery failed because of an Databricks internal error. Contact support if it doesn't go away soon. * `NOT_FOUND`: The log delivery status as the configuration has been disabled since the release of this feature or there are no workspaces in the account. @@ -116,6 +128,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: Filter + :members: + :undoc-members: + .. autoclass:: GetBillingUsageDashboardResponse :members: :undoc-members: @@ -124,10 +140,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: LimitConfig + :members: + :undoc-members: + .. autoclass:: ListBudgetConfigurationsResponse :members: :undoc-members: +.. autoclass:: ListBudgetPoliciesResponse + :members: + :undoc-members: + .. py:class:: LogDeliveryConfigStatus Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable the configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is not supported, so disable a log delivery configuration that is no longer needed. @@ -175,6 +199,15 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: SortSpec + :members: + :undoc-members: + +.. py:class:: SortSpecField + + .. py:attribute:: POLICY_NAME + :value: "POLICY_NAME" + .. autoclass:: UpdateBudgetConfigurationBudget :members: :undoc-members: diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst index cb6399348..17d23b223 100644 --- a/docs/dbdataclasses/catalog.rst +++ b/docs/dbdataclasses/catalog.rst @@ -69,6 +69,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: AwsIamRole + :members: + :undoc-members: + .. autoclass:: AwsIamRoleRequest :members: :undoc-members: @@ -77,6 +81,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: AzureActiveDirectoryToken + :members: + :undoc-members: + +.. autoclass:: AzureManagedIdentity + :members: + :undoc-members: + .. autoclass:: AzureManagedIdentityRequest :members: :undoc-members: @@ -101,49 +113,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. py:class:: CatalogInfoSecurableKind - - Kind of catalog securable. - - .. py:attribute:: CATALOG_DELTASHARING - :value: "CATALOG_DELTASHARING" - - .. py:attribute:: CATALOG_FOREIGN_BIGQUERY - :value: "CATALOG_FOREIGN_BIGQUERY" - - .. py:attribute:: CATALOG_FOREIGN_DATABRICKS - :value: "CATALOG_FOREIGN_DATABRICKS" - - .. py:attribute:: CATALOG_FOREIGN_MYSQL - :value: "CATALOG_FOREIGN_MYSQL" - - .. py:attribute:: CATALOG_FOREIGN_POSTGRESQL - :value: "CATALOG_FOREIGN_POSTGRESQL" - - .. py:attribute:: CATALOG_FOREIGN_REDSHIFT - :value: "CATALOG_FOREIGN_REDSHIFT" - - .. py:attribute:: CATALOG_FOREIGN_SNOWFLAKE - :value: "CATALOG_FOREIGN_SNOWFLAKE" - - .. py:attribute:: CATALOG_FOREIGN_SQLDW - :value: "CATALOG_FOREIGN_SQLDW" - - .. py:attribute:: CATALOG_FOREIGN_SQLSERVER - :value: "CATALOG_FOREIGN_SQLSERVER" - - .. py:attribute:: CATALOG_INTERNAL - :value: "CATALOG_INTERNAL" - - .. py:attribute:: CATALOG_STANDARD - :value: "CATALOG_STANDARD" - - .. py:attribute:: CATALOG_SYSTEM - :value: "CATALOG_SYSTEM" - - .. py:attribute:: CATALOG_SYSTEM_DELTASHARING - :value: "CATALOG_SYSTEM_DELTASHARING" - .. py:class:: CatalogIsolationMode Whether the current securable is accessible from all workspaces or a specific set of workspaces. @@ -181,8 +150,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ColumnTypeName - Name of type (INT, STRUCT, MAP, etc.). - .. py:attribute:: ARRAY :value: "ARRAY" @@ -246,53 +213,13 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: USER_DEFINED_TYPE :value: "USER_DEFINED_TYPE" + .. py:attribute:: VARIANT + :value: "VARIANT" + .. autoclass:: ConnectionInfo :members: :undoc-members: -.. py:class:: ConnectionInfoSecurableKind - - Kind of connection securable. - - .. py:attribute:: CONNECTION_BIGQUERY - :value: "CONNECTION_BIGQUERY" - - .. py:attribute:: CONNECTION_BUILTIN_HIVE_METASTORE - :value: "CONNECTION_BUILTIN_HIVE_METASTORE" - - .. py:attribute:: CONNECTION_DATABRICKS - :value: "CONNECTION_DATABRICKS" - - .. py:attribute:: CONNECTION_EXTERNAL_HIVE_METASTORE - :value: "CONNECTION_EXTERNAL_HIVE_METASTORE" - - .. py:attribute:: CONNECTION_GLUE - :value: "CONNECTION_GLUE" - - .. py:attribute:: CONNECTION_HTTP_BEARER - :value: "CONNECTION_HTTP_BEARER" - - .. py:attribute:: CONNECTION_MYSQL - :value: "CONNECTION_MYSQL" - - .. py:attribute:: CONNECTION_ONLINE_CATALOG - :value: "CONNECTION_ONLINE_CATALOG" - - .. py:attribute:: CONNECTION_POSTGRESQL - :value: "CONNECTION_POSTGRESQL" - - .. py:attribute:: CONNECTION_REDSHIFT - :value: "CONNECTION_REDSHIFT" - - .. py:attribute:: CONNECTION_SNOWFLAKE - :value: "CONNECTION_SNOWFLAKE" - - .. py:attribute:: CONNECTION_SQLDW - :value: "CONNECTION_SQLDW" - - .. py:attribute:: CONNECTION_SQLSERVER - :value: "CONNECTION_SQLSERVER" - .. py:class:: ConnectionType The type of connection. @@ -342,6 +269,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: CreateCredentialRequest + :members: + :undoc-members: + .. autoclass:: CreateExternalLocation :members: :undoc-members: @@ -373,7 +304,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: CreateFunctionSecurityType - Function security type. + The security type of the function. .. py:attribute:: DEFINER :value: "DEFINER" @@ -403,10 +334,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateOnlineTableRequest - :members: - :undoc-members: - .. autoclass:: CreateRegisteredModelRequest :members: :undoc-members: @@ -431,6 +358,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: CredentialInfo + :members: + :undoc-members: + +.. py:class:: CredentialPurpose + + .. py:attribute:: SERVICE + :value: "SERVICE" + + .. py:attribute:: STORAGE + :value: "STORAGE" + .. py:class:: CredentialType The type of credential. @@ -441,6 +380,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: USERNAME_PASSWORD :value: "USERNAME_PASSWORD" +.. autoclass:: CredentialValidationResult + :members: + :undoc-members: + .. autoclass:: CurrentWorkspaceBindings :members: :undoc-members: @@ -518,6 +461,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: WORKDAY_RAAS_FORMAT :value: "WORKDAY_RAAS_FORMAT" +.. autoclass:: DatabricksGcpServiceAccount + :members: + :undoc-members: + .. autoclass:: DatabricksGcpServiceAccountRequest :members: :undoc-members: @@ -530,6 +477,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: DeleteCredentialResponse + :members: + :undoc-members: + .. autoclass:: DeleteResponse :members: :undoc-members: @@ -636,7 +587,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: FunctionInfoSecurityType - Function security type. + The security type of the function. .. py:attribute:: DEFINER :value: "DEFINER" @@ -683,6 +634,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: GenerateTemporaryServiceCredentialAzureOptions + :members: + :undoc-members: + +.. autoclass:: GenerateTemporaryServiceCredentialGcpOptions + :members: + :undoc-members: + +.. autoclass:: GenerateTemporaryServiceCredentialRequest + :members: + :undoc-members: + .. autoclass:: GenerateTemporaryTableCredentialRequest :members: :undoc-members: @@ -696,6 +659,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: CATALOG :value: "CATALOG" + .. py:attribute:: CREDENTIAL + :value: "CREDENTIAL" + .. py:attribute:: EXTERNAL_LOCATION :value: "EXTERNAL_LOCATION" @@ -722,8 +688,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: IsolationMode - Whether the current securable is accessible from all workspaces or a specific set of workspaces. - .. py:attribute:: ISOLATION_MODE_ISOLATED :value: "ISOLATION_MODE_ISOLATED" @@ -746,6 +710,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ListCredentialsResponse + :members: + :undoc-members: + .. autoclass:: ListExternalLocationsResponse :members: :undoc-members: @@ -1070,6 +1038,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: CREATE_FOREIGN_CATALOG :value: "CREATE_FOREIGN_CATALOG" + .. py:attribute:: CREATE_FOREIGN_SECURABLE + :value: "CREATE_FOREIGN_SECURABLE" + .. py:attribute:: CREATE_FUNCTION :value: "CREATE_FUNCTION" @@ -1185,6 +1156,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: ACTIVE :value: "ACTIVE" + .. py:attribute:: DEGRADED + :value: "DEGRADED" + .. py:attribute:: DELETING :value: "DELETING" @@ -1236,9 +1210,15 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: CATALOG :value: "CATALOG" + .. py:attribute:: CLEAN_ROOM + :value: "CLEAN_ROOM" + .. py:attribute:: CONNECTION :value: "CONNECTION" + .. py:attribute:: CREDENTIAL + :value: "CREDENTIAL" + .. py:attribute:: EXTERNAL_LOCATION :value: "EXTERNAL_LOCATION" @@ -1379,6 +1359,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: VIEW :value: "VIEW" +.. autoclass:: TemporaryCredentials + :members: + :undoc-members: + .. autoclass:: TriggeredUpdateStatus :members: :undoc-members: @@ -1396,6 +1380,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: CATALOG :value: "CATALOG" + .. py:attribute:: CREDENTIAL + :value: "CREDENTIAL" + .. py:attribute:: EXTERNAL_LOCATION :value: "EXTERNAL_LOCATION" @@ -1410,6 +1397,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: UpdateCredentialRequest + :members: + :undoc-members: + .. autoclass:: UpdateExternalLocation :members: :undoc-members: @@ -1476,6 +1467,27 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ValidateCredentialRequest + :members: + :undoc-members: + +.. autoclass:: ValidateCredentialResponse + :members: + :undoc-members: + +.. py:class:: ValidateCredentialResult + + A enum represents the result of the file operation + + .. py:attribute:: FAIL + :value: "FAIL" + + .. py:attribute:: PASS + :value: "PASS" + + .. py:attribute:: SKIP + :value: "SKIP" + .. autoclass:: ValidateStorageCredential :members: :undoc-members: diff --git a/docs/dbdataclasses/cleanrooms.rst b/docs/dbdataclasses/cleanrooms.rst new file mode 100644 index 000000000..85ec98250 --- /dev/null +++ b/docs/dbdataclasses/cleanrooms.rst @@ -0,0 +1,158 @@ +Clean Rooms +=========== + +These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.cleanrooms`` module. + +.. py:currentmodule:: databricks.sdk.service.cleanrooms +.. autoclass:: CleanRoom + :members: + :undoc-members: + +.. py:class:: CleanRoomAccessRestricted + + .. py:attribute:: CSP_MISMATCH + :value: "CSP_MISMATCH" + + .. py:attribute:: NO_RESTRICTION + :value: "NO_RESTRICTION" + +.. autoclass:: CleanRoomAsset + :members: + :undoc-members: + +.. py:class:: CleanRoomAssetAssetType + + .. py:attribute:: FOREIGN_TABLE + :value: "FOREIGN_TABLE" + + .. py:attribute:: NOTEBOOK_FILE + :value: "NOTEBOOK_FILE" + + .. py:attribute:: TABLE + :value: "TABLE" + + .. py:attribute:: VIEW + :value: "VIEW" + + .. py:attribute:: VOLUME + :value: "VOLUME" + +.. autoclass:: CleanRoomAssetForeignTable + :members: + :undoc-members: + +.. autoclass:: CleanRoomAssetForeignTableLocalDetails + :members: + :undoc-members: + +.. autoclass:: CleanRoomAssetNotebook + :members: + :undoc-members: + +.. py:class:: CleanRoomAssetStatusEnum + + .. py:attribute:: ACTIVE + :value: "ACTIVE" + + .. py:attribute:: PENDING + :value: "PENDING" + + .. py:attribute:: PERMISSION_DENIED + :value: "PERMISSION_DENIED" + +.. autoclass:: CleanRoomAssetTable + :members: + :undoc-members: + +.. autoclass:: CleanRoomAssetTableLocalDetails + :members: + :undoc-members: + +.. autoclass:: CleanRoomAssetView + :members: + :undoc-members: + +.. autoclass:: CleanRoomAssetViewLocalDetails + :members: + :undoc-members: + +.. autoclass:: CleanRoomAssetVolumeLocalDetails + :members: + :undoc-members: + +.. autoclass:: CleanRoomCollaborator + :members: + :undoc-members: + +.. autoclass:: CleanRoomNotebookTaskRun + :members: + :undoc-members: + +.. autoclass:: CleanRoomOutputCatalog + :members: + :undoc-members: + +.. py:class:: CleanRoomOutputCatalogOutputCatalogStatus + + .. py:attribute:: CREATED + :value: "CREATED" + + .. py:attribute:: NOT_CREATED + :value: "NOT_CREATED" + + .. py:attribute:: NOT_ELIGIBLE + :value: "NOT_ELIGIBLE" + +.. autoclass:: CleanRoomRemoteDetail + :members: + :undoc-members: + +.. py:class:: CleanRoomStatusEnum + + .. py:attribute:: ACTIVE + :value: "ACTIVE" + + .. py:attribute:: DELETED + :value: "DELETED" + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: PROVISIONING + :value: "PROVISIONING" + +.. autoclass:: CollaboratorJobRunInfo + :members: + :undoc-members: + +.. autoclass:: ComplianceSecurityProfile + :members: + :undoc-members: + +.. autoclass:: CreateCleanRoomOutputCatalogResponse + :members: + :undoc-members: + +.. autoclass:: DeleteCleanRoomAssetResponse + :members: + :undoc-members: + +.. autoclass:: DeleteResponse + :members: + :undoc-members: + +.. autoclass:: ListCleanRoomAssetsResponse + :members: + :undoc-members: + +.. autoclass:: ListCleanRoomNotebookTaskRunsResponse + :members: + :undoc-members: + +.. autoclass:: ListCleanRoomsResponse + :members: + :undoc-members: + +.. autoclass:: UpdateCleanRoomRequest + :members: + :undoc-members: diff --git a/docs/dbdataclasses/compute.rst b/docs/dbdataclasses/compute.rst index 0066f0374..b90ec99f7 100644 --- a/docs/dbdataclasses/compute.rst +++ b/docs/dbdataclasses/compute.rst @@ -299,6 +299,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: CustomPolicyTag + :members: + :undoc-members: + .. autoclass:: DataPlaneEventDetails :members: :undoc-members: @@ -316,10 +320,20 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: DataSecurityMode Data security mode decides what data governance model to use when accessing data from a cluster. - * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. Most programming languages, cluster features and data governance features are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are fully isolated so that they cannot see each other's data and credentials. Most data governance features are supported in this mode. But programming languages and cluster features might be limited. + The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`. + The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. Most programming languages, cluster features and data governance features are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are fully isolated so that they cannot see each other's data and credentials. Most data governance features are supported in this mode. But programming languages and cluster features might be limited. The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for future Databricks Runtime versions: * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC nor passthrough enabled. + .. py:attribute:: DATA_SECURITY_MODE_AUTO + :value: "DATA_SECURITY_MODE_AUTO" + + .. py:attribute:: DATA_SECURITY_MODE_DEDICATED + :value: "DATA_SECURITY_MODE_DEDICATED" + + .. py:attribute:: DATA_SECURITY_MODE_STANDARD + :value: "DATA_SECURITY_MODE_STANDARD" + .. py:attribute:: LEGACY_PASSTHROUGH :value: "LEGACY_PASSTHROUGH" @@ -485,6 +499,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: EventType + .. py:attribute:: ADD_NODES_FAILED + :value: "ADD_NODES_FAILED" + + .. py:attribute:: AUTOMATIC_CLUSTER_UPDATE + :value: "AUTOMATIC_CLUSTER_UPDATE" + + .. py:attribute:: AUTOSCALING_BACKOFF + :value: "AUTOSCALING_BACKOFF" + + .. py:attribute:: AUTOSCALING_FAILED + :value: "AUTOSCALING_FAILED" + .. py:attribute:: AUTOSCALING_STATS_REPORT :value: "AUTOSCALING_STATS_REPORT" @@ -782,6 +808,15 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: Kind + + The kind of compute described by this compute specification. + Depending on `kind`, different validations and default values will be applied. + The first usage of this value is for the simple cluster form where it sets `kind = CLASSIC_PREVIEW`. + + .. py:attribute:: CLASSIC_PREVIEW + :value: "CLASSIC_PREVIEW" + .. py:class:: Language .. py:attribute:: PYTHON diff --git a/docs/dbdataclasses/dashboards.rst b/docs/dbdataclasses/dashboards.rst index 91de6ccb2..114bd1f5b 100644 --- a/docs/dbdataclasses/dashboards.rst +++ b/docs/dbdataclasses/dashboards.rst @@ -4,15 +4,11 @@ Dashboards These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.dashboards`` module. .. py:currentmodule:: databricks.sdk.service.dashboards -.. autoclass:: CreateDashboardRequest +.. autoclass:: CancelQueryExecutionResponse :members: :undoc-members: -.. autoclass:: CreateScheduleRequest - :members: - :undoc-members: - -.. autoclass:: CreateSubscriptionRequest +.. autoclass:: CancelQueryExecutionResponseStatus :members: :undoc-members: @@ -29,6 +25,59 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: DASHBOARD_VIEW_BASIC :value: "DASHBOARD_VIEW_BASIC" +.. py:class:: DataType + + .. py:attribute:: DATA_TYPE_ARRAY + :value: "DATA_TYPE_ARRAY" + + .. py:attribute:: DATA_TYPE_BIG_INT + :value: "DATA_TYPE_BIG_INT" + + .. py:attribute:: DATA_TYPE_BINARY + :value: "DATA_TYPE_BINARY" + + .. py:attribute:: DATA_TYPE_BOOLEAN + :value: "DATA_TYPE_BOOLEAN" + + .. py:attribute:: DATA_TYPE_DATE + :value: "DATA_TYPE_DATE" + + .. py:attribute:: DATA_TYPE_DECIMAL + :value: "DATA_TYPE_DECIMAL" + + .. py:attribute:: DATA_TYPE_DOUBLE + :value: "DATA_TYPE_DOUBLE" + + .. py:attribute:: DATA_TYPE_FLOAT + :value: "DATA_TYPE_FLOAT" + + .. py:attribute:: DATA_TYPE_INT + :value: "DATA_TYPE_INT" + + .. py:attribute:: DATA_TYPE_INTERVAL + :value: "DATA_TYPE_INTERVAL" + + .. py:attribute:: DATA_TYPE_MAP + :value: "DATA_TYPE_MAP" + + .. py:attribute:: DATA_TYPE_SMALL_INT + :value: "DATA_TYPE_SMALL_INT" + + .. py:attribute:: DATA_TYPE_STRING + :value: "DATA_TYPE_STRING" + + .. py:attribute:: DATA_TYPE_STRUCT + :value: "DATA_TYPE_STRUCT" + + .. py:attribute:: DATA_TYPE_TIMESTAMP + :value: "DATA_TYPE_TIMESTAMP" + + .. py:attribute:: DATA_TYPE_TINY_INT + :value: "DATA_TYPE_TINY_INT" + + .. py:attribute:: DATA_TYPE_VOID + :value: "DATA_TYPE_VOID" + .. autoclass:: DeleteScheduleResponse :members: :undoc-members: @@ -37,6 +86,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: Empty + :members: + :undoc-members: + +.. autoclass:: ExecutePublishedDashboardQueryRequest + :members: + :undoc-members: + +.. autoclass:: ExecuteQueryResponse + :members: + :undoc-members: + .. autoclass:: GenieAttachment :members: :undoc-members: @@ -65,6 +126,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: GetPublishedDashboardEmbeddedResponse + :members: + :undoc-members: + .. py:class:: LifecycleState .. py:attribute:: ACTIVE @@ -166,6 +231,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION :value: "MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION" + .. py:attribute:: NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE + :value: "NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE" + .. py:attribute:: NO_QUERY_TO_VISUALIZE_EXCEPTION :value: "NO_QUERY_TO_VISUALIZE_EXCEPTION" @@ -187,6 +255,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SQL_EXECUTION_EXCEPTION :value: "SQL_EXECUTION_EXCEPTION" + .. py:attribute:: STOP_PROCESS_DUE_TO_AUTO_REGENERATE + :value: "STOP_PROCESS_DUE_TO_AUTO_REGENERATE" + .. py:attribute:: TABLES_MISSING_EXCEPTION :value: "TABLES_MISSING_EXCEPTION" @@ -210,7 +281,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: MessageStatus - MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. * `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message status will stay in the `EXECUTING_QUERY` until a client calls [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a response or the executing the query failed. Please see `error` field. * `COMPLETED`: Message processing is completed. Results are in the `attachments` field. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: Message has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available anymore. The user needs to execute the query again. * `CANCELLED`: Message has been cancelled. + MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. * `ASKING_AI`: Waiting for the LLM to respond to the users question. * `PENDING_WAREHOUSE`: Waiting for warehouse before the SQL query can start executing. * `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message status will stay in the `EXECUTING_QUERY` until a client calls [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a response or the executing the query failed. Please see `error` field. * `COMPLETED`: Message processing is completed. Results are in the `attachments` field. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: Message has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available anymore. The user needs to execute the query again. * `CANCELLED`: Message has been cancelled. .. py:attribute:: ASKING_AI :value: "ASKING_AI" @@ -233,6 +304,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: FILTERING_CONTEXT :value: "FILTERING_CONTEXT" + .. py:attribute:: PENDING_WAREHOUSE + :value: "PENDING_WAREHOUSE" + .. py:attribute:: QUERY_RESULT_EXPIRED :value: "QUERY_RESULT_EXPIRED" @@ -243,6 +317,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: PendingStatus + :members: + :undoc-members: + +.. autoclass:: PollQueryStatusResponse + :members: + :undoc-members: + +.. autoclass:: PollQueryStatusResponseData + :members: + :undoc-members: + .. autoclass:: PublishRequest :members: :undoc-members: @@ -255,6 +341,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: QueryResponseStatus + :members: + :undoc-members: + +.. autoclass:: QuerySchema + :members: + :undoc-members: + +.. autoclass:: QuerySchemaColumn + :members: + :undoc-members: + .. autoclass:: Result :members: :undoc-members: @@ -287,22 +385,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: TextAttachment +.. autoclass:: SuccessStatus :members: :undoc-members: -.. autoclass:: TrashDashboardResponse - :members: - :undoc-members: - -.. autoclass:: UnpublishDashboardResponse +.. autoclass:: TextAttachment :members: :undoc-members: -.. autoclass:: UpdateDashboardRequest +.. autoclass:: TrashDashboardResponse :members: :undoc-members: -.. autoclass:: UpdateScheduleRequest +.. autoclass:: UnpublishDashboardResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/iam.rst b/docs/dbdataclasses/iam.rst index 643da3d47..6df58ae4e 100644 --- a/docs/dbdataclasses/iam.rst +++ b/docs/dbdataclasses/iam.rst @@ -12,10 +12,22 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: Actor + :members: + :undoc-members: + +.. autoclass:: CheckPolicyResponse + :members: + :undoc-members: + .. autoclass:: ComplexValue :members: :undoc-members: +.. autoclass:: ConsistencyToken + :members: + :undoc-members: + .. autoclass:: DeleteResponse :members: :undoc-members: @@ -242,6 +254,20 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: RequestAuthzIdentity + + Defines the identity to be used for authZ of the request on the server side. See one pager for for more information: http://go/acl/service-identity + + .. py:attribute:: REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY + :value: "REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY" + + .. py:attribute:: REQUEST_AUTHZ_IDENTITY_USER_CONTEXT + :value: "REQUEST_AUTHZ_IDENTITY_USER_CONTEXT" + +.. autoclass:: ResourceInfo + :members: + :undoc-members: + .. autoclass:: ResourceMeta :members: :undoc-members: diff --git a/docs/dbdataclasses/index.rst b/docs/dbdataclasses/index.rst index 987bee7f5..3ecb9c13f 100644 --- a/docs/dbdataclasses/index.rst +++ b/docs/dbdataclasses/index.rst @@ -8,6 +8,7 @@ Dataclasses apps billing catalog + cleanrooms compute dashboards files diff --git a/docs/dbdataclasses/jobs.rst b/docs/dbdataclasses/jobs.rst index 3aa0db043..e85322a66 100644 --- a/docs/dbdataclasses/jobs.rst +++ b/docs/dbdataclasses/jobs.rst @@ -28,6 +28,95 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: CleanRoomTaskRunLifeCycleState + + Copied from elastic-spark-common/api/messages/runs.proto. Using the original definition to remove coupling with jobs API definition + + .. py:attribute:: BLOCKED + :value: "BLOCKED" + + .. py:attribute:: INTERNAL_ERROR + :value: "INTERNAL_ERROR" + + .. py:attribute:: PENDING + :value: "PENDING" + + .. py:attribute:: QUEUED + :value: "QUEUED" + + .. py:attribute:: RUNNING + :value: "RUNNING" + + .. py:attribute:: RUN_LIFE_CYCLE_STATE_UNSPECIFIED + :value: "RUN_LIFE_CYCLE_STATE_UNSPECIFIED" + + .. py:attribute:: SKIPPED + :value: "SKIPPED" + + .. py:attribute:: TERMINATED + :value: "TERMINATED" + + .. py:attribute:: TERMINATING + :value: "TERMINATING" + + .. py:attribute:: WAITING_FOR_RETRY + :value: "WAITING_FOR_RETRY" + +.. py:class:: CleanRoomTaskRunResultState + + Copied from elastic-spark-common/api/messages/runs.proto. Using the original definition to avoid cyclic dependency. + + .. py:attribute:: CANCELED + :value: "CANCELED" + + .. py:attribute:: DISABLED + :value: "DISABLED" + + .. py:attribute:: EVICTED + :value: "EVICTED" + + .. py:attribute:: EXCLUDED + :value: "EXCLUDED" + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: MAXIMUM_CONCURRENT_RUNS_REACHED + :value: "MAXIMUM_CONCURRENT_RUNS_REACHED" + + .. py:attribute:: RUN_RESULT_STATE_UNSPECIFIED + :value: "RUN_RESULT_STATE_UNSPECIFIED" + + .. py:attribute:: SUCCESS + :value: "SUCCESS" + + .. py:attribute:: SUCCESS_WITH_FAILURES + :value: "SUCCESS_WITH_FAILURES" + + .. py:attribute:: TIMEDOUT + :value: "TIMEDOUT" + + .. py:attribute:: UPSTREAM_CANCELED + :value: "UPSTREAM_CANCELED" + + .. py:attribute:: UPSTREAM_EVICTED + :value: "UPSTREAM_EVICTED" + + .. py:attribute:: UPSTREAM_FAILED + :value: "UPSTREAM_FAILED" + +.. autoclass:: CleanRoomTaskRunState + :members: + :undoc-members: + +.. autoclass:: CleanRoomsNotebookTask + :members: + :undoc-members: + +.. autoclass:: CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput + :members: + :undoc-members: + .. autoclass:: ClusterInstance :members: :undoc-members: @@ -317,7 +406,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: JobsHealthMetric Specifies the health metric that is being evaluated for a particular health rule. - * `RUN_DURATION_SECONDS`: Expected total time for a run in seconds. * `STREAMING_BACKLOG_BYTES`: An estimate of the maximum bytes of data waiting to be consumed across all streams. This metric is in Private Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag across all streams. This metric is in Private Preview. * `STREAMING_BACKLOG_SECONDS`: An estimate of the maximum consumer delay across all streams. This metric is in Private Preview. * `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of outstanding files across all streams. This metric is in Private Preview. + * `RUN_DURATION_SECONDS`: Expected total time for a run in seconds. * `STREAMING_BACKLOG_BYTES`: An estimate of the maximum bytes of data waiting to be consumed across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_SECONDS`: An estimate of the maximum consumer delay across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of outstanding files across all streams. This metric is in Public Preview. .. py:attribute:: RUN_DURATION_SECONDS :value: "RUN_DURATION_SECONDS" @@ -369,6 +458,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: OutputSchemaInfo + :members: + :undoc-members: + .. py:class:: PauseStatus .. py:attribute:: PAUSED @@ -377,6 +470,16 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: UNPAUSED :value: "UNPAUSED" +.. py:class:: PerformanceTarget + + PerformanceTarget defines how performant (lower latency) or cost efficient the execution of run on serverless compute should be. The performance mode on the job or pipeline should map to a performance setting that is passed to Cluster Manager (see cluster-common PerformanceTarget). + + .. py:attribute:: COST_OPTIMIZED + :value: "COST_OPTIMIZED" + + .. py:attribute:: PERFORMANCE_OPTIMIZED + :value: "PERFORMANCE_OPTIMIZED" + .. autoclass:: PeriodicTriggerConfiguration :members: :undoc-members: @@ -802,6 +905,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo The code indicates why the run was terminated. Additional codes might be introduced in future releases. * `SUCCESS`: The run was completed successfully. * `USER_CANCELED`: The run was successfully canceled during execution by a user. * `CANCELED`: The run was canceled during execution by the Databricks platform; for example, if the maximum run duration was exceeded. * `SKIPPED`: Run was never executed, for example, if the upstream task run failed, the dependency type condition was not met, or there were no material tasks to execute. * `INTERNAL_ERROR`: The run encountered an unexpected error. Refer to the state message for further details. * `DRIVER_ERROR`: The run encountered an error while communicating with the Spark Driver. * `CLUSTER_ERROR`: The run failed due to a cluster error. Refer to the state message for further details. * `REPOSITORY_CHECKOUT_FAILED`: Failed to complete the checkout due to an error when communicating with the third party service. * `INVALID_CLUSTER_REQUEST`: The run failed because it issued an invalid request to start the cluster. * `WORKSPACE_RUN_LIMIT_EXCEEDED`: The workspace has reached the quota for the maximum number of concurrent active runs. Consider scheduling the runs over a larger time frame. * `FEATURE_DISABLED`: The run failed because it tried to access a feature unavailable for the workspace. * `CLUSTER_REQUEST_LIMIT_EXCEEDED`: The number of cluster creation, start, and upsize requests have exceeded the allotted rate limit. Consider spreading the run execution over a larger time frame. * `STORAGE_ACCESS_ERROR`: The run failed due to an error when accessing the customer blob storage. Refer to the state message for further details. * `RUN_EXECUTION_ERROR`: The run was completed with task failures. For more details, refer to the state message or run output. * `UNAUTHORIZED_ERROR`: The run failed due to a permission issue while accessing a resource. Refer to the state message for further details. * `LIBRARY_INSTALLATION_ERROR`: The run failed while installing the user-requested library. Refer to the state message for further details. The causes might include, but are not limited to: The provided library is invalid, there are insufficient permissions to install the library, and so forth. * `MAX_CONCURRENT_RUNS_EXCEEDED`: The scheduled run exceeds the limit of maximum concurrent runs set for the job. * `MAX_SPARK_CONTEXTS_EXCEEDED`: The run is scheduled on a cluster that has already reached the maximum number of contexts it is configured to create. See: [Link]. * `RESOURCE_NOT_FOUND`: A resource necessary for run execution does not exist. Refer to the state message for further details. * `INVALID_RUN_CONFIGURATION`: The run failed due to an invalid configuration. Refer to the state message for further details. * `CLOUD_FAILURE`: The run failed due to a cloud provider issue. Refer to the state message for further details. * `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size limit. [Link]: https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now + .. py:attribute:: BUDGET_POLICY_LIMIT_EXCEEDED + :value: "BUDGET_POLICY_LIMIT_EXCEEDED" + .. py:attribute:: CANCELED :value: "CANCELED" @@ -900,7 +1006,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: TriggerType The type of trigger that fired this run. - * `PERIODIC`: Schedules that periodically trigger runs, such as a cron scheduler. * `ONE_TIME`: One time triggers that fire a single run. This occurs you triggered a single run on demand through the UI or the API. * `RETRY`: Indicates a run that is triggered as a retry of a previously failed run. This occurs when you request to re-run the job in case of failures. * `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`: Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is triggered by a table update. + * `PERIODIC`: Schedules that periodically trigger runs, such as a cron scheduler. * `ONE_TIME`: One time triggers that fire a single run. This occurs you triggered a single run on demand through the UI or the API. * `RETRY`: Indicates a run that is triggered as a retry of a previously failed run. This occurs when you request to re-run the job in case of failures. * `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`: Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is triggered by a table update. * `CONTINUOUS_RESTART`: Indicates a run created by user to manually restart a continuous job run. .. py:attribute:: FILE_ARRIVAL :value: "FILE_ARRIVAL" diff --git a/docs/dbdataclasses/marketplace.rst b/docs/dbdataclasses/marketplace.rst index bb48967db..c1029d842 100644 --- a/docs/dbdataclasses/marketplace.rst +++ b/docs/dbdataclasses/marketplace.rst @@ -29,6 +29,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: ASSET_TYPE_NOTEBOOK :value: "ASSET_TYPE_NOTEBOOK" + .. py:attribute:: ASSET_TYPE_PARTNER_INTEGRATION + :value: "ASSET_TYPE_PARTNER_INTEGRATION" + .. autoclass:: BatchGetListingsResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/oauth2.rst b/docs/dbdataclasses/oauth2.rst index 6265f6648..10202e55e 100644 --- a/docs/dbdataclasses/oauth2.rst +++ b/docs/dbdataclasses/oauth2.rst @@ -24,10 +24,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DataPlaneInfo - :members: - :undoc-members: - .. autoclass:: DeleteCustomAppIntegrationOutput :members: :undoc-members: @@ -40,6 +36,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: FederationPolicy + :members: + :undoc-members: + .. autoclass:: GetCustomAppIntegrationOutput :members: :undoc-members: @@ -60,10 +60,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ListFederationPoliciesResponse + :members: + :undoc-members: + .. autoclass:: ListServicePrincipalSecretsResponse :members: :undoc-members: +.. autoclass:: OidcFederationPolicy + :members: + :undoc-members: + .. autoclass:: PublishedAppOutput :members: :undoc-members: diff --git a/docs/dbdataclasses/pipelines.rst b/docs/dbdataclasses/pipelines.rst index 9f419f160..903cb52ff 100644 --- a/docs/dbdataclasses/pipelines.rst +++ b/docs/dbdataclasses/pipelines.rst @@ -20,6 +20,31 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: DayOfWeek + + Days of week in which the restart is allowed to happen (within a five-hour window starting at start_hour). If not specified all days of the week will be used. + + .. py:attribute:: FRIDAY + :value: "FRIDAY" + + .. py:attribute:: MONDAY + :value: "MONDAY" + + .. py:attribute:: SATURDAY + :value: "SATURDAY" + + .. py:attribute:: SUNDAY + :value: "SUNDAY" + + .. py:attribute:: THURSDAY + :value: "THURSDAY" + + .. py:attribute:: TUESDAY + :value: "TUESDAY" + + .. py:attribute:: WEDNESDAY + :value: "WEDNESDAY" + .. autoclass:: DeletePipelineResponse :members: :undoc-members: @@ -269,6 +294,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: RestartWindow + :members: + :undoc-members: + +.. autoclass:: RunAs + :members: + :undoc-members: + .. autoclass:: SchemaSpec :members: :undoc-members: diff --git a/docs/dbdataclasses/provisioning.rst b/docs/dbdataclasses/provisioning.rst index 7990eae96..4c909d488 100644 --- a/docs/dbdataclasses/provisioning.rst +++ b/docs/dbdataclasses/provisioning.rst @@ -106,6 +106,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: VPC :value: "VPC" +.. autoclass:: ExternalCustomerInfo + :members: + :undoc-members: + .. autoclass:: GcpKeyInfo :members: :undoc-members: diff --git a/docs/dbdataclasses/serving.rst b/docs/dbdataclasses/serving.rst index 3deefc873..abaeb5355 100644 --- a/docs/dbdataclasses/serving.rst +++ b/docs/dbdataclasses/serving.rst @@ -22,8 +22,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: AiGatewayGuardrailPiiBehaviorBehavior - Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input guardrail and the request contains PII, the request is not sent to the model server and 400 status code is returned; if 'BLOCK' is set for the output guardrail and the model response contains PII, the PII info in the response is redacted and 400 status code is returned. - .. py:attribute:: BLOCK :value: "BLOCK" @@ -44,8 +42,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: AiGatewayRateLimitKey - Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified. - .. py:attribute:: ENDPOINT :value: "ENDPOINT" @@ -54,8 +50,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: AiGatewayRateLimitRenewalPeriod - Renewal period field for a rate limit. Currently, only 'minute' is supported. - .. py:attribute:: MINUTE :value: "MINUTE" @@ -69,8 +63,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: AmazonBedrockConfigBedrockProvider - The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon. - .. py:attribute:: AI21LABS :value: "AI21LABS" @@ -128,6 +120,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: DataPlaneInfo + :members: + :undoc-members: + .. autoclass:: DatabricksModelServingConfig :members: :undoc-members: @@ -173,8 +169,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: EndpointStateConfigUpdate - The state of an endpoint's config update. This informs the user if the pending_config is in progress, if the update failed, or if there is no update in progress. Note that if the endpoint's config_update state value is IN_PROGRESS, another update can not be made until the update completes or fails. - .. py:attribute:: IN_PROGRESS :value: "IN_PROGRESS" @@ -189,8 +183,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: EndpointStateReady - The state of an endpoint, indicating whether or not the endpoint is queryable. An endpoint is READY if all of the served entities in its active configuration are ready. If any of the actively served entities are in a non-ready state, the endpoint state will be NOT_READY. - .. py:attribute:: NOT_READY :value: "NOT_READY" @@ -201,18 +193,41 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: EndpointTags + :members: + :undoc-members: + .. autoclass:: ExportMetricsResponse :members: :undoc-members: +.. autoclass:: ExternalFunctionRequest + :members: + :undoc-members: + +.. py:class:: ExternalFunctionRequestHttpMethod + + .. py:attribute:: DELETE + :value: "DELETE" + + .. py:attribute:: GET + :value: "GET" + + .. py:attribute:: PATCH + :value: "PATCH" + + .. py:attribute:: POST + :value: "POST" + + .. py:attribute:: PUT + :value: "PUT" + .. autoclass:: ExternalModel :members: :undoc-members: .. py:class:: ExternalModelProvider - The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.", - .. py:attribute:: AI21LABS :value: "AI21LABS" @@ -257,6 +272,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: HttpRequestResponse + :members: + :undoc-members: + .. autoclass:: ListEndpointsResponse :members: :undoc-members: @@ -281,10 +300,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: PutAiGatewayRequest + :members: + :undoc-members: + .. autoclass:: PutAiGatewayResponse :members: :undoc-members: +.. autoclass:: PutRequest + :members: + :undoc-members: + .. autoclass:: PutResponse :members: :undoc-members: @@ -316,8 +343,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: RateLimitKey - Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified. - .. py:attribute:: ENDPOINT :value: "ENDPOINT" @@ -326,8 +351,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: RateLimitRenewalPeriod - Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported. - .. py:attribute:: MINUTE :value: "MINUTE" @@ -353,8 +376,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ServedModelInputWorkloadSize - The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0. - .. py:attribute:: LARGE :value: "LARGE" @@ -366,9 +387,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ServedModelInputWorkloadType - The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types]. - [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types - .. py:attribute:: CPU :value: "CPU" @@ -398,8 +416,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ServedModelStateDeployment - The state of the served entity deployment. DEPLOYMENT_CREATING indicates that the served entity is not ready yet because the deployment is still being created (i.e container image is building, model server is deploying for the first time, etc.). DEPLOYMENT_RECOVERING indicates that the served entity was previously in a ready state but no longer is and is attempting to recover. DEPLOYMENT_READY indicates that the served entity is ready to receive traffic. DEPLOYMENT_FAILED indicates that there was an error trying to bring up the served entity (e.g container image build failed, the model server failed to start due to a model loading error, etc.) DEPLOYMENT_ABORTED indicates that the deployment was terminated likely due to a failure in bringing up another served entity under the same endpoint and config version. - .. py:attribute:: ABORTED :value: "ABORTED" @@ -437,8 +453,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ServingEndpointDetailedPermissionLevel - The permission level of the principal making the request. - .. py:attribute:: CAN_MANAGE :value: "CAN_MANAGE" @@ -477,6 +491,23 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ServingModelWorkloadType + + .. py:attribute:: CPU + :value: "CPU" + + .. py:attribute:: GPU_LARGE + :value: "GPU_LARGE" + + .. py:attribute:: GPU_MEDIUM + :value: "GPU_MEDIUM" + + .. py:attribute:: GPU_SMALL + :value: "GPU_SMALL" + + .. py:attribute:: MULTIGPU_MEDIUM + :value: "MULTIGPU_MEDIUM" + .. autoclass:: TrafficConfig :members: :undoc-members: diff --git a/docs/dbdataclasses/settings.rst b/docs/dbdataclasses/settings.rst index 12043e3c5..2325c4023 100644 --- a/docs/dbdataclasses/settings.rst +++ b/docs/dbdataclasses/settings.rst @@ -4,6 +4,37 @@ Settings These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.settings`` module. .. py:currentmodule:: databricks.sdk.service.settings +.. autoclass:: AccountIpAccessEnable + :members: + :undoc-members: + +.. autoclass:: AibiDashboardEmbeddingAccessPolicy + :members: + :undoc-members: + +.. py:class:: AibiDashboardEmbeddingAccessPolicyAccessPolicyType + + .. py:attribute:: ALLOW_ALL_DOMAINS + :value: "ALLOW_ALL_DOMAINS" + + .. py:attribute:: ALLOW_APPROVED_DOMAINS + :value: "ALLOW_APPROVED_DOMAINS" + + .. py:attribute:: DENY_ALL_DOMAINS + :value: "DENY_ALL_DOMAINS" + +.. autoclass:: AibiDashboardEmbeddingAccessPolicySetting + :members: + :undoc-members: + +.. autoclass:: AibiDashboardEmbeddingApprovedDomains + :members: + :undoc-members: + +.. autoclass:: AibiDashboardEmbeddingApprovedDomainsSetting + :members: + :undoc-members: + .. autoclass:: AutomaticClusterUpdateSetting :members: :undoc-members: @@ -108,9 +139,15 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: HIPAA :value: "HIPAA" + .. py:attribute:: HITRUST + :value: "HITRUST" + .. py:attribute:: IRAP_PROTECTED :value: "IRAP_PROTECTED" + .. py:attribute:: ISMAP + :value: "ISMAP" + .. py:attribute:: ITAR_EAR :value: "ITAR_EAR" @@ -188,6 +225,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: DeleteAccountIpAccessEnableResponse + :members: + :undoc-members: + +.. autoclass:: DeleteAibiDashboardEmbeddingAccessPolicySettingResponse + :members: + :undoc-members: + +.. autoclass:: DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse + :members: + :undoc-members: + .. autoclass:: DeleteDefaultNamespaceSettingResponse :members: :undoc-members: @@ -249,6 +298,83 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: EgressNetworkPolicy + :members: + :undoc-members: + +.. autoclass:: EgressNetworkPolicyInternetAccessPolicy + :members: + :undoc-members: + +.. autoclass:: EgressNetworkPolicyInternetAccessPolicyInternetDestination + :members: + :undoc-members: + +.. py:class:: EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol + + The filtering protocol used by the DP. For private and public preview, SEG will only support TCP filtering (i.e. DNS based filtering, filtering by destination IP address), so protocol will be set to TCP by default and hidden from the user. In the future, users may be able to select HTTP filtering (i.e. SNI based filtering, filtering by FQDN). + + .. py:attribute:: TCP + :value: "TCP" + +.. py:class:: EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType + + .. py:attribute:: FQDN + :value: "FQDN" + +.. autoclass:: EgressNetworkPolicyInternetAccessPolicyLogOnlyMode + :members: + :undoc-members: + +.. py:class:: EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType + + .. py:attribute:: ALL_SERVICES + :value: "ALL_SERVICES" + + .. py:attribute:: SELECTED_SERVICES + :value: "SELECTED_SERVICES" + +.. py:class:: EgressNetworkPolicyInternetAccessPolicyLogOnlyModeWorkloadType + + The values should match the list of workloads used in networkconfig.proto + + .. py:attribute:: DBSQL + :value: "DBSQL" + + .. py:attribute:: ML_SERVING + :value: "ML_SERVING" + +.. py:class:: EgressNetworkPolicyInternetAccessPolicyRestrictionMode + + At which level can Databricks and Databricks managed compute access Internet. FULL_ACCESS: Databricks can access Internet. No blocking rules will apply. RESTRICTED_ACCESS: Databricks can only access explicitly allowed internet and storage destinations, as well as UC connections and external locations. PRIVATE_ACCESS_ONLY (not used): Databricks can only access destinations via private link. + + .. py:attribute:: FULL_ACCESS + :value: "FULL_ACCESS" + + .. py:attribute:: PRIVATE_ACCESS_ONLY + :value: "PRIVATE_ACCESS_ONLY" + + .. py:attribute:: RESTRICTED_ACCESS + :value: "RESTRICTED_ACCESS" + +.. autoclass:: EgressNetworkPolicyInternetAccessPolicyStorageDestination + :members: + :undoc-members: + +.. py:class:: EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType + + .. py:attribute:: AWS_S3 + :value: "AWS_S3" + + .. py:attribute:: AZURE_STORAGE + :value: "AZURE_STORAGE" + + .. py:attribute:: CLOUDFLARE_R2 + :value: "CLOUDFLARE_R2" + + .. py:attribute:: GOOGLE_CLOUD_STORAGE + :value: "GOOGLE_CLOUD_STORAGE" + .. autoclass:: EmailConfig :members: :undoc-members: @@ -540,9 +666,24 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: ARCLIGHT_AZURE_EXCHANGE_TOKEN :value: "ARCLIGHT_AZURE_EXCHANGE_TOKEN" + .. py:attribute:: ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY + :value: "ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY" + .. py:attribute:: AZURE_ACTIVE_DIRECTORY_TOKEN :value: "AZURE_ACTIVE_DIRECTORY_TOKEN" +.. autoclass:: UpdateAccountIpAccessEnableRequest + :members: + :undoc-members: + +.. autoclass:: UpdateAibiDashboardEmbeddingAccessPolicySettingRequest + :members: + :undoc-members: + +.. autoclass:: UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest + :members: + :undoc-members: + .. autoclass:: UpdateAutomaticClusterUpdateSettingRequest :members: :undoc-members: diff --git a/docs/dbdataclasses/sharing.rst b/docs/dbdataclasses/sharing.rst index ded587fe5..ed4a4c006 100644 --- a/docs/dbdataclasses/sharing.rst +++ b/docs/dbdataclasses/sharing.rst @@ -14,117 +14,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: TOKEN :value: "TOKEN" -.. autoclass:: CentralCleanRoomInfo - :members: - :undoc-members: - -.. autoclass:: CleanRoomAssetInfo - :members: - :undoc-members: - -.. autoclass:: CleanRoomCatalog - :members: - :undoc-members: - -.. autoclass:: CleanRoomCatalogUpdate - :members: - :undoc-members: - -.. autoclass:: CleanRoomCollaboratorInfo - :members: - :undoc-members: - -.. autoclass:: CleanRoomInfo - :members: - :undoc-members: - -.. autoclass:: CleanRoomNotebookInfo - :members: - :undoc-members: - -.. autoclass:: CleanRoomTableInfo - :members: - :undoc-members: - -.. autoclass:: ColumnInfo - :members: - :undoc-members: - -.. autoclass:: ColumnMask - :members: - :undoc-members: - -.. py:class:: ColumnTypeName - - Name of type (INT, STRUCT, MAP, etc.). - - .. py:attribute:: ARRAY - :value: "ARRAY" - - .. py:attribute:: BINARY - :value: "BINARY" - - .. py:attribute:: BOOLEAN - :value: "BOOLEAN" - - .. py:attribute:: BYTE - :value: "BYTE" - - .. py:attribute:: CHAR - :value: "CHAR" - - .. py:attribute:: DATE - :value: "DATE" - - .. py:attribute:: DECIMAL - :value: "DECIMAL" - - .. py:attribute:: DOUBLE - :value: "DOUBLE" - - .. py:attribute:: FLOAT - :value: "FLOAT" - - .. py:attribute:: INT - :value: "INT" - - .. py:attribute:: INTERVAL - :value: "INTERVAL" - - .. py:attribute:: LONG - :value: "LONG" - - .. py:attribute:: MAP - :value: "MAP" - - .. py:attribute:: NULL - :value: "NULL" - - .. py:attribute:: SHORT - :value: "SHORT" - - .. py:attribute:: STRING - :value: "STRING" - - .. py:attribute:: STRUCT - :value: "STRUCT" - - .. py:attribute:: TABLE_TYPE - :value: "TABLE_TYPE" - - .. py:attribute:: TIMESTAMP - :value: "TIMESTAMP" - - .. py:attribute:: TIMESTAMP_NTZ - :value: "TIMESTAMP_NTZ" - - .. py:attribute:: USER_DEFINED_TYPE - :value: "USER_DEFINED_TYPE" - -.. autoclass:: CreateCleanRoom - :members: - :undoc-members: - .. autoclass:: CreateProvider :members: :undoc-members: @@ -153,10 +42,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ListCleanRoomsResponse - :members: - :undoc-members: - .. autoclass:: ListProviderSharesResponse :members: :undoc-members: @@ -177,14 +62,16 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: PartitionSpecificationPartition + :members: + :undoc-members: + .. autoclass:: PartitionValue :members: :undoc-members: .. py:class:: PartitionValueOp - The operator to apply for the value. - .. py:attribute:: EQUAL :value: "EQUAL" @@ -223,6 +110,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: CREATE_FOREIGN_CATALOG :value: "CREATE_FOREIGN_CATALOG" + .. py:attribute:: CREATE_FOREIGN_SECURABLE + :value: "CREATE_FOREIGN_SECURABLE" + .. py:attribute:: CREATE_FUNCTION :value: "CREATE_FUNCTION" @@ -377,6 +267,12 @@ These dataclasses are used in the SDK to represent API requests and responses fo The type of the data object. + .. py:attribute:: FEATURE_SPEC + :value: "FEATURE_SPEC" + + .. py:attribute:: FUNCTION + :value: "FUNCTION" + .. py:attribute:: MATERIALIZED_VIEW :value: "MATERIALIZED_VIEW" @@ -435,10 +331,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: UPDATE :value: "UPDATE" -.. autoclass:: UpdateCleanRoom - :members: - :undoc-members: - .. autoclass:: UpdatePermissionsResponse :members: :undoc-members: @@ -451,10 +343,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateResponse - :members: - :undoc-members: - .. autoclass:: UpdateShare :members: :undoc-members: diff --git a/docs/dbdataclasses/sql.rst b/docs/dbdataclasses/sql.rst index 1657146c3..c63fe7cd2 100644 --- a/docs/dbdataclasses/sql.rst +++ b/docs/dbdataclasses/sql.rst @@ -114,8 +114,12 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: CHANNEL_NAME_PREVIEW :value: "CHANNEL_NAME_PREVIEW" - .. py:attribute:: CHANNEL_NAME_UNSPECIFIED - :value: "CHANNEL_NAME_UNSPECIFIED" + .. py:attribute:: CHANNEL_NAME_PREVIOUS + :value: "CHANNEL_NAME_PREVIOUS" + +.. autoclass:: ClientConfig + :members: + :undoc-members: .. autoclass:: ColumnInfo :members: diff --git a/docs/dbdataclasses/workspace.rst b/docs/dbdataclasses/workspace.rst index 9ff3eb66b..bd0785db4 100644 --- a/docs/dbdataclasses/workspace.rst +++ b/docs/dbdataclasses/workspace.rst @@ -157,6 +157,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: JUPYTER :value: "JUPYTER" + .. py:attribute:: RAW + :value: "RAW" + .. py:attribute:: R_MARKDOWN :value: "R_MARKDOWN" diff --git a/docs/gen-client-docs.py b/docs/gen-client-docs.py index 5c32beffe..6ebfa7bab 100644 --- a/docs/gen-client-docs.py +++ b/docs/gen-client-docs.py @@ -248,6 +248,7 @@ class Generator: Package("dashboards", "Dashboards", "Manage Lakeview dashboards"), Package("marketplace", "Marketplace", "Manage AI and analytics assets such as ML models, notebooks, applications in an open marketplace"), Package("apps", "Apps", "Build custom applications on Databricks"), + Package("cleanrooms", "Clean Rooms", "Manage clean rooms and their assets and task runs"), ] def __init__(self): @@ -266,11 +267,22 @@ def _load_mapping(self) -> dict[str, Tag]: pkgs = {p.name: p for p in self.packages} spec = json.loads(self._openapi_spec()) for tag in spec['tags']: + is_account=tag.get('x-databricks-is-accounts') + # Unique identifier for the tag. Note that the service name may not be unique + key = 'a' if is_account else 'w' + parent_service = tag.get('x-databricks-parent-service') + if parent_service: + # SDK generation removes the "account" prefix from account services + clean_parent_service = parent_service.lower().removeprefix("account") + key = f"{key}.{clean_parent_service}" + + key = f"{key}.{tag['x-databricks-service']}".lower() + t = Tag(name=tag['name'], service=tag['x-databricks-service'], is_account=tag.get('x-databricks-is-accounts', False), package=pkgs[tag['x-databricks-package']]) - mapping[tag['name']] = t + mapping[key] = t return mapping @staticmethod @@ -359,7 +371,7 @@ def service_docs(self, client_inst, client_prefix: str) -> list[ServiceDoc]: service_name=service_name, class_name=class_name, doc=class_doc, - tag=self._get_tag_name(service_inst.__class__.__name__, service_name), + tag=self._get_tag_name(service_inst.__class__.__name__, client_prefix, service_name), methods=self.class_methods(service_inst), property=self.class_properties(service_inst))) return all @@ -375,13 +387,19 @@ def _make_folder_if_not_exists(folder): def write_dataclass_docs(self): self._make_folder_if_not_exists(f'{__dir__}/dbdataclasses') + all_packages = [] for pkg in self.packages: - module = importlib.import_module(f'databricks.sdk.service.{pkg.name}') + try: + module = importlib.import_module(f'databricks.sdk.service.{pkg.name}') + except ModuleNotFoundError: + print(f'No module found for {pkg.name}, continuing') + continue + all_packages.append(pkg.name) all_members = [name for name, _ in inspect.getmembers(module, predicate=self._should_document)] doc = DataclassesDoc(package=pkg, dataclasses=sorted(all_members)) with open(f'{__dir__}/dbdataclasses/{pkg.name}.rst', 'w') as f: f.write(doc.as_rst()) - all = "\n ".join(sorted([p.name for p in self.packages])) + all = "\n ".join(sorted(all_packages)) with open(f'{__dir__}/dbdataclasses/index.rst', 'w') as f: f.write(f''' Dataclasses @@ -392,13 +410,13 @@ def write_dataclass_docs(self): {all}''') - def _get_tag_name(self, class_name, service_name) -> Tag: + def _get_tag_name(self, class_name, client_prefix, service_name) -> Tag: if class_name[-3:] == 'Ext': # ClustersExt, DbfsExt, WorkspaceExt, but not ExternalLocations class_name = class_name.replace('Ext', 'API') class_name = class_name[:-3] - for tag_name, t in self.mapping.items(): - if t.service.lower() == str(class_name).lower(): + for key, t in self.mapping.items(): + if key == f'{client_prefix}.{str(class_name).lower()}': return t raise KeyError(f'Cannot find {class_name} / {service_name} tag') diff --git a/docs/workspace/apps/apps.rst b/docs/workspace/apps/apps.rst index 774e75b8b..af7229f34 100644 --- a/docs/workspace/apps/apps.rst +++ b/docs/workspace/apps/apps.rst @@ -7,26 +7,22 @@ Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on. - .. py:method:: create(name: str [, description: Optional[str], resources: Optional[List[AppResource]]]) -> Wait[App] + .. py:method:: create( [, app: Optional[App], no_compute: Optional[bool]]) -> Wait[App] Create an app. Creates a new app. - :param name: str - The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It - must be unique within the workspace. - :param description: str (optional) - The description of the app. - :param resources: List[:class:`AppResource`] (optional) - Resources for the app. + :param app: :class:`App` (optional) + :param no_compute: bool (optional) + If true, the app will not be started after creation. :returns: Long-running operation waiter for :class:`App`. See :method:wait_get_app_active for more details. - .. py:method:: create_and_wait(name: str [, description: Optional[str], resources: Optional[List[AppResource]], timeout: datetime.timedelta = 0:20:00]) -> App + .. py:method:: create_and_wait( [, app: Optional[App], no_compute: Optional[bool], timeout: datetime.timedelta = 0:20:00]) -> App .. py:method:: delete(name: str) -> App @@ -41,7 +37,7 @@ :returns: :class:`App` - .. py:method:: deploy(app_name: str [, deployment_id: Optional[str], mode: Optional[AppDeploymentMode], source_code_path: Optional[str]]) -> Wait[AppDeployment] + .. py:method:: deploy(app_name: str [, app_deployment: Optional[AppDeployment]]) -> Wait[AppDeployment] Create an app deployment. @@ -49,23 +45,14 @@ :param app_name: str The name of the app. - :param deployment_id: str (optional) - The unique id of the deployment. - :param mode: :class:`AppDeploymentMode` (optional) - The mode of which the deployment will manage the source code. - :param source_code_path: str (optional) - The workspace file system path of the source code used to create the app deployment. This is - different from `deployment_artifacts.source_code_path`, which is the path used by the deployed app. - The former refers to the original source code location of the app in the workspace during deployment - creation, whereas the latter provides a system generated stable snapshotted source code path used by - the deployment. + :param app_deployment: :class:`AppDeployment` (optional) :returns: Long-running operation waiter for :class:`AppDeployment`. See :method:wait_get_deployment_app_succeeded for more details. - .. py:method:: deploy_and_wait(app_name: str [, deployment_id: Optional[str], mode: Optional[AppDeploymentMode], source_code_path: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> AppDeployment + .. py:method:: deploy_and_wait(app_name: str [, app_deployment: Optional[AppDeployment], timeout: datetime.timedelta = 0:20:00]) -> AppDeployment .. py:method:: get(name: str) -> App @@ -152,7 +139,8 @@ Set app permissions. - Sets permissions on an app. Apps can inherit permissions from their root object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param app_name: str The app for which to get or manage permissions. @@ -195,7 +183,7 @@ .. py:method:: stop_and_wait(name: str, timeout: datetime.timedelta = 0:20:00) -> App - .. py:method:: update(name: str [, description: Optional[str], resources: Optional[List[AppResource]]]) -> App + .. py:method:: update(name: str [, app: Optional[App]]) -> App Update an app. @@ -204,10 +192,7 @@ :param name: str The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It must be unique within the workspace. - :param description: str (optional) - The description of the app. - :param resources: List[:class:`AppResource`] (optional) - Resources for the app. + :param app: :class:`App` (optional) :returns: :class:`App` diff --git a/docs/workspace/catalog/catalogs.rst b/docs/workspace/catalog/catalogs.rst index 200168ee6..1d6b6dc2a 100644 --- a/docs/workspace/catalog/catalogs.rst +++ b/docs/workspace/catalog/catalogs.rst @@ -143,7 +143,7 @@ :returns: Iterator over :class:`CatalogInfo` - .. py:method:: update(name: str [, comment: Optional[str], enable_predictive_optimization: Optional[EnablePredictiveOptimization], isolation_mode: Optional[CatalogIsolationMode], new_name: Optional[str], owner: Optional[str], properties: Optional[Dict[str, str]]]) -> CatalogInfo + .. py:method:: update(name: str [, comment: Optional[str], enable_predictive_optimization: Optional[EnablePredictiveOptimization], isolation_mode: Optional[CatalogIsolationMode], new_name: Optional[str], options: Optional[Dict[str, str]], owner: Optional[str], properties: Optional[Dict[str, str]]]) -> CatalogInfo Usage: @@ -178,6 +178,8 @@ Whether the current securable is accessible from all workspaces or a specific set of workspaces. :param new_name: str (optional) New name for the catalog. + :param options: Dict[str,str] (optional) + A map of key-value properties attached to the securable. :param owner: str (optional) Username of current owner of catalog. :param properties: Dict[str,str] (optional) diff --git a/docs/workspace/catalog/credentials.rst b/docs/workspace/catalog/credentials.rst new file mode 100644 index 000000000..3927e6351 --- /dev/null +++ b/docs/workspace/catalog/credentials.rst @@ -0,0 +1,193 @@ +``w.credentials``: Credentials +============================== +.. currentmodule:: databricks.sdk.service.catalog + +.. py:class:: CredentialsAPI + + A credential represents an authentication and authorization mechanism for accessing services on your cloud + tenant. Each credential is subject to Unity Catalog access-control policies that control which users and + groups can access the credential. + + To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE CREDENTIAL` + privilege. The user who creates the credential can delegate ownership to another user or group to manage + permissions on it. + + .. py:method:: create_credential(name: str [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], azure_service_principal: Optional[AzureServicePrincipal], comment: Optional[str], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount], purpose: Optional[CredentialPurpose], read_only: Optional[bool], skip_validation: Optional[bool]]) -> CredentialInfo + + Create a credential. + + Creates a new credential. The type of credential to be created is determined by the **purpose** field, + which should be either **SERVICE** or **STORAGE**. + + The caller must be a metastore admin or have the metastore privilege **CREATE_STORAGE_CREDENTIAL** for + storage credentials, or **CREATE_SERVICE_CREDENTIAL** for service credentials. + + :param name: str + The credential name. The name must be unique among storage and service credentials within the + metastore. + :param aws_iam_role: :class:`AwsIamRole` (optional) + The AWS IAM role configuration + :param azure_managed_identity: :class:`AzureManagedIdentity` (optional) + The Azure managed identity configuration. + :param azure_service_principal: :class:`AzureServicePrincipal` (optional) + The Azure service principal configuration. Only applicable when purpose is **STORAGE**. + :param comment: str (optional) + Comment associated with the credential. + :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional) + GCP long-lived credential. Databricks-created Google Cloud Storage service account. + :param purpose: :class:`CredentialPurpose` (optional) + Indicates the purpose of the credential. + :param read_only: bool (optional) + Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**. + :param skip_validation: bool (optional) + Optional. Supplying true to this argument skips validation of the created set of credentials. + + :returns: :class:`CredentialInfo` + + + .. py:method:: delete_credential(name_arg: str [, force: Optional[bool]]) + + Delete a credential. + + Deletes a service or storage credential from the metastore. The caller must be an owner of the + credential. + + :param name_arg: str + Name of the credential. + :param force: bool (optional) + Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent + external locations and external tables (when purpose is **STORAGE**). + + + + + .. py:method:: generate_temporary_service_credential(credential_name: str [, azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions], gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions]]) -> TemporaryCredentials + + Generate a temporary service credential. + + Returns a set of temporary credentials generated using the specified service credential. The caller + must be a metastore admin or have the metastore privilege **ACCESS** on the service credential. + + :param credential_name: str + The name of the service credential used to generate a temporary credential + :param azure_options: :class:`GenerateTemporaryServiceCredentialAzureOptions` (optional) + The Azure cloud options to customize the requested temporary credential + :param gcp_options: :class:`GenerateTemporaryServiceCredentialGcpOptions` (optional) + The GCP cloud options to customize the requested temporary credential + + :returns: :class:`TemporaryCredentials` + + + .. py:method:: get_credential(name_arg: str) -> CredentialInfo + + Get a credential. + + Gets a service or storage credential from the metastore. The caller must be a metastore admin, the + owner of the credential, or have any permission on the credential. + + :param name_arg: str + Name of the credential. + + :returns: :class:`CredentialInfo` + + + .. py:method:: list_credentials( [, max_results: Optional[int], page_token: Optional[str], purpose: Optional[CredentialPurpose]]) -> Iterator[CredentialInfo] + + List credentials. + + Gets an array of credentials (as __CredentialInfo__ objects). + + The array is limited to only the credentials that the caller has permission to access. If the caller + is a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a specific + ordering of the elements in the array. + + :param max_results: int (optional) + Maximum number of credentials to return. - If not set, the default max page size is used. - When set + to a value greater than 0, the page length is the minimum of this value and a server-configured + value. - When set to 0, the page length is set to a server-configured value (recommended). - When + set to a value less than 0, an invalid parameter error is returned. + :param page_token: str (optional) + Opaque token to retrieve the next page of results. + :param purpose: :class:`CredentialPurpose` (optional) + Return only credentials for the specified purpose. + + :returns: Iterator over :class:`CredentialInfo` + + + .. py:method:: update_credential(name_arg: str [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], azure_service_principal: Optional[AzureServicePrincipal], comment: Optional[str], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount], force: Optional[bool], isolation_mode: Optional[IsolationMode], new_name: Optional[str], owner: Optional[str], read_only: Optional[bool], skip_validation: Optional[bool]]) -> CredentialInfo + + Update a credential. + + Updates a service or storage credential on the metastore. + + The caller must be the owner of the credential or a metastore admin or have the `MANAGE` permission. + If the caller is a metastore admin, only the __owner__ field can be changed. + + :param name_arg: str + Name of the credential. + :param aws_iam_role: :class:`AwsIamRole` (optional) + The AWS IAM role configuration + :param azure_managed_identity: :class:`AzureManagedIdentity` (optional) + The Azure managed identity configuration. + :param azure_service_principal: :class:`AzureServicePrincipal` (optional) + The Azure service principal configuration. Only applicable when purpose is **STORAGE**. + :param comment: str (optional) + Comment associated with the credential. + :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional) + GCP long-lived credential. Databricks-created Google Cloud Storage service account. + :param force: bool (optional) + Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent + external locations and external tables (when purpose is **STORAGE**). + :param isolation_mode: :class:`IsolationMode` (optional) + Whether the current securable is accessible from all workspaces or a specific set of workspaces. + :param new_name: str (optional) + New name of credential. + :param owner: str (optional) + Username of current owner of credential. + :param read_only: bool (optional) + Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**. + :param skip_validation: bool (optional) + Supply true to this argument to skip validation of the updated credential. + + :returns: :class:`CredentialInfo` + + + .. py:method:: validate_credential( [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], credential_name: Optional[str], external_location_name: Optional[str], purpose: Optional[CredentialPurpose], read_only: Optional[bool], url: Optional[str]]) -> ValidateCredentialResponse + + Validate a credential. + + Validates a credential. + + For service credentials (purpose is **SERVICE**), either the __credential_name__ or the cloud-specific + credential must be provided. + + For storage credentials (purpose is **STORAGE**), at least one of __external_location_name__ and + __url__ need to be provided. If only one of them is provided, it will be used for validation. And if + both are provided, the __url__ will be used for validation, and __external_location_name__ will be + ignored when checking overlapping urls. Either the __credential_name__ or the cloud-specific + credential must be provided. + + The caller must be a metastore admin or the credential owner or have the required permission on the + metastore and the credential (e.g., **CREATE_EXTERNAL_LOCATION** when purpose is **STORAGE**). + + :param aws_iam_role: :class:`AwsIamRole` (optional) + The AWS IAM role configuration + :param azure_managed_identity: :class:`AzureManagedIdentity` (optional) + The Azure managed identity configuration. + :param credential_name: str (optional) + Required. The name of an existing credential or long-lived cloud credential to validate. + :param external_location_name: str (optional) + The name of an existing external location to validate. Only applicable for storage credentials + (purpose is **STORAGE**.) + :param purpose: :class:`CredentialPurpose` (optional) + The purpose of the credential. This should only be used when the credential is specified. + :param read_only: bool (optional) + Whether the credential is only usable for read operations. Only applicable for storage credentials + (purpose is **STORAGE**.) + :param url: str (optional) + The external location url to validate. Only applicable when purpose is **STORAGE**. + + :returns: :class:`ValidateCredentialResponse` + \ No newline at end of file diff --git a/docs/workspace/catalog/external_locations.rst b/docs/workspace/catalog/external_locations.rst index 365007b09..fc60b18f6 100644 --- a/docs/workspace/catalog/external_locations.rst +++ b/docs/workspace/catalog/external_locations.rst @@ -221,7 +221,6 @@ :param force: bool (optional) Force update even if changing url invalidates dependent external tables or mounts. :param isolation_mode: :class:`IsolationMode` (optional) - Whether the current securable is accessible from all workspaces or a specific set of workspaces. :param new_name: str (optional) New name for the external location. :param owner: str (optional) diff --git a/docs/workspace/catalog/index.rst b/docs/workspace/catalog/index.rst index 1372ca5a1..471804098 100644 --- a/docs/workspace/catalog/index.rst +++ b/docs/workspace/catalog/index.rst @@ -10,6 +10,7 @@ Configure data governance with Unity Catalog for metastores, catalogs, schemas, artifact_allowlists catalogs connections + credentials external_locations functions grants diff --git a/docs/workspace/catalog/online_tables.rst b/docs/workspace/catalog/online_tables.rst index 164832b0f..d0119657f 100644 --- a/docs/workspace/catalog/online_tables.rst +++ b/docs/workspace/catalog/online_tables.rst @@ -6,20 +6,23 @@ Online tables provide lower latency and higher QPS access to data from Delta tables. - .. py:method:: create( [, name: Optional[str], spec: Optional[OnlineTableSpec]]) -> OnlineTable + .. py:method:: create( [, table: Optional[OnlineTable]]) -> Wait[OnlineTable] Create an Online Table. Create a new Online Table. - :param name: str (optional) - Full three-part (catalog, schema, table) name of the table. - :param spec: :class:`OnlineTableSpec` (optional) - Specification of the online table. + :param table: :class:`OnlineTable` (optional) + Online Table information. - :returns: :class:`OnlineTable` + :returns: + Long-running operation waiter for :class:`OnlineTable`. + See :method:wait_get_online_table_active for more details. + .. py:method:: create_and_wait( [, table: Optional[OnlineTable], timeout: datetime.timedelta = 0:20:00]) -> OnlineTable + + .. py:method:: delete(name: str) Delete an Online Table. @@ -44,4 +47,6 @@ Full three-part (catalog, schema, table) name of the table. :returns: :class:`OnlineTable` - \ No newline at end of file + + + .. py:method:: wait_get_online_table_active(name: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[OnlineTable], None]]) -> OnlineTable diff --git a/docs/workspace/catalog/storage_credentials.rst b/docs/workspace/catalog/storage_credentials.rst index 30b04654c..cac70a944 100644 --- a/docs/workspace/catalog/storage_credentials.rst +++ b/docs/workspace/catalog/storage_credentials.rst @@ -193,7 +193,6 @@ :param force: bool (optional) Force update even if there are dependent external locations or external tables. :param isolation_mode: :class:`IsolationMode` (optional) - Whether the current securable is accessible from all workspaces or a specific set of workspaces. :param new_name: str (optional) New name for the storage credential. :param owner: str (optional) diff --git a/docs/workspace/catalog/tables.rst b/docs/workspace/catalog/tables.rst index 4cb458b46..15cfb1cac 100644 --- a/docs/workspace/catalog/tables.rst +++ b/docs/workspace/catalog/tables.rst @@ -100,7 +100,7 @@ :returns: :class:`TableInfo` - .. py:method:: list(catalog_name: str, schema_name: str [, include_browse: Optional[bool], include_delta_metadata: Optional[bool], include_manifest_capabilities: Optional[bool], max_results: Optional[int], omit_columns: Optional[bool], omit_properties: Optional[bool], page_token: Optional[str]]) -> Iterator[TableInfo] + .. py:method:: list(catalog_name: str, schema_name: str [, include_browse: Optional[bool], include_delta_metadata: Optional[bool], include_manifest_capabilities: Optional[bool], max_results: Optional[int], omit_columns: Optional[bool], omit_properties: Optional[bool], omit_username: Optional[bool], page_token: Optional[str]]) -> Iterator[TableInfo] Usage: @@ -151,6 +151,9 @@ Whether to omit the columns of the table from the response or not. :param omit_properties: bool (optional) Whether to omit the properties of the table from the response or not. + :param omit_username: bool (optional) + Whether to omit the username of the table (e.g. owner, updated_by, created_by) from the response or + not. :param page_token: str (optional) Opaque token to send for the next page of results (pagination). diff --git a/docs/workspace/cleanrooms/clean_room_assets.rst b/docs/workspace/cleanrooms/clean_room_assets.rst new file mode 100644 index 000000000..fe282543a --- /dev/null +++ b/docs/workspace/cleanrooms/clean_room_assets.rst @@ -0,0 +1,94 @@ +``w.clean_room_assets``: Assets +=============================== +.. currentmodule:: databricks.sdk.service.cleanrooms + +.. py:class:: CleanRoomAssetsAPI + + Clean room assets are data and code objects — Tables, volumes, and notebooks that are shared with the + clean room. + + .. py:method:: create(clean_room_name: str [, asset: Optional[CleanRoomAsset]]) -> CleanRoomAsset + + Create an asset. + + Create a clean room asset —share an asset like a notebook or table into the clean room. For each UC + asset that is added through this method, the clean room owner must also have enough privilege on the + asset to consume it. The privilege must be maintained indefinitely for the clean room to be able to + access the asset. Typically, you should use a group as the clean room owner. + + :param clean_room_name: str + Name of the clean room. + :param asset: :class:`CleanRoomAsset` (optional) + Metadata of the clean room asset + + :returns: :class:`CleanRoomAsset` + + + .. py:method:: delete(clean_room_name: str, asset_type: CleanRoomAssetAssetType, asset_full_name: str) + + Delete an asset. + + Delete a clean room asset - unshare/remove the asset from the clean room + + :param clean_room_name: str + Name of the clean room. + :param asset_type: :class:`CleanRoomAssetAssetType` + The type of the asset. + :param asset_full_name: str + The fully qualified name of the asset, it is same as the name field in CleanRoomAsset. + + + + + .. py:method:: get(clean_room_name: str, asset_type: CleanRoomAssetAssetType, asset_full_name: str) -> CleanRoomAsset + + Get an asset. + + Get the details of a clean room asset by its type and full name. + + :param clean_room_name: str + Name of the clean room. + :param asset_type: :class:`CleanRoomAssetAssetType` + The type of the asset. + :param asset_full_name: str + The fully qualified name of the asset, it is same as the name field in CleanRoomAsset. + + :returns: :class:`CleanRoomAsset` + + + .. py:method:: list(clean_room_name: str [, page_token: Optional[str]]) -> Iterator[CleanRoomAsset] + + List assets. + + :param clean_room_name: str + Name of the clean room. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. + + :returns: Iterator over :class:`CleanRoomAsset` + + + .. py:method:: update(clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str [, asset: Optional[CleanRoomAsset]]) -> CleanRoomAsset + + Update an asset. + + Update a clean room asset. For example, updating the content of a notebook; changing the shared + partitions of a table; etc. + + :param clean_room_name: str + Name of the clean room. + :param asset_type: :class:`CleanRoomAssetAssetType` + The type of the asset. + :param name: str + A fully qualified name that uniquely identifies the asset within the clean room. This is also the + name displayed in the clean room UI. + + For UC securable assets (tables, volumes, etc.), the format is + *shared_catalog*.*shared_schema*.*asset_name* + + For notebooks, the name is the notebook file name. + :param asset: :class:`CleanRoomAsset` (optional) + Metadata of the clean room asset + + :returns: :class:`CleanRoomAsset` + \ No newline at end of file diff --git a/docs/workspace/cleanrooms/clean_room_task_runs.rst b/docs/workspace/cleanrooms/clean_room_task_runs.rst new file mode 100644 index 000000000..dcf59037c --- /dev/null +++ b/docs/workspace/cleanrooms/clean_room_task_runs.rst @@ -0,0 +1,25 @@ +``w.clean_room_task_runs``: Task Runs +===================================== +.. currentmodule:: databricks.sdk.service.cleanrooms + +.. py:class:: CleanRoomTaskRunsAPI + + Clean room task runs are the executions of notebooks in a clean room. + + .. py:method:: list(clean_room_name: str [, notebook_name: Optional[str], page_size: Optional[int], page_token: Optional[str]]) -> Iterator[CleanRoomNotebookTaskRun] + + List notebook task runs. + + List all the historical notebook task runs in a clean room. + + :param clean_room_name: str + Name of the clean room. + :param notebook_name: str (optional) + Notebook name + :param page_size: int (optional) + The maximum number of task runs to return + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. + + :returns: Iterator over :class:`CleanRoomNotebookTaskRun` + \ No newline at end of file diff --git a/docs/workspace/cleanrooms/clean_rooms.rst b/docs/workspace/cleanrooms/clean_rooms.rst new file mode 100644 index 000000000..8ef5d8827 --- /dev/null +++ b/docs/workspace/cleanrooms/clean_rooms.rst @@ -0,0 +1,95 @@ +``w.clean_rooms``: Clean Rooms +============================== +.. currentmodule:: databricks.sdk.service.cleanrooms + +.. py:class:: CleanRoomsAPI + + A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting + environment where multiple parties can work together on sensitive enterprise data without direct access to + each other’s data. + + .. py:method:: create( [, clean_room: Optional[CleanRoom]]) -> CleanRoom + + Create a clean room. + + Create a new clean room with the specified collaborators. This method is asynchronous; the returned + name field inside the clean_room field can be used to poll the clean room status, using the + :method:cleanrooms/get method. When this method returns, the clean room will be in a PROVISIONING + state, with only name, owner, comment, created_at and status populated. The clean room will be usable + once it enters an ACTIVE state. + + The caller must be a metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the metastore. + + :param clean_room: :class:`CleanRoom` (optional) + + :returns: :class:`CleanRoom` + + + .. py:method:: create_output_catalog(clean_room_name: str [, output_catalog: Optional[CleanRoomOutputCatalog]]) -> CreateCleanRoomOutputCatalogResponse + + Create an output catalog. + + Create the output catalog of the clean room. + + :param clean_room_name: str + Name of the clean room. + :param output_catalog: :class:`CleanRoomOutputCatalog` (optional) + + :returns: :class:`CreateCleanRoomOutputCatalogResponse` + + + .. py:method:: delete(name: str) + + Delete a clean room. + + Delete a clean room. After deletion, the clean room will be removed from the metastore. If the other + collaborators have not deleted the clean room, they will still have the clean room in their metastore, + but it will be in a DELETED state and no operations other than deletion can be performed on it. + + :param name: str + Name of the clean room. + + + + + .. py:method:: get(name: str) -> CleanRoom + + Get a clean room. + + Get the details of a clean room given its name. + + :param name: str + + :returns: :class:`CleanRoom` + + + .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[CleanRoom] + + List clean rooms. + + Get a list of all clean rooms of the metastore. Only clean rooms the caller has access to are + returned. + + :param page_size: int (optional) + Maximum number of clean rooms to return (i.e., the page length). Defaults to 100. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. + + :returns: Iterator over :class:`CleanRoom` + + + .. py:method:: update(name: str [, clean_room: Optional[CleanRoom]]) -> CleanRoom + + Update a clean room. + + Update a clean room. The caller must be the owner of the clean room, have **MODIFY_CLEAN_ROOM** + privilege, or be metastore admin. + + When the caller is a metastore admin, only the __owner__ field can be updated. + + :param name: str + Name of the clean room. + :param clean_room: :class:`CleanRoom` (optional) + + :returns: :class:`CleanRoom` + \ No newline at end of file diff --git a/docs/workspace/cleanrooms/index.rst b/docs/workspace/cleanrooms/index.rst new file mode 100644 index 000000000..a979ac201 --- /dev/null +++ b/docs/workspace/cleanrooms/index.rst @@ -0,0 +1,12 @@ + +Clean Rooms +=========== + +Manage clean rooms and their assets and task runs + +.. toctree:: + :maxdepth: 1 + + clean_room_assets + clean_room_task_runs + clean_rooms \ No newline at end of file diff --git a/docs/workspace/compute/cluster_policies.rst b/docs/workspace/compute/cluster_policies.rst index 1cefc8ca6..65066964c 100644 --- a/docs/workspace/compute/cluster_policies.rst +++ b/docs/workspace/compute/cluster_policies.rst @@ -267,7 +267,8 @@ Set cluster policy permissions. - Sets permissions on a cluster policy. Cluster policies can inherit permissions from their root object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param cluster_policy_id: str The cluster policy for which to get or manage permissions. diff --git a/docs/workspace/compute/clusters.rst b/docs/workspace/compute/clusters.rst index ac52edecb..4e97857eb 100644 --- a/docs/workspace/compute/clusters.rst +++ b/docs/workspace/compute/clusters.rst @@ -71,7 +71,7 @@ - .. py:method:: create(spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], clone_from: Optional[CloneCluster], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], workload_type: Optional[WorkloadType]]) -> Wait[ClusterDetails] + .. py:method:: create(spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], clone_from: Optional[CloneCluster], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], is_single_node: Optional[bool], kind: Optional[Kind], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], use_ml_runtime: Optional[bool], workload_type: Optional[WorkloadType]]) -> Wait[ClusterDetails] Usage: @@ -134,11 +134,11 @@ :param clone_from: :class:`CloneCluster` (optional) When specified, this clones libraries from a source cluster during the creation of a new cluster. :param cluster_log_conf: :class:`ClusterLogConf` (optional) - The configuration for delivering spark logs to a long-term storage destination. Two kinds of - destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster. If - the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of - driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is - `$destination/$clusterId/executor`. + The configuration for delivering spark logs to a long-term storage destination. Three kinds of + destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be + specified for one cluster. If the conf is given, the logs will be delivered to the destination every + `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination + of executor logs is `$destination/$clusterId/executor`. :param cluster_name: str (optional) Cluster name requested by the user. This doesn't have to be unique. If not specified at creation, the cluster name will be an empty string. @@ -152,13 +152,19 @@ :param data_security_mode: :class:`DataSecurityMode` (optional) Data security mode decides what data governance model to use when accessing data from a cluster. - * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are - not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a - single user specified in `single_user_name`. Most programming languages, cluster features and data - governance features are available in this mode. * `USER_ISOLATION`: A secure cluster that can be - shared by multiple users. Cluster users are fully isolated so that they cannot see each other's data - and credentials. Most data governance features are supported in this mode. But programming languages - and cluster features might be limited. + The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will + choose the most appropriate access mode depending on your compute configuration. * + `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias + for `SINGLE_USER`. + + The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple + users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`: + A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. + Most programming languages, cluster features and data governance features are available in this + mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are + fully isolated so that they cannot see each other's data and credentials. Most data governance + features are supported in this mode. But programming languages and cluster features might be + limited. The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for future Databricks Runtime versions: @@ -190,6 +196,17 @@ logs are sent to `//init_scripts`. :param instance_pool_id: str (optional) The optional ID of the instance pool to which the cluster belongs. + :param is_single_node: bool (optional) + This field can only be used with `kind`. + + When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, + and `num_workers` + :param kind: :class:`Kind` (optional) + The kind of compute described by this compute specification. + + Depending on `kind`, different validations and default values will be applied. + + The first usage of this value is for the simple cluster form where it sets `kind = CLASSIC_PREVIEW`. :param node_type_id: str (optional) This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute @@ -236,6 +253,11 @@ SSH public key contents that will be added to each Spark node in this cluster. The corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be specified. + :param use_ml_runtime: bool (optional) + This field can only be used with `kind`. + + `effective_spark_version` is determined by `spark_version` (DBR release), this field + `use_ml_runtime`, and whether `node_type_id` is gpu node or not. :param workload_type: :class:`WorkloadType` (optional) :returns: @@ -243,7 +265,7 @@ See :method:wait_get_cluster_running for more details. - .. py:method:: create_and_wait(spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], clone_from: Optional[CloneCluster], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], workload_type: Optional[WorkloadType], timeout: datetime.timedelta = 0:20:00]) -> ClusterDetails + .. py:method:: create_and_wait(spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], clone_from: Optional[CloneCluster], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], is_single_node: Optional[bool], kind: Optional[Kind], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], use_ml_runtime: Optional[bool], workload_type: Optional[WorkloadType], timeout: datetime.timedelta = 0:20:00]) -> ClusterDetails .. py:method:: delete(cluster_id: str) -> Wait[ClusterDetails] @@ -292,7 +314,7 @@ .. py:method:: delete_and_wait(cluster_id: str, timeout: datetime.timedelta = 0:20:00) -> ClusterDetails - .. py:method:: edit(cluster_id: str, spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], workload_type: Optional[WorkloadType]]) -> Wait[ClusterDetails] + .. py:method:: edit(cluster_id: str, spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], is_single_node: Optional[bool], kind: Optional[Kind], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], use_ml_runtime: Optional[bool], workload_type: Optional[WorkloadType]]) -> Wait[ClusterDetails] Usage: @@ -341,7 +363,7 @@ Clusters created by the Databricks Jobs service cannot be edited. :param cluster_id: str - ID of the cluser + ID of the cluster :param spark_version: str The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call. @@ -362,11 +384,11 @@ Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a set of default values will be used. :param cluster_log_conf: :class:`ClusterLogConf` (optional) - The configuration for delivering spark logs to a long-term storage destination. Two kinds of - destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster. If - the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of - driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is - `$destination/$clusterId/executor`. + The configuration for delivering spark logs to a long-term storage destination. Three kinds of + destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be + specified for one cluster. If the conf is given, the logs will be delivered to the destination every + `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination + of executor logs is `$destination/$clusterId/executor`. :param cluster_name: str (optional) Cluster name requested by the user. This doesn't have to be unique. If not specified at creation, the cluster name will be an empty string. @@ -380,13 +402,19 @@ :param data_security_mode: :class:`DataSecurityMode` (optional) Data security mode decides what data governance model to use when accessing data from a cluster. - * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are - not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a - single user specified in `single_user_name`. Most programming languages, cluster features and data - governance features are available in this mode. * `USER_ISOLATION`: A secure cluster that can be - shared by multiple users. Cluster users are fully isolated so that they cannot see each other's data - and credentials. Most data governance features are supported in this mode. But programming languages - and cluster features might be limited. + The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will + choose the most appropriate access mode depending on your compute configuration. * + `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias + for `SINGLE_USER`. + + The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple + users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`: + A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. + Most programming languages, cluster features and data governance features are available in this + mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are + fully isolated so that they cannot see each other's data and credentials. Most data governance + features are supported in this mode. But programming languages and cluster features might be + limited. The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for future Databricks Runtime versions: @@ -418,6 +446,17 @@ logs are sent to `//init_scripts`. :param instance_pool_id: str (optional) The optional ID of the instance pool to which the cluster belongs. + :param is_single_node: bool (optional) + This field can only be used with `kind`. + + When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, + and `num_workers` + :param kind: :class:`Kind` (optional) + The kind of compute described by this compute specification. + + Depending on `kind`, different validations and default values will be applied. + + The first usage of this value is for the simple cluster form where it sets `kind = CLASSIC_PREVIEW`. :param node_type_id: str (optional) This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute @@ -464,6 +503,11 @@ SSH public key contents that will be added to each Spark node in this cluster. The corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be specified. + :param use_ml_runtime: bool (optional) + This field can only be used with `kind`. + + `effective_spark_version` is determined by `spark_version` (DBR release), this field + `use_ml_runtime`, and whether `node_type_id` is gpu node or not. :param workload_type: :class:`WorkloadType` (optional) :returns: @@ -471,7 +515,7 @@ See :method:wait_get_cluster_running for more details. - .. py:method:: edit_and_wait(cluster_id: str, spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], workload_type: Optional[WorkloadType], timeout: datetime.timedelta = 0:20:00]) -> ClusterDetails + .. py:method:: edit_and_wait(cluster_id: str, spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], is_single_node: Optional[bool], kind: Optional[Kind], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], use_ml_runtime: Optional[bool], workload_type: Optional[WorkloadType], timeout: datetime.timedelta = 0:20:00]) -> ClusterDetails .. py:method:: ensure_cluster_is_running(cluster_id: str) @@ -906,7 +950,8 @@ Set cluster permissions. - Sets permissions on a cluster. Clusters can inherit permissions from their root object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param cluster_id: str The cluster for which to get or manage permissions. diff --git a/docs/workspace/compute/instance_pools.rst b/docs/workspace/compute/instance_pools.rst index 277844170..333c44938 100644 --- a/docs/workspace/compute/instance_pools.rst +++ b/docs/workspace/compute/instance_pools.rst @@ -245,7 +245,8 @@ Set instance pool permissions. - Sets permissions on an instance pool. Instance pools can inherit permissions from their root object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param instance_pool_id: str The instance pool for which to get or manage permissions. diff --git a/docs/workspace/dashboards/genie.rst b/docs/workspace/dashboards/genie.rst index 5581870b9..3908c6472 100644 --- a/docs/workspace/dashboards/genie.rst +++ b/docs/workspace/dashboards/genie.rst @@ -80,6 +80,25 @@ :returns: :class:`GenieGetMessageQueryResultResponse` + .. py:method:: get_message_query_result_by_attachment(space_id: str, conversation_id: str, message_id: str, attachment_id: str) -> GenieGetMessageQueryResultResponse + + Get conversation message SQL query result by attachment id. + + Get the result of SQL query by attachment id This is only available if a message has a query + attachment and the message status is `EXECUTING_QUERY`. + + :param space_id: str + Genie space ID + :param conversation_id: str + Conversation ID + :param message_id: str + Message ID + :param attachment_id: str + Attachment ID + + :returns: :class:`GenieGetMessageQueryResultResponse` + + .. py:method:: start_conversation(space_id: str, content: str) -> Wait[GenieMessage] Start conversation. diff --git a/docs/workspace/dashboards/index.rst b/docs/workspace/dashboards/index.rst index 6d1565bb6..acea442bb 100644 --- a/docs/workspace/dashboards/index.rst +++ b/docs/workspace/dashboards/index.rst @@ -8,4 +8,6 @@ Manage Lakeview dashboards :maxdepth: 1 genie - lakeview \ No newline at end of file + lakeview + lakeview_embedded + query_execution diff --git a/docs/workspace/dashboards/lakeview.rst b/docs/workspace/dashboards/lakeview.rst index fe358063c..c37479dcb 100644 --- a/docs/workspace/dashboards/lakeview.rst +++ b/docs/workspace/dashboards/lakeview.rst @@ -7,47 +7,29 @@ These APIs provide specific management operations for Lakeview dashboards. Generic resource management can be done with Workspace API (import, export, get-status, list, delete). - .. py:method:: create(display_name: str [, parent_path: Optional[str], serialized_dashboard: Optional[str], warehouse_id: Optional[str]]) -> Dashboard + .. py:method:: create( [, dashboard: Optional[Dashboard]]) -> Dashboard Create dashboard. Create a draft dashboard. - :param display_name: str - The display name of the dashboard. - :param parent_path: str (optional) - The workspace path of the folder containing the dashboard. Includes leading slash and no trailing - slash. This field is excluded in List Dashboards responses. - :param serialized_dashboard: str (optional) - The contents of the dashboard in serialized string form. This field is excluded in List Dashboards - responses. Use the [get dashboard API] to retrieve an example response, which includes the - `serialized_dashboard` field. This field provides the structure of the JSON string that represents - the dashboard's layout and components. - - [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get - :param warehouse_id: str (optional) - The warehouse ID used to run the dashboard. + :param dashboard: :class:`Dashboard` (optional) :returns: :class:`Dashboard` - .. py:method:: create_schedule(dashboard_id: str, cron_schedule: CronSchedule [, display_name: Optional[str], pause_status: Optional[SchedulePauseStatus]]) -> Schedule + .. py:method:: create_schedule(dashboard_id: str [, schedule: Optional[Schedule]]) -> Schedule Create dashboard schedule. :param dashboard_id: str UUID identifying the dashboard to which the schedule belongs. - :param cron_schedule: :class:`CronSchedule` - The cron expression describing the frequency of the periodic refresh for this schedule. - :param display_name: str (optional) - The display name for schedule. - :param pause_status: :class:`SchedulePauseStatus` (optional) - The status indicates whether this schedule is paused or not. + :param schedule: :class:`Schedule` (optional) :returns: :class:`Schedule` - .. py:method:: create_subscription(dashboard_id: str, schedule_id: str, subscriber: Subscriber) -> Subscription + .. py:method:: create_subscription(dashboard_id: str, schedule_id: str [, subscription: Optional[Subscription]]) -> Subscription Create schedule subscription. @@ -55,8 +37,7 @@ UUID identifying the dashboard to which the subscription belongs. :param schedule_id: str UUID identifying the schedule to which the subscription belongs. - :param subscriber: :class:`Subscriber` - Subscriber details for users and destinations to be added as subscribers to the schedule. + :param subscription: :class:`Subscription` (optional) :returns: :class:`Subscription` @@ -112,7 +93,7 @@ Get the current published dashboard. :param dashboard_id: str - UUID identifying the dashboard to be published. + UUID identifying the published dashboard. :returns: :class:`PublishedDashboard` @@ -166,7 +147,7 @@ List dashboard schedules. :param dashboard_id: str - UUID identifying the dashboard to which the schedule belongs. + UUID identifying the dashboard to which the schedules belongs. :param page_size: int (optional) The number of schedules to return per page. :param page_token: str (optional) @@ -181,9 +162,9 @@ List schedule subscriptions. :param dashboard_id: str - UUID identifying the dashboard to which the subscription belongs. + UUID identifying the dashboard which the subscriptions belongs. :param schedule_id: str - UUID identifying the schedule to which the subscription belongs. + UUID identifying the schedule which the subscriptions belongs. :param page_size: int (optional) The number of subscriptions to return per page. :param page_token: str (optional) @@ -193,7 +174,7 @@ :returns: Iterator over :class:`Subscription` - .. py:method:: migrate(source_dashboard_id: str [, display_name: Optional[str], parent_path: Optional[str]]) -> Dashboard + .. py:method:: migrate(source_dashboard_id: str [, display_name: Optional[str], parent_path: Optional[str], update_parameter_syntax: Optional[bool]]) -> Dashboard Migrate dashboard. @@ -205,6 +186,9 @@ Display name for the new Lakeview dashboard. :param parent_path: str (optional) The workspace path of the folder to contain the migrated Lakeview dashboard. + :param update_parameter_syntax: bool (optional) + Flag to indicate if mustache parameter syntax ({{ param }}) should be auto-updated to named syntax + (:param) when converting datasets in the dashboard. :returns: :class:`Dashboard` @@ -245,12 +229,12 @@ Unpublish the dashboard. :param dashboard_id: str - UUID identifying the dashboard to be published. + UUID identifying the published dashboard. - .. py:method:: update(dashboard_id: str [, display_name: Optional[str], etag: Optional[str], serialized_dashboard: Optional[str], warehouse_id: Optional[str]]) -> Dashboard + .. py:method:: update(dashboard_id: str [, dashboard: Optional[Dashboard]]) -> Dashboard Update dashboard. @@ -258,25 +242,12 @@ :param dashboard_id: str UUID identifying the dashboard. - :param display_name: str (optional) - The display name of the dashboard. - :param etag: str (optional) - The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard has - not been modified since the last read. This field is excluded in List Dashboards responses. - :param serialized_dashboard: str (optional) - The contents of the dashboard in serialized string form. This field is excluded in List Dashboards - responses. Use the [get dashboard API] to retrieve an example response, which includes the - `serialized_dashboard` field. This field provides the structure of the JSON string that represents - the dashboard's layout and components. - - [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get - :param warehouse_id: str (optional) - The warehouse ID used to run the dashboard. + :param dashboard: :class:`Dashboard` (optional) :returns: :class:`Dashboard` - .. py:method:: update_schedule(dashboard_id: str, schedule_id: str, cron_schedule: CronSchedule [, display_name: Optional[str], etag: Optional[str], pause_status: Optional[SchedulePauseStatus]]) -> Schedule + .. py:method:: update_schedule(dashboard_id: str, schedule_id: str [, schedule: Optional[Schedule]]) -> Schedule Update dashboard schedule. @@ -284,15 +255,7 @@ UUID identifying the dashboard to which the schedule belongs. :param schedule_id: str UUID identifying the schedule. - :param cron_schedule: :class:`CronSchedule` - The cron expression describing the frequency of the periodic refresh for this schedule. - :param display_name: str (optional) - The display name for schedule. - :param etag: str (optional) - The etag for the schedule. Must be left empty on create, must be provided on updates to ensure that - the schedule has not been modified since the last read, and can be optionally provided on delete. - :param pause_status: :class:`SchedulePauseStatus` (optional) - The status indicates whether this schedule is paused or not. + :param schedule: :class:`Schedule` (optional) :returns: :class:`Schedule` \ No newline at end of file diff --git a/docs/workspace/dashboards/lakeview_embedded.rst b/docs/workspace/dashboards/lakeview_embedded.rst new file mode 100644 index 000000000..4c06031f5 --- /dev/null +++ b/docs/workspace/dashboards/lakeview_embedded.rst @@ -0,0 +1,19 @@ +``w.lakeview_embedded``: Lakeview Embedded +========================================== +.. currentmodule:: databricks.sdk.service.dashboards + +.. py:class:: LakeviewEmbeddedAPI + + Token-based Lakeview APIs for embedding dashboards in external applications. + + .. py:method:: get_published_dashboard_embedded(dashboard_id: str) + + Read a published dashboard in an embedded ui. + + Get the current published dashboard within an embedded context. + + :param dashboard_id: str + UUID identifying the published dashboard. + + + \ No newline at end of file diff --git a/docs/workspace/dashboards/query_execution.rst b/docs/workspace/dashboards/query_execution.rst new file mode 100644 index 000000000..5672183d9 --- /dev/null +++ b/docs/workspace/dashboards/query_execution.rst @@ -0,0 +1,46 @@ +``w.query_execution``: Query Execution +====================================== +.. currentmodule:: databricks.sdk.service.dashboards + +.. py:class:: QueryExecutionAPI + + Query execution APIs for AI / BI Dashboards + + .. py:method:: cancel_published_query_execution(dashboard_name: str, dashboard_revision_id: str [, tokens: Optional[List[str]]]) -> CancelQueryExecutionResponse + + Cancel the results for the a query for a published, embedded dashboard. + + :param dashboard_name: str + :param dashboard_revision_id: str + :param tokens: List[str] (optional) + Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ + + :returns: :class:`CancelQueryExecutionResponse` + + + .. py:method:: execute_published_dashboard_query(dashboard_name: str, dashboard_revision_id: str [, override_warehouse_id: Optional[str]]) + + Execute a query for a published dashboard. + + :param dashboard_name: str + Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains the + list of datasets, warehouse_id, and embedded_credentials + :param dashboard_revision_id: str + :param override_warehouse_id: str (optional) + A dashboard schedule can override the warehouse used as compute for processing the published + dashboard queries + + + + + .. py:method:: poll_published_query_status(dashboard_name: str, dashboard_revision_id: str [, tokens: Optional[List[str]]]) -> PollQueryStatusResponse + + Poll the results for the a query for a published, embedded dashboard. + + :param dashboard_name: str + :param dashboard_revision_id: str + :param tokens: List[str] (optional) + Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ + + :returns: :class:`PollQueryStatusResponse` + \ No newline at end of file diff --git a/docs/workspace/files/files.rst b/docs/workspace/files/files.rst index db20b2192..0151fcce2 100644 --- a/docs/workspace/files/files.rst +++ b/docs/workspace/files/files.rst @@ -13,9 +13,12 @@ /Volumes/<catalog_name>/<schema_name>/<volume_name>/<path_to_file>. The Files API has two distinct endpoints, one for working with files (`/fs/files`) and another one for - working with directories (`/fs/directories`). Both endpoints, use the standard HTTP methods GET, HEAD, - PUT, and DELETE to manage files and directories specified using their URI path. The path is always - absolute. + working with directories (`/fs/directories`). Both endpoints use the standard HTTP methods GET, HEAD, PUT, + and DELETE to manage files and directories specified using their URI path. The path is always absolute. + + Some Files API client features are currently experimental. To enable them, set + `enable_experimental_files_api_client = True` in your configuration profile or use the environment + variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`. [Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html @@ -64,8 +67,8 @@ Download a file. - Downloads a file of up to 5 GiB. The file contents are the response body. This is a standard HTTP file - download, not a JSON RPC. + Downloads a file. The file contents are the response body. This is a standard HTTP file download, not + a JSON RPC. It supports the Range and If-Unmodified-Since HTTP headers. :param file_path: str The absolute path of the file. diff --git a/docs/workspace/iam/access_control.rst b/docs/workspace/iam/access_control.rst new file mode 100644 index 000000000..a5f1feeda --- /dev/null +++ b/docs/workspace/iam/access_control.rst @@ -0,0 +1,23 @@ +``w.access_control``: RbacService +================================= +.. currentmodule:: databricks.sdk.service.iam + +.. py:class:: AccessControlAPI + + Rule based Access Control for Databricks Resources. + + .. py:method:: check_policy(actor: Actor, permission: str, resource: str, consistency_token: ConsistencyToken, authz_identity: RequestAuthzIdentity [, resource_info: Optional[ResourceInfo]]) -> CheckPolicyResponse + + Check access policy to a resource. + + :param actor: :class:`Actor` + :param permission: str + :param resource: str + Ex: (servicePrincipal/use, accounts//servicePrincipals/) Ex: + (servicePrincipal.ruleSet/update, accounts//servicePrincipals//ruleSets/default) + :param consistency_token: :class:`ConsistencyToken` + :param authz_identity: :class:`RequestAuthzIdentity` + :param resource_info: :class:`ResourceInfo` (optional) + + :returns: :class:`CheckPolicyResponse` + \ No newline at end of file diff --git a/docs/workspace/iam/index.rst b/docs/workspace/iam/index.rst index 2a98cc9ae..00a7f1fe7 100644 --- a/docs/workspace/iam/index.rst +++ b/docs/workspace/iam/index.rst @@ -7,6 +7,7 @@ Manage users, service principals, groups and their permissions in Accounts and W .. toctree:: :maxdepth: 1 + access_control account_access_control_proxy current_user groups diff --git a/docs/workspace/iam/permissions.rst b/docs/workspace/iam/permissions.rst index 1f2fd2851..bf8f8e77f 100644 --- a/docs/workspace/iam/permissions.rst +++ b/docs/workspace/iam/permissions.rst @@ -153,7 +153,8 @@ Set object permissions. - Sets permissions on an object. Objects can inherit permissions from their parent objects or root + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their parent objects or root object. :param request_object_type: str diff --git a/docs/workspace/iam/users.rst b/docs/workspace/iam/users.rst index 2eae834a2..616ef7b86 100644 --- a/docs/workspace/iam/users.rst +++ b/docs/workspace/iam/users.rst @@ -239,7 +239,8 @@ Set password permissions. - Sets permissions on all passwords. Passwords can inherit permissions from their root object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional) diff --git a/docs/workspace/index.rst b/docs/workspace/index.rst index 1b6c5708c..dc86a0e78 100644 --- a/docs/workspace/index.rst +++ b/docs/workspace/index.rst @@ -9,6 +9,7 @@ These APIs are available from WorkspaceClient apps/index catalog/index + cleanrooms/index compute/index dashboards/index files/index diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst index b097c94c8..36f7d7d39 100644 --- a/docs/workspace/jobs/jobs.rst +++ b/docs/workspace/jobs/jobs.rst @@ -1,8 +1,8 @@ -``w.jobs``: Jobs -================ +``w.jobs``: Jobs (2.2) +====================== .. currentmodule:: databricks.sdk.service.jobs -.. py:class:: JobsAPI +.. py:class:: JobsExt The Jobs API allows you to create, edit, and delete jobs. @@ -120,7 +120,7 @@ .. py:method:: cancel_run_and_wait(run_id: int, timeout: datetime.timedelta = 0:20:00) -> Run - .. py:method:: create( [, access_control_list: Optional[List[JobAccessControlRequest]], budget_policy_id: Optional[str], continuous: Optional[Continuous], deployment: Optional[JobDeployment], description: Optional[str], edit_mode: Optional[JobEditMode], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], format: Optional[Format], git_source: Optional[GitSource], health: Optional[JobsHealthRules], job_clusters: Optional[List[JobCluster]], max_concurrent_runs: Optional[int], name: Optional[str], notification_settings: Optional[JobNotificationSettings], parameters: Optional[List[JobParameterDefinition]], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], schedule: Optional[CronSchedule], tags: Optional[Dict[str, str]], tasks: Optional[List[Task]], timeout_seconds: Optional[int], trigger: Optional[TriggerSettings], webhook_notifications: Optional[WebhookNotifications]]) -> CreateResponse + .. py:method:: create( [, access_control_list: Optional[List[JobAccessControlRequest]], budget_policy_id: Optional[str], continuous: Optional[Continuous], deployment: Optional[JobDeployment], description: Optional[str], edit_mode: Optional[JobEditMode], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], format: Optional[Format], git_source: Optional[GitSource], health: Optional[JobsHealthRules], job_clusters: Optional[List[JobCluster]], max_concurrent_runs: Optional[int], name: Optional[str], notification_settings: Optional[JobNotificationSettings], parameters: Optional[List[JobParameterDefinition]], performance_target: Optional[PerformanceTarget], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], schedule: Optional[CronSchedule], tags: Optional[Dict[str, str]], tasks: Optional[List[Task]], timeout_seconds: Optional[int], trigger: Optional[TriggerSettings], webhook_notifications: Optional[WebhookNotifications]]) -> CreateResponse Usage: @@ -199,6 +199,7 @@ :param job_clusters: List[:class:`JobCluster`] (optional) A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings. + If more than 100 job clusters are available, you can paginate through them using :method:jobs/get. :param max_concurrent_runs: int (optional) An optional maximum allowed number of concurrent runs of the job. Set this value if you want to be able to execute multiple runs of the same job concurrently. This is useful for example if you @@ -215,14 +216,16 @@ `email_notifications` and `webhook_notifications` for this job. :param parameters: List[:class:`JobParameterDefinition`] (optional) Job-level parameter definitions + :param performance_target: :class:`PerformanceTarget` (optional) + PerformanceTarget defines how performant or cost efficient the execution of run on serverless should + be. :param queue: :class:`QueueSettings` (optional) The queue settings of the job. :param run_as: :class:`JobRunAs` (optional) - Write-only setting. Specifies the user, service principal or group that the job/pipeline runs as. If - not specified, the job/pipeline runs as the user who created the job/pipeline. + Write-only setting. Specifies the user or service principal that the job runs as. If not specified, + the job runs as the user who created the job. - Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not, an - error is thrown. + Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown. :param schedule: :class:`CronSchedule` (optional) An optional periodic schedule for this job. The default behavior is that the job only runs when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`. @@ -231,7 +234,9 @@ clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags can be added to the job. :param tasks: List[:class:`Task`] (optional) - A list of task specifications to be executed by this job. + A list of task specifications to be executed by this job. If more than 100 tasks are available, you + can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root + to determine if more results are available. :param timeout_seconds: int (optional) An optional timeout applied to each run of this job. A value of `0` means no timeout. :param trigger: :class:`TriggerSettings` (optional) @@ -316,7 +321,7 @@ :returns: :class:`ExportRunOutput` - .. py:method:: get(job_id: int) -> Job + .. py:method:: get(job_id: int [, page_token: Optional[str]]) -> Job Usage: @@ -352,8 +357,16 @@ Retrieves the details for a single job. + In Jobs API 2.2, requests for a single job support pagination of `tasks` and `job_clusters` when + either exceeds 100 elements. Use the `next_page_token` field to check for more results and pass its + value as the `page_token` in subsequent requests. Arrays with fewer than 100 elements in a page will + be empty on later pages. + :param job_id: int The canonical identifier of the job to retrieve information about. This field is required. + :param page_token: str (optional) + Use `next_page_token` returned from the previous GetJob to request the next page of the job's + sub-resources. :returns: :class:`Job` @@ -382,7 +395,7 @@ :returns: :class:`JobPermissions` - .. py:method:: get_run(run_id: int [, include_history: Optional[bool], include_resolved_values: Optional[bool], page_token: Optional[str]]) -> Run + .. py:method:: get_run(run_id: int [, include_history: bool, include_resolved_values: bool, page_token: str]) -> Run Usage: @@ -415,9 +428,9 @@ w.jobs.delete_run(run_id=run.run_id) Get a single job run. - - Retrieve the metadata of a run. - + + Retrieve the metadata of a run. If a run has multiple pages of tasks, it will paginate through all pages of tasks, iterations, job_clusters, job_parameters, and repair history. + :param run_id: int The canonical identifier of the run for which to retrieve the metadata. This field is required. :param include_history: bool (optional) @@ -425,9 +438,9 @@ :param include_resolved_values: bool (optional) Whether to include resolved parameter values in the response. :param page_token: str (optional) - To list the next page or the previous page of job tasks, set this field to the value of the - `next_page_token` or `prev_page_token` returned in the GetJob response. - + To list the next page of job tasks, set this field to the value of the `next_page_token` returned in + the GetJob response. + :returns: :class:`Run` @@ -519,7 +532,8 @@ Retrieves a list of jobs. :param expand_tasks: bool (optional) - Whether to include task and cluster details in the response. + Whether to include task and cluster details in the response. Note that in API 2.2, only the first + 100 elements will be shown. Use :method:jobs/get to paginate through all tasks and clusters. :param limit: int (optional) The number of jobs to return. This value must be greater than 0 and less or equal to 100. The default value is 20. @@ -581,7 +595,8 @@ If completed_only is `true`, only completed runs are included in the results; otherwise, lists both active and completed runs. This field cannot be `true` when active_only is `true`. :param expand_tasks: bool (optional) - Whether to include task and cluster details in the response. + Whether to include task and cluster details in the response. Note that in API 2.2, only the first + 100 elements will be shown. Use :method:jobs/getrun to paginate through all tasks and clusters. :param job_id: int (optional) The job for which to list runs. If omitted, the Jobs service lists runs from all jobs. :param limit: int (optional) @@ -661,8 +676,9 @@ in conjunction with notebook_params. The JSON representation of this field (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - Use [Task parameter variables](/jobs.html"#parameter-variables") to set parameters containing - information about job runs. + Use [Task parameter variables] to set parameters containing information about job runs. + + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param job_parameters: Dict[str,str] (optional) Job-level parameters used in the run. for example `"param": "overriding_val"` :param latest_repair_id: int (optional) @@ -791,7 +807,7 @@ - .. py:method:: run_now(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]]]) -> Wait[Run] + .. py:method:: run_now(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], only: Optional[List[str]], performance_target: Optional[PerformanceTarget], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]]]) -> Wait[Run] Usage: @@ -854,8 +870,9 @@ in conjunction with notebook_params. The JSON representation of this field (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - Use [Task parameter variables](/jobs.html"#parameter-variables") to set parameters containing - information about job runs. + Use [Task parameter variables] to set parameters containing information about job runs. + + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param job_parameters: Dict[str,str] (optional) Job-level parameters used in the run. for example `"param": "overriding_val"` :param notebook_params: Dict[str,str] (optional) @@ -874,6 +891,13 @@ [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html + :param only: List[str] (optional) + A list of task keys to run inside of the job. If this field is not provided, all tasks in the job + will be run. + :param performance_target: :class:`PerformanceTarget` (optional) + PerformanceTarget defines how performant or cost efficient the execution of run on serverless + compute should be. For RunNow request, the run will execute with this settings instead of ones + defined in job. :param pipeline_params: :class:`PipelineParams` (optional) Controls whether the pipeline should perform a full refresh :param python_named_params: Dict[str,str] (optional) @@ -919,14 +943,15 @@ See :method:wait_get_run_job_terminated_or_skipped for more details. - .. py:method:: run_now_and_wait(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]], timeout: datetime.timedelta = 0:20:00]) -> Run + .. py:method:: run_now_and_wait(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], only: Optional[List[str]], performance_target: Optional[PerformanceTarget], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]], timeout: datetime.timedelta = 0:20:00]) -> Run .. py:method:: set_permissions(job_id: str [, access_control_list: Optional[List[JobAccessControlRequest]]]) -> JobPermissions Set job permissions. - Sets permissions on a job. Jobs can inherit permissions from their root object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param job_id: str The job for which to get or manage permissions. diff --git a/docs/workspace/ml/experiments.rst b/docs/workspace/ml/experiments.rst index c09cfe353..44ceeef8c 100644 --- a/docs/workspace/ml/experiments.rst +++ b/docs/workspace/ml/experiments.rst @@ -578,7 +578,8 @@ Set experiment permissions. - Sets permissions on an experiment. Experiments can inherit permissions from their root object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param experiment_id: str The experiment for which to get or manage permissions. diff --git a/docs/workspace/ml/model_registry.rst b/docs/workspace/ml/model_registry.rst index 8ac52916f..d08a85415 100644 --- a/docs/workspace/ml/model_registry.rst +++ b/docs/workspace/ml/model_registry.rst @@ -658,8 +658,8 @@ Set registered model permissions. - Sets permissions on a registered model. Registered models can inherit permissions from their root - object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param registered_model_id: str The registered model for which to get or manage permissions. diff --git a/docs/workspace/pipelines/pipelines.rst b/docs/workspace/pipelines/pipelines.rst index 9801a200e..ec31991ef 100644 --- a/docs/workspace/pipelines/pipelines.rst +++ b/docs/workspace/pipelines/pipelines.rst @@ -15,7 +15,7 @@ also enforce data quality with Delta Live Tables expectations. Expectations allow you to define expected data quality and specify how to handle records that fail those expectations. - .. py:method:: create( [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse + .. py:method:: create( [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], run_as: Optional[RunAs], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse Usage: @@ -79,7 +79,7 @@ :param filters: :class:`Filters` (optional) Filters on which Pipeline packages to include in the deployed graph. :param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional) - The definition of a gateway pipeline to support CDC. + The definition of a gateway pipeline to support change data capture. :param id: str (optional) Unique identifier for this pipeline. :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional) @@ -93,6 +93,14 @@ List of notification settings for this pipeline. :param photon: bool (optional) Whether Photon is enabled for this pipeline. + :param restart_window: :class:`RestartWindow` (optional) + Restart window of this pipeline. + :param run_as: :class:`RunAs` (optional) + Write-only setting, available only in Create/Update calls. Specifies the user or service principal + that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline. + + Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is + thrown. :param schema: str (optional) The default schema (database) where tables are read from or published to. The presence of this field implies that the pipeline is in direct publishing mode. @@ -324,7 +332,8 @@ Set pipeline permissions. - Sets permissions on a pipeline. Pipelines can inherit permissions from their root object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param pipeline_id: str The pipeline for which to get or manage permissions. @@ -376,7 +385,7 @@ .. py:method:: stop_and_wait(pipeline_id: str, timeout: datetime.timedelta = 0:20:00) -> GetPipelineResponse - .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], expected_last_modified: Optional[int], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) + .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], expected_last_modified: Optional[int], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], run_as: Optional[RunAs], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) Usage: @@ -456,7 +465,7 @@ :param filters: :class:`Filters` (optional) Filters on which Pipeline packages to include in the deployed graph. :param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional) - The definition of a gateway pipeline to support CDC. + The definition of a gateway pipeline to support change data capture. :param id: str (optional) Unique identifier for this pipeline. :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional) @@ -470,6 +479,14 @@ List of notification settings for this pipeline. :param photon: bool (optional) Whether Photon is enabled for this pipeline. + :param restart_window: :class:`RestartWindow` (optional) + Restart window of this pipeline. + :param run_as: :class:`RunAs` (optional) + Write-only setting, available only in Create/Update calls. Specifies the user or service principal + that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline. + + Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is + thrown. :param schema: str (optional) The default schema (database) where tables are read from or published to. The presence of this field implies that the pipeline is in direct publishing mode. diff --git a/docs/workspace/provisioning/credentials.rst b/docs/workspace/provisioning/credentials.rst new file mode 100644 index 000000000..8f38d13c4 --- /dev/null +++ b/docs/workspace/provisioning/credentials.rst @@ -0,0 +1,123 @@ +``w.credentials``: Credential configurations +============================================ +.. currentmodule:: databricks.sdk.service.provisioning + +.. py:class:: CredentialsAPI + + These APIs manage credential configurations for this workspace. Databricks needs access to a cross-account + service IAM role in your AWS account so that Databricks can deploy clusters in the appropriate VPC for the + new workspace. A credential configuration encapsulates this role information, and its ID is used when + creating a new workspace. + + .. py:method:: create(credentials_name: str, aws_credentials: CreateCredentialAwsCredentials) -> Credential + + + Usage: + + .. code-block:: + + import os + import time + + from databricks.sdk import AccountClient + from databricks.sdk.service import provisioning + + a = AccountClient() + + role = a.credentials.create( + credentials_name=f'sdk-{time.time_ns()}', + aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole( + role_arn=os.environ["TEST_CROSSACCOUNT_ARN"]))) + + # cleanup + a.credentials.delete(credentials_id=role.credentials_id) + + Create credential configuration. + + Creates a Databricks credential configuration that represents cloud cross-account credentials for a + specified account. Databricks uses this to set up network infrastructure properly to host Databricks + clusters. For your AWS IAM role, you need to trust the External ID (the Databricks Account API account + ID) in the returned credential object, and configure the required access policy. + + Save the response's `credentials_id` field, which is the ID for your new credential configuration + object. + + For information about how to create a new workspace with this API, see [Create a new workspace using + the Account API] + + [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html + + :param credentials_name: str + The human-readable name of the credential configuration object. + :param aws_credentials: :class:`CreateCredentialAwsCredentials` + + :returns: :class:`Credential` + + + .. py:method:: delete(credentials_id: str) + + Delete credential configuration. + + Deletes a Databricks credential configuration object for an account, both specified by ID. You cannot + delete a credential that is associated with any workspace. + + :param credentials_id: str + Databricks Account API credential configuration ID + + + + + .. py:method:: get(credentials_id: str) -> Credential + + + Usage: + + .. code-block:: + + import os + import time + + from databricks.sdk import AccountClient + from databricks.sdk.service import provisioning + + a = AccountClient() + + role = a.credentials.create( + credentials_name=f'sdk-{time.time_ns()}', + aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole( + role_arn=os.environ["TEST_CROSSACCOUNT_ARN"]))) + + by_id = a.credentials.get(credentials_id=role.credentials_id) + + # cleanup + a.credentials.delete(credentials_id=role.credentials_id) + + Get credential configuration. + + Gets a Databricks credential configuration object for an account, both specified by ID. + + :param credentials_id: str + Databricks Account API credential configuration ID + + :returns: :class:`Credential` + + + .. py:method:: list() -> Iterator[Credential] + + + Usage: + + .. code-block:: + + from databricks.sdk import AccountClient + + a = AccountClient() + + configs = a.credentials.list() + + Get all credential configurations. + + Gets all Databricks credential configurations associated with an account specified by ID. + + :returns: Iterator over :class:`Credential` + \ No newline at end of file diff --git a/docs/workspace/provisioning/index.rst b/docs/workspace/provisioning/index.rst new file mode 100644 index 000000000..efe541424 --- /dev/null +++ b/docs/workspace/provisioning/index.rst @@ -0,0 +1,10 @@ + +Provisioning +============ + +Resource management for secure Databricks Workspace deployment, cross-account IAM roles, storage, encryption, networking and private access. + +.. toctree:: + :maxdepth: 1 + + credentials \ No newline at end of file diff --git a/docs/workspace/serving/serving_endpoints.rst b/docs/workspace/serving/serving_endpoints.rst index cbcbca964..f6bfe82f4 100644 --- a/docs/workspace/serving/serving_endpoints.rst +++ b/docs/workspace/serving/serving_endpoints.rst @@ -29,14 +29,17 @@ :returns: :class:`BuildLogsResponse` - .. py:method:: create(name: str, config: EndpointCoreConfigInput [, ai_gateway: Optional[AiGatewayConfig], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]]]) -> Wait[ServingEndpointDetailed] + .. py:method:: create(name: str [, ai_gateway: Optional[AiGatewayConfig], config: Optional[EndpointCoreConfigInput], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]]]) -> Wait[ServingEndpointDetailed] Create a new serving endpoint. :param name: str The name of the serving endpoint. This field is required and must be unique across a Databricks workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores. - :param config: :class:`EndpointCoreConfigInput` + :param ai_gateway: :class:`AiGatewayConfig` (optional) + The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned + throughput endpoints are currently supported. + :param config: :class:`EndpointCoreConfigInput` (optional) The core config of the serving endpoint. :param ai_gateway: :class:`AiGatewayConfig` (optional) The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are @@ -54,7 +57,7 @@ See :method:wait_get_serving_endpoint_not_updating for more details. - .. py:method:: create_and_wait(name: str, config: EndpointCoreConfigInput [, ai_gateway: Optional[AiGatewayConfig], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]], timeout: datetime.timedelta = 0:20:00]) -> ServingEndpointDetailed + .. py:method:: create_and_wait(name: str [, ai_gateway: Optional[AiGatewayConfig], config: Optional[EndpointCoreConfigInput], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]], timeout: datetime.timedelta = 0:20:00]) -> ServingEndpointDetailed .. py:method:: delete(name: str) @@ -62,7 +65,6 @@ Delete a serving endpoint. :param name: str - The name of the serving endpoint. This field is required. @@ -98,7 +100,7 @@ .. py:method:: get_open_ai_client() - .. py:method:: get_open_api(name: str) + .. py:method:: get_open_api(name: str) -> GetOpenApiResponse Get the schema for a serving endpoint. @@ -108,7 +110,7 @@ :param name: str The name of the serving endpoint that the served model belongs to. This field is required. - + :returns: :class:`GetOpenApiResponse` .. py:method:: get_permission_levels(serving_endpoint_id: str) -> GetServingEndpointPermissionLevelsResponse @@ -136,6 +138,26 @@ :returns: :class:`ServingEndpointPermissions` + .. py:method:: http_request(conn: str, method: ExternalFunctionRequestHttpMethod, path: str [, headers: typing.Dict[str, str], json: typing.Dict[str, str], params: typing.Dict[str, str]]) -> Response + + Make external services call using the credentials stored in UC Connection. + **NOTE:** Experimental: This API may change or be removed in a future release without warning. + :param conn: str + The connection name to use. This is required to identify the external connection. + :param method: :class:`ExternalFunctionRequestHttpMethod` + The HTTP method to use (e.g., 'GET', 'POST'). This is required. + :param path: str + The relative path for the API endpoint. This is required. + :param headers: Dict[str,str] (optional) + Additional headers for the request. If not provided, only auth headers from connections would be + passed. + :param json: Dict[str,str] (optional) + JSON payload for the request. + :param params: Dict[str,str] (optional) + Query parameters for the request. + :returns: :class:`Response` + + .. py:method:: list() -> Iterator[ServingEndpoint] Get all serving endpoints. @@ -157,7 +179,7 @@ :returns: :class:`ServerLogsResponse` - .. py:method:: patch(name: str [, add_tags: Optional[List[EndpointTag]], delete_tags: Optional[List[str]]]) -> Iterator[EndpointTag] + .. py:method:: patch(name: str [, add_tags: Optional[List[EndpointTag]], delete_tags: Optional[List[str]]]) -> EndpointTags Update tags of a serving endpoint. @@ -170,7 +192,7 @@ :param delete_tags: List[str] (optional) List of tag keys to delete - :returns: Iterator over :class:`EndpointTag` + :returns: :class:`EndpointTags` .. py:method:: put(name: str [, rate_limits: Optional[List[RateLimit]]]) -> PutResponse @@ -192,8 +214,8 @@ Update AI Gateway of a serving endpoint. - Used to update the AI Gateway of a serving endpoint. NOTE: Only external model endpoints are currently - supported. + Used to update the AI Gateway of a serving endpoint. NOTE: Only external model and provisioned + throughput endpoints are currently supported. :param name: str The name of the serving endpoint whose AI Gateway is being updated. This field is required. @@ -266,8 +288,8 @@ Set serving endpoint permissions. - Sets permissions on a serving endpoint. Serving endpoints can inherit permissions from their root - object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param serving_endpoint_id: str The serving endpoint for which to get or manage permissions. @@ -288,14 +310,16 @@ The name of the serving endpoint to update. This field is required. :param auto_capture_config: :class:`AutoCaptureConfigInput` (optional) Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog. + Note: this field is deprecated for creating new provisioned throughput endpoints, or updating + existing provisioned throughput endpoints that never have inference table configured; in these cases + please use AI Gateway to manage inference tables. :param served_entities: List[:class:`ServedEntityInput`] (optional) - A list of served entities for the endpoint to serve. A serving endpoint can have up to 15 served - entities. + The list of served entities under the serving endpoint config. :param served_models: List[:class:`ServedModelInput`] (optional) - (Deprecated, use served_entities instead) A list of served models for the endpoint to serve. A - serving endpoint can have up to 15 served models. + (Deprecated, use served_entities instead) The list of served models under the serving endpoint + config. :param traffic_config: :class:`TrafficConfig` (optional) - The traffic config defining how invocations to the serving endpoint should be routed. + The traffic configuration associated with the serving endpoint config. :returns: Long-running operation waiter for :class:`ServingEndpointDetailed`. diff --git a/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst b/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst new file mode 100644 index 000000000..66c621997 --- /dev/null +++ b/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst @@ -0,0 +1,64 @@ +``w.settings.aibi_dashboard_embedding_access_policy``: AI/BI Dashboard Embedding Access Policy +============================================================================================== +.. currentmodule:: databricks.sdk.service.settings + +.. py:class:: AibiDashboardEmbeddingAccessPolicyAPI + + Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or disabled at the + workspace level. By default, this setting is conditionally enabled (ALLOW_APPROVED_DOMAINS). + + .. py:method:: delete( [, etag: Optional[str]]) -> DeleteAibiDashboardEmbeddingAccessPolicySettingResponse + + Delete the AI/BI dashboard embedding access policy. + + Delete the AI/BI dashboard embedding access policy, reverting back to the default. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`DeleteAibiDashboardEmbeddingAccessPolicySettingResponse` + + + .. py:method:: get( [, etag: Optional[str]]) -> AibiDashboardEmbeddingAccessPolicySetting + + Retrieve the AI/BI dashboard embedding access policy. + + Retrieves the AI/BI dashboard embedding access policy. The default setting is ALLOW_APPROVED_DOMAINS, + permitting AI/BI dashboards to be embedded on approved domains. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting` + + + .. py:method:: update(allow_missing: bool, setting: AibiDashboardEmbeddingAccessPolicySetting, field_mask: str) -> AibiDashboardEmbeddingAccessPolicySetting + + Update the AI/BI dashboard embedding access policy. + + Updates the AI/BI dashboard embedding access policy at the workspace level. + + :param allow_missing: bool + This should always be set to true for Settings API. Added for AIP compliance. + :param setting: :class:`AibiDashboardEmbeddingAccessPolicySetting` + :param field_mask: str + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + + :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting` + \ No newline at end of file diff --git a/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst b/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst new file mode 100644 index 000000000..0c9294130 --- /dev/null +++ b/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst @@ -0,0 +1,65 @@ +``w.settings.aibi_dashboard_embedding_approved_domains``: AI/BI Dashboard Embedding Approved Domains +==================================================================================================== +.. currentmodule:: databricks.sdk.service.settings + +.. py:class:: AibiDashboardEmbeddingApprovedDomainsAPI + + Controls the list of domains approved to host the embedded AI/BI dashboards. The approved domains list + can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS. + + .. py:method:: delete( [, etag: Optional[str]]) -> DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse + + Delete AI/BI dashboard embedding approved domains. + + Delete the list of domains approved to host embedded AI/BI dashboards, reverting back to the default + empty list. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse` + + + .. py:method:: get( [, etag: Optional[str]]) -> AibiDashboardEmbeddingApprovedDomainsSetting + + Retrieve the list of domains approved to host embedded AI/BI dashboards. + + Retrieves the list of domains approved to host embedded AI/BI dashboards. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting` + + + .. py:method:: update(allow_missing: bool, setting: AibiDashboardEmbeddingApprovedDomainsSetting, field_mask: str) -> AibiDashboardEmbeddingApprovedDomainsSetting + + Update the list of domains approved to host embedded AI/BI dashboards. + + Updates the list of domains approved to host embedded AI/BI dashboards. This update will fail if the + current workspace access policy is not ALLOW_APPROVED_DOMAINS. + + :param allow_missing: bool + This should always be set to true for Settings API. Added for AIP compliance. + :param setting: :class:`AibiDashboardEmbeddingApprovedDomainsSetting` + :param field_mask: str + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + + :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting` + \ No newline at end of file diff --git a/docs/workspace/settings/automatic_cluster_update.rst b/docs/workspace/settings/automatic_cluster_update.rst index 2219e1130..350e0e713 100644 --- a/docs/workspace/settings/automatic_cluster_update.rst +++ b/docs/workspace/settings/automatic_cluster_update.rst @@ -36,9 +36,15 @@ This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`AutomaticClusterUpdateSetting` :param field_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :returns: :class:`AutomaticClusterUpdateSetting` \ No newline at end of file diff --git a/docs/workspace/settings/compliance_security_profile.rst b/docs/workspace/settings/compliance_security_profile.rst index f503830bc..855451b82 100644 --- a/docs/workspace/settings/compliance_security_profile.rst +++ b/docs/workspace/settings/compliance_security_profile.rst @@ -38,9 +38,15 @@ This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`ComplianceSecurityProfileSetting` :param field_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :returns: :class:`ComplianceSecurityProfileSetting` \ No newline at end of file diff --git a/docs/workspace/settings/default_namespace.rst b/docs/workspace/settings/default_namespace.rst index 061a0e34e..960949930 100644 --- a/docs/workspace/settings/default_namespace.rst +++ b/docs/workspace/settings/default_namespace.rst @@ -72,9 +72,15 @@ restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only applies when using Unity Catalog-enabled compute. :param field_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :returns: :class:`DefaultNamespaceSetting` \ No newline at end of file diff --git a/docs/workspace/settings/disable_legacy_access.rst b/docs/workspace/settings/disable_legacy_access.rst index c8baba3a7..a015e777f 100644 --- a/docs/workspace/settings/disable_legacy_access.rst +++ b/docs/workspace/settings/disable_legacy_access.rst @@ -53,9 +53,15 @@ This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DisableLegacyAccess` :param field_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :returns: :class:`DisableLegacyAccess` \ No newline at end of file diff --git a/docs/workspace/settings/disable_legacy_dbfs.rst b/docs/workspace/settings/disable_legacy_dbfs.rst index ad11fa606..502111fe4 100644 --- a/docs/workspace/settings/disable_legacy_dbfs.rst +++ b/docs/workspace/settings/disable_legacy_dbfs.rst @@ -49,9 +49,15 @@ This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DisableLegacyDbfs` :param field_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :returns: :class:`DisableLegacyDbfs` \ No newline at end of file diff --git a/docs/workspace/settings/enhanced_security_monitoring.rst b/docs/workspace/settings/enhanced_security_monitoring.rst index fe7668973..c9dfb547d 100644 --- a/docs/workspace/settings/enhanced_security_monitoring.rst +++ b/docs/workspace/settings/enhanced_security_monitoring.rst @@ -40,9 +40,15 @@ This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`EnhancedSecurityMonitoringSetting` :param field_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :returns: :class:`EnhancedSecurityMonitoringSetting` \ No newline at end of file diff --git a/docs/workspace/settings/index.rst b/docs/workspace/settings/index.rst index 22655853b..c9e4f335d 100644 --- a/docs/workspace/settings/index.rst +++ b/docs/workspace/settings/index.rst @@ -11,6 +11,8 @@ Manage security settings for Accounts and Workspaces ip_access_lists notification_destinations settings + aibi_dashboard_embedding_access_policy + aibi_dashboard_embedding_approved_domains automatic_cluster_update compliance_security_profile default_namespace diff --git a/docs/workspace/settings/notification_destinations.rst b/docs/workspace/settings/notification_destinations.rst index 29d947f55..8fb2d0c3c 100644 --- a/docs/workspace/settings/notification_destinations.rst +++ b/docs/workspace/settings/notification_destinations.rst @@ -65,6 +65,7 @@ required in the request body. :param id: str + UUID identifying notification destination. :param config: :class:`Config` (optional) The configuration for the notification destination. Must wrap EXACTLY one of the nested configs. :param display_name: str (optional) diff --git a/docs/workspace/settings/restrict_workspace_admins.rst b/docs/workspace/settings/restrict_workspace_admins.rst index 47660fda4..b025112cc 100644 --- a/docs/workspace/settings/restrict_workspace_admins.rst +++ b/docs/workspace/settings/restrict_workspace_admins.rst @@ -62,9 +62,15 @@ This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`RestrictWorkspaceAdminsSetting` :param field_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :returns: :class:`RestrictWorkspaceAdminsSetting` \ No newline at end of file diff --git a/docs/workspace/settings/settings.rst b/docs/workspace/settings/settings.rst index 588031926..aa806280e 100644 --- a/docs/workspace/settings/settings.rst +++ b/docs/workspace/settings/settings.rst @@ -6,6 +6,18 @@ Workspace Settings API allows users to manage settings at the workspace level. + .. py:property:: aibi_dashboard_embedding_access_policy + :type: AibiDashboardEmbeddingAccessPolicyAPI + + Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or disabled at the + workspace level. By default, this setting is conditionally enabled (ALLOW_APPROVED_DOMAINS). + + .. py:property:: aibi_dashboard_embedding_approved_domains + :type: AibiDashboardEmbeddingApprovedDomainsAPI + + Controls the list of domains approved to host the embedded AI/BI dashboards. The approved domains list + can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS. + .. py:property:: automatic_cluster_update :type: AutomaticClusterUpdateAPI diff --git a/docs/workspace/settings/token_management.rst b/docs/workspace/settings/token_management.rst index d030a432f..50dbe1328 100644 --- a/docs/workspace/settings/token_management.rst +++ b/docs/workspace/settings/token_management.rst @@ -53,7 +53,7 @@ Deletes a token, specified by its ID. :param token_id: str - The ID of the token to get. + The ID of the token to revoke. @@ -143,7 +143,8 @@ Set token permissions. - Sets permissions on all tokens. Tokens can inherit permissions from their root object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param access_control_list: List[:class:`TokenAccessControlRequest`] (optional) diff --git a/docs/workspace/sharing/index.rst b/docs/workspace/sharing/index.rst index e012eb548..09452b490 100644 --- a/docs/workspace/sharing/index.rst +++ b/docs/workspace/sharing/index.rst @@ -7,7 +7,6 @@ Configure data sharing with Unity Catalog for providers, recipients, and shares .. toctree:: :maxdepth: 1 - clean_rooms providers recipient_activation recipients diff --git a/docs/workspace/sharing/providers.rst b/docs/workspace/sharing/providers.rst index 7cf398ac0..7d27acc3d 100644 --- a/docs/workspace/sharing/providers.rst +++ b/docs/workspace/sharing/providers.rst @@ -44,7 +44,8 @@ :param comment: str (optional) Description about the provider. :param recipient_profile_str: str (optional) - This field is required when the __authentication_type__ is **TOKEN** or not provided. + This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** + or not provided. :returns: :class:`ProviderInfo` @@ -228,7 +229,8 @@ :param owner: str (optional) Username of Provider owner. :param recipient_profile_str: str (optional) - This field is required when the __authentication_type__ is **TOKEN** or not provided. + This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** + or not provided. :returns: :class:`ProviderInfo` \ No newline at end of file diff --git a/docs/workspace/sharing/recipients.rst b/docs/workspace/sharing/recipients.rst index 44f2042bb..76e1da171 100644 --- a/docs/workspace/sharing/recipients.rst +++ b/docs/workspace/sharing/recipients.rst @@ -39,7 +39,7 @@ Create a share recipient. Creates a new recipient with the delta sharing authentication type in the metastore. The caller must - be a metastore admin or has the **CREATE_RECIPIENT** privilege on the metastore. + be a metastore admin or have the **CREATE_RECIPIENT** privilege on the metastore. :param name: str Name of Recipient. @@ -48,8 +48,8 @@ :param comment: str (optional) Description about the recipient. :param data_recipient_global_metastore_id: str (optional) - The global Unity Catalog metastore id provided by the data recipient. This field is required when - the __authentication_type__ is **DATABRICKS**. The identifier is of format + The global Unity Catalog metastore id provided by the data recipient. This field is only present + when the __authentication_type__ is **DATABRICKS**. The identifier is of format __cloud__:__region__:__metastore-uuid__. :param expiration_time: int (optional) Expiration timestamp of the token, in epoch milliseconds. @@ -58,9 +58,11 @@ :param owner: str (optional) Username of the recipient owner. :param properties_kvpairs: :class:`SecurablePropertiesKvPairs` (optional) - Recipient properties as map of string key-value pairs. + Recipient properties as map of string key-value pairs. When provided in update request, the + specified properties will override the existing properties. To add and remove properties, one would + need to perform a read-modify-write. :param sharing_code: str (optional) - The one-time sharing code provided by the data recipient. This field is required when the + The one-time sharing code provided by the data recipient. This field is only present when the __authentication_type__ is **DATABRICKS**. :returns: :class:`RecipientInfo` @@ -174,7 +176,7 @@ The caller must be the owner of the recipient. :param name: str - The name of the recipient. + The name of the Recipient. :param existing_token_expire_in_seconds: int The expiration time of the bearer token in ISO 8601 format. This will set the expiration_time of existing token only to a smaller timestamp, it cannot extend the expiration_time. Use 0 to expire @@ -224,7 +226,7 @@ :returns: :class:`GetRecipientSharePermissionsResponse` - .. py:method:: update(name: str [, comment: Optional[str], expiration_time: Optional[int], ip_access_list: Optional[IpAccessList], new_name: Optional[str], owner: Optional[str], properties_kvpairs: Optional[SecurablePropertiesKvPairs]]) + .. py:method:: update(name: str [, comment: Optional[str], expiration_time: Optional[int], ip_access_list: Optional[IpAccessList], new_name: Optional[str], owner: Optional[str], properties_kvpairs: Optional[SecurablePropertiesKvPairs]]) -> RecipientInfo Usage: @@ -259,7 +261,7 @@ :param ip_access_list: :class:`IpAccessList` (optional) IP Access List :param new_name: str (optional) - New name for the recipient. + New name for the recipient. . :param owner: str (optional) Username of the recipient owner. :param properties_kvpairs: :class:`SecurablePropertiesKvPairs` (optional) @@ -267,5 +269,5 @@ specified properties will override the existing properties. To add and remove properties, one would need to perform a read-modify-write. - + :returns: :class:`RecipientInfo` \ No newline at end of file diff --git a/docs/workspace/sql/alerts.rst b/docs/workspace/sql/alerts.rst index c552d5f80..c8d9c31ab 100644 --- a/docs/workspace/sql/alerts.rst +++ b/docs/workspace/sql/alerts.rst @@ -182,9 +182,15 @@ :param id: str :param update_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :param alert: :class:`UpdateAlertRequestAlert` (optional) :returns: :class:`Alert` diff --git a/docs/workspace/sql/dashboards.rst b/docs/workspace/sql/dashboards.rst index 97ea1014d..f22c7c96b 100644 --- a/docs/workspace/sql/dashboards.rst +++ b/docs/workspace/sql/dashboards.rst @@ -1,5 +1,5 @@ -``w.dashboards``: Dashboards -============================ +``w.dashboards``: Dashboards (legacy) +===================================== .. currentmodule:: databricks.sdk.service.sql .. py:class:: DashboardsAPI diff --git a/docs/workspace/sql/index.rst b/docs/workspace/sql/index.rst index 728730209..bddb6a827 100644 --- a/docs/workspace/sql/index.rst +++ b/docs/workspace/sql/index.rst @@ -18,5 +18,6 @@ Manage Databricks SQL assets, including warehouses, dashboards, queries and quer query_history query_visualizations query_visualizations_legacy + redash_config statement_execution warehouses \ No newline at end of file diff --git a/docs/workspace/sql/queries.rst b/docs/workspace/sql/queries.rst index 1f01c2f1d..959552850 100644 --- a/docs/workspace/sql/queries.rst +++ b/docs/workspace/sql/queries.rst @@ -151,9 +151,15 @@ :param id: str :param update_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :param query: :class:`UpdateQueryRequestQuery` (optional) :returns: :class:`Query` diff --git a/docs/workspace/sql/query_visualizations.rst b/docs/workspace/sql/query_visualizations.rst index 95095fb20..ac3d6c565 100644 --- a/docs/workspace/sql/query_visualizations.rst +++ b/docs/workspace/sql/query_visualizations.rst @@ -37,9 +37,15 @@ :param id: str :param update_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :param visualization: :class:`UpdateVisualizationRequestVisualization` (optional) :returns: :class:`Visualization` diff --git a/docs/workspace/sql/redash_config.rst b/docs/workspace/sql/redash_config.rst new file mode 100644 index 000000000..9b4382dd5 --- /dev/null +++ b/docs/workspace/sql/redash_config.rst @@ -0,0 +1,14 @@ +``w.redash_config``: Redash Config +================================== +.. currentmodule:: databricks.sdk.service.sql + +.. py:class:: RedashConfigAPI + + Redash V2 service for workspace configurations (internal) + + .. py:method:: get_config() -> ClientConfig + + Read workspace configuration for Redash-v2. + + :returns: :class:`ClientConfig` + \ No newline at end of file diff --git a/docs/workspace/sql/statement_execution.rst b/docs/workspace/sql/statement_execution.rst index 716fa4fdc..44f64b512 100644 --- a/docs/workspace/sql/statement_execution.rst +++ b/docs/workspace/sql/statement_execution.rst @@ -80,11 +80,10 @@ outstanding statement might have already completed execution when the cancel request arrives. Polling for status until a terminal state is reached is a reliable way to determine the final state. - Wait timeouts are approximate, occur server-side, and cannot account for things such as caller delays and network - latency from caller to service. - The system will auto-close a statement after one hour if the client - stops polling and thus you must poll at least once an hour. - The results are only available for one hour - after success; polling does not extend this. - The SQL Execution API must be used for the entire lifecycle - of the statement. For example, you cannot use the Jobs API to execute the command, and then the SQL - Execution API to cancel it. + latency from caller to service. - To guarantee that the statement is kept alive, you must poll at least + once every 15 minutes. - The results are only available for one hour after success; polling does not + extend this. - The SQL Execution API must be used for the entire lifecycle of the statement. For example, + you cannot use the Jobs API to execute the command, and then the SQL Execution API to cancel it. [Apache Arrow Columnar]: https://arrow.apache.org/overview/ [Databricks SQL Statement Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html diff --git a/docs/workspace/sql/warehouses.rst b/docs/workspace/sql/warehouses.rst index 58b8a3fc0..fd55d5b0c 100644 --- a/docs/workspace/sql/warehouses.rst +++ b/docs/workspace/sql/warehouses.rst @@ -315,7 +315,8 @@ Set SQL warehouse permissions. - Sets permissions on a SQL warehouse. SQL warehouses can inherit permissions from their root object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param warehouse_id: str The SQL warehouse for which to get or manage permissions. diff --git a/docs/workspace/workspace/repos.rst b/docs/workspace/workspace/repos.rst index 01b1c875f..5f3e3e290 100644 --- a/docs/workspace/workspace/repos.rst +++ b/docs/workspace/workspace/repos.rst @@ -62,7 +62,7 @@ Deletes the specified repo. :param repo_id: int - ID of the Git folder (repo) object in the workspace. + The ID for the corresponding repo to delete. @@ -157,7 +157,8 @@ Set repo permissions. - Sets permissions on a repo. Repos can inherit permissions from their root object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. :param repo_id: str The repo for which to get or manage permissions. diff --git a/docs/workspace/workspace/workspace.rst b/docs/workspace/workspace/workspace.rst index 4aee0a2b6..595872deb 100644 --- a/docs/workspace/workspace/workspace.rst +++ b/docs/workspace/workspace/workspace.rst @@ -272,8 +272,9 @@ Set workspace object permissions. - Sets permissions on a workspace object. Workspace objects can inherit permissions from their parent - objects or root object. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their parent objects or root + object. :param workspace_object_type: str The workspace object type for which to get or manage permissions. diff --git a/setup.py b/setup.py index 021f4e430..bc7327e31 100644 --- a/setup.py +++ b/setup.py @@ -8,55 +8,36 @@ with version_file.open('r') as f: exec(f.read(), version_data) -setup( - name="sync-databricks-sdk", - version=version_data["__version__"], - packages=find_packages(exclude=["tests", "*tests.*", "*tests"]), - package_data={"databricks.sdk": ["py.typed"]}, - python_requires=">=3.7", - install_requires=["requests>=2.28.1,<3", "google-auth~=2.0"], - extras_require={ - "dev": [ - "pytest", - "pytest-cov", - "pytest-xdist", - "pytest-mock", - "yapf", - "pycodestyle", - "autoflake", - "isort", - "wheel", - "ipython", - "ipywidgets", - "requests-mock", - "pyfakefs", - "databricks-connect", - "pytest-rerunfailures", - "openai", - 'langchain-openai; python_version > "3.7"', - "httpx", - ], - "notebook": ["ipython>=8,<9", "ipywidgets>=8,<9"], - }, - author="Sync Computing", - author_email="info@synccomputing.com", - description="Sync Fork Databricks SDK for Python (Beta)", - long_description=io.open("README.md", encoding="utf-8").read(), - long_description_content_type="text/markdown", - url="https://databricks-sdk-py.readthedocs.io", - keywords="databricks sdk", - classifiers=[ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "Intended Audience :: Science/Research", - "Intended Audience :: System Administrators", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Operating System :: OS Independent", - ], -) +setup(name="sync-databricks-sdk", + version=version_data['__version__'], + packages=find_packages(exclude=["tests", "*tests.*", "*tests"]), + package_data = {"databricks.sdk": ["py.typed"]}, + python_requires=">=3.7", + install_requires=["requests>=2.28.1,<3", "google-auth~=2.0"], + extras_require={"dev": ["pytest", "pytest-cov", "pytest-xdist", "pytest-mock", + "yapf", "pycodestyle", "autoflake", "isort", "wheel", + "ipython", "ipywidgets", "requests-mock", "pyfakefs", + "databricks-connect", "pytest-rerunfailures", "openai", + 'langchain-openai; python_version > "3.7"', "httpx"], + "notebook": ["ipython>=8,<9", "ipywidgets>=8,<9"], + "openai": ["openai", 'langchain-openai; python_version > "3.7"', "httpx"]}, + author="Sync Computing", + author_email="info@synccomputing.com", + description="Sync Fork Databricks SDK for Python (Beta)", + long_description=io.open("README.md", encoding="utf-8").read(), + long_description_content_type='text/markdown', + url="https://databricks-sdk-py.readthedocs.io", + keywords="databricks sdk", + classifiers=[ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "Intended Audience :: System Administrators", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent"]) diff --git a/tests/integration/test_auth.py b/tests/integration/test_auth.py index 0bf7f951d..3ee271778 100644 --- a/tests/integration/test_auth.py +++ b/tests/integration/test_auth.py @@ -133,15 +133,16 @@ def _test_runtime_auth_from_jobs_inner(w, env_or_skip, random, dbr_versions, lib tasks = [] for v in dbr_versions: - t = Task(task_key=f'test_{v.key.replace(".", "_")}', - notebook_task=NotebookTask(notebook_path=notebook_path), - new_cluster=ClusterSpec( - spark_version=v.key, - num_workers=1, - instance_pool_id=instance_pool_id, - # GCP uses "custom" data security mode by default, which does not support UC. - data_security_mode=DataSecurityMode.SINGLE_USER), - libraries=[library]) + t = Task( + task_key=f'test_{v.key.replace(".", "_")}', + notebook_task=NotebookTask(notebook_path=notebook_path), + new_cluster=ClusterSpec( + spark_version=v.key, + num_workers=1, + instance_pool_id=instance_pool_id, + # GCP uses "custom" data security mode by default, which does not support UC. + data_security_mode=DataSecurityMode.SINGLE_USER), + libraries=[library]) tasks.append(t) waiter = w.jobs.submit(run_name=f'Runtime Native Auth {random(10)}', tasks=tasks) diff --git a/tests/integration/test_clusters.py b/tests/integration/test_clusters.py index 930989943..f3a9c6c89 100644 --- a/tests/integration/test_clusters.py +++ b/tests/integration/test_clusters.py @@ -41,7 +41,7 @@ def test_create_cluster(w, env_or_skip, random): def test_error_unmarshall(w, random): with pytest.raises(DatabricksError) as exc_info: - w.clusters.get('__non_existing__') + w.clusters.get('123__non_existing__') err = exc_info.value - assert 'Cluster __non_existing__ does not exist' in str(err) + assert 'Cluster 123__non_existing__ does not exist' in str(err) assert 'INVALID_PARAMETER_VALUE' == err.error_code diff --git a/tests/integration/test_dbutils.py b/tests/integration/test_dbutils.py index e6e2a8668..e486f2282 100644 --- a/tests/integration/test_dbutils.py +++ b/tests/integration/test_dbutils.py @@ -192,20 +192,21 @@ def test_secrets(w, random): from databricks.sdk.runtime import dbutils + all_scopes = dbutils.secrets.listScopes() + assert random_scope in [scope.getName() for scope in all_scopes] + all_secrets = {} - for secret_scope in dbutils.secrets.listScopes(): - scope = secret_scope.name - for secret_metadata in dbutils.secrets.list(scope): - key = secret_metadata.key - try: - all_secrets[f'{scope}.{key}'] = dbutils.secrets.get(scope, key) - except DatabricksError as e: - if e.error_code == 'BAD_REQUEST': - pytest.skip('dbconnect is not enabled on this workspace') - raise e + for secret_metadata in dbutils.secrets.list(random_scope): + key = secret_metadata.key + try: + all_secrets[key] = dbutils.secrets.get(random_scope, key) + except DatabricksError as e: + if e.error_code == 'BAD_REQUEST': + pytest.skip('dbconnect is not enabled on this workspace') + raise e logger.info(f'After loading secret: {random_value}') logging.getLogger('databricks.sdk').info(f'After loading secret: {random_value}') - assert all_secrets[f'{random_scope}.{key_for_string}'] == random_value - assert all_secrets[f'{random_scope}.{key_for_bytes}'] == random_value + assert all_secrets[key_for_string] == random_value + assert all_secrets[key_for_bytes] == random_value diff --git a/tests/integration/test_jobs.py b/tests/integration/test_jobs.py index 8fd5f8820..768752a75 100644 --- a/tests/integration/test_jobs.py +++ b/tests/integration/test_jobs.py @@ -17,18 +17,19 @@ def test_submitting_jobs(w, random, env_or_skip): with w.dbfs.open(py_on_dbfs, write=True, overwrite=True) as f: f.write(b'import time; time.sleep(10); print("Hello, World!")') - waiter = w.jobs.submit(run_name=f'py-sdk-{random(8)}', - tasks=[ - jobs.SubmitTask( - task_key='pi', - new_cluster=compute.ClusterSpec( - spark_version=w.clusters.select_spark_version(long_term_support=True), - # node_type_id=w.clusters.select_node_type(local_disk=True), - instance_pool_id=env_or_skip('TEST_INSTANCE_POOL_ID'), - num_workers=1), - spark_python_task=jobs.SparkPythonTask(python_file=f'dbfs:{py_on_dbfs}'), - ) - ]) + waiter = w.jobs.submit( + run_name=f'py-sdk-{random(8)}', + tasks=[ + jobs.SubmitTask( + task_key='pi', + new_cluster=compute.ClusterSpec( + spark_version=w.clusters.select_spark_version(long_term_support=True), + # node_type_id=w.clusters.select_node_type(local_disk=True), + instance_pool_id=env_or_skip('TEST_INSTANCE_POOL_ID'), + num_workers=1), + spark_python_task=jobs.SparkPythonTask(python_file=f'dbfs:{py_on_dbfs}'), + ) + ]) logging.info(f'starting to poll: {waiter.run_id}') diff --git a/tests/test_base_client.py b/tests/test_base_client.py index e9e7324a9..16a8ecfc4 100644 --- a/tests/test_base_client.py +++ b/tests/test_base_client.py @@ -1,18 +1,22 @@ +import io +import random from http.server import BaseHTTPRequestHandler -from typing import Iterator, List +from typing import Callable, Iterator, List, Optional, Tuple, Type +from unittest.mock import Mock import pytest -import requests +from requests import PreparedRequest, Response, Timeout from databricks.sdk import errors, useragent -from databricks.sdk._base_client import _BaseClient, _StreamingResponse +from databricks.sdk._base_client import (_BaseClient, _RawResponse, + _StreamingResponse) from databricks.sdk.core import DatabricksError from .clock import FakeClock from .fixture_server import http_fixture_server -class DummyResponse(requests.Response): +class DummyResponse(_RawResponse): _content: Iterator[bytes] _closed: bool = False @@ -276,3 +280,215 @@ def inner(h: BaseHTTPRequestHandler): assert 'foo' in res assert len(requests) == 2 + + +@pytest.mark.parametrize( + 'chunk_size,expected_chunks,data_size', + [ + (5, 20, 100), # 100 / 5 bytes per chunk = 20 chunks + (10, 10, 100), # 100 / 10 bytes per chunk = 10 chunks + (200, 1, 100), # 100 / 200 bytes per chunk = 1 chunk + ]) +def test_streaming_response_chunk_size(chunk_size, expected_chunks, data_size): + rng = random.Random(42) + test_data = bytes(rng.getrandbits(8) for _ in range(data_size)) + + content_chunks = [] + mock_response = Mock(spec=_RawResponse) + + def mock_iter_content(chunk_size: int, decode_unicode: bool): + # Simulate how requests would chunk the data. + for i in range(0, len(test_data), chunk_size): + chunk = test_data[i:i + chunk_size] + content_chunks.append(chunk) # track chunks for verification + yield chunk + + mock_response.iter_content = mock_iter_content + stream = _StreamingResponse(mock_response) + stream.set_chunk_size(chunk_size) + + # Read all data one byte at a time. + received_data = b"" + while True: + chunk = stream.read(1) + if not chunk: + break + received_data += chunk + + assert received_data == test_data # all data was received correctly + assert len(content_chunks) == expected_chunks # correct number of chunks + assert all(len(c) <= chunk_size for c in content_chunks) # chunks don't exceed size + + +def test_is_seekable_stream(): + client = _BaseClient() + + # Test various input types that are not streams. + assert not client._is_seekable_stream(None) # None + assert not client._is_seekable_stream("string data") # str + assert not client._is_seekable_stream(b"binary data") # bytes + assert not client._is_seekable_stream(["list", "data"]) # list + assert not client._is_seekable_stream(42) # int + + # Test non-seekable stream. + non_seekable = io.BytesIO(b"test data") + non_seekable.seekable = lambda: False + assert not client._is_seekable_stream(non_seekable) + + # Test seekable streams. + assert client._is_seekable_stream(io.BytesIO(b"test data")) # BytesIO + assert client._is_seekable_stream(io.StringIO("test data")) # StringIO + + # Test file objects. + with open(__file__, 'rb') as f: + assert client._is_seekable_stream(f) # File object + + # Test custom seekable stream. + class CustomSeekableStream(io.IOBase): + + def seekable(self): + return True + + def seek(self, offset, whence=0): + return 0 + + def tell(self): + return 0 + + assert client._is_seekable_stream(CustomSeekableStream()) + + +class RetryTestCase: + + def __init__(self, data_provider: Callable, offset: Optional[int], expected_failure: bool, + expected_result: bytes): + self._data_provider = data_provider + self._offset = offset + self._expected_result = expected_result + self._expected_failure = expected_failure + + def get_data(self): + data = self._data_provider() + if self._offset is not None: + data.seek(self._offset) + return data + + @classmethod + def create_non_seekable_stream(cls, data: bytes): + result = io.BytesIO(data) + result.seekable = lambda: False # makes the stream appear non-seekable + return result + + +class MockSession: + + def __init__(self, failure_count: int, failure_provider: Callable[[], Response]): + self._failure_count = failure_count + self._received_requests: List[bytes] = [] + self._failure_provider = failure_provider + + @classmethod + def raise_timeout_exception(cls): + raise Timeout("Fake timeout") + + @classmethod + def return_retryable_response(cls): + # fill response fields so that logging does not fail + response = Response() + response._content = b'' + response.status_code = 429 + response.headers = {'Retry-After': '1'} + response.url = 'http://test.com/' + + response.request = PreparedRequest() + response.request.url = response.url + response.request.method = 'POST' + response.request.headers = None + response.request.body = b'' + return response + + # following the signature of Session.request() + def request(self, + method, + url, + params=None, + data=None, + headers=None, + cookies=None, + files=None, + auth=None, + timeout=None, + allow_redirects=True, + proxies=None, + hooks=None, + stream=None, + verify=None, + cert=None, + json=None): + request_body = data.read() + + if isinstance(request_body, str): + request_body = request_body.encode('utf-8') # to be able to compare with expected bytes + + self._received_requests.append(request_body) + if self._failure_count > 0: + self._failure_count -= 1 + return self._failure_provider() + # + else: + # fill response fields so that logging does not fail + response = Response() + response._content = b'' + response.status_code = 200 + response.reason = 'OK' + response.url = url + + response.request = PreparedRequest() + response.request.url = url + response.request.method = method + response.request.headers = headers + response.request.body = data + return response + + +@pytest.mark.parametrize( + 'test_case', + [ + # bytes -> BytesIO + RetryTestCase(lambda: b"0123456789", None, False, b"0123456789"), + # str -> BytesIO + RetryTestCase(lambda: "0123456789", None, False, b"0123456789"), + # BytesIO directly + RetryTestCase(lambda: io.BytesIO(b"0123456789"), None, False, b"0123456789"), + # BytesIO directly with offset + RetryTestCase(lambda: io.BytesIO(b"0123456789"), 4, False, b"456789"), + # StringIO + RetryTestCase(lambda: io.StringIO("0123456789"), None, False, b"0123456789"), + # Non-seekable + RetryTestCase(lambda: RetryTestCase.create_non_seekable_stream(b"0123456789"), None, True, + b"0123456789") + ]) +@pytest.mark.parametrize('failure', [[MockSession.raise_timeout_exception, Timeout], + [MockSession.return_retryable_response, errors.TooManyRequests]]) +def test_rewind_seekable_stream(test_case: RetryTestCase, failure: Tuple[Callable[[], Response], Type]): + failure_count = 2 + + data = test_case.get_data() + + session = MockSession(failure_count, failure[0]) + client = _BaseClient() + client._session = session + + def do(): + client.do('POST', f'test.com/foo', data=data) + + if test_case._expected_failure: + expected_attempts_made = 1 + exception_class = failure[1] + with pytest.raises(exception_class): + do() + else: + expected_attempts_made = failure_count + 1 + do() + + assert session._received_requests == [test_case._expected_result for _ in range(expected_attempts_made)] diff --git a/tests/test_config.py b/tests/test_config.py index 2eac6d2f8..ebc8d683a 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -42,6 +42,11 @@ class MockUname: def system(self): return 'TestOS' + # Clear all environment variables and cached CICD provider. + for k in os.environ: + monkeypatch.delenv(k, raising=False) + useragent._cicd_provider = None + monkeypatch.setattr(platform, 'python_version', lambda: '3.0.0') monkeypatch.setattr(platform, 'uname', MockUname) monkeypatch.setenv('DATABRICKS_SDK_UPSTREAM', "upstream-product") diff --git a/tests/test_core.py b/tests/test_core.py index 16a4c2ad6..32431172b 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -8,7 +8,7 @@ import pytest -from databricks.sdk import WorkspaceClient, errors +from databricks.sdk import WorkspaceClient, errors, useragent from databricks.sdk.core import ApiClient, Config, DatabricksError from databricks.sdk.credentials_provider import (CliTokenSource, CredentialsProvider, @@ -178,6 +178,11 @@ class MockUname: def system(self): return 'TestOS' + # Clear all environment variables and cached CICD provider. + for k in os.environ: + monkeypatch.delenv(k, raising=False) + useragent._cicd_provider = None + monkeypatch.setattr(platform, 'python_version', lambda: '3.0.0') monkeypatch.setattr(platform, 'uname', MockUname) monkeypatch.setenv('DATABRICKS_SDK_UPSTREAM', "upstream-product") @@ -370,14 +375,20 @@ def inner(h: BaseHTTPRequestHandler): assert {'Authorization': 'Taker this-is-it'} == headers -@pytest.mark.parametrize(['azure_environment', 'expected'], - [('PUBLIC', ENVIRONMENTS['PUBLIC']), ('USGOVERNMENT', ENVIRONMENTS['USGOVERNMENT']), - ('CHINA', ENVIRONMENTS['CHINA']), ('public', ENVIRONMENTS['PUBLIC']), - ('usgovernment', ENVIRONMENTS['USGOVERNMENT']), ('china', ENVIRONMENTS['CHINA']), - # Kept for historical compatibility - ('AzurePublicCloud', ENVIRONMENTS['PUBLIC']), - ('AzureUSGovernment', ENVIRONMENTS['USGOVERNMENT']), - ('AzureChinaCloud', ENVIRONMENTS['CHINA']), ]) +@pytest.mark.parametrize( + ['azure_environment', 'expected'], + [ + ('PUBLIC', ENVIRONMENTS['PUBLIC']), + ('USGOVERNMENT', ENVIRONMENTS['USGOVERNMENT']), + ('CHINA', ENVIRONMENTS['CHINA']), + ('public', ENVIRONMENTS['PUBLIC']), + ('usgovernment', ENVIRONMENTS['USGOVERNMENT']), + ('china', ENVIRONMENTS['CHINA']), + # Kept for historical compatibility + ('AzurePublicCloud', ENVIRONMENTS['PUBLIC']), + ('AzureUSGovernment', ENVIRONMENTS['USGOVERNMENT']), + ('AzureChinaCloud', ENVIRONMENTS['CHINA']), + ]) def test_azure_environment(azure_environment, expected): c = Config(credentials_strategy=noop_credentials, azure_workspace_resource_id='...', diff --git a/tests/test_credentials_provider.py b/tests/test_credentials_provider.py new file mode 100644 index 000000000..67e6f5b35 --- /dev/null +++ b/tests/test_credentials_provider.py @@ -0,0 +1,145 @@ +from unittest.mock import Mock + +from databricks.sdk.credentials_provider import external_browser + + +def test_external_browser_refresh_success(mocker): + """Tests successful refresh of existing credentials.""" + + # Mock Config. + mock_cfg = Mock() + mock_cfg.auth_type = 'external-browser' + mock_cfg.host = 'test-host' + mock_cfg.oidc_endpoints = {'token_endpoint': 'test-token-endpoint'} + mock_cfg.client_id = 'test-client-id' # Or use azure_client_id + mock_cfg.client_secret = 'test-client-secret' # Or use azure_client_secret + + # Mock TokenCache. + mock_token_cache = Mock() + mock_session_credentials = Mock() + mock_session_credentials.token.return_value = "valid_token" # Simulate successful refresh + mock_token_cache.load.return_value = mock_session_credentials + + # Mock SessionCredentials. + want_credentials_provider = lambda c: "new_credentials" + mock_session_credentials.return_value = want_credentials_provider + + # Inject the mock implementations. + mocker.patch('databricks.sdk.credentials_provider.TokenCache', return_value=mock_token_cache) + + got_credentials_provider = external_browser(mock_cfg) + + mock_token_cache.load.assert_called_once() + mock_session_credentials.token.assert_called_once() # Verify token refresh was attempted + assert got_credentials_provider == want_credentials_provider + + +def test_external_browser_refresh_failure_new_oauth_flow(mocker): + """Tests failed refresh, triggering a new OAuth flow.""" + + # Mock Config. + mock_cfg = Mock() + mock_cfg.auth_type = 'external-browser' + mock_cfg.host = 'test-host' + mock_cfg.oidc_endpoints = {'token_endpoint': 'test-token-endpoint'} + mock_cfg.client_id = 'test-client-id' + mock_cfg.client_secret = 'test-client-secret' + + # Mock TokenCache. + mock_token_cache = Mock() + mock_session_credentials = Mock() + mock_session_credentials.token.side_effect = Exception( + "Simulated refresh error") # Simulate a failed refresh + mock_token_cache.load.return_value = mock_session_credentials + + # Mock SessionCredentials. + want_credentials_provider = lambda c: "new_credentials" + mock_session_credentials.return_value = want_credentials_provider + + # Mock OAuthClient. + mock_oauth_client = Mock() + mock_consent = Mock() + mock_consent.launch_external_browser.return_value = mock_session_credentials + mock_oauth_client.initiate_consent.return_value = mock_consent + + # Inject the mock implementations. + mocker.patch('databricks.sdk.credentials_provider.TokenCache', return_value=mock_token_cache) + mocker.patch('databricks.sdk.credentials_provider.OAuthClient', return_value=mock_oauth_client) + + got_credentials_provider = external_browser(mock_cfg) + + mock_token_cache.load.assert_called_once() + mock_session_credentials.token.assert_called_once() # Refresh attempt + mock_oauth_client.initiate_consent.assert_called_once() + mock_consent.launch_external_browser.assert_called_once() + mock_token_cache.save.assert_called_once_with(mock_session_credentials) + assert got_credentials_provider == want_credentials_provider + + +def test_external_browser_no_cached_credentials(mocker): + """Tests the case where there are no cached credentials, initiating a new OAuth flow.""" + + # Mock Config. + mock_cfg = Mock() + mock_cfg.auth_type = 'external-browser' + mock_cfg.host = 'test-host' + mock_cfg.oidc_endpoints = {'token_endpoint': 'test-token-endpoint'} + mock_cfg.client_id = 'test-client-id' + mock_cfg.client_secret = 'test-client-secret' + + # Mock TokenCache. + mock_token_cache = Mock() + mock_token_cache.load.return_value = None # No cached credentials + + # Mock SessionCredentials. + mock_session_credentials = Mock() + want_credentials_provider = lambda c: "new_credentials" + mock_session_credentials.return_value = want_credentials_provider + + # Mock OAuthClient. + mock_consent = Mock() + mock_consent.launch_external_browser.return_value = mock_session_credentials + mock_oauth_client = Mock() + mock_oauth_client.initiate_consent.return_value = mock_consent + + # Inject the mock implementations. + mocker.patch('databricks.sdk.credentials_provider.TokenCache', return_value=mock_token_cache) + mocker.patch('databricks.sdk.credentials_provider.OAuthClient', return_value=mock_oauth_client) + + got_credentials_provider = external_browser(mock_cfg) + + mock_token_cache.load.assert_called_once() + mock_oauth_client.initiate_consent.assert_called_once() + mock_consent.launch_external_browser.assert_called_once() + mock_token_cache.save.assert_called_once_with(mock_session_credentials) + assert got_credentials_provider == want_credentials_provider + + +def test_external_browser_consent_fails(mocker): + """Tests the case where OAuth consent initiation fails.""" + + # Mock Config. + mock_cfg = Mock() + mock_cfg.auth_type = 'external-browser' + mock_cfg.host = 'test-host' + mock_cfg.oidc_endpoints = {'token_endpoint': 'test-token-endpoint'} + mock_cfg.client_id = 'test-client-id' + mock_cfg.client_secret = 'test-client-secret' + + # Mock TokenCache. + mock_token_cache = Mock() + mock_token_cache.load.return_value = None # No cached credentials + + # Mock OAuthClient. + mock_oauth_client = Mock() + mock_oauth_client.initiate_consent.return_value = None # Simulate consent failure + + # Inject the mock implementations. + mocker.patch('databricks.sdk.credentials_provider.TokenCache', return_value=mock_token_cache) + mocker.patch('databricks.sdk.credentials_provider.OAuthClient', return_value=mock_oauth_client) + + got_credentials_provider = external_browser(mock_cfg) + + mock_token_cache.load.assert_called_once() + mock_oauth_client.initiate_consent.assert_called_once() + assert got_credentials_provider is None diff --git a/tests/test_data_plane.py b/tests/test_data_plane.py index a74658964..1eac92382 100644 --- a/tests/test_data_plane.py +++ b/tests/test_data_plane.py @@ -2,7 +2,7 @@ from databricks.sdk.data_plane import DataPlaneService from databricks.sdk.oauth import Token -from databricks.sdk.service.oauth2 import DataPlaneInfo +from databricks.sdk.service.serving import DataPlaneInfo info = DataPlaneInfo(authorization_details="authDetails", endpoint_url="url") diff --git a/tests/test_files.py b/tests/test_files.py new file mode 100644 index 000000000..f4d916f6f --- /dev/null +++ b/tests/test_files.py @@ -0,0 +1,340 @@ +import logging +import os +import re +from dataclasses import dataclass +from typing import List, Union + +import pytest +from requests import RequestException + +from databricks.sdk import WorkspaceClient +from databricks.sdk.core import Config + +logger = logging.getLogger(__name__) + + +@dataclass +class RequestData: + + def __init__(self, offset: int): + self._offset: int = offset + + +class DownloadTestCase: + + def __init__(self, name: str, enable_new_client: bool, file_size: int, + failure_at_absolute_offset: List[int], max_recovers_total: Union[int, None], + max_recovers_without_progressing: Union[int, None], expected_success: bool, + expected_requested_offsets: List[int]): + self.name = name + self.enable_new_client = enable_new_client + self.file_size = file_size + self.failure_at_absolute_offset = failure_at_absolute_offset + self.max_recovers_total = max_recovers_total + self.max_recovers_without_progressing = max_recovers_without_progressing + self.expected_success = expected_success + self.expected_requested_offsets = expected_requested_offsets + + @staticmethod + def to_string(test_case): + return test_case.name + + def run(self, config: Config): + config = config.copy() + config.enable_experimental_files_api_client = self.enable_new_client + config.files_api_client_download_max_total_recovers = self.max_recovers_total + config.files_api_client_download_max_total_recovers_without_progressing = self.max_recovers_without_progressing + + w = WorkspaceClient(config=config) + + session = MockSession(self) + w.files._api._api_client._session = session + + response = w.files.download("/test").contents + if self.expected_success: + actual_content = response.read() + assert (len(actual_content) == len(session.content)) + assert (actual_content == session.content) + else: + with pytest.raises(RequestException): + response.read() + + received_requests = session.received_requests + + assert (len(self.expected_requested_offsets) == len(received_requests)) + for idx, requested_offset in enumerate(self.expected_requested_offsets): + assert (requested_offset == received_requests[idx]._offset) + + +class MockSession: + + def __init__(self, test_case: DownloadTestCase): + self.test_case: DownloadTestCase = test_case + self.received_requests: List[RequestData] = [] + self.content: bytes = os.urandom(self.test_case.file_size) + self.failure_pointer = 0 + self.last_modified = 'Thu, 28 Nov 2024 16:39:14 GMT' + + # following the signature of Session.request() + def request(self, + method, + url, + params=None, + data=None, + headers=None, + cookies=None, + files=None, + auth=None, + timeout=None, + allow_redirects=True, + proxies=None, + hooks=None, + stream=None, + verify=None, + cert=None, + json=None): + assert method == 'GET' + assert stream == True + + offset = 0 + if "Range" in headers: + range = headers["Range"] + match = re.search("^bytes=(\\d+)-$", range) + if match: + offset = int(match.group(1)) + else: + raise Exception("Unexpected range header: " + range) + + if "If-Unmodified-Since" in headers: + assert (headers["If-Unmodified-Since"] == self.last_modified) + else: + raise Exception("If-Unmodified-Since header should be passed along with Range") + + logger.info("Client requested offset: %s", offset) + + if offset > len(self.content): + raise Exception("Offset %s exceeds file length %s", offset, len(self.content)) + + self.received_requests.append(RequestData(offset)) + return MockResponse(self, offset, MockRequest(url)) + + +# required only for correct logging +class MockRequest: + + def __init__(self, url: str): + self.url = url + self.method = 'GET' + self.headers = dict() + self.body = None + + +class MockResponse: + + def __init__(self, session: MockSession, offset: int, request: MockRequest): + self.session = session + self.offset = offset + self.request = request + self.status_code = 200 + self.reason = 'OK' + self.headers = dict() + self.headers['Content-Length'] = len(session.content) - offset + self.headers['Content-Type'] = 'application/octet-stream' + self.headers['Last-Modified'] = session.last_modified + self.ok = True + self.url = request.url + + def iter_content(self, chunk_size: int, decode_unicode: bool): + assert decode_unicode == False + return MockIterator(self, chunk_size) + + +class MockIterator: + + def __init__(self, response: MockResponse, chunk_size: int): + self.response = response + self.chunk_size = chunk_size + self.offset = 0 + + def __next__(self): + start_offset = self.response.offset + self.offset + if start_offset == len(self.response.session.content): + raise StopIteration + + end_offset = start_offset + self.chunk_size # exclusive, might be out of range + + if self.response.session.failure_pointer < len( + self.response.session.test_case.failure_at_absolute_offset): + failure_after_byte = self.response.session.test_case.failure_at_absolute_offset[ + self.response.session.failure_pointer] + if failure_after_byte < end_offset: + self.response.session.failure_pointer += 1 + raise RequestException("Fake error") + + result = self.response.session.content[start_offset:end_offset] + self.offset += len(result) + return result + + def close(self): + pass + + +class _Constants: + underlying_chunk_size = 1024 * 1024 # see ticket #832 + + +@pytest.mark.parametrize( + "test_case", + [ + DownloadTestCase(name="Old client: no failures, file of 5 bytes", + enable_new_client=False, + file_size=5, + failure_at_absolute_offset=[], + max_recovers_total=0, + max_recovers_without_progressing=0, + expected_success=True, + expected_requested_offsets=[0]), + DownloadTestCase(name="Old client: no failures, file of 1.5 chunks", + enable_new_client=False, + file_size=int(1.5 * _Constants.underlying_chunk_size), + failure_at_absolute_offset=[], + max_recovers_total=0, + max_recovers_without_progressing=0, + expected_success=True, + expected_requested_offsets=[0]), + DownloadTestCase( + name="Old client: failure", + enable_new_client=False, + file_size=1024, + failure_at_absolute_offset=[100], + max_recovers_total=None, # unlimited but ignored + max_recovers_without_progressing=None, # unlimited but ignored + expected_success=False, + expected_requested_offsets=[0]), + DownloadTestCase(name="New client: no failures, file of 5 bytes", + enable_new_client=True, + file_size=5, + failure_at_absolute_offset=[], + max_recovers_total=0, + max_recovers_without_progressing=0, + expected_success=True, + expected_requested_offsets=[0]), + DownloadTestCase(name="New client: no failures, file of 1 Kb", + enable_new_client=True, + file_size=1024, + max_recovers_total=None, + max_recovers_without_progressing=None, + failure_at_absolute_offset=[], + expected_success=True, + expected_requested_offsets=[0]), + DownloadTestCase(name="New client: no failures, file of 1.5 chunks", + enable_new_client=True, + file_size=int(1.5 * _Constants.underlying_chunk_size), + failure_at_absolute_offset=[], + max_recovers_total=0, + max_recovers_without_progressing=0, + expected_success=True, + expected_requested_offsets=[0]), + DownloadTestCase(name="New client: no failures, file of 10 chunks", + enable_new_client=True, + file_size=10 * _Constants.underlying_chunk_size, + failure_at_absolute_offset=[], + max_recovers_total=0, + max_recovers_without_progressing=0, + expected_success=True, + expected_requested_offsets=[0]), + DownloadTestCase(name="New client: recovers are disabled, first failure leads to download abort", + enable_new_client=True, + file_size=10000, + failure_at_absolute_offset=[5], + max_recovers_total=0, + max_recovers_without_progressing=0, + expected_success=False, + expected_requested_offsets=[0]), + DownloadTestCase( + name="New client: unlimited recovers allowed", + enable_new_client=True, + file_size=_Constants.underlying_chunk_size * 5, + # causes errors on requesting the third chunk + failure_at_absolute_offset=[ + _Constants.underlying_chunk_size - 1, _Constants.underlying_chunk_size - 1, + _Constants.underlying_chunk_size - 1, _Constants.underlying_chunk_size + 1, + _Constants.underlying_chunk_size * 3, + ], + max_recovers_total=None, + max_recovers_without_progressing=None, + expected_success=True, + expected_requested_offsets=[ + 0, 0, 0, 0, _Constants.underlying_chunk_size, _Constants.underlying_chunk_size * 3 + ]), + DownloadTestCase( + name="New client: we respect limit on total recovers when progressing", + enable_new_client=True, + file_size=_Constants.underlying_chunk_size * 10, + failure_at_absolute_offset=[ + 1, + _Constants.underlying_chunk_size + 1, # progressing + _Constants.underlying_chunk_size * 2 + 1, # progressing + _Constants.underlying_chunk_size * 3 + 1 # progressing + ], + max_recovers_total=3, + max_recovers_without_progressing=None, + expected_success=False, + expected_requested_offsets=[ + 0, 0, _Constants.underlying_chunk_size * 1, _Constants.underlying_chunk_size * 2 + ]), + DownloadTestCase(name="New client: we respect limit on total recovers when not progressing", + enable_new_client=True, + file_size=_Constants.underlying_chunk_size * 10, + failure_at_absolute_offset=[1, 1, 1, 1], + max_recovers_total=3, + max_recovers_without_progressing=None, + expected_success=False, + expected_requested_offsets=[0, 0, 0, 0]), + DownloadTestCase(name="New client: we respect limit on non-progressing recovers", + enable_new_client=True, + file_size=_Constants.underlying_chunk_size * 2, + failure_at_absolute_offset=[ + _Constants.underlying_chunk_size - 1, _Constants.underlying_chunk_size - 1, + _Constants.underlying_chunk_size - 1, _Constants.underlying_chunk_size - 1 + ], + max_recovers_total=None, + max_recovers_without_progressing=3, + expected_success=False, + expected_requested_offsets=[0, 0, 0, 0]), + DownloadTestCase( + name="New client: non-progressing recovers count is reset when progressing", + enable_new_client=True, + file_size=_Constants.underlying_chunk_size * 10, + failure_at_absolute_offset=[ + _Constants.underlying_chunk_size + 1, # this recover is after progressing + _Constants.underlying_chunk_size + 1, # this is not + _Constants.underlying_chunk_size * 2 + 1, # this recover is after progressing + _Constants.underlying_chunk_size * 2 + 1, # this is not + _Constants.underlying_chunk_size * 2 + 1, # this is not, we abort here + ], + max_recovers_total=None, + max_recovers_without_progressing=2, + expected_success=False, + expected_requested_offsets=[ + 0, _Constants.underlying_chunk_size, _Constants.underlying_chunk_size, + _Constants.underlying_chunk_size * 2, _Constants.underlying_chunk_size * 2 + ]), + DownloadTestCase(name="New client: non-progressing recovers count is reset when progressing - 2", + enable_new_client=True, + file_size=_Constants.underlying_chunk_size * 10, + failure_at_absolute_offset=[ + 1, _Constants.underlying_chunk_size + 1, _Constants.underlying_chunk_size * 2 + + 1, _Constants.underlying_chunk_size * 3 + 1 + ], + max_recovers_total=None, + max_recovers_without_progressing=1, + expected_success=True, + expected_requested_offsets=[ + 0, 0, _Constants.underlying_chunk_size, _Constants.underlying_chunk_size * 2, + _Constants.underlying_chunk_size * 3 + ]), + ], + ids=DownloadTestCase.to_string) +def test_download_recover(config: Config, test_case: DownloadTestCase): + test_case.run(config) diff --git a/tests/test_jobs_mixin.py b/tests/test_jobs_mixin.py new file mode 100644 index 000000000..2c39d41d9 --- /dev/null +++ b/tests/test_jobs_mixin.py @@ -0,0 +1,263 @@ +import json +import re +from typing import Pattern + +from databricks.sdk import WorkspaceClient + + +def make_getrun_path_pattern(run_id: int, page_token: str) -> Pattern[str]: + return re.compile( + rf'{re.escape("http://localhost/api/")}2.\d{re.escape(f"/jobs/runs/get?page_token={page_token}&run_id={run_id}")}' + ) + + +def make_getjob_path_pattern(job_id: int, page_token: str) -> Pattern[str]: + return re.compile( + rf'{re.escape("http://localhost/api/")}2.\d{re.escape(f"/jobs/get?job_id={job_id}&page_token={page_token}")}' + ) + + +def test_get_run_with_no_pagination(config, requests_mock): + run1 = {"tasks": [{"run_id": 0}, {"run_id": 1}], } + requests_mock.get(make_getrun_path_pattern(1337, "initialToken"), text=json.dumps(run1)) + w = WorkspaceClient(config=config) + + run = w.jobs.get_run(1337, page_token="initialToken") + + assert run.as_dict() == {"tasks": [{'run_id': 0}, {'run_id': 1}], } + + +def test_get_run_pagination_with_tasks(config, requests_mock): + from databricks.sdk.service import compute, jobs + cluster_spec = compute.ClusterSpec(spark_version="11.3.x-scala2.12", + custom_tags={"ResourceClass": "SingleNode"}, + num_workers=0, + node_type_id="Standard_DS3_v2", + ) + cluster1 = jobs.JobCluster(job_cluster_key="cluster1", new_cluster=cluster_spec) + cluster2 = jobs.JobCluster(job_cluster_key="cluster2", new_cluster=cluster_spec) + cluster3 = jobs.JobCluster(job_cluster_key="cluster3", new_cluster=cluster_spec) + cluster4 = jobs.JobCluster(job_cluster_key="cluster4", new_cluster=cluster_spec) + run1 = { + "tasks": [{ + "run_id": 0 + }, { + "run_id": 1 + }], + "job_clusters": [cluster1.as_dict(), cluster2.as_dict(), ], + "job_parameters": [{ + "name": "param1", + "value": "value1" + }], + "next_page_token": "tokenToSecondPage", + } + run2 = { + "tasks": [{ + "run_id": 2 + }, { + "run_id": 3 + }], + "job_clusters": [cluster3.as_dict(), cluster4.as_dict(), ], + "job_parameters": [{ + "name": "param2", + "value": "value2" + }], + "next_page_token": "tokenToThirdPage", + } + run3 = {"tasks": [{"run_id": 4}]} + requests_mock.get(make_getrun_path_pattern(1337, "initialToken"), text=json.dumps(run1)) + requests_mock.get(make_getrun_path_pattern(1337, "tokenToSecondPage"), text=json.dumps(run2)) + requests_mock.get(make_getrun_path_pattern(1337, "tokenToThirdPage"), text=json.dumps(run3)) + w = WorkspaceClient(config=config) + + run = w.jobs.get_run(1337, page_token="initialToken") + + assert run.as_dict() == { + "tasks": [{ + 'run_id': 0 + }, { + 'run_id': 1 + }, { + 'run_id': 2 + }, { + 'run_id': 3 + }, { + 'run_id': 4 + }], + "job_clusters": [cluster1.as_dict(), + cluster2.as_dict(), + cluster3.as_dict(), + cluster4.as_dict()], + "job_parameters": [{ + "name": "param1", + "value": "value1" + }, { + "name": "param2", + "value": "value2" + }], + } + + +def test_get_run_pagination_with_iterations(config, requests_mock): + run1 = { + "tasks": [{ + "run_id": 1337 + }], + "iterations": [{ + "run_id": 0 + }, { + "run_id": 1 + }], + "next_page_token": "tokenToSecondPage", + } + run2 = { + "tasks": [{ + "run_id": 1337 + }], + "iterations": [{ + "run_id": 2 + }, { + "run_id": 3 + }], + "next_page_token": "tokenToThirdPage", + } + run3 = {"tasks": [{"run_id": 1337}], "iterations": [{"run_id": 4}], } + requests_mock.get(make_getrun_path_pattern(1337, "initialToken"), text=json.dumps(run1)) + requests_mock.get(make_getrun_path_pattern(1337, "tokenToSecondPage"), text=json.dumps(run2)) + requests_mock.get(make_getrun_path_pattern(1337, "tokenToThirdPage"), text=json.dumps(run3)) + w = WorkspaceClient(config=config) + + run = w.jobs.get_run(1337, page_token="initialToken") + + assert run.as_dict() == { + "tasks": [{ + 'run_id': 1337 + }], + "iterations": [{ + 'run_id': 0 + }, { + 'run_id': 1 + }, { + 'run_id': 2 + }, { + 'run_id': 3 + }, { + 'run_id': 4 + }], + } + + +def test_get_job_with_no_pagination(config, requests_mock): + job1 = {"settings": {"tasks": [{"task_key": "taskKey1"}, {"task_key": "taskKey2"}], }} + requests_mock.get(make_getjob_path_pattern(1337, "initialToken"), text=json.dumps(job1)) + w = WorkspaceClient(config=config) + + job = w.jobs.get(1337, page_token="initialToken") + + assert job.as_dict() == {"settings": {"tasks": [{"task_key": "taskKey1"}, {"task_key": "taskKey2"}], }} + + +def test_get_job_pagination_with_tasks(config, requests_mock): + from databricks.sdk.service import compute, jobs + cluster_spec = compute.ClusterSpec(spark_version="11.3.x-scala2.12", + custom_tags={"ResourceClass": "SingleNode"}, + num_workers=0, + node_type_id="Standard_DS3_v2", + ) + cluster1 = jobs.JobCluster(job_cluster_key="cluster1", new_cluster=cluster_spec) + cluster2 = jobs.JobCluster(job_cluster_key="cluster2", new_cluster=cluster_spec) + cluster3 = jobs.JobCluster(job_cluster_key="cluster3", new_cluster=cluster_spec) + cluster4 = jobs.JobCluster(job_cluster_key="cluster4", new_cluster=cluster_spec) + job1 = { + "settings": { + "tasks": [{ + "task_key": "taskKey1" + }, { + "task_key": "taskKey2" + }], + "job_clusters": [cluster1.as_dict(), cluster2.as_dict()], + "parameters": [{ + "name": "param1", + "default": "default1" + }], + "environments": [{ + "environment_key": "key1" + }, { + "environment_key": "key2" + }] + }, + "next_page_token": "tokenToSecondPage" + } + job2 = { + "settings": { + "tasks": [{ + "task_key": "taskKey3" + }, { + "task_key": "taskKey4" + }], + "job_clusters": [cluster3.as_dict(), cluster4.as_dict()], + "parameters": [{ + "name": "param2", + "default": "default2" + }], + "environments": [{ + "environment_key": "key3" + }] + }, + "next_page_token": "tokenToThirdPage" + } + job3 = { + "settings": { + "tasks": [{ + "task_key": "taskKey5" + }], + "parameters": [{ + "name": "param3", + "default": "default3" + }] + }, + } + + requests_mock.get(make_getjob_path_pattern(1337, "initialToken"), text=json.dumps(job1)) + requests_mock.get(make_getjob_path_pattern(1337, "tokenToSecondPage"), text=json.dumps(job2)) + requests_mock.get(make_getjob_path_pattern(1337, "tokenToThirdPage"), text=json.dumps(job3)) + w = WorkspaceClient(config=config) + + job = w.jobs.get(1337, page_token="initialToken") + + assert job.as_dict() == { + "settings": { + "tasks": [{ + "task_key": "taskKey1" + }, { + "task_key": "taskKey2" + }, { + "task_key": "taskKey3" + }, { + "task_key": "taskKey4" + }, { + "task_key": "taskKey5" + }], + "job_clusters": [cluster1.as_dict(), + cluster2.as_dict(), + cluster3.as_dict(), + cluster4.as_dict()], + "parameters": [{ + "name": "param1", + "default": "default1" + }, { + "name": "param2", + "default": "default2" + }, { + "name": "param3", + "default": "default3" + }], + "environments": [{ + "environment_key": "key1" + }, { + "environment_key": "key2" + }, { + "environment_key": "key3" + }] + } + } diff --git a/tests/test_model_serving_auth.py b/tests/test_model_serving_auth.py index 092a3bf16..49aed33a5 100644 --- a/tests/test_model_serving_auth.py +++ b/tests/test_model_serving_auth.py @@ -1,8 +1,10 @@ +import threading import time import pytest from databricks.sdk.core import Config +from databricks.sdk.credentials_provider import ModelServingUserCredentials from .conftest import raises @@ -39,7 +41,6 @@ def test_model_serving_auth(env_values, del_env_values, oauth_file_name, monkeyp mocker.patch('databricks.sdk.config.Config._known_file_config_loader') cfg = Config() - assert cfg.auth_type == 'model-serving' headers = cfg.authenticate() assert (cfg.host == 'x') @@ -47,15 +48,24 @@ def test_model_serving_auth(env_values, del_env_values, oauth_file_name, monkeyp assert headers.get("Authorization") == 'Bearer databricks_sdk_unit_test_token' -@pytest.mark.parametrize("env_values, oauth_file_name", [ - ([], "invalid_file_name"), # Not in Model Serving and Invalid File Name - ([('IS_IN_DB_MODEL_SERVING_ENV', 'true')], "invalid_file_name"), # In Model Serving and Invalid File Name - ([('IS_IN_DATABRICKS_MODEL_SERVING_ENV', 'true') - ], "invalid_file_name"), # In Model Serving and Invalid File Name - ([], "tests/testdata/model-serving-test-token") # Not in Model Serving and Valid File Name -]) +@pytest.mark.parametrize( + "env_values, oauth_file_name", + [ + ([], "invalid_file_name"), # Not in Model Serving and Invalid File Name + ([('IS_IN_DB_MODEL_SERVING_ENV', 'true') + ], "invalid_file_name"), # In Model Serving and Invalid File Name + ([('IS_IN_DATABRICKS_MODEL_SERVING_ENV', 'true') + ], "invalid_file_name"), # In Model Serving and Invalid File Name + ([], "tests/testdata/model-serving-test-token") # Not in Model Serving and Valid File Name + ]) @raises(default_auth_base_error_message) def test_model_serving_auth_errors(env_values, oauth_file_name, monkeypatch): + # Guarantee that the tests defaults to env variables rather than config file. + # + # TODO: this is hacky and we should find a better way to tell the config + # that it should not read from the config file. + monkeypatch.setenv('DATABRICKS_CONFIG_FILE', 'x') + for (env_name, env_value) in env_values: monkeypatch.setenv(env_name, env_value) monkeypatch.setattr( @@ -84,7 +94,6 @@ def test_model_serving_auth_refresh(monkeypatch, mocker): assert (cfg.host == 'x') assert headers.get( "Authorization") == 'Bearer databricks_sdk_unit_test_token' # Token defined in the test file - # Simulate refreshing the token by patching to to a new file monkeypatch.setattr( "databricks.sdk.credentials_provider.ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH", @@ -104,3 +113,49 @@ def test_model_serving_auth_refresh(monkeypatch, mocker): assert (cfg.host == 'x') # Read V2 now assert headers.get("Authorization") == 'Bearer databricks_sdk_unit_test_token_v2' + + +def test_agent_user_credentials(monkeypatch, mocker): + monkeypatch.setenv('IS_IN_DB_MODEL_SERVING_ENV', 'true') + monkeypatch.setenv('DB_MODEL_SERVING_HOST_URL', 'x') + monkeypatch.setattr( + "databricks.sdk.credentials_provider.ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH", + "tests/testdata/model-serving-test-token") + + invokers_token_val = "databricks_invokers_token" + current_thread = threading.current_thread() + thread_data = current_thread.__dict__ + thread_data["invokers_token"] = invokers_token_val + + cfg = Config(credentials_strategy=ModelServingUserCredentials()) + assert cfg.auth_type == 'model_serving_user_credentials' + + headers = cfg.authenticate() + + assert (cfg.host == 'x') + assert headers.get("Authorization") == f'Bearer {invokers_token_val}' + + # Test updates of invokers token + invokers_token_val = "databricks_invokers_token_v2" + current_thread = threading.current_thread() + thread_data = current_thread.__dict__ + thread_data["invokers_token"] = invokers_token_val + + headers = cfg.authenticate() + assert (cfg.host == 'x') + assert headers.get("Authorization") == f'Bearer {invokers_token_val}' + + +# If this credential strategy is being used in a non model serving environments then use default credential strategy instead +def test_agent_user_credentials_in_non_model_serving_environments(monkeypatch): + + monkeypatch.setenv('DATABRICKS_HOST', 'x') + monkeypatch.setenv('DATABRICKS_TOKEN', 'token') + + cfg = Config(credentials_strategy=ModelServingUserCredentials()) + assert cfg.auth_type == 'pat' # Auth type is PAT as it is no longer in a model serving environment + + headers = cfg.authenticate() + + assert (cfg.host == 'https://x') + assert headers.get("Authorization") == f'Bearer token' diff --git a/tests/test_open_ai_mixin.py b/tests/test_open_ai_mixin.py index 1858c66cb..72e1e9a60 100644 --- a/tests/test_open_ai_mixin.py +++ b/tests/test_open_ai_mixin.py @@ -1,8 +1,10 @@ import sys +from io import BytesIO import pytest from databricks.sdk.core import Config +from databricks.sdk.service.serving import ExternalFunctionRequestHttpMethod def test_open_ai_client(monkeypatch): @@ -28,3 +30,22 @@ def test_langchain_open_ai_client(monkeypatch): assert client.openai_api_base == "https://test_host/serving-endpoints" assert client.model_name == "databricks-meta-llama-3-1-70b-instruct" + + +def test_http_request(w, requests_mock): + headers = {"Accept": "text/plain", "Content-Type": "application/json", } + mocked_url = "http://localhost/api/2.0/external-function" + blob_response = BytesIO(b"The request was successful") + + requests_mock.post(mocked_url, + request_headers=headers, + content=blob_response.getvalue(), + status_code=200, + ) + response = w.serving_endpoints.http_request(conn="test_conn", + method=ExternalFunctionRequestHttpMethod.GET, + path="test_path") + assert requests_mock.call_count == 1 + assert requests_mock.called + assert response.status_code == 200 # Verify the response status + assert (response.text == "The request was successful") # Ensure the response body matches the mocked data diff --git a/tests/test_user_agent.py b/tests/test_user_agent.py index 5083d9908..ba6f694f5 100644 --- a/tests/test_user_agent.py +++ b/tests/test_user_agent.py @@ -1,3 +1,5 @@ +import os + import pytest from databricks.sdk.version import __version__ @@ -40,3 +42,45 @@ def test_user_agent_with_partner(user_agent): user_agent.with_partner('differenttest') assert 'partner/test' in user_agent.to_string() assert 'partner/differenttest' in user_agent.to_string() + + +@pytest.fixture(scope="function") +def clear_cicd(): + # Save and clear env vars. + original_env = os.environ.copy() + os.environ.clear() + + # Clear cached CICD provider. + from databricks.sdk import useragent + useragent._cicd_provider = None + + yield + + # Restore env vars. + os.environ = original_env + + +def test_user_agent_cicd_no_provider(clear_cicd): + from databricks.sdk import useragent + user_agent = useragent.to_string() + + assert 'cicd' not in user_agent + + +def test_user_agent_cicd_one_provider(clear_cicd): + os.environ['GITHUB_ACTIONS'] = 'true' + + from databricks.sdk import useragent + user_agent = useragent.to_string() + + assert 'cicd/github' in user_agent + + +def test_user_agent_cicd_two_provider(clear_cicd): + os.environ['GITHUB_ACTIONS'] = 'true' + os.environ['GITLAB_CI'] = 'true' + + from databricks.sdk import useragent + user_agent = useragent.to_string() + + assert 'cicd/github' in user_agent From 2814adf1aa999f69838db9a44449dd969a646604 Mon Sep 17 00:00:00 2001 From: CaymanWilliams Date: Fri, 14 Feb 2025 16:11:58 -0700 Subject: [PATCH 02/12] fix files --- databricks/sdk/config.py | 12 ---- databricks/sdk/credentials_provider.py | 82 -------------------------- 2 files changed, 94 deletions(-) diff --git a/databricks/sdk/config.py b/databricks/sdk/config.py index a556b5988..490c6ba4e 100644 --- a/databricks/sdk/config.py +++ b/databricks/sdk/config.py @@ -92,17 +92,6 @@ class Config: max_connections_per_pool: int = ConfigAttribute() databricks_environment: Optional[DatabricksEnvironment] = None -<<<<<<< HEAD - def __init__(self, - *, - # Deprecated. Use credentials_strategy instead. - credentials_provider: Optional[CredentialsStrategy] = None, - credentials_strategy: Optional[CredentialsStrategy] = None, - product=None, - product_version=None, - clock: Optional[Clock] = None, - **kwargs): -======= enable_experimental_files_api_client: bool = ConfigAttribute( env='DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT') files_api_client_download_max_total_recovers = None @@ -118,7 +107,6 @@ def __init__( product_version=None, clock: Optional[Clock] = None, **kwargs): ->>>>>>> upstream/main self._header_factory = None self._inner = {} self._user_agent_other_info = [] diff --git a/databricks/sdk/credentials_provider.py b/databricks/sdk/credentials_provider.py index 9a5b0748f..07fb48c5a 100644 --- a/databricks/sdk/credentials_provider.py +++ b/databricks/sdk/credentials_provider.py @@ -9,10 +9,7 @@ import platform import subprocess import sys -<<<<<<< HEAD -======= import threading ->>>>>>> upstream/main import time from datetime import datetime from typing import Callable, Dict, List, Optional, Tuple, Union @@ -192,10 +189,6 @@ def token() -> Token: def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]: if cfg.auth_type != 'external-browser': return None -<<<<<<< HEAD -======= - ->>>>>>> upstream/main client_id, client_secret = None, None if cfg.client_id: client_id = cfg.client_id @@ -203,20 +196,11 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]: elif cfg.azure_client_id: client_id = cfg.azure_client client_secret = cfg.azure_client_secret -<<<<<<< HEAD - - if not client_id: - client_id = 'databricks-cli' - - # Load cached credentials from disk if they exist. - # Note that these are local to the Python SDK and not reused by other SDKs. -======= if not client_id: client_id = 'databricks-cli' # Load cached credentials from disk if they exist. Note that these are # local to the Python SDK and not reused by other SDKs. ->>>>>>> upstream/main oidc_endpoints = cfg.oidc_endpoints redirect_url = 'http://localhost:8020' token_cache = TokenCache(host=cfg.host, @@ -226,19 +210,6 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]: redirect_url=redirect_url) credentials = token_cache.load() if credentials: -<<<<<<< HEAD - # Force a refresh in case the loaded credentials are expired. - credentials.token() - else: - oauth_client = OAuthClient(oidc_endpoints=oidc_endpoints, - client_id=client_id, - redirect_url=redirect_url, - client_secret=client_secret) - consent = oauth_client.initiate_consent() - if not consent: - return None - credentials = consent.launch_external_browser() -======= try: # Pro-actively refresh the loaded credentials. This is done # to detect if the token is expired and needs to be refreshed @@ -258,7 +229,6 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]: return None credentials = consent.launch_external_browser() ->>>>>>> upstream/main token_cache.save(credentials) return credentials(cfg) @@ -753,16 +723,6 @@ def inner() -> Dict[str, str]: # This Code is derived from Mlflow DatabricksModelServingConfigProvider # https://github.com/mlflow/mlflow/blob/1219e3ef1aac7d337a618a352cd859b336cf5c81/mlflow/legacy_databricks_cli/configure/provider.py#L332 class ModelServingAuthProvider(): -<<<<<<< HEAD - _MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH = "/var/credentials-secret/model-dependencies-oauth-token" - - def __init__(self): - self.expiry_time = -1 - self.current_token = None - self.refresh_duration = 300 # 300 Seconds - - def should_fetch_model_serving_environment_oauth(self) -> bool: -======= USER_CREDENTIALS = "user_credentials" _MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH = "/var/credentials-secret/model-dependencies-oauth-token" @@ -774,7 +734,6 @@ def __init__(self, credential_type: Optional[str]): self.credential_type = credential_type def should_fetch_model_serving_environment_oauth() -> bool: ->>>>>>> upstream/main """ Check whether this is the model serving environment Additionally check if the oauth token file path exists @@ -783,25 +742,15 @@ def should_fetch_model_serving_environment_oauth() -> bool: is_in_model_serving_env = (os.environ.get("IS_IN_DB_MODEL_SERVING_ENV") or os.environ.get("IS_IN_DATABRICKS_MODEL_SERVING_ENV") or "false") return (is_in_model_serving_env == "true" -<<<<<<< HEAD - and os.path.isfile(self._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH)) - - def get_model_dependency_oauth_token(self, should_retry=True) -> str: -======= and os.path.isfile(ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH)) def _get_model_dependency_oauth_token(self, should_retry=True) -> str: ->>>>>>> upstream/main # Use Cached value if it is valid if self.current_token is not None and self.expiry_time > time.time(): return self.current_token try: -<<<<<<< HEAD - with open(self._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH) as f: -======= with open(ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH) as f: ->>>>>>> upstream/main oauth_dict = json.load(f) self.current_token = oauth_dict["OAUTH_TOKEN"][0]["oauthTokenValue"] self.expiry_time = time.time() + self.refresh_duration @@ -811,21 +760,13 @@ def _get_model_dependency_oauth_token(self, should_retry=True) -> str: logger.warning("Unable to read oauth token on first attmept in Model Serving Environment", exc_info=e) time.sleep(0.5) -<<<<<<< HEAD - return self.get_model_dependency_oauth_token(should_retry=False) -======= return self._get_model_dependency_oauth_token(should_retry=False) ->>>>>>> upstream/main else: raise RuntimeError( "Unable to read OAuth credentials from the file mounted in Databricks Model Serving" ) from e return self.current_token -<<<<<<< HEAD - def get_databricks_host_token(self) -> Optional[Tuple[str, str]]: - if not self.should_fetch_model_serving_environment_oauth(): -======= def _get_invokers_token(self): current_thread = threading.current_thread() thread_data = current_thread.__dict__ @@ -840,26 +781,11 @@ def _get_invokers_token(self): def get_databricks_host_token(self) -> Optional[Tuple[str, str]]: if not ModelServingAuthProvider.should_fetch_model_serving_environment_oauth(): ->>>>>>> upstream/main return None # read from DB_MODEL_SERVING_HOST_ENV_VAR if available otherwise MODEL_SERVING_HOST_ENV_VAR host = os.environ.get("DATABRICKS_MODEL_SERVING_HOST_URL") or os.environ.get( "DB_MODEL_SERVING_HOST_URL") -<<<<<<< HEAD - token = self.get_model_dependency_oauth_token() - - return (host, token) - - -@credentials_strategy('model-serving', []) -def model_serving_auth(cfg: 'Config') -> Optional[CredentialsProvider]: - try: - model_serving_auth_provider = ModelServingAuthProvider() - if not model_serving_auth_provider.should_fetch_model_serving_environment_oauth(): - logger.debug("model-serving: Not in Databricks Model Serving, skipping") - return None -======= if self.credential_type == ModelServingAuthProvider.USER_CREDENTIALS: return (host, self._get_invokers_token()) @@ -871,7 +797,6 @@ def model_serving_auth_visitor(cfg: 'Config', credential_type: Optional[str] = None) -> Optional[CredentialsProvider]: try: model_serving_auth_provider = ModelServingAuthProvider(credential_type) ->>>>>>> upstream/main host, token = model_serving_auth_provider.get_databricks_host_token() if token is None: raise ValueError( @@ -882,10 +807,6 @@ def model_serving_auth_visitor(cfg: 'Config', except Exception as e: logger.warning("Unable to get auth from Databricks Model Serving Environment", exc_info=e) return None -<<<<<<< HEAD - -======= ->>>>>>> upstream/main logger.info("Using Databricks Model Serving Authentication") def inner() -> Dict[str, str]: @@ -896,8 +817,6 @@ def inner() -> Dict[str, str]: return inner -<<<<<<< HEAD -======= @credentials_strategy('model-serving', []) def model_serving_auth(cfg: 'Config') -> Optional[CredentialsProvider]: if not ModelServingAuthProvider.should_fetch_model_serving_environment_oauth(): @@ -907,7 +826,6 @@ def model_serving_auth(cfg: 'Config') -> Optional[CredentialsProvider]: return model_serving_auth_visitor(cfg) ->>>>>>> upstream/main class DefaultCredentials: """ Select the first applicable credential provider from the chain """ From f0e962c64bd097aa8bfca7bbc0af8b77d901a596 Mon Sep 17 00:00:00 2001 From: CaymanWilliams Date: Fri, 14 Feb 2025 17:08:14 -0700 Subject: [PATCH 03/12] missing termination reason code --- databricks/sdk/service/compute.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py index 63a971b73..d4596e63e 100755 --- a/databricks/sdk/service/compute.py +++ b/databricks/sdk/service/compute.py @@ -7616,6 +7616,8 @@ class TerminationReasonCode(Enum): INSTANCE_UNREACHABLE = 'INSTANCE_UNREACHABLE' INTERNAL_ERROR = 'INTERNAL_ERROR' INVALID_ARGUMENT = 'INVALID_ARGUMENT' + # [PROD-2800] Add missing termination reason code + INVALID_INSTANCE_PLACEMENT_PROTOCOL = 'INVALID_INSTANCE_PLACEMENT_PROTOCOL' INVALID_SPARK_IMAGE = 'INVALID_SPARK_IMAGE' IP_EXHAUSTION_FAILURE = 'IP_EXHAUSTION_FAILURE' JOB_FINISHED = 'JOB_FINISHED' From ee90a155ec674ac56a27cdad25b7f6255743a34f Mon Sep 17 00:00:00 2001 From: CaymanWilliams Date: Fri, 14 Feb 2025 17:18:53 -0700 Subject: [PATCH 04/12] delete tests --- .github/workflows/integration-tests.yml | 90 ------------------------- .github/workflows/message.yml | 32 --------- 2 files changed, 122 deletions(-) delete mode 100644 .github/workflows/integration-tests.yml delete mode 100644 .github/workflows/message.yml diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml deleted file mode 100644 index c308cc03c..000000000 --- a/.github/workflows/integration-tests.yml +++ /dev/null @@ -1,90 +0,0 @@ -name: Integration Tests - -on: - - pull_request: - types: [opened, synchronize] - - merge_group: - - -jobs: - check-token: - name: Check secrets access - - runs-on: - group: databricks-deco-testing-runner-group - labels: ubuntu-latest-deco - - environment: "test-trigger-is" - outputs: - has_token: ${{ steps.set-token-status.outputs.has_token }} - steps: - - name: Check if DECO_WORKFLOW_TRIGGER_APP_ID is set - id: set-token-status - run: | - if [ -z "${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}" ]; then - echo "DECO_WORKFLOW_TRIGGER_APP_ID is empty. User has no access to secrets." - echo "::set-output name=has_token::false" - else - echo "DECO_WORKFLOW_TRIGGER_APP_ID is set. User has access to secrets." - echo "::set-output name=has_token::true" - fi - - trigger-tests: - name: Trigger Tests - - runs-on: - group: databricks-deco-testing-runner-group - labels: ubuntu-latest-deco - - needs: check-token - if: github.event_name == 'pull_request' && needs.check-token.outputs.has_token == 'true' - environment: "test-trigger-is" - - steps: - - uses: actions/checkout@v3 - - - name: Generate GitHub App Token - id: generate-token - uses: actions/create-github-app-token@v1 - with: - app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }} - private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }} - owner: ${{ secrets.ORG_NAME }} - repositories: ${{secrets.REPO_NAME}} - - - name: Trigger Workflow in Another Repo - env: - GH_TOKEN: ${{ steps.generate-token.outputs.token }} - run: | - gh workflow run sdk-py-isolated-pr.yml -R ${{ secrets.ORG_NAME }}/${{secrets.REPO_NAME}} \ - --ref main \ - -f pull_request_number=${{ github.event.pull_request.number }} \ - -f commit_sha=${{ github.event.pull_request.head.sha }} - - # Statuses and checks apply to specific commits (by hash). - # Enforcement of required checks is done both at the PR level and the merge queue level. - # In case of multiple commits in a single PR, the hash of the squashed commit - # will not match the one for the latest (approved) commit in the PR. - # We auto approve the check for the merge queue for two reasons: - # * Queue times out due to duration of tests. - # * Avoid running integration tests twice, since it was already run at the tip of the branch before squashing. - auto-approve: - if: github.event_name == 'merge_group' - - runs-on: - group: databricks-deco-testing-runner-group - labels: ubuntu-latest-deco - - steps: - - name: Mark Check - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - shell: bash - run: | - gh api -X POST -H "Accept: application/vnd.github+json" \ - -H "X-GitHub-Api-Version: 2022-11-28" \ - /repos/${{ github.repository }}/statuses/${{ github.sha }} \ - -f 'state=success' \ - -f 'context=Integration Tests Check' diff --git a/.github/workflows/message.yml b/.github/workflows/message.yml deleted file mode 100644 index 057556895..000000000 --- a/.github/workflows/message.yml +++ /dev/null @@ -1,32 +0,0 @@ -name: Validate Commit Message - -on: - pull_request: - types: [opened, synchronize, edited] - merge_group: - types: [checks_requested] - -jobs: - validate: - runs-on: ubuntu-latest - # GitHub required checks are shared between PRs and the Merge Queue. - # Since there is no PR title on Merge Queue, we need to trigger and - # skip this test for Merge Queue to succeed. - if: github.event_name == 'pull_request' - steps: - - name: Checkout - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - - name: Validate Tag - env: - TITLE: ${{ github.event.pull_request.title }} - run: | - TAG=$(echo "$TITLE" | sed -ne 's/\[\(.*\)\].*/\1/p') - if grep -q "tag: \"\[$TAG\]\"" .codegen/changelog_config.yml; then - echo "Valid tag found: [$TAG]" - else - echo "Invalid or missing tag in commit message: [$TAG]" - exit 1 - fi \ No newline at end of file From cb5925c489a12e59c1c8e40e380c6b3da6434e51 Mon Sep 17 00:00:00 2001 From: CaymanWilliams Date: Fri, 14 Feb 2025 18:13:18 -0700 Subject: [PATCH 05/12] fix merge issues --- databricks/sdk/_base_client.py | 1 + databricks/sdk/credentials_provider.py | 1 + databricks/sdk/service/billing.py | 6 +- databricks/sdk/service/dashboards.py | 187 ------------------- databricks/sdk/service/iam.py | 4 +- databricks/sdk/service/jobs.py | 5 - databricks/sdk/service/serving.py | 21 --- databricks/sdk/service/sql.py | 138 +++++++++----- docs/account/iam/workspace_assignment.rst | 2 +- docs/workspace/dashboards/index.rst | 2 +- docs/workspace/serving/serving_endpoints.rst | 3 - tests/test_open_ai_mixin.py | 2 +- 12 files changed, 97 insertions(+), 275 deletions(-) diff --git a/databricks/sdk/_base_client.py b/databricks/sdk/_base_client.py index 58fcb10a5..f0950f656 100644 --- a/databricks/sdk/_base_client.py +++ b/databricks/sdk/_base_client.py @@ -276,6 +276,7 @@ def _perform(self, error = self._error_parser.get_api_error(response) if error is not None: raise error from None + return response def _record_request_log(self, response: requests.Response, raw: bool = False) -> None: diff --git a/databricks/sdk/credentials_provider.py b/databricks/sdk/credentials_provider.py index 07fb48c5a..24d01f678 100644 --- a/databricks/sdk/credentials_provider.py +++ b/databricks/sdk/credentials_provider.py @@ -189,6 +189,7 @@ def token() -> Token: def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]: if cfg.auth_type != 'external-browser': return None + client_id, client_secret = None, None if cfg.client_id: client_id = cfg.client_id diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py index e23e676fe..dd2579921 100755 --- a/databricks/sdk/service/billing.py +++ b/databricks/sdk/service/billing.py @@ -753,7 +753,7 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteBudgetConfigurationResponse: class DeleteResponse: def as_dict(self) -> dict: - """Serializes the DeleteBudgetConfigurationResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" body = {} return body @@ -763,8 +763,8 @@ def as_shallow_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> DeleteBudgetConfigurationResponse: - """Deserializes the DeleteBudgetConfigurationResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, any]) -> DeleteResponse: + """Deserializes the DeleteResponse from a dictionary.""" return cls() diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py index c81159cca..ba01ba41d 100755 --- a/databricks/sdk/service/dashboards.py +++ b/databricks/sdk/service/dashboards.py @@ -1804,193 +1804,6 @@ def start_conversation_and_wait(self, space_id: str, content: str, return self.start_conversation(content=content, space_id=space_id).result(timeout=timeout) -class GenieAPI: - """Genie provides a no-code experience for business users, powered by AI/BI. Analysts set up spaces that - business users can use to ask questions using natural language. Genie uses data registered to Unity - Catalog and requires at least CAN USE permission on a Pro or Serverless SQL warehouse. Also, Databricks - Assistant must be enabled.""" - - def __init__(self, api_client): - self._api = api_client - - def wait_get_message_genie_completed( - self, - conversation_id: str, - message_id: str, - space_id: str, - timeout=timedelta(minutes=20), - callback: Optional[Callable[[GenieMessage], None]] = None) -> GenieMessage: - deadline = time.time() + timeout.total_seconds() - target_states = (MessageStatus.COMPLETED, ) - failure_states = (MessageStatus.FAILED, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.get_message(conversation_id=conversation_id, message_id=message_id, space_id=space_id) - status = poll.status - status_message = f'current status: {status}' - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach COMPLETED, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"conversation_id={conversation_id}, message_id={message_id}, space_id={space_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - def create_message(self, space_id: str, conversation_id: str, content: str) -> Wait[GenieMessage]: - """Create conversation message. - - Create new message in [conversation](:method:genie/startconversation). The AI response uses all - previously created messages in the conversation to respond. - - :param space_id: str - The ID associated with the Genie space where the conversation is started. - :param conversation_id: str - The ID associated with the conversation. - :param content: str - User message content. - - :returns: - Long-running operation waiter for :class:`GenieMessage`. - See :method:wait_get_message_genie_completed for more details. - """ - body = {} - if content is not None: body['content'] = content - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - - op_response = self._api.do( - 'POST', - f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages', - body=body, - headers=headers) - return Wait(self.wait_get_message_genie_completed, - response=GenieMessage.from_dict(op_response), - conversation_id=conversation_id, - message_id=op_response['id'], - space_id=space_id) - - def create_message_and_wait(self, - space_id: str, - conversation_id: str, - content: str, - timeout=timedelta(minutes=20)) -> GenieMessage: - return self.create_message(content=content, conversation_id=conversation_id, - space_id=space_id).result(timeout=timeout) - - def execute_message_query(self, space_id: str, conversation_id: str, - message_id: str) -> GenieGetMessageQueryResultResponse: - """Execute SQL query in a conversation message. - - Execute the SQL query in the message. - - :param space_id: str - Genie space ID - :param conversation_id: str - Conversation ID - :param message_id: str - Message ID - - :returns: :class:`GenieGetMessageQueryResultResponse` - """ - - headers = {'Accept': 'application/json', } - - res = self._api.do( - 'POST', - f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/execute-query', - headers=headers) - return GenieGetMessageQueryResultResponse.from_dict(res) - - def get_message(self, space_id: str, conversation_id: str, message_id: str) -> GenieMessage: - """Get conversation message. - - Get message from conversation. - - :param space_id: str - The ID associated with the Genie space where the target conversation is located. - :param conversation_id: str - The ID associated with the target conversation. - :param message_id: str - The ID associated with the target message from the identified conversation. - - :returns: :class:`GenieMessage` - """ - - headers = {'Accept': 'application/json', } - - res = self._api.do( - 'GET', - f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}', - headers=headers) - return GenieMessage.from_dict(res) - - def get_message_query_result(self, space_id: str, conversation_id: str, - message_id: str) -> GenieGetMessageQueryResultResponse: - """Get conversation message SQL query result. - - Get the result of SQL query if the message has a query attachment. This is only available if a message - has a query attachment and the message status is `EXECUTING_QUERY`. - - :param space_id: str - Genie space ID - :param conversation_id: str - Conversation ID - :param message_id: str - Message ID - - :returns: :class:`GenieGetMessageQueryResultResponse` - """ - - headers = {'Accept': 'application/json', } - - res = self._api.do( - 'GET', - f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/query-result', - headers=headers) - return GenieGetMessageQueryResultResponse.from_dict(res) - - def start_conversation(self, space_id: str, content: str) -> Wait[GenieMessage]: - """Start conversation. - - Start a new conversation. - - :param space_id: str - The ID associated with the Genie space where you want to start a conversation. - :param content: str - The text of the message that starts the conversation. - - :returns: - Long-running operation waiter for :class:`GenieMessage`. - See :method:wait_get_message_genie_completed for more details. - """ - body = {} - if content is not None: body['content'] = content - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - - op_response = self._api.do('POST', - f'/api/2.0/genie/spaces/{space_id}/start-conversation', - body=body, - headers=headers) - return Wait(self.wait_get_message_genie_completed, - response=GenieStartConversationResponse.from_dict(op_response), - conversation_id=op_response['conversation_id'], - message_id=op_response['message_id'], - space_id=space_id) - - def start_conversation_and_wait(self, space_id: str, content: str, - timeout=timedelta(minutes=20)) -> GenieMessage: - return self.start_conversation(content=content, space_id=space_id).result(timeout=timeout) - - class LakeviewAPI: """These APIs provide specific management operations for Lakeview dashboards. Generic resource management can be done with Workspace API (import, export, get-status, list, delete).""" diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py index b841bec8b..2f752d06c 100755 --- a/databricks/sdk/service/iam.py +++ b/databricks/sdk/service/iam.py @@ -1588,7 +1588,7 @@ class UpdateWorkspaceAssignments: """The ID of the user, service principal, or group.""" workspace_id: Optional[int] = None - """The workspace ID for the account.""" + """The workspace ID.""" def as_dict(self) -> dict: """Serializes the UpdateWorkspaceAssignments into a dictionary suitable for use as a JSON request body.""" @@ -3894,7 +3894,7 @@ def update(self, specified principal. :param workspace_id: int - The workspace ID for the account. + The workspace ID. :param principal_id: int The ID of the user, service principal, or group. :param permissions: List[:class:`WorkspacePermission`] (optional) diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index 8220a0715..c0d4240bf 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -3861,9 +3861,6 @@ class Run: overriding_parameters: Optional[RunParameters] = None """The parameters used for this run.""" - prev_page_token: Optional[str] = None - """A token that can be used to list the previous page of sub-resources.""" - queue_duration: Optional[int] = None """The time in milliseconds that the run has spent in the queue.""" @@ -3956,7 +3953,6 @@ def as_dict(self) -> dict: if self.original_attempt_run_id is not None: body['original_attempt_run_id'] = self.original_attempt_run_id if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters.as_dict() - if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token if self.queue_duration is not None: body['queue_duration'] = self.queue_duration if self.repair_history: body['repair_history'] = [v.as_dict() for v in self.repair_history] if self.run_duration is not None: body['run_duration'] = self.run_duration @@ -4039,7 +4035,6 @@ def from_dict(cls, d: Dict[str, any]) -> Run: number_in_job=d.get('number_in_job', None), original_attempt_run_id=d.get('original_attempt_run_id', None), overriding_parameters=_from_dict(d, 'overriding_parameters', RunParameters), - prev_page_token=d.get('prev_page_token', None), queue_duration=d.get('queue_duration', None), repair_history=_repeated_dict(d, 'repair_history', RepairHistoryItem), run_duration=d.get('run_duration', None), diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index c10e43572..938445863 100755 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -657,10 +657,6 @@ class CreateServingEndpoint: config: Optional[EndpointCoreConfigInput] = None """The core config of the serving endpoint.""" - ai_gateway: Optional[AiGatewayConfig] = None - """The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are - supported as of now.""" - rate_limits: Optional[List[RateLimit]] = None """Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI Gateway to manage rate limits.""" @@ -2554,12 +2550,6 @@ class ServedModelInput: min_provisioned_throughput: Optional[int] = None """The minimum tokens per second that the endpoint can scale down to.""" - max_provisioned_throughput: Optional[int] = None - """The maximum tokens per second that the endpoint can scale up to.""" - - min_provisioned_throughput: Optional[int] = None - """The minimum tokens per second that the endpoint can scale down to.""" - name: Optional[str] = None """The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external @@ -2574,14 +2564,6 @@ class ServedModelInput: scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0.""" - workload_size: Optional[ServedModelInputWorkloadSize] = None - """The workload size of the served model. The workload size corresponds to a range of provisioned - concurrency that the compute will autoscale between. A single unit of provisioned concurrency - can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned - concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned - concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for - each workload size will be 0.""" - workload_type: Optional[ServedModelInputWorkloadType] = None """The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU @@ -3431,9 +3413,6 @@ def create(self, throughput endpoints are currently supported. :param config: :class:`EndpointCoreConfigInput` (optional) The core config of the serving endpoint. - :param ai_gateway: :class:`AiGatewayConfig` (optional) - The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are - supported as of now. :param rate_limits: List[:class:`RateLimit`] (optional) Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI Gateway to manage rate limits. diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index 059b744ef..bc3c03d31 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -3078,49 +3078,74 @@ class LegacyQuery: can_edit: Optional[bool] = None """Describes whether the authenticated user is allowed to edit the definition of this query.""" - catalog: Optional[str] = None - """Name of the catalog where this query will be executed.""" + created_at: Optional[str] = None + """The timestamp when this query was created.""" - create_time: Optional[str] = None - """Timestamp when this query was created.""" + data_source_id: Optional[str] = None + """Data source ID maps to the ID of the data source used by the resource and is distinct from the + warehouse ID. [Learn more] + + [Learn more]: https://docs.databricks.com/api/workspace/datasources/list""" description: Optional[str] = None """General description that conveys additional information about this query such as usage notes.""" - display_name: Optional[str] = None - """Display name of the query that appears in list views, widget headings, and on the query page.""" - id: Optional[str] = None - """UUID identifying the query.""" + """Query ID.""" - last_modifier_user_name: Optional[str] = None - """Username of the user who last saved changes to this query.""" + is_archived: Optional[bool] = None + """Indicates whether the query is trashed. Trashed queries can't be used in dashboards, or appear + in search results. If this boolean is `true`, the `options` property for this query includes a + `moved_to_trash_at` timestamp. Trashed queries are permanently deleted after 30 days.""" - lifecycle_state: Optional[LifecycleState] = None - """Indicates whether the query is trashed.""" + is_draft: Optional[bool] = None + """Whether the query is a draft. Draft queries only appear in list views for their owners. + Visualizations from draft queries cannot appear on dashboards.""" - owner_user_name: Optional[str] = None - """Username of the user that owns the query.""" + is_favorite: Optional[bool] = None + """Whether this query object appears in the current user's favorites list. This flag determines + whether the star icon for favorites is selected.""" - parameters: Optional[List[QueryParameter]] = None - """List of query parameter definitions.""" + is_safe: Optional[bool] = None + """Text parameter types are not safe from SQL injection for all types of data source. Set this + Boolean parameter to `true` if a query either does not use any text type parameters or uses a + data source type where text type parameters are handled safely.""" - parent_path: Optional[str] = None - """Workspace path of the workspace folder containing the object.""" + last_modified_by: Optional[User] = None - query_text: Optional[str] = None - """Text of the query to be run.""" + last_modified_by_id: Optional[int] = None + """The ID of the user who last saved changes to this query.""" - run_as_mode: Optional[RunAsMode] = None - """Sets the "Run as" role for the object.""" + latest_query_data_id: Optional[str] = None + """If there is a cached result for this query and user, this field includes the query result ID. If + this query uses parameters, this field is always null.""" - schema: Optional[str] = None - """Name of the schema where this query will be executed.""" + name: Optional[str] = None + """The title of this query that appears in list views, widget headings, and on the query page.""" + + options: Optional[QueryOptions] = None + + parent: Optional[str] = None + """The identifier of the workspace folder containing the object.""" + + permission_tier: Optional[PermissionLevel] = None + """* `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query + * `CAN_MANAGE`: Can manage the query""" + + query: Optional[str] = None + """The text of the query to be run.""" + + query_hash: Optional[str] = None + """A SHA-256 hash of the query text along with the authenticated user ID.""" + + run_as_role: Optional[RunAsRole] = None + """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as + viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" tags: Optional[List[str]] = None - update_time: Optional[str] = None - """Timestamp when this query was last updated.""" + updated_at: Optional[str] = None + """The timestamp at which this query was last updated.""" user: Optional[User] = None @@ -3132,24 +3157,30 @@ class LegacyQuery: def as_dict(self) -> dict: """Serializes the LegacyQuery into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit - if self.catalog is not None: body['catalog'] = self.catalog - if self.create_time is not None: body['create_time'] = self.create_time + if self.can_edit is not None: body['can_edit'] = self.can_edit + if self.created_at is not None: body['created_at'] = self.created_at + if self.data_source_id is not None: body['data_source_id'] = self.data_source_id if self.description is not None: body['description'] = self.description - if self.display_name is not None: body['display_name'] = self.display_name if self.id is not None: body['id'] = self.id - if self.last_modifier_user_name is not None: - body['last_modifier_user_name'] = self.last_modifier_user_name - if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value - if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name - if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] - if self.parent_path is not None: body['parent_path'] = self.parent_path - if self.query_text is not None: body['query_text'] = self.query_text - if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value - if self.schema is not None: body['schema'] = self.schema + if self.is_archived is not None: body['is_archived'] = self.is_archived + if self.is_draft is not None: body['is_draft'] = self.is_draft + if self.is_favorite is not None: body['is_favorite'] = self.is_favorite + if self.is_safe is not None: body['is_safe'] = self.is_safe + if self.last_modified_by: body['last_modified_by'] = self.last_modified_by.as_dict() + if self.last_modified_by_id is not None: body['last_modified_by_id'] = self.last_modified_by_id + if self.latest_query_data_id is not None: body['latest_query_data_id'] = self.latest_query_data_id + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options.as_dict() + if self.parent is not None: body['parent'] = self.parent + if self.permission_tier is not None: body['permission_tier'] = self.permission_tier.value + if self.query is not None: body['query'] = self.query + if self.query_hash is not None: body['query_hash'] = self.query_hash + if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value if self.tags: body['tags'] = [v for v in self.tags] - if self.update_time is not None: body['update_time'] = self.update_time - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.user: body['user'] = self.user.as_dict() + if self.user_id is not None: body['user_id'] = self.user_id + if self.visualizations: body['visualizations'] = [v.as_dict() for v in self.visualizations] return body def as_shallow_dict(self) -> dict: @@ -3188,16 +3219,21 @@ def from_dict(cls, d: Dict[str, any]) -> LegacyQuery: created_at=d.get('created_at', None), data_source_id=d.get('data_source_id', None), description=d.get('description', None), - display_name=d.get('display_name', None), id=d.get('id', None), - last_modifier_user_name=d.get('last_modifier_user_name', None), - lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState), - owner_user_name=d.get('owner_user_name', None), - parameters=_repeated_dict(d, 'parameters', QueryParameter), - parent_path=d.get('parent_path', None), - query_text=d.get('query_text', None), - run_as_mode=_enum(d, 'run_as_mode', RunAsMode), - schema=d.get('schema', None), + is_archived=d.get('is_archived', None), + is_draft=d.get('is_draft', None), + is_favorite=d.get('is_favorite', None), + is_safe=d.get('is_safe', None), + last_modified_by=_from_dict(d, 'last_modified_by', User), + last_modified_by_id=d.get('last_modified_by_id', None), + latest_query_data_id=d.get('latest_query_data_id', None), + name=d.get('name', None), + options=_from_dict(d, 'options', QueryOptions), + parent=d.get('parent', None), + permission_tier=_enum(d, 'permission_tier', PermissionLevel), + query=d.get('query', None), + query_hash=d.get('query_hash', None), + run_as_role=_enum(d, 'run_as_role', RunAsRole), tags=d.get('tags', None), updated_at=d.get('updated_at', None), user=_from_dict(d, 'user', User), @@ -8666,4 +8702,4 @@ def update_permissions(self, f'/api/2.0/permissions/warehouses/{warehouse_id}', body=body, headers=headers) - return WarehousePermissions.from_dict(res) + return WarehousePermissions.from_dict(res) \ No newline at end of file diff --git a/docs/account/iam/workspace_assignment.rst b/docs/account/iam/workspace_assignment.rst index 6230b8199..697f0a5da 100644 --- a/docs/account/iam/workspace_assignment.rst +++ b/docs/account/iam/workspace_assignment.rst @@ -92,7 +92,7 @@ specified principal. :param workspace_id: int - The workspace ID for the account. + The workspace ID. :param principal_id: int The ID of the user, service principal, or group. :param permissions: List[:class:`WorkspacePermission`] (optional) diff --git a/docs/workspace/dashboards/index.rst b/docs/workspace/dashboards/index.rst index acea442bb..940efa5dd 100644 --- a/docs/workspace/dashboards/index.rst +++ b/docs/workspace/dashboards/index.rst @@ -10,4 +10,4 @@ Manage Lakeview dashboards genie lakeview lakeview_embedded - query_execution + query_execution \ No newline at end of file diff --git a/docs/workspace/serving/serving_endpoints.rst b/docs/workspace/serving/serving_endpoints.rst index f6bfe82f4..687976f5d 100644 --- a/docs/workspace/serving/serving_endpoints.rst +++ b/docs/workspace/serving/serving_endpoints.rst @@ -41,9 +41,6 @@ throughput endpoints are currently supported. :param config: :class:`EndpointCoreConfigInput` (optional) The core config of the serving endpoint. - :param ai_gateway: :class:`AiGatewayConfig` (optional) - The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are - supported as of now. :param rate_limits: List[:class:`RateLimit`] (optional) Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI Gateway to manage rate limits. diff --git a/tests/test_open_ai_mixin.py b/tests/test_open_ai_mixin.py index 72e1e9a60..e503da073 100644 --- a/tests/test_open_ai_mixin.py +++ b/tests/test_open_ai_mixin.py @@ -48,4 +48,4 @@ def test_http_request(w, requests_mock): assert requests_mock.call_count == 1 assert requests_mock.called assert response.status_code == 200 # Verify the response status - assert (response.text == "The request was successful") # Ensure the response body matches the mocked data + assert (response.text == "The request was successful") # Ensure the response body matches the mocked data \ No newline at end of file From f268fa33276c15a9d94333e257643823fd592a39 Mon Sep 17 00:00:00 2001 From: CaymanWilliams Date: Fri, 14 Feb 2025 18:17:25 -0700 Subject: [PATCH 06/12] more diff --- .codegen/_openapi_sha | 2 +- databricks/sdk/service/sql.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 2a9a021e0..562b72fcc 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -99f644e72261ef5ecf8d74db20f4b7a1e09723cc +99f644e72261ef5ecf8d74db20f4b7a1e09723cc \ No newline at end of file diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index bc3c03d31..cfa94aaa7 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -8702,4 +8702,4 @@ def update_permissions(self, f'/api/2.0/permissions/warehouses/{warehouse_id}', body=body, headers=headers) - return WarehousePermissions.from_dict(res) \ No newline at end of file + return WarehousePermissions.from_dict(res) From c19c4a5c60c40cdf2eeeee30ce755fc70d0095c7 Mon Sep 17 00:00:00 2001 From: CaymanWilliams Date: Sat, 15 Feb 2025 13:01:31 -0700 Subject: [PATCH 07/12] update push workflow --- .github/workflows/push.yml | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index a839096c0..4f8881465 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -10,7 +10,7 @@ on: - main jobs: - tests-ubuntu: + tests: uses: ./.github/workflows/test.yml strategy: fail-fast: false @@ -19,16 +19,6 @@ jobs: with: os: ubuntu-latest pyVersion: ${{ matrix.pyVersion }} - - tests-windows: - uses: ./.github/workflows/test.yml - strategy: - fail-fast: false - matrix: - pyVersion: [ '3.9', '3.10', '3.11', '3.12' ] - with: - os: windows-latest - pyVersion: ${{ matrix.pyVersion }} fmt: runs-on: ubuntu-latest From 2402560cc3708944b23b3af5a4b3f8a8c1f6295c Mon Sep 17 00:00:00 2001 From: CaymanWilliams Date: Sat, 15 Feb 2025 13:03:00 -0700 Subject: [PATCH 08/12] update push workflow again --- .github/workflows/push.yml | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index 4f8881465..80dc449a1 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -16,9 +16,20 @@ jobs: fail-fast: false matrix: pyVersion: [ '3.8', '3.9', '3.10', '3.11', '3.12' ] - with: - os: ubuntu-latest - pyVersion: ${{ matrix.pyVersion }} + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Unshallow + run: git fetch --prune --unshallow + + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.pyVersion }} + + - name: Run tests + run: make dev install test fmt: runs-on: ubuntu-latest From 2a6e44b3849c391d3fb33c4e976a0f86b0a4e9b3 Mon Sep 17 00:00:00 2001 From: CaymanWilliams Date: Sat, 15 Feb 2025 13:04:55 -0700 Subject: [PATCH 09/12] test change --- .github/workflows/push.yml | 15 +-------------- 1 file changed, 1 insertion(+), 14 deletions(-) diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index 80dc449a1..f524bdcd3 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -15,7 +15,7 @@ jobs: strategy: fail-fast: false matrix: - pyVersion: [ '3.8', '3.9', '3.10', '3.11', '3.12' ] + pyVersion: [ '3.7', '3.8', '3.9', '3.10', '3.11', '3.12' ] runs-on: ubuntu-latest steps: - name: Checkout @@ -30,16 +30,3 @@ jobs: - name: Run tests run: make dev install test - - fmt: - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Format all files - run: make dev fmt - - - name: Fail on differences - run: git diff --exit-code From d36c2de38867f19f6850db8c8d9c8f528dad2e85 Mon Sep 17 00:00:00 2001 From: CaymanWilliams Date: Sat, 15 Feb 2025 13:06:03 -0700 Subject: [PATCH 10/12] fix tests --- .github/workflows/push.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index f524bdcd3..1c71fcd9e 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -11,7 +11,6 @@ on: jobs: tests: - uses: ./.github/workflows/test.yml strategy: fail-fast: false matrix: From 5fe847c3e166a0cc87f1338a1e1f4fc0d984cf30 Mon Sep 17 00:00:00 2001 From: CaymanWilliams Date: Sat, 15 Feb 2025 13:07:04 -0700 Subject: [PATCH 11/12] remove test version --- .github/workflows/push.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index 1c71fcd9e..f76e696f2 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -14,7 +14,7 @@ jobs: strategy: fail-fast: false matrix: - pyVersion: [ '3.7', '3.8', '3.9', '3.10', '3.11', '3.12' ] + pyVersion: [ '3.8', '3.9', '3.10', '3.11', '3.12' ] runs-on: ubuntu-latest steps: - name: Checkout From e10818e163529fa6e737a544a23305b42f110407 Mon Sep 17 00:00:00 2001 From: CaymanWilliams Date: Sat, 15 Feb 2025 13:11:09 -0700 Subject: [PATCH 12/12] retry --- .github/workflows/push.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index f76e696f2..bef41718f 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -14,8 +14,8 @@ jobs: strategy: fail-fast: false matrix: - pyVersion: [ '3.8', '3.9', '3.10', '3.11', '3.12' ] - runs-on: ubuntu-latest + pyVersion: [ '3.7', '3.8', '3.9', '3.10', '3.11', '3.12' ] + runs-on: ubuntu-22.04 steps: - name: Checkout uses: actions/checkout@v4