diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 00e5d84f9..a2ba58aa5 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -5285ce76f81314f342c1702d5c2ad4ef42488781 \ No newline at end of file +f2385add116e3716c8a90a0b68e204deb40f996c \ No newline at end of file diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py index 4123ea08c..a08a7e66d 100755 --- a/databricks/sdk/service/apps.py +++ b/databricks/sdk/service/apps.py @@ -52,6 +52,8 @@ class App: resources: Optional[List[AppResource]] = None """Resources for the app.""" + service_principal_client_id: Optional[str] = None + service_principal_id: Optional[int] = None service_principal_name: Optional[str] = None @@ -79,6 +81,8 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name if self.pending_deployment: body['pending_deployment'] = self.pending_deployment.as_dict() if self.resources: body['resources'] = [v.as_dict() for v in self.resources] + if self.service_principal_client_id is not None: + body['service_principal_client_id'] = self.service_principal_client_id if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name @@ -100,6 +104,7 @@ def from_dict(cls, d: Dict[str, any]) -> App: name=d.get('name', None), pending_deployment=_from_dict(d, 'pending_deployment', AppDeployment), resources=_repeated_dict(d, 'resources', AppResource), + service_principal_client_id=d.get('service_principal_client_id', None), service_principal_id=d.get('service_principal_id', None), service_principal_name=d.get('service_principal_name', None), update_time=d.get('update_time', None), @@ -798,7 +803,7 @@ def create(self, *, app: Optional[App] = None) -> Wait[App]: Long-running operation waiter for :class:`App`. See :method:wait_get_app_active for more details. """ - body = app + body = app.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } op_response = self._api.do('POST', '/api/2.0/apps', body=body, headers=headers) @@ -836,7 +841,7 @@ def deploy(self, app_name: str, *, app_deployment: Optional[AppDeployment] = Non Long-running operation waiter for :class:`AppDeployment`. See :method:wait_get_deployment_app_succeeded for more details. """ - body = app_deployment + body = app_deployment.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } op_response = self._api.do('POST', @@ -1053,12 +1058,13 @@ def update(self, name: str, *, app: Optional[App] = None) -> App: Updates the app with the supplied name. :param name: str - The name of the app. + The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It + must be unique within the workspace. :param app: :class:`App` (optional) :returns: :class:`App` """ - body = app + body = app.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('PATCH', f'/api/2.0/apps/{name}', body=body, headers=headers) diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py index cfb7ba0b4..8375a2629 100755 --- a/databricks/sdk/service/billing.py +++ b/databricks/sdk/service/billing.py @@ -1121,7 +1121,7 @@ def get(self, budget_id: str) -> GetBudgetConfigurationResponse: Gets a budget configuration for an account. Both account and budget configuration are specified by ID. :param budget_id: str - The Databricks budget configuration ID. + The budget configuration ID :returns: :class:`GetBudgetConfigurationResponse` """ diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index 3943608ef..d24ad54e6 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -415,7 +415,7 @@ def from_dict(cls, d: Dict[str, any]) -> AzureActiveDirectoryToken: class AzureManagedIdentity: """The Azure managed identity configuration.""" - access_connector_id: Optional[str] = None + access_connector_id: str """The Azure resource ID of the Azure Databricks Access Connector. Use the format `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}`.""" @@ -508,6 +508,8 @@ def from_dict(cls, d: Dict[str, any]) -> AzureManagedIdentityResponse: @dataclass class AzureServicePrincipal: + """The Azure service principal configuration.""" + directory_id: str """The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application.""" @@ -1161,22 +1163,31 @@ def from_dict(cls, d: Dict[str, any]) -> CreateConnection: @dataclass class CreateCredentialRequest: + name: str + """The credential name. The name must be unique among storage and service credentials within the + metastore.""" + aws_iam_role: Optional[AwsIamRole] = None """The AWS IAM role configuration""" azure_managed_identity: Optional[AzureManagedIdentity] = None """The Azure managed identity configuration.""" + azure_service_principal: Optional[AzureServicePrincipal] = None + """The Azure service principal configuration.""" + comment: Optional[str] = None """Comment associated with the credential.""" - name: Optional[str] = None - """The credential name. The name must be unique among storage and service credentials within the - metastore.""" + gcp_service_account_key: Optional[GcpServiceAccountKey] = None purpose: Optional[CredentialPurpose] = None """Indicates the purpose of the credential.""" + read_only: Optional[bool] = None + """Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**.""" + skip_validation: Optional[bool] = None """Optional. Supplying true to this argument skips validation of the created set of credentials.""" @@ -1185,9 +1196,14 @@ def as_dict(self) -> dict: body = {} if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict() if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: + body['azure_service_principal'] = self.azure_service_principal.as_dict() if self.comment is not None: body['comment'] = self.comment + if self.gcp_service_account_key: + body['gcp_service_account_key'] = self.gcp_service_account_key.as_dict() if self.name is not None: body['name'] = self.name if self.purpose is not None: body['purpose'] = self.purpose.value + if self.read_only is not None: body['read_only'] = self.read_only if self.skip_validation is not None: body['skip_validation'] = self.skip_validation return body @@ -1196,9 +1212,12 @@ def from_dict(cls, d: Dict[str, any]) -> CreateCredentialRequest: """Deserializes the CreateCredentialRequest from a dictionary.""" return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole), azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity), + azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal), comment=d.get('comment', None), + gcp_service_account_key=_from_dict(d, 'gcp_service_account_key', GcpServiceAccountKey), name=d.get('name', None), purpose=_enum(d, 'purpose', CredentialPurpose), + read_only=d.get('read_only', None), skip_validation=d.get('skip_validation', None)) @@ -1796,6 +1815,9 @@ class CredentialInfo: azure_managed_identity: Optional[AzureManagedIdentity] = None """The Azure managed identity configuration.""" + azure_service_principal: Optional[AzureServicePrincipal] = None + """The Azure service principal configuration.""" + comment: Optional[str] = None """Comment associated with the credential.""" @@ -1827,17 +1849,27 @@ class CredentialInfo: purpose: Optional[CredentialPurpose] = None """Indicates the purpose of the credential.""" + read_only: Optional[bool] = None + """Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**.""" + updated_at: Optional[int] = None """Time at which this credential was last modified, in epoch milliseconds.""" updated_by: Optional[str] = None """Username of user who last modified the credential.""" + used_for_managed_storage: Optional[bool] = None + """Whether this credential is the current metastore's root storage credential. Only applicable when + purpose is **STORAGE**.""" + def as_dict(self) -> dict: """Serializes the CredentialInfo into a dictionary suitable for use as a JSON request body.""" body = {} if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict() if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: + body['azure_service_principal'] = self.azure_service_principal.as_dict() if self.comment is not None: body['comment'] = self.comment if self.created_at is not None: body['created_at'] = self.created_at if self.created_by is not None: body['created_by'] = self.created_by @@ -1848,8 +1880,11 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name if self.owner is not None: body['owner'] = self.owner if self.purpose is not None: body['purpose'] = self.purpose.value + if self.read_only is not None: body['read_only'] = self.read_only if self.updated_at is not None: body['updated_at'] = self.updated_at if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.used_for_managed_storage is not None: + body['used_for_managed_storage'] = self.used_for_managed_storage return body @classmethod @@ -1857,6 +1892,7 @@ def from_dict(cls, d: Dict[str, any]) -> CredentialInfo: """Deserializes the CredentialInfo from a dictionary.""" return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole), azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity), + azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), @@ -1867,13 +1903,16 @@ def from_dict(cls, d: Dict[str, any]) -> CredentialInfo: name=d.get('name', None), owner=d.get('owner', None), purpose=_enum(d, 'purpose', CredentialPurpose), + read_only=d.get('read_only', None), updated_at=d.get('updated_at', None), - updated_by=d.get('updated_by', None)) + updated_by=d.get('updated_by', None), + used_for_managed_storage=d.get('used_for_managed_storage', None)) class CredentialPurpose(Enum): SERVICE = 'SERVICE' + STORAGE = 'STORAGE' class CredentialType(Enum): @@ -2751,6 +2790,35 @@ def from_dict(cls, d: Dict[str, any]) -> GcpOauthToken: return cls(oauth_token=d.get('oauth_token', None)) +@dataclass +class GcpServiceAccountKey: + """GCP long-lived credential. GCP Service Account.""" + + email: Optional[str] = None + """The email of the service account.""" + + private_key: Optional[str] = None + """The service account's RSA private key.""" + + private_key_id: Optional[str] = None + """The ID of the service account's private key.""" + + def as_dict(self) -> dict: + """Serializes the GcpServiceAccountKey into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.email is not None: body['email'] = self.email + if self.private_key is not None: body['private_key'] = self.private_key + if self.private_key_id is not None: body['private_key_id'] = self.private_key_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GcpServiceAccountKey: + """Deserializes the GcpServiceAccountKey from a dictionary.""" + return cls(email=d.get('email', None), + private_key=d.get('private_key', None), + private_key_id=d.get('private_key_id', None)) + + @dataclass class GenerateTemporaryServiceCredentialAzureOptions: """Options to customize the requested temporary credential""" @@ -2774,12 +2842,12 @@ def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryServiceCredentialAzure @dataclass class GenerateTemporaryServiceCredentialRequest: + credential_name: str + """The name of the service credential used to generate a temporary credential""" + azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None """Options to customize the requested temporary credential""" - credential_name: Optional[str] = None - """The name of the service credential used to generate a temporary credential""" - def as_dict(self) -> dict: """Serializes the GenerateTemporaryServiceCredentialRequest into a dictionary suitable for use as a JSON request body.""" body = {} @@ -5661,11 +5729,15 @@ class UpdateCredentialRequest: azure_managed_identity: Optional[AzureManagedIdentity] = None """The Azure managed identity configuration.""" + azure_service_principal: Optional[AzureServicePrincipal] = None + """The Azure service principal configuration.""" + comment: Optional[str] = None """Comment associated with the credential.""" force: Optional[bool] = None - """Force update even if there are dependent services.""" + """Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent + external locations and external tables (when purpose is **STORAGE**).""" isolation_mode: Optional[IsolationMode] = None """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" @@ -5679,6 +5751,10 @@ class UpdateCredentialRequest: owner: Optional[str] = None """Username of current owner of credential.""" + read_only: Optional[bool] = None + """Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**.""" + skip_validation: Optional[bool] = None """Supply true to this argument to skip validation of the updated credential.""" @@ -5687,12 +5763,15 @@ def as_dict(self) -> dict: body = {} if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict() if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: + body['azure_service_principal'] = self.azure_service_principal.as_dict() if self.comment is not None: body['comment'] = self.comment if self.force is not None: body['force'] = self.force if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value if self.name_arg is not None: body['name_arg'] = self.name_arg if self.new_name is not None: body['new_name'] = self.new_name if self.owner is not None: body['owner'] = self.owner + if self.read_only is not None: body['read_only'] = self.read_only if self.skip_validation is not None: body['skip_validation'] = self.skip_validation return body @@ -5701,12 +5780,14 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateCredentialRequest: """Deserializes the UpdateCredentialRequest from a dictionary.""" return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole), azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity), + azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal), comment=d.get('comment', None), force=d.get('force', None), isolation_mode=_enum(d, 'isolation_mode', IsolationMode), name_arg=d.get('name_arg', None), new_name=d.get('new_name', None), owner=d.get('owner', None), + read_only=d.get('read_only', None), skip_validation=d.get('skip_validation', None)) @@ -6310,16 +6391,31 @@ class ValidateCredentialRequest: credential_name: Optional[str] = None """Required. The name of an existing credential or long-lived cloud credential to validate.""" + external_location_name: Optional[str] = None + """The name of an existing external location to validate. Only applicable for storage credentials + (purpose is **STORAGE**.)""" + purpose: Optional[CredentialPurpose] = None """The purpose of the credential. This should only be used when the credential is specified.""" + read_only: Optional[bool] = None + """Whether the credential is only usable for read operations. Only applicable for storage + credentials (purpose is **STORAGE**.)""" + + url: Optional[str] = None + """The external location url to validate. Only applicable when purpose is **STORAGE**.""" + def as_dict(self) -> dict: """Serializes the ValidateCredentialRequest into a dictionary suitable for use as a JSON request body.""" body = {} if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict() if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() if self.credential_name is not None: body['credential_name'] = self.credential_name + if self.external_location_name is not None: + body['external_location_name'] = self.external_location_name if self.purpose is not None: body['purpose'] = self.purpose.value + if self.read_only is not None: body['read_only'] = self.read_only + if self.url is not None: body['url'] = self.url return body @classmethod @@ -6328,24 +6424,33 @@ def from_dict(cls, d: Dict[str, any]) -> ValidateCredentialRequest: return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole), azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity), credential_name=d.get('credential_name', None), - purpose=_enum(d, 'purpose', CredentialPurpose)) + external_location_name=d.get('external_location_name', None), + purpose=_enum(d, 'purpose', CredentialPurpose), + read_only=d.get('read_only', None), + url=d.get('url', None)) @dataclass class ValidateCredentialResponse: + is_dir: Optional[bool] = None + """Whether the tested location is a directory in cloud storage. Only applicable for when purpose is + **STORAGE**.""" + results: Optional[List[CredentialValidationResult]] = None """The results of the validation check.""" def as_dict(self) -> dict: """Serializes the ValidateCredentialResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.is_dir is not None: body['isDir'] = self.is_dir if self.results: body['results'] = [v.as_dict() for v in self.results] return body @classmethod def from_dict(cls, d: Dict[str, any]) -> ValidateCredentialResponse: """Deserializes the ValidateCredentialResponse from a dictionary.""" - return cls(results=_repeated_dict(d, 'results', CredentialValidationResult)) + return cls(is_dir=d.get('isDir', None), + results=_repeated_dict(d, 'results', CredentialValidationResult)) class ValidateCredentialResult(Enum): @@ -7405,28 +7510,41 @@ def __init__(self, api_client): self._api = api_client def create_credential(self, + name: str, *, aws_iam_role: Optional[AwsIamRole] = None, azure_managed_identity: Optional[AzureManagedIdentity] = None, + azure_service_principal: Optional[AzureServicePrincipal] = None, comment: Optional[str] = None, - name: Optional[str] = None, + gcp_service_account_key: Optional[GcpServiceAccountKey] = None, purpose: Optional[CredentialPurpose] = None, + read_only: Optional[bool] = None, skip_validation: Optional[bool] = None) -> CredentialInfo: """Create a credential. - Creates a new credential. + Creates a new credential. The type of credential to be created is determined by the **purpose** field, + which should be either **SERVICE** or **STORAGE**. + The caller must be a metastore admin or have the metastore privilege **CREATE_STORAGE_CREDENTIAL** for + storage credentials, or **CREATE_SERVICE_CREDENTIAL** for service credentials. + + :param name: str + The credential name. The name must be unique among storage and service credentials within the + metastore. :param aws_iam_role: :class:`AwsIamRole` (optional) The AWS IAM role configuration :param azure_managed_identity: :class:`AzureManagedIdentity` (optional) The Azure managed identity configuration. + :param azure_service_principal: :class:`AzureServicePrincipal` (optional) + The Azure service principal configuration. :param comment: str (optional) Comment associated with the credential. - :param name: str (optional) - The credential name. The name must be unique among storage and service credentials within the - metastore. + :param gcp_service_account_key: :class:`GcpServiceAccountKey` (optional) :param purpose: :class:`CredentialPurpose` (optional) Indicates the purpose of the credential. + :param read_only: bool (optional) + Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**. :param skip_validation: bool (optional) Optional. Supplying true to this argument skips validation of the created set of credentials. @@ -7436,9 +7554,14 @@ def create_credential(self, if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict() if azure_managed_identity is not None: body['azure_managed_identity'] = azure_managed_identity.as_dict() + if azure_service_principal is not None: + body['azure_service_principal'] = azure_service_principal.as_dict() if comment is not None: body['comment'] = comment + if gcp_service_account_key is not None: + body['gcp_service_account_key'] = gcp_service_account_key.as_dict() if name is not None: body['name'] = name if purpose is not None: body['purpose'] = purpose.value + if read_only is not None: body['read_only'] = read_only if skip_validation is not None: body['skip_validation'] = skip_validation headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } @@ -7448,12 +7571,14 @@ def create_credential(self, def delete_credential(self, name_arg: str, *, force: Optional[bool] = None): """Delete a credential. - Deletes a credential from the metastore. The caller must be an owner of the credential. + Deletes a service or storage credential from the metastore. The caller must be an owner of the + credential. :param name_arg: str Name of the credential. :param force: bool (optional) - Force deletion even if there are dependent services. + Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent + external locations and external tables (when purpose is **STORAGE**). """ @@ -7465,19 +7590,20 @@ def delete_credential(self, name_arg: str, *, force: Optional[bool] = None): self._api.do('DELETE', f'/api/2.1/unity-catalog/credentials/{name_arg}', query=query, headers=headers) def generate_temporary_service_credential( - self, - *, - azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None, - credential_name: Optional[str] = None) -> TemporaryCredentials: + self, + credential_name: str, + *, + azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None + ) -> TemporaryCredentials: """Generate a temporary service credential. Returns a set of temporary credentials generated using the specified service credential. The caller must be a metastore admin or have the metastore privilege **ACCESS** on the service credential. + :param credential_name: str + The name of the service credential used to generate a temporary credential :param azure_options: :class:`GenerateTemporaryServiceCredentialAzureOptions` (optional) Options to customize the requested temporary credential - :param credential_name: str (optional) - The name of the service credential used to generate a temporary credential :returns: :class:`TemporaryCredentials` """ @@ -7495,8 +7621,8 @@ def generate_temporary_service_credential( def get_credential(self, name_arg: str) -> CredentialInfo: """Get a credential. - Gets a credential from the metastore. The caller must be a metastore admin, the owner of the - credential, or have any permission on the credential. + Gets a service or storage credential from the metastore. The caller must be a metastore admin, the + owner of the credential, or have any permission on the credential. :param name_arg: str Name of the credential. @@ -7555,15 +7681,17 @@ def update_credential(self, *, aws_iam_role: Optional[AwsIamRole] = None, azure_managed_identity: Optional[AzureManagedIdentity] = None, + azure_service_principal: Optional[AzureServicePrincipal] = None, comment: Optional[str] = None, force: Optional[bool] = None, isolation_mode: Optional[IsolationMode] = None, new_name: Optional[str] = None, owner: Optional[str] = None, + read_only: Optional[bool] = None, skip_validation: Optional[bool] = None) -> CredentialInfo: """Update a credential. - Updates a credential on the metastore. + Updates a service or storage credential on the metastore. The caller must be the owner of the credential or a metastore admin or have the `MANAGE` permission. If the caller is a metastore admin, only the __owner__ field can be changed. @@ -7574,16 +7702,22 @@ def update_credential(self, The AWS IAM role configuration :param azure_managed_identity: :class:`AzureManagedIdentity` (optional) The Azure managed identity configuration. + :param azure_service_principal: :class:`AzureServicePrincipal` (optional) + The Azure service principal configuration. :param comment: str (optional) Comment associated with the credential. :param force: bool (optional) - Force update even if there are dependent services. + Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent + external locations and external tables (when purpose is **STORAGE**). :param isolation_mode: :class:`IsolationMode` (optional) Whether the current securable is accessible from all workspaces or a specific set of workspaces. :param new_name: str (optional) New name of credential. :param owner: str (optional) Username of current owner of credential. + :param read_only: bool (optional) + Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**. :param skip_validation: bool (optional) Supply true to this argument to skip validation of the updated credential. @@ -7593,11 +7727,14 @@ def update_credential(self, if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict() if azure_managed_identity is not None: body['azure_managed_identity'] = azure_managed_identity.as_dict() + if azure_service_principal is not None: + body['azure_service_principal'] = azure_service_principal.as_dict() if comment is not None: body['comment'] = comment if force is not None: body['force'] = force if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value if new_name is not None: body['new_name'] = new_name if owner is not None: body['owner'] = owner + if read_only is not None: body['read_only'] = read_only if skip_validation is not None: body['skip_validation'] = skip_validation headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } @@ -7612,14 +7749,25 @@ def validate_credential(self, aws_iam_role: Optional[AwsIamRole] = None, azure_managed_identity: Optional[AzureManagedIdentity] = None, credential_name: Optional[str] = None, - purpose: Optional[CredentialPurpose] = None) -> ValidateCredentialResponse: + external_location_name: Optional[str] = None, + purpose: Optional[CredentialPurpose] = None, + read_only: Optional[bool] = None, + url: Optional[str] = None) -> ValidateCredentialResponse: """Validate a credential. Validates a credential. - Either the __credential_name__ or the cloud-specific credential must be provided. + For service credentials (purpose is **SERVICE**), either the __credential_name__ or the cloud-specific + credential must be provided. - The caller must be a metastore admin or the credential owner. + For storage credentials (purpose is **STORAGE**), at least one of __external_location_name__ and + __url__ need to be provided. If only one of them is provided, it will be used for validation. And if + both are provided, the __url__ will be used for validation, and __external_location_name__ will be + ignored when checking overlapping urls. Either the __credential_name__ or the cloud-specific + credential must be provided. + + The caller must be a metastore admin or the credential owner or have the required permission on the + metastore and the credential (e.g., **CREATE_EXTERNAL_LOCATION** when purpose is **STORAGE**). :param aws_iam_role: :class:`AwsIamRole` (optional) The AWS IAM role configuration @@ -7627,8 +7775,16 @@ def validate_credential(self, The Azure managed identity configuration. :param credential_name: str (optional) Required. The name of an existing credential or long-lived cloud credential to validate. + :param external_location_name: str (optional) + The name of an existing external location to validate. Only applicable for storage credentials + (purpose is **STORAGE**.) :param purpose: :class:`CredentialPurpose` (optional) The purpose of the credential. This should only be used when the credential is specified. + :param read_only: bool (optional) + Whether the credential is only usable for read operations. Only applicable for storage credentials + (purpose is **STORAGE**.) + :param url: str (optional) + The external location url to validate. Only applicable when purpose is **STORAGE**. :returns: :class:`ValidateCredentialResponse` """ @@ -7637,7 +7793,10 @@ def validate_credential(self, if azure_managed_identity is not None: body['azure_managed_identity'] = azure_managed_identity.as_dict() if credential_name is not None: body['credential_name'] = credential_name + if external_location_name is not None: body['external_location_name'] = external_location_name if purpose is not None: body['purpose'] = purpose.value + if read_only is not None: body['read_only'] = read_only + if url is not None: body['url'] = url headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', '/api/2.1/unity-catalog/validate-credentials', body=body, headers=headers) @@ -8640,7 +8799,7 @@ def create(self, *, table: Optional[OnlineTable] = None) -> Wait[OnlineTable]: Long-running operation waiter for :class:`OnlineTable`. See :method:wait_get_online_table_active for more details. """ - body = table + body = table.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } op_response = self._api.do('POST', '/api/2.0/online-tables', body=body, headers=headers) @@ -10128,6 +10287,7 @@ def list(self, max_results: Optional[int] = None, omit_columns: Optional[bool] = None, omit_properties: Optional[bool] = None, + omit_username: Optional[bool] = None, page_token: Optional[str] = None) -> Iterator[TableInfo]: """List tables. @@ -10157,6 +10317,9 @@ def list(self, Whether to omit the columns of the table from the response or not. :param omit_properties: bool (optional) Whether to omit the properties of the table from the response or not. + :param omit_username: bool (optional) + Whether to omit the username of the table (e.g. owner, updated_by, created_by) from the response or + not. :param page_token: str (optional) Opaque token to send for the next page of results (pagination). @@ -10172,6 +10335,7 @@ def list(self, if max_results is not None: query['max_results'] = max_results if omit_columns is not None: query['omit_columns'] = omit_columns if omit_properties is not None: query['omit_properties'] = omit_properties + if omit_username is not None: query['omit_username'] = omit_username if page_token is not None: query['page_token'] = page_token if schema_name is not None: query['schema_name'] = schema_name headers = {'Accept': 'application/json', } diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py index 1b02d8c89..5f9fe2c2c 100755 --- a/databricks/sdk/service/dashboards.py +++ b/databricks/sdk/service/dashboards.py @@ -1144,7 +1144,7 @@ def create(self, *, dashboard: Optional[Dashboard] = None) -> Dashboard: :returns: :class:`Dashboard` """ - body = dashboard + body = dashboard.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', '/api/2.0/lakeview/dashboards', body=body, headers=headers) @@ -1159,7 +1159,7 @@ def create_schedule(self, dashboard_id: str, *, schedule: Optional[Schedule] = N :returns: :class:`Schedule` """ - body = schedule + body = schedule.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', @@ -1183,7 +1183,7 @@ def create_subscription(self, :returns: :class:`Subscription` """ - body = subscription + body = subscription.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do( @@ -1269,7 +1269,7 @@ def get_published(self, dashboard_id: str) -> PublishedDashboard: Get the current published dashboard. :param dashboard_id: str - UUID identifying the dashboard to be published. + UUID identifying the published dashboard. :returns: :class:`PublishedDashboard` """ @@ -1364,7 +1364,7 @@ def list_schedules(self, """List dashboard schedules. :param dashboard_id: str - UUID identifying the dashboard to which the schedule belongs. + UUID identifying the dashboard to which the schedules belongs. :param page_size: int (optional) The number of schedules to return per page. :param page_token: str (optional) @@ -1400,9 +1400,9 @@ def list_subscriptions(self, """List schedule subscriptions. :param dashboard_id: str - UUID identifying the dashboard to which the subscription belongs. + UUID identifying the dashboard which the subscriptions belongs. :param schedule_id: str - UUID identifying the schedule to which the subscription belongs. + UUID identifying the schedule which the subscriptions belongs. :param page_size: int (optional) The number of subscriptions to return per page. :param page_token: str (optional) @@ -1508,7 +1508,7 @@ def unpublish(self, dashboard_id: str): Unpublish the dashboard. :param dashboard_id: str - UUID identifying the dashboard to be published. + UUID identifying the published dashboard. """ @@ -1528,7 +1528,7 @@ def update(self, dashboard_id: str, *, dashboard: Optional[Dashboard] = None) -> :returns: :class:`Dashboard` """ - body = dashboard + body = dashboard.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('PATCH', @@ -1552,7 +1552,7 @@ def update_schedule(self, :returns: :class:`Schedule` """ - body = schedule + body = schedule.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('PUT', diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py index 05d1ccce3..fc0122b2b 100755 --- a/databricks/sdk/service/iam.py +++ b/databricks/sdk/service/iam.py @@ -1150,7 +1150,7 @@ class UpdateWorkspaceAssignments: """The ID of the user, service principal, or group.""" workspace_id: Optional[int] = None - """The workspace ID for the account.""" + """The workspace ID.""" def as_dict(self) -> dict: """Serializes the UpdateWorkspaceAssignments into a dictionary suitable for use as a JSON request body.""" @@ -3385,7 +3385,7 @@ def update(self, specified principal. :param workspace_id: int - The workspace ID for the account. + The workspace ID. :param principal_id: int The ID of the user, service principal, or group. :param permissions: List[:class:`WorkspacePermission`] (optional) diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index 82d3bac65..ab485b33c 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -574,8 +574,7 @@ class CreateJob: """Write-only setting. Specifies the user, service principal or group that the job/pipeline runs as. If not specified, the job/pipeline runs as the user who created the job/pipeline. - Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not, - an error is thrown.""" + Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.""" schedule: Optional[CronSchedule] = None """An optional periodic schedule for this job. The default behavior is that the job only runs when @@ -1752,8 +1751,7 @@ class JobRunAs: """Write-only setting. Specifies the user, service principal or group that the job/pipeline runs as. If not specified, the job/pipeline runs as the user who created the job/pipeline. - Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not, - an error is thrown.""" + Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.""" service_principal_name: Optional[str] = None """Application ID of an active service principal. Setting this field requires the @@ -1861,8 +1859,7 @@ class JobSettings: """Write-only setting. Specifies the user, service principal or group that the job/pipeline runs as. If not specified, the job/pipeline runs as the user who created the job/pipeline. - Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not, - an error is thrown.""" + Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.""" schedule: Optional[CronSchedule] = None """An optional periodic schedule for this job. The default behavior is that the job only runs when @@ -3371,6 +3368,10 @@ class RunNow: [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html""" + only: Optional[List[str]] = None + """A list of task keys to run inside of the job. If this field is not provided, all tasks in the + job will be run.""" + pipeline_params: Optional[PipelineParams] = None """Controls whether the pipeline should perform a full refresh""" @@ -3425,6 +3426,7 @@ def as_dict(self) -> dict: if self.job_id is not None: body['job_id'] = self.job_id if self.job_parameters: body['job_parameters'] = self.job_parameters if self.notebook_params: body['notebook_params'] = self.notebook_params + if self.only: body['only'] = [v for v in self.only] if self.pipeline_params: body['pipeline_params'] = self.pipeline_params.as_dict() if self.python_named_params: body['python_named_params'] = self.python_named_params if self.python_params: body['python_params'] = [v for v in self.python_params] @@ -3442,6 +3444,7 @@ def from_dict(cls, d: Dict[str, any]) -> RunNow: job_id=d.get('job_id', None), job_parameters=d.get('job_parameters', None), notebook_params=d.get('notebook_params', None), + only=d.get('only', None), pipeline_params=_from_dict(d, 'pipeline_params', PipelineParams), python_named_params=d.get('python_named_params', None), python_params=d.get('python_params', None), @@ -5754,8 +5757,7 @@ def create(self, Write-only setting. Specifies the user, service principal or group that the job/pipeline runs as. If not specified, the job/pipeline runs as the user who created the job/pipeline. - Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not, an - error is thrown. + Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown. :param schedule: :class:`CronSchedule` (optional) An optional periodic schedule for this job. The default behavior is that the job only runs when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`. @@ -6275,6 +6277,7 @@ def run_now(self, jar_params: Optional[List[str]] = None, job_parameters: Optional[Dict[str, str]] = None, notebook_params: Optional[Dict[str, str]] = None, + only: Optional[List[str]] = None, pipeline_params: Optional[PipelineParams] = None, python_named_params: Optional[Dict[str, str]] = None, python_params: Optional[List[str]] = None, @@ -6331,6 +6334,9 @@ def run_now(self, [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html + :param only: List[str] (optional) + A list of task keys to run inside of the job. If this field is not provided, all tasks in the job + will be run. :param pipeline_params: :class:`PipelineParams` (optional) Controls whether the pipeline should perform a full refresh :param python_named_params: Dict[str,str] (optional) @@ -6382,6 +6388,7 @@ def run_now(self, if job_id is not None: body['job_id'] = job_id if job_parameters is not None: body['job_parameters'] = job_parameters if notebook_params is not None: body['notebook_params'] = notebook_params + if only is not None: body['only'] = [v for v in only] if pipeline_params is not None: body['pipeline_params'] = pipeline_params.as_dict() if python_named_params is not None: body['python_named_params'] = python_named_params if python_params is not None: body['python_params'] = [v for v in python_params] @@ -6403,6 +6410,7 @@ def run_now_and_wait(self, jar_params: Optional[List[str]] = None, job_parameters: Optional[Dict[str, str]] = None, notebook_params: Optional[Dict[str, str]] = None, + only: Optional[List[str]] = None, pipeline_params: Optional[PipelineParams] = None, python_named_params: Optional[Dict[str, str]] = None, python_params: Optional[List[str]] = None, @@ -6416,6 +6424,7 @@ def run_now_and_wait(self, job_id=job_id, job_parameters=job_parameters, notebook_params=notebook_params, + only=only, pipeline_params=pipeline_params, python_named_params=python_named_params, python_params=python_params, diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py index 01edcdf50..7bfc8fe1a 100755 --- a/databricks/sdk/service/oauth2.py +++ b/databricks/sdk/service/oauth2.py @@ -666,6 +666,7 @@ def get(self, integration_id: str) -> GetCustomAppIntegrationOutput: Gets the Custom OAuth App Integration for the given integration id. :param integration_id: str + The OAuth app integration ID. :returns: :class:`GetCustomAppIntegrationOutput` """ diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py index 26461d088..137ab3c21 100755 --- a/databricks/sdk/service/pipelines.py +++ b/databricks/sdk/service/pipelines.py @@ -61,7 +61,7 @@ class CreatePipeline: """Filters on which Pipeline packages to include in the deployed graph.""" gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None - """The definition of a gateway pipeline to support CDC.""" + """The definition of a gateway pipeline to support change data capture.""" id: Optional[str] = None """Unique identifier for this pipeline.""" @@ -82,6 +82,9 @@ class CreatePipeline: photon: Optional[bool] = None """Whether Photon is enabled for this pipeline.""" + restart_window: Optional[RestartWindow] = None + """Restart window of this pipeline.""" + schema: Optional[str] = None """The default schema (database) where tables are read from or published to. The presence of this field implies that the pipeline is in direct publishing mode.""" @@ -122,6 +125,7 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications] if self.photon is not None: body['photon'] = self.photon + if self.restart_window: body['restart_window'] = self.restart_window.as_dict() if self.schema is not None: body['schema'] = self.schema if self.serverless is not None: body['serverless'] = self.serverless if self.storage is not None: body['storage'] = self.storage @@ -151,6 +155,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreatePipeline: name=d.get('name', None), notifications=_repeated_dict(d, 'notifications', Notifications), photon=d.get('photon', None), + restart_window=_from_dict(d, 'restart_window', RestartWindow), schema=d.get('schema', None), serverless=d.get('serverless', None), storage=d.get('storage', None), @@ -285,7 +290,7 @@ class EditPipeline: """Filters on which Pipeline packages to include in the deployed graph.""" gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None - """The definition of a gateway pipeline to support CDC.""" + """The definition of a gateway pipeline to support change data capture.""" id: Optional[str] = None """Unique identifier for this pipeline.""" @@ -309,6 +314,9 @@ class EditPipeline: pipeline_id: Optional[str] = None """Unique identifier for this pipeline.""" + restart_window: Optional[RestartWindow] = None + """Restart window of this pipeline.""" + schema: Optional[str] = None """The default schema (database) where tables are read from or published to. The presence of this field implies that the pipeline is in direct publishing mode.""" @@ -351,6 +359,7 @@ def as_dict(self) -> dict: if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications] if self.photon is not None: body['photon'] = self.photon if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.restart_window: body['restart_window'] = self.restart_window.as_dict() if self.schema is not None: body['schema'] = self.schema if self.serverless is not None: body['serverless'] = self.serverless if self.storage is not None: body['storage'] = self.storage @@ -381,6 +390,7 @@ def from_dict(cls, d: Dict[str, any]) -> EditPipeline: notifications=_repeated_dict(d, 'notifications', Notifications), photon=d.get('photon', None), pipeline_id=d.get('pipeline_id', None), + restart_window=_from_dict(d, 'restart_window', RestartWindow), schema=d.get('schema', None), serverless=d.get('serverless', None), storage=d.get('storage', None), @@ -588,13 +598,13 @@ def from_dict(cls, d: Dict[str, any]) -> GetUpdateResponse: @dataclass class IngestionConfig: report: Optional[ReportSpec] = None - """Select tables from a specific source report.""" + """Select a specific source report.""" schema: Optional[SchemaSpec] = None - """Select tables from a specific source schema.""" + """Select all tables from a specific source schema.""" table: Optional[TableSpec] = None - """Select tables from a specific source table.""" + """Select a specific source table.""" def as_dict(self) -> dict: """Serializes the IngestionConfig into a dictionary suitable for use as a JSON request body.""" @@ -615,11 +625,11 @@ def from_dict(cls, d: Dict[str, any]) -> IngestionConfig: @dataclass class IngestionGatewayPipelineDefinition: connection_id: Optional[str] = None - """[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection this gateway - pipeline uses to communicate with the source.""" + """[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this + gateway pipeline uses to communicate with the source.""" connection_name: Optional[str] = None - """Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the + """Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.""" gateway_storage_catalog: Optional[str] = None @@ -658,12 +668,12 @@ def from_dict(cls, d: Dict[str, any]) -> IngestionGatewayPipelineDefinition: @dataclass class IngestionPipelineDefinition: connection_name: Optional[str] = None - """Immutable. The Unity Catalog connection this ingestion pipeline uses to communicate with the - source. Specify either ingestion_gateway_id or connection_name.""" + """Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with + the source. This is used with connectors for applications like Salesforce, Workday, and so on.""" ingestion_gateway_id: Optional[str] = None - """Immutable. Identifier for the ingestion gateway used by this ingestion pipeline to communicate - with the source. Specify either ingestion_gateway_id or connection_name.""" + """Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate + with the source database. This is used with connectors to databases like SQL Server.""" objects: Optional[List[IngestionConfig]] = None """Required. Settings specifying tables to replicate and the destination for the replicated tables.""" @@ -1450,7 +1460,7 @@ class PipelineSpec: """Filters on which Pipeline packages to include in the deployed graph.""" gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None - """The definition of a gateway pipeline to support CDC.""" + """The definition of a gateway pipeline to support change data capture.""" id: Optional[str] = None """Unique identifier for this pipeline.""" @@ -1471,6 +1481,9 @@ class PipelineSpec: photon: Optional[bool] = None """Whether Photon is enabled for this pipeline.""" + restart_window: Optional[RestartWindow] = None + """Restart window of this pipeline.""" + schema: Optional[str] = None """The default schema (database) where tables are read from or published to. The presence of this field implies that the pipeline is in direct publishing mode.""" @@ -1509,6 +1522,7 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications] if self.photon is not None: body['photon'] = self.photon + if self.restart_window: body['restart_window'] = self.restart_window.as_dict() if self.schema is not None: body['schema'] = self.schema if self.serverless is not None: body['serverless'] = self.serverless if self.storage is not None: body['storage'] = self.storage @@ -1536,6 +1550,7 @@ def from_dict(cls, d: Dict[str, any]) -> PipelineSpec: name=d.get('name', None), notifications=_repeated_dict(d, 'notifications', Notifications), photon=d.get('photon', None), + restart_window=_from_dict(d, 'restart_window', RestartWindow), schema=d.get('schema', None), serverless=d.get('serverless', None), storage=d.get('storage', None), @@ -1674,6 +1689,50 @@ def from_dict(cls, d: Dict[str, any]) -> ReportSpec: table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig)) +@dataclass +class RestartWindow: + start_hour: int + """An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day. + Continuous pipeline restart is triggered only within a five-hour window starting at this hour.""" + + days_of_week: Optional[RestartWindowDaysOfWeek] = None + """Days of week in which the restart is allowed to happen (within a five-hour window starting at + start_hour). If not specified all days of the week will be used.""" + + time_zone_id: Optional[str] = None + """Time zone id of restart window. See + https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html + for details. If not specified, UTC will be used.""" + + def as_dict(self) -> dict: + """Serializes the RestartWindow into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.days_of_week is not None: body['days_of_week'] = self.days_of_week.value + if self.start_hour is not None: body['start_hour'] = self.start_hour + if self.time_zone_id is not None: body['time_zone_id'] = self.time_zone_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> RestartWindow: + """Deserializes the RestartWindow from a dictionary.""" + return cls(days_of_week=_enum(d, 'days_of_week', RestartWindowDaysOfWeek), + start_hour=d.get('start_hour', None), + time_zone_id=d.get('time_zone_id', None)) + + +class RestartWindowDaysOfWeek(Enum): + """Days of week in which the restart is allowed to happen (within a five-hour window starting at + start_hour). If not specified all days of the week will be used.""" + + FRIDAY = 'FRIDAY' + MONDAY = 'MONDAY' + SATURDAY = 'SATURDAY' + SUNDAY = 'SUNDAY' + THURSDAY = 'THURSDAY' + TUESDAY = 'TUESDAY' + WEDNESDAY = 'WEDNESDAY' + + @dataclass class SchemaSpec: destination_catalog: Optional[str] = None @@ -2211,6 +2270,7 @@ def create(self, name: Optional[str] = None, notifications: Optional[List[Notifications]] = None, photon: Optional[bool] = None, + restart_window: Optional[RestartWindow] = None, schema: Optional[str] = None, serverless: Optional[bool] = None, storage: Optional[str] = None, @@ -2247,7 +2307,7 @@ def create(self, :param filters: :class:`Filters` (optional) Filters on which Pipeline packages to include in the deployed graph. :param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional) - The definition of a gateway pipeline to support CDC. + The definition of a gateway pipeline to support change data capture. :param id: str (optional) Unique identifier for this pipeline. :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional) @@ -2261,6 +2321,8 @@ def create(self, List of notification settings for this pipeline. :param photon: bool (optional) Whether Photon is enabled for this pipeline. + :param restart_window: :class:`RestartWindow` (optional) + Restart window of this pipeline. :param schema: str (optional) The default schema (database) where tables are read from or published to. The presence of this field implies that the pipeline is in direct publishing mode. @@ -2296,6 +2358,7 @@ def create(self, if name is not None: body['name'] = name if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications] if photon is not None: body['photon'] = photon + if restart_window is not None: body['restart_window'] = restart_window.as_dict() if schema is not None: body['schema'] = schema if serverless is not None: body['serverless'] = serverless if storage is not None: body['storage'] = storage @@ -2629,6 +2692,7 @@ def update(self, name: Optional[str] = None, notifications: Optional[List[Notifications]] = None, photon: Optional[bool] = None, + restart_window: Optional[RestartWindow] = None, schema: Optional[str] = None, serverless: Optional[bool] = None, storage: Optional[str] = None, @@ -2668,7 +2732,7 @@ def update(self, :param filters: :class:`Filters` (optional) Filters on which Pipeline packages to include in the deployed graph. :param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional) - The definition of a gateway pipeline to support CDC. + The definition of a gateway pipeline to support change data capture. :param id: str (optional) Unique identifier for this pipeline. :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional) @@ -2682,6 +2746,8 @@ def update(self, List of notification settings for this pipeline. :param photon: bool (optional) Whether Photon is enabled for this pipeline. + :param restart_window: :class:`RestartWindow` (optional) + Restart window of this pipeline. :param schema: str (optional) The default schema (database) where tables are read from or published to. The presence of this field implies that the pipeline is in direct publishing mode. @@ -2717,6 +2783,7 @@ def update(self, if name is not None: body['name'] = name if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications] if photon is not None: body['photon'] = photon + if restart_window is not None: body['restart_window'] = restart_window.as_dict() if schema is not None: body['schema'] = schema if serverless is not None: body['serverless'] = serverless if storage is not None: body['storage'] = storage diff --git a/databricks/sdk/service/provisioning.py b/databricks/sdk/service/provisioning.py index b1d825d1a..d108f7984 100755 --- a/databricks/sdk/service/provisioning.py +++ b/databricks/sdk/service/provisioning.py @@ -1245,6 +1245,10 @@ class UpdateWorkspaceRequest: customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC to a customer-managed VPC by updating the workspace to add a network configuration ID.""" + private_access_settings_id: Optional[str] = None + """The ID of the workspace's private access settings configuration object. This parameter is + available only for updating failed workspaces.""" + storage_configuration_id: Optional[str] = None """The ID of the workspace's storage configuration object. This parameter is available only for updating failed workspaces.""" @@ -1267,6 +1271,8 @@ def as_dict(self) -> dict: if self.network_connectivity_config_id is not None: body['network_connectivity_config_id'] = self.network_connectivity_config_id if self.network_id is not None: body['network_id'] = self.network_id + if self.private_access_settings_id is not None: + body['private_access_settings_id'] = self.private_access_settings_id if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id if self.storage_customer_managed_key_id is not None: @@ -1284,6 +1290,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceRequest: None), network_connectivity_config_id=d.get('network_connectivity_config_id', None), network_id=d.get('network_id', None), + private_access_settings_id=d.get('private_access_settings_id', None), storage_configuration_id=d.get('storage_configuration_id', None), storage_customer_managed_key_id=d.get('storage_customer_managed_key_id', None), workspace_id=d.get('workspace_id', None)) @@ -2706,6 +2713,7 @@ def update(self, managed_services_customer_managed_key_id: Optional[str] = None, network_connectivity_config_id: Optional[str] = None, network_id: Optional[str] = None, + private_access_settings_id: Optional[str] = None, storage_configuration_id: Optional[str] = None, storage_customer_managed_key_id: Optional[str] = None) -> Wait[Workspace]: """Update workspace configuration. @@ -2824,6 +2832,9 @@ def update(self, The ID of the workspace's network configuration object. Used only if you already use a customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC to a customer-managed VPC by updating the workspace to add a network configuration ID. + :param private_access_settings_id: str (optional) + The ID of the workspace's private access settings configuration object. This parameter is available + only for updating failed workspaces. :param storage_configuration_id: str (optional) The ID of the workspace's storage configuration object. This parameter is available only for updating failed workspaces. @@ -2844,6 +2855,8 @@ def update(self, if network_connectivity_config_id is not None: body['network_connectivity_config_id'] = network_connectivity_config_id if network_id is not None: body['network_id'] = network_id + if private_access_settings_id is not None: + body['private_access_settings_id'] = private_access_settings_id if storage_configuration_id is not None: body['storage_configuration_id'] = storage_configuration_id if storage_customer_managed_key_id is not None: body['storage_customer_managed_key_id'] = storage_customer_managed_key_id @@ -2867,6 +2880,7 @@ def update_and_wait( managed_services_customer_managed_key_id: Optional[str] = None, network_connectivity_config_id: Optional[str] = None, network_id: Optional[str] = None, + private_access_settings_id: Optional[str] = None, storage_configuration_id: Optional[str] = None, storage_customer_managed_key_id: Optional[str] = None, timeout=timedelta(minutes=20)) -> Workspace: @@ -2876,6 +2890,7 @@ def update_and_wait( managed_services_customer_managed_key_id=managed_services_customer_managed_key_id, network_connectivity_config_id=network_connectivity_config_id, network_id=network_id, + private_access_settings_id=private_access_settings_id, storage_configuration_id=storage_configuration_id, storage_customer_managed_key_id=storage_customer_managed_key_id, workspace_id=workspace_id).result(timeout=timeout) diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py index 607cc3085..d6294b261 100755 --- a/databricks/sdk/service/settings.py +++ b/databricks/sdk/service/settings.py @@ -2943,6 +2943,7 @@ class UpdateNotificationDestinationRequest: """The display name for the notification destination.""" id: Optional[str] = None + """UUID identifying notification destination.""" def as_dict(self) -> dict: """Serializes the UpdateNotificationDestinationRequest into a dictionary suitable for use as a JSON request body.""" @@ -4670,6 +4671,7 @@ def update(self, required in the request body. :param id: str + UUID identifying notification destination. :param config: :class:`Config` (optional) The configuration for the notification destination. Must wrap EXACTLY one of the nested configs. :param display_name: str (optional) @@ -4984,7 +4986,7 @@ def delete(self, token_id: str): Deletes a token, specified by its ID. :param token_id: str - The ID of the token to get. + The ID of the token to revoke. """ diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py index 091fa9e82..13cba2ccf 100755 --- a/databricks/sdk/service/sharing.py +++ b/databricks/sdk/service/sharing.py @@ -984,6 +984,8 @@ def from_dict(cls, d: Dict[str, any]) -> SharedDataObject: class SharedDataObjectDataObjectType(Enum): """The type of the data object.""" + FEATURE_SPEC = 'FEATURE_SPEC' + FUNCTION = 'FUNCTION' MATERIALIZED_VIEW = 'MATERIALIZED_VIEW' MODEL = 'MODEL' NOTEBOOK_FILE = 'NOTEBOOK_FILE' diff --git a/databricks/sdk/service/workspace.py b/databricks/sdk/service/workspace.py index 01c463a0d..29380d4f3 100755 --- a/databricks/sdk/service/workspace.py +++ b/databricks/sdk/service/workspace.py @@ -684,6 +684,7 @@ class ImportFormat(Enum): DBC = 'DBC' HTML = 'HTML' JUPYTER = 'JUPYTER' + RAW = 'RAW' R_MARKDOWN = 'R_MARKDOWN' SOURCE = 'SOURCE' @@ -1799,7 +1800,7 @@ def delete(self, repo_id: int): Deletes the specified repo. :param repo_id: int - ID of the Git folder (repo) object in the workspace. + The ID for the corresponding repo to delete. """ diff --git a/docs/account/billing/budgets.rst b/docs/account/billing/budgets.rst index edba0a733..43c77d00b 100644 --- a/docs/account/billing/budgets.rst +++ b/docs/account/billing/budgets.rst @@ -115,7 +115,7 @@ Gets a budget configuration for an account. Both account and budget configuration are specified by ID. :param budget_id: str - The Databricks budget configuration ID. + The budget configuration ID :returns: :class:`GetBudgetConfigurationResponse` diff --git a/docs/account/iam/workspace_assignment.rst b/docs/account/iam/workspace_assignment.rst index 6230b8199..697f0a5da 100644 --- a/docs/account/iam/workspace_assignment.rst +++ b/docs/account/iam/workspace_assignment.rst @@ -92,7 +92,7 @@ specified principal. :param workspace_id: int - The workspace ID for the account. + The workspace ID. :param principal_id: int The ID of the user, service principal, or group. :param permissions: List[:class:`WorkspacePermission`] (optional) diff --git a/docs/account/oauth2/custom_app_integration.rst b/docs/account/oauth2/custom_app_integration.rst index 0dcc3d8e0..4192b2109 100644 --- a/docs/account/oauth2/custom_app_integration.rst +++ b/docs/account/oauth2/custom_app_integration.rst @@ -49,6 +49,7 @@ Gets the Custom OAuth App Integration for the given integration id. :param integration_id: str + The OAuth app integration ID. :returns: :class:`GetCustomAppIntegrationOutput` diff --git a/docs/account/provisioning/workspaces.rst b/docs/account/provisioning/workspaces.rst index fa1d130b1..ad8a75942 100644 --- a/docs/account/provisioning/workspaces.rst +++ b/docs/account/provisioning/workspaces.rst @@ -229,7 +229,7 @@ :returns: Iterator over :class:`Workspace` - .. py:method:: update(workspace_id: int [, aws_region: Optional[str], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], managed_services_customer_managed_key_id: Optional[str], network_connectivity_config_id: Optional[str], network_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str]]) -> Wait[Workspace] + .. py:method:: update(workspace_id: int [, aws_region: Optional[str], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], managed_services_customer_managed_key_id: Optional[str], network_connectivity_config_id: Optional[str], network_id: Optional[str], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str]]) -> Wait[Workspace] Usage: @@ -372,6 +372,9 @@ The ID of the workspace's network configuration object. Used only if you already use a customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC to a customer-managed VPC by updating the workspace to add a network configuration ID. + :param private_access_settings_id: str (optional) + The ID of the workspace's private access settings configuration object. This parameter is available + only for updating failed workspaces. :param storage_configuration_id: str (optional) The ID of the workspace's storage configuration object. This parameter is available only for updating failed workspaces. @@ -384,7 +387,7 @@ See :method:wait_get_workspace_running for more details. - .. py:method:: update_and_wait(workspace_id: int [, aws_region: Optional[str], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], managed_services_customer_managed_key_id: Optional[str], network_connectivity_config_id: Optional[str], network_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> Workspace + .. py:method:: update_and_wait(workspace_id: int [, aws_region: Optional[str], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], managed_services_customer_managed_key_id: Optional[str], network_connectivity_config_id: Optional[str], network_id: Optional[str], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> Workspace .. py:method:: wait_get_workspace_running(workspace_id: int, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[Workspace], None]]) -> Workspace diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst index 9f5fef3bc..19b245b25 100644 --- a/docs/dbdataclasses/catalog.rst +++ b/docs/dbdataclasses/catalog.rst @@ -455,6 +455,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SERVICE :value: "SERVICE" + .. py:attribute:: STORAGE + :value: "STORAGE" + .. py:class:: CredentialType The type of credential. @@ -715,6 +718,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: GcpServiceAccountKey + :members: + :undoc-members: + .. autoclass:: GenerateTemporaryServiceCredentialAzureOptions :members: :undoc-members: diff --git a/docs/dbdataclasses/pipelines.rst b/docs/dbdataclasses/pipelines.rst index 9f419f160..f82cd73c2 100644 --- a/docs/dbdataclasses/pipelines.rst +++ b/docs/dbdataclasses/pipelines.rst @@ -269,6 +269,35 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: RestartWindow + :members: + :undoc-members: + +.. py:class:: RestartWindowDaysOfWeek + + Days of week in which the restart is allowed to happen (within a five-hour window starting at start_hour). If not specified all days of the week will be used. + + .. py:attribute:: FRIDAY + :value: "FRIDAY" + + .. py:attribute:: MONDAY + :value: "MONDAY" + + .. py:attribute:: SATURDAY + :value: "SATURDAY" + + .. py:attribute:: SUNDAY + :value: "SUNDAY" + + .. py:attribute:: THURSDAY + :value: "THURSDAY" + + .. py:attribute:: TUESDAY + :value: "TUESDAY" + + .. py:attribute:: WEDNESDAY + :value: "WEDNESDAY" + .. autoclass:: SchemaSpec :members: :undoc-members: diff --git a/docs/dbdataclasses/sharing.rst b/docs/dbdataclasses/sharing.rst index cd4c2dcea..650811e08 100644 --- a/docs/dbdataclasses/sharing.rst +++ b/docs/dbdataclasses/sharing.rst @@ -262,6 +262,12 @@ These dataclasses are used in the SDK to represent API requests and responses fo The type of the data object. + .. py:attribute:: FEATURE_SPEC + :value: "FEATURE_SPEC" + + .. py:attribute:: FUNCTION + :value: "FUNCTION" + .. py:attribute:: MATERIALIZED_VIEW :value: "MATERIALIZED_VIEW" diff --git a/docs/dbdataclasses/workspace.rst b/docs/dbdataclasses/workspace.rst index 9ff3eb66b..bd0785db4 100644 --- a/docs/dbdataclasses/workspace.rst +++ b/docs/dbdataclasses/workspace.rst @@ -157,6 +157,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: JUPYTER :value: "JUPYTER" + .. py:attribute:: RAW + :value: "RAW" + .. py:attribute:: R_MARKDOWN :value: "R_MARKDOWN" diff --git a/docs/workspace/apps/apps.rst b/docs/workspace/apps/apps.rst index a24941242..40791a143 100644 --- a/docs/workspace/apps/apps.rst +++ b/docs/workspace/apps/apps.rst @@ -188,7 +188,8 @@ Updates the app with the supplied name. :param name: str - The name of the app. + The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It + must be unique within the workspace. :param app: :class:`App` (optional) :returns: :class:`App` diff --git a/docs/workspace/catalog/tables.rst b/docs/workspace/catalog/tables.rst index 4cb458b46..15cfb1cac 100644 --- a/docs/workspace/catalog/tables.rst +++ b/docs/workspace/catalog/tables.rst @@ -100,7 +100,7 @@ :returns: :class:`TableInfo` - .. py:method:: list(catalog_name: str, schema_name: str [, include_browse: Optional[bool], include_delta_metadata: Optional[bool], include_manifest_capabilities: Optional[bool], max_results: Optional[int], omit_columns: Optional[bool], omit_properties: Optional[bool], page_token: Optional[str]]) -> Iterator[TableInfo] + .. py:method:: list(catalog_name: str, schema_name: str [, include_browse: Optional[bool], include_delta_metadata: Optional[bool], include_manifest_capabilities: Optional[bool], max_results: Optional[int], omit_columns: Optional[bool], omit_properties: Optional[bool], omit_username: Optional[bool], page_token: Optional[str]]) -> Iterator[TableInfo] Usage: @@ -151,6 +151,9 @@ Whether to omit the columns of the table from the response or not. :param omit_properties: bool (optional) Whether to omit the properties of the table from the response or not. + :param omit_username: bool (optional) + Whether to omit the username of the table (e.g. owner, updated_by, created_by) from the response or + not. :param page_token: str (optional) Opaque token to send for the next page of results (pagination). diff --git a/docs/workspace/dashboards/lakeview.rst b/docs/workspace/dashboards/lakeview.rst index 0fe55542a..b8dceeb9e 100644 --- a/docs/workspace/dashboards/lakeview.rst +++ b/docs/workspace/dashboards/lakeview.rst @@ -93,7 +93,7 @@ Get the current published dashboard. :param dashboard_id: str - UUID identifying the dashboard to be published. + UUID identifying the published dashboard. :returns: :class:`PublishedDashboard` @@ -147,7 +147,7 @@ List dashboard schedules. :param dashboard_id: str - UUID identifying the dashboard to which the schedule belongs. + UUID identifying the dashboard to which the schedules belongs. :param page_size: int (optional) The number of schedules to return per page. :param page_token: str (optional) @@ -162,9 +162,9 @@ List schedule subscriptions. :param dashboard_id: str - UUID identifying the dashboard to which the subscription belongs. + UUID identifying the dashboard which the subscriptions belongs. :param schedule_id: str - UUID identifying the schedule to which the subscription belongs. + UUID identifying the schedule which the subscriptions belongs. :param page_size: int (optional) The number of subscriptions to return per page. :param page_token: str (optional) @@ -226,7 +226,7 @@ Unpublish the dashboard. :param dashboard_id: str - UUID identifying the dashboard to be published. + UUID identifying the published dashboard. diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst index 0c6d51439..e9e63bb20 100644 --- a/docs/workspace/jobs/jobs.rst +++ b/docs/workspace/jobs/jobs.rst @@ -2,7 +2,7 @@ ================ .. currentmodule:: databricks.sdk.service.jobs -.. py:class:: JobsAPI +.. py:class:: JobsExt The Jobs API allows you to create, edit, and delete jobs. @@ -221,8 +221,7 @@ Write-only setting. Specifies the user, service principal or group that the job/pipeline runs as. If not specified, the job/pipeline runs as the user who created the job/pipeline. - Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not, an - error is thrown. + Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown. :param schedule: :class:`CronSchedule` (optional) An optional periodic schedule for this job. The default behavior is that the job only runs when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`. @@ -382,7 +381,7 @@ :returns: :class:`JobPermissions` - .. py:method:: get_run(run_id: int [, include_history: Optional[bool], include_resolved_values: Optional[bool], page_token: Optional[str]]) -> Run + .. py:method:: get_run(run_id: int [, include_history: bool, include_resolved_values: bool, page_token: str]) -> Run Usage: @@ -414,10 +413,9 @@ # cleanup w.jobs.delete_run(run_id=run.run_id) - Get a single job run. - - Retrieve the metadata of a run. + This method fetches the details of a run identified by `run_id`. If the run has multiple pages of tasks or iterations, + it will paginate through all pages and aggregate the results. :param run_id: int The canonical identifier of the run for which to retrieve the metadata. This field is required. :param include_history: bool (optional) @@ -425,9 +423,8 @@ :param include_resolved_values: bool (optional) Whether to include resolved parameter values in the response. :param page_token: str (optional) - To list the next page of job tasks, set this field to the value of the `next_page_token` returned in - the GetJob response. - + To list the next page or the previous page of job tasks, set this field to the value of the + `next_page_token` or `prev_page_token` returned in the GetJob response. :returns: :class:`Run` @@ -792,7 +789,7 @@ - .. py:method:: run_now(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]]]) -> Wait[Run] + .. py:method:: run_now(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], only: Optional[List[str]], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]]]) -> Wait[Run] Usage: @@ -876,6 +873,9 @@ [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html + :param only: List[str] (optional) + A list of task keys to run inside of the job. If this field is not provided, all tasks in the job + will be run. :param pipeline_params: :class:`PipelineParams` (optional) Controls whether the pipeline should perform a full refresh :param python_named_params: Dict[str,str] (optional) @@ -921,7 +921,7 @@ See :method:wait_get_run_job_terminated_or_skipped for more details. - .. py:method:: run_now_and_wait(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]], timeout: datetime.timedelta = 0:20:00]) -> Run + .. py:method:: run_now_and_wait(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], only: Optional[List[str]], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]], timeout: datetime.timedelta = 0:20:00]) -> Run .. py:method:: set_permissions(job_id: str [, access_control_list: Optional[List[JobAccessControlRequest]]]) -> JobPermissions diff --git a/docs/workspace/pipelines/pipelines.rst b/docs/workspace/pipelines/pipelines.rst index 39b5c9d77..1ba875740 100644 --- a/docs/workspace/pipelines/pipelines.rst +++ b/docs/workspace/pipelines/pipelines.rst @@ -15,7 +15,7 @@ also enforce data quality with Delta Live Tables expectations. Expectations allow you to define expected data quality and specify how to handle records that fail those expectations. - .. py:method:: create( [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse + .. py:method:: create( [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse Usage: @@ -79,7 +79,7 @@ :param filters: :class:`Filters` (optional) Filters on which Pipeline packages to include in the deployed graph. :param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional) - The definition of a gateway pipeline to support CDC. + The definition of a gateway pipeline to support change data capture. :param id: str (optional) Unique identifier for this pipeline. :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional) @@ -93,6 +93,8 @@ List of notification settings for this pipeline. :param photon: bool (optional) Whether Photon is enabled for this pipeline. + :param restart_window: :class:`RestartWindow` (optional) + Restart window of this pipeline. :param schema: str (optional) The default schema (database) where tables are read from or published to. The presence of this field implies that the pipeline is in direct publishing mode. @@ -377,7 +379,7 @@ .. py:method:: stop_and_wait(pipeline_id: str, timeout: datetime.timedelta = 0:20:00) -> GetPipelineResponse - .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], expected_last_modified: Optional[int], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) + .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], expected_last_modified: Optional[int], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) Usage: @@ -457,7 +459,7 @@ :param filters: :class:`Filters` (optional) Filters on which Pipeline packages to include in the deployed graph. :param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional) - The definition of a gateway pipeline to support CDC. + The definition of a gateway pipeline to support change data capture. :param id: str (optional) Unique identifier for this pipeline. :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional) @@ -471,6 +473,8 @@ List of notification settings for this pipeline. :param photon: bool (optional) Whether Photon is enabled for this pipeline. + :param restart_window: :class:`RestartWindow` (optional) + Restart window of this pipeline. :param schema: str (optional) The default schema (database) where tables are read from or published to. The presence of this field implies that the pipeline is in direct publishing mode. diff --git a/docs/workspace/settings/notification_destinations.rst b/docs/workspace/settings/notification_destinations.rst index 29d947f55..8fb2d0c3c 100644 --- a/docs/workspace/settings/notification_destinations.rst +++ b/docs/workspace/settings/notification_destinations.rst @@ -65,6 +65,7 @@ required in the request body. :param id: str + UUID identifying notification destination. :param config: :class:`Config` (optional) The configuration for the notification destination. Must wrap EXACTLY one of the nested configs. :param display_name: str (optional) diff --git a/docs/workspace/settings/token_management.rst b/docs/workspace/settings/token_management.rst index 9c938ce3e..50dbe1328 100644 --- a/docs/workspace/settings/token_management.rst +++ b/docs/workspace/settings/token_management.rst @@ -53,7 +53,7 @@ Deletes a token, specified by its ID. :param token_id: str - The ID of the token to get. + The ID of the token to revoke. diff --git a/docs/workspace/workspace/repos.rst b/docs/workspace/workspace/repos.rst index 3e826a064..5f3e3e290 100644 --- a/docs/workspace/workspace/repos.rst +++ b/docs/workspace/workspace/repos.rst @@ -62,7 +62,7 @@ Deletes the specified repo. :param repo_id: int - ID of the Git folder (repo) object in the workspace. + The ID for the corresponding repo to delete.