Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Python] Update generated code #2508

Merged
merged 2 commits into from
Mar 20, 2025
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@
S3StorageInfo,
S3StorageInfoParam,
)
from databricks.bundles.compute._models.volumes_storage_info import (
VolumesStorageInfo,
VolumesStorageInfoParam,
)
from databricks.bundles.core._transform import _transform
from databricks.bundles.core._transform_to_json import _transform_to_json_value
from databricks.bundles.core._variable import VariableOrOptional
Expand All @@ -35,6 +39,12 @@ class ClusterLogConf:
`instance_profile_arn` has permission to write data to the s3 destination.
"""

volumes: VariableOrOptional[VolumesStorageInfo] = None
"""
destination needs to be provided. e.g.
`{ "volumes" : { "destination" : "/Volumes/catalog/schema/volume/cluster_log" } }`
"""

@classmethod
def from_dict(cls, value: "ClusterLogConfDict") -> "Self":
return _transform(cls, value)
Expand All @@ -60,5 +70,11 @@ class ClusterLogConfDict(TypedDict, total=False):
`instance_profile_arn` has permission to write data to the s3 destination.
"""

volumes: VariableOrOptional[VolumesStorageInfoParam]
"""
destination needs to be provided. e.g.
`{ "volumes" : { "destination" : "/Volumes/catalog/schema/volume/cluster_log" } }`
"""


ClusterLogConfParam = ClusterLogConfDict | ClusterLogConf
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ class ClusterSpec:
cluster_log_conf: VariableOrOptional[ClusterLogConf] = None
"""
The configuration for delivering spark logs to a long-term storage destination.
Two kinds of destinations (dbfs and s3) are supported. Only one destination can be specified
Three kinds of destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be specified
for one cluster. If the conf is given, the logs will be delivered to the destination every
`5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while
the destination of executor logs is `$destination/$clusterId/executor`.
Expand Down Expand Up @@ -163,7 +163,7 @@ class ClusterSpec:

is_single_node: VariableOrOptional[bool] = None
"""
This field can only be used with `kind`.
This field can only be used when `kind = CLASSIC_PREVIEW`.

When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`

Expand Down Expand Up @@ -242,7 +242,7 @@ class ClusterSpec:

use_ml_runtime: VariableOrOptional[bool] = None
"""
This field can only be used with `kind`.
This field can only be used when `kind = CLASSIC_PREVIEW`.

`effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not.

Expand Down Expand Up @@ -295,7 +295,7 @@ class ClusterSpecDict(TypedDict, total=False):
cluster_log_conf: VariableOrOptional[ClusterLogConfParam]
"""
The configuration for delivering spark logs to a long-term storage destination.
Two kinds of destinations (dbfs and s3) are supported. Only one destination can be specified
Three kinds of destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be specified
for one cluster. If the conf is given, the logs will be delivered to the destination every
`5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while
the destination of executor logs is `$destination/$clusterId/executor`.
Expand Down Expand Up @@ -367,7 +367,7 @@ class ClusterSpecDict(TypedDict, total=False):

is_single_node: VariableOrOptional[bool]
"""
This field can only be used with `kind`.
This field can only be used when `kind = CLASSIC_PREVIEW`.

When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`

Expand Down Expand Up @@ -446,7 +446,7 @@ class ClusterSpecDict(TypedDict, total=False):

use_ml_runtime: VariableOrOptional[bool]
"""
This field can only be used with `kind`.
This field can only be used when `kind = CLASSIC_PREVIEW`.

`effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ class DataSecurityMode(Enum):
Data security mode decides what data governance model to use when accessing data
from a cluster.

The following modes can only be used with `kind`.
The following modes can only be used when `kind = CLASSIC_PREVIEW`.
* `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on your compute configuration.
* `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`.
* `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,6 @@ class Environment:
dependencies: VariableOrList[str] = field(default_factory=list)
"""
List of pip dependencies, as supported by the version of pip in this environment.
Each dependency is a pip requirement file line https://pip.pypa.io/en/stable/reference/requirements-file-format/
Allowed dependency could be <requirement specifier>, <archive url/path>, <local project path>(WSFS or Volumes in Databricks), <vcs project url>
E.g. dependencies: ["foo==0.0.1", "-r /Workspace/test/requirements.txt"]
"""

@classmethod
Expand All @@ -54,9 +51,6 @@ class EnvironmentDict(TypedDict, total=False):
dependencies: VariableOrList[str]
"""
List of pip dependencies, as supported by the version of pip in this environment.
Each dependency is a pip requirement file line https://pip.pypa.io/en/stable/reference/requirements-file-format/
Allowed dependency could be <requirement specifier>, <archive url/path>, <local project path>(WSFS or Volumes in Databricks), <vcs project url>
E.g. dependencies: ["foo==0.0.1", "-r /Workspace/test/requirements.txt"]
"""


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,7 @@ class InitScriptInfo:

abfss: VariableOrOptional[Adlsgen2Info] = None
"""
destination needs to be provided. e.g.
`{ "abfss" : { "destination" : "abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/<directory-name>" } }`
Contains the Azure Data Lake Storage destination path
"""

dbfs: VariableOrOptional[DbfsStorageInfo] = None
Expand Down Expand Up @@ -98,8 +97,7 @@ class InitScriptInfoDict(TypedDict, total=False):

abfss: VariableOrOptional[Adlsgen2InfoParam]
"""
destination needs to be provided. e.g.
`{ "abfss" : { "destination" : "abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/<directory-name>" } }
Contains the Azure Data Lake Storage destination path
"""

dbfs: VariableOrOptional[DbfsStorageInfoParam]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,12 @@ class LogAnalyticsInfo:

log_analytics_primary_key: VariableOrOptional[str] = None
"""
<needs content added>
The primary key for the Azure Log Analytics agent configuration
"""

log_analytics_workspace_id: VariableOrOptional[str] = None
"""
<needs content added>
The workspace ID for the Azure Log Analytics agent configuration
"""

@classmethod
Expand All @@ -36,12 +36,12 @@ class LogAnalyticsInfoDict(TypedDict, total=False):

log_analytics_primary_key: VariableOrOptional[str]
"""
<needs content added>
The primary key for the Azure Log Analytics agent configuration
"""

log_analytics_workspace_id: VariableOrOptional[str]
"""
<needs content added>
The workspace ID for the Azure Log Analytics agent configuration
"""


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ class VolumesStorageInfo:

destination: VariableOr[str]
"""
Unity Catalog Volumes file destination, e.g. `/Volumes/my-init.sh`
Unity Catalog volumes file destination, e.g. `/Volumes/catalog/schema/volume/dir/file`
"""

@classmethod
Expand All @@ -31,7 +31,7 @@ class VolumesStorageInfoDict(TypedDict, total=False):

destination: VariableOr[str]
"""
Unity Catalog Volumes file destination, e.g. `/Volumes/my-init.sh`
Unity Catalog volumes file destination, e.g. `/Volumes/catalog/schema/volume/dir/file`
"""


Expand Down
23 changes: 15 additions & 8 deletions experimental/python/databricks/bundles/jobs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,9 +72,6 @@
"GcsStorageInfoParam",
"GitProvider",
"GitProviderParam",
"GitSnapshot",
"GitSnapshotDict",
"GitSnapshotParam",
"GitSource",
"GitSourceDict",
"GitSourceParam",
Expand Down Expand Up @@ -129,6 +126,8 @@
"NotebookTaskParam",
"PauseStatus",
"PauseStatusParam",
"PerformanceTarget",
"PerformanceTargetParam",
"PeriodicTriggerConfiguration",
"PeriodicTriggerConfigurationDict",
"PeriodicTriggerConfigurationParam",
Expand All @@ -137,6 +136,9 @@
"Permission",
"PermissionDict",
"PermissionParam",
"PipelineParams",
"PipelineParamsDict",
"PipelineParamsParam",
"PipelineTask",
"PipelineTaskDict",
"PipelineTaskParam",
Expand Down Expand Up @@ -403,11 +405,6 @@
ForEachTaskParam,
)
from databricks.bundles.jobs._models.git_provider import GitProvider, GitProviderParam
from databricks.bundles.jobs._models.git_snapshot import (
GitSnapshot,
GitSnapshotDict,
GitSnapshotParam,
)
from databricks.bundles.jobs._models.git_source import (
GitSource,
GitSourceDict,
Expand Down Expand Up @@ -468,6 +465,10 @@
NotebookTaskParam,
)
from databricks.bundles.jobs._models.pause_status import PauseStatus, PauseStatusParam
from databricks.bundles.jobs._models.performance_target import (
PerformanceTarget,
PerformanceTargetParam,
)
from databricks.bundles.jobs._models.periodic_trigger_configuration import (
PeriodicTriggerConfiguration,
PeriodicTriggerConfigurationDict,
Expand All @@ -482,6 +483,11 @@
PermissionDict,
PermissionParam,
)
from databricks.bundles.jobs._models.pipeline_params import (
PipelineParams,
PipelineParamsDict,
PipelineParamsParam,
)
from databricks.bundles.jobs._models.pipeline_task import (
PipelineTask,
PipelineTaskDict,
Expand Down Expand Up @@ -583,6 +589,7 @@ def _resolve_recursive_imports():
import typing

from databricks.bundles.core._variable import VariableOr
from databricks.bundles.jobs._models.task import Task

ForEachTask.__annotations__ = typing.get_type_hints(
ForEachTask,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from dataclasses import dataclass
from dataclasses import dataclass, field
from typing import TYPE_CHECKING, TypedDict

from databricks.bundles.core._transform import _transform
Expand All @@ -14,14 +14,14 @@
class DbtTask:
""""""

commands: VariableOrList[str]
catalog: VariableOrOptional[str] = None
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Field order has changed because "commands" is not required anymore

"""
A list of dbt commands to execute. All commands must start with `dbt`. This parameter must not be empty. A maximum of up to 10 commands can be provided.
Optional name of the catalog to use. The value is the top level in the 3-level namespace of Unity Catalog (catalog / schema / relation). The catalog value can only be specified if a warehouse_id is specified. Requires dbt-databricks >= 1.1.1.
"""

catalog: VariableOrOptional[str] = None
commands: VariableOrList[str] = field(default_factory=list)
"""
Optional name of the catalog to use. The value is the top level in the 3-level namespace of Unity Catalog (catalog / schema / relation). The catalog value can only be specified if a warehouse_id is specified. Requires dbt-databricks >= 1.1.1.
A list of dbt commands to execute. All commands must start with `dbt`. This parameter must not be empty. A maximum of up to 10 commands can be provided.
"""

profiles_directory: VariableOrOptional[str] = None
Expand Down Expand Up @@ -66,14 +66,14 @@ def as_dict(self) -> "DbtTaskDict":
class DbtTaskDict(TypedDict, total=False):
""""""

commands: VariableOrList[str]
catalog: VariableOrOptional[str]
"""
A list of dbt commands to execute. All commands must start with `dbt`. This parameter must not be empty. A maximum of up to 10 commands can be provided.
Optional name of the catalog to use. The value is the top level in the 3-level namespace of Unity Catalog (catalog / schema / relation). The catalog value can only be specified if a warehouse_id is specified. Requires dbt-databricks >= 1.1.1.
"""

catalog: VariableOrOptional[str]
commands: VariableOrList[str]
"""
Optional name of the catalog to use. The value is the top level in the 3-level namespace of Unity Catalog (catalog / schema / relation). The catalog value can only be specified if a warehouse_id is specified. Requires dbt-databricks >= 1.1.1.
A list of dbt commands to execute. All commands must start with `dbt`. This parameter must not be empty. A maximum of up to 10 commands can be provided.
"""

profiles_directory: VariableOrOptional[str]
Expand Down

This file was deleted.

Loading
Loading