Skip to content

Commit

Permalink
Merge branch 'databricks:main' into example/default_namespace
Browse files Browse the repository at this point in the history
  • Loading branch information
nkvuong authored Apr 23, 2024
2 parents a854e19 + a3c0207 commit 673fccc
Show file tree
Hide file tree
Showing 5 changed files with 37 additions and 4 deletions.
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,11 @@
# Version changelog

## 0.25.1

Bug fixes:
* Fixed `select_node_type` error ([#614](https://github.com/databricks/databricks-sdk-py/pull/614)).


## 0.25.0

### Behavior Changes
Expand Down
7 changes: 5 additions & 2 deletions databricks/sdk/mixins/compute.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,7 @@ def select_spark_version(self,
:param beta: bool
:param latest: bool
:param ml: bool
:param genomics: bool
:param gpu: bool
:param scala: str
:param spark_version: str
Expand All @@ -100,7 +101,7 @@ def select_spark_version(self,
for version in sv.versions:
if "-scala" + scala not in version.key:
continue
matches = ((not "apache-spark-" in version.key) and (("-ml-" in version.key) == ml)
matches = (("apache-spark-" not in version.key) and (("-ml-" in version.key) == ml)
and (("-hls-" in version.key) == genomics) and (("-gpu-" in version.key) == gpu)
and (("-photon-" in version.key) == photon)
and (("-aarch64-" in version.key) == graviton) and (("Beta" in version.name) == beta))
Expand Down Expand Up @@ -137,7 +138,7 @@ def _should_node_be_skipped(nt: compute.NodeType) -> bool:
return False
val = compute.CloudProviderNodeStatus
for st in nt.node_info.status:
if st in (val.NotAvailableInRegion, val.NotEnabledOnSubscription):
if st in (val.NOT_AVAILABLE_IN_REGION, val.NOT_ENABLED_ON_SUBSCRIPTION):
return True
return False

Expand Down Expand Up @@ -168,6 +169,8 @@ def select_node_type(self,
:param photon_driver_capable: bool
:param graviton: bool
:param is_io_cache_enabled: bool
:param support_port_forwarding: bool
:param fleet: bool
:returns: `node_type` compatible string
"""
Expand Down
2 changes: 1 addition & 1 deletion databricks/sdk/version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = '0.25.0'
__version__ = '0.25.1'
24 changes: 24 additions & 0 deletions examples/groups/patch_groups.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import time
from databricks.sdk import WorkspaceClient
from databricks.sdk.service import iam

w = WorkspaceClient()

group = w.groups.create(display_name=f'sdk-{time.time_ns()}-group')
user = w.users.create(
display_name=f'sdk-{time.time_ns()}-user', user_name=f'sdk-{time.time_ns()}@example.com')

w.groups.patch(
id=group.id,
operations=[iam.Patch(
op=iam.PatchOp.ADD,
value={"members": [{
"value": user.id,
}]},
)],
schemas=[iam.PatchSchema.URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP],
)

# cleanup
w.users.delete(id=user.id)
w.groups.delete(id=group.id)
2 changes: 1 addition & 1 deletion tests/integration/test_clusters.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def test_create_cluster(w, env_or_skip, random):
spark_version=w.clusters.select_spark_version(long_term_support=True),
instance_pool_id=env_or_skip('TEST_INSTANCE_POOL_ID'),
autotermination_minutes=10,
num_workers=1).result(timeout=timedelta(minutes=10))
num_workers=1).result(timeout=timedelta(minutes=20))
logging.info(f'Created: {info}')


Expand Down

0 comments on commit 673fccc

Please sign in to comment.