Skip to content

Commit 78e1b9c

Browse files
authored
Add support for volumes in deployment bind/unbind commands (#2527)
## Changes 1. Changed `FindResourceByConfigKey` to return volume resources 2. Implemented `Exists` method on Volume resource ## Why This PR adds support for volume resources in deployment operations, enabling users to: - Bind experiments using `databricks bundle deployment bind <myvolume_key> <myvolume_full_name>` - Unbind experiments using `databricks bundle deployment unbind <myvolume_key>` Where: - `myvolume_key` is a resource key defined in the bundle's .yml file - `myvolume_full_name` references an existing volume in the Databricks workspace using its fully qualified (3-level) name These capabilities allow for more flexible resource management of volumes within bundles. ## Tests Added a new acceptance test that tests bind and unbind methods together with bundle deployment and destruction.
1 parent a35086d commit 78e1b9c

File tree

9 files changed

+181
-4
lines changed

9 files changed

+181
-4
lines changed

NEXT_CHANGELOG.md

+2-1
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88

99
* Processing 'artifacts' section is now done in "bundle validate" (adding defaults, inferring "build", asserting required fields) ([#2526])(https://github.com/databricks/cli/pull/2526))
1010
* When uploading artifacts, include relative path in log message ([#2539])(https://github.com/databricks/cli/pull/2539))
11-
* Add support for clusters in deployment bind/unbind commands ([#2536](https://github.com/databricks/cli/pull/2536))
11+
* Added support for clusters in deployment bind/unbind commands ([#2536](https://github.com/databricks/cli/pull/2536))
12+
* Added support for volumes in deployment bind/unbind commands ([#2527](https://github.com/databricks/cli/pull/2527))
1213

1314
### API Changes

acceptance/acceptance_test.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ var (
4444
)
4545

4646
// In order to debug CLI running under acceptance test, search for TestInprocessMode and update
47-
// the test name there, e..g "bundle/variables/empty".
47+
// the test name there, e.g. "bundle/variables/empty".
4848
// Then install your breakpoints and click "debug test" near TestInprocessMode in VSCODE.
4949

5050
// If enabled, instead of compiling and running CLI externally, we'll start in-process server that accepts and runs
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
bundle:
2+
name: bind-dashboard-test-$UNIQUE_NAME
3+
4+
resources:
5+
volumes:
6+
volume1:
7+
name: $VOLUME_NAME
8+
schema_name: $SCHEMA_NAME
9+
catalog_name: $CATALOG_NAME
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
2+
>>> [CLI] schemas create test-schema-[UUID] main
3+
{
4+
"full_name": "main.test-schema-[UUID]",
5+
"catalog_name": "main"
6+
}
7+
8+
=== Create a pre-defined volume:
9+
>>> [CLI] bundle deployment bind volume1 main.test-schema-[UUID].volume-[UUID] --auto-approve
10+
Updating deployment state...
11+
Successfully bound databricks_volume with an id 'main.test-schema-[UUID].volume-[UUID]'. Run 'bundle deploy' to deploy changes to your workspace
12+
13+
>>> [CLI] bundle deploy
14+
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/bind-dashboard-test-[UNIQUE_NAME]/default/files...
15+
Deploying resources...
16+
Updating deployment state...
17+
Deployment complete!
18+
19+
>>> [CLI] volumes read main.test-schema-[UUID].volume-[UUID]
20+
{
21+
"catalog_name": "main",
22+
"full_name": "main.test-schema-[UUID].volume-[UUID]",
23+
"schema_name": "test-schema-[UUID]",
24+
"volume_type": "MANAGED"
25+
}
26+
27+
>>> [CLI] bundle deployment unbind volume1
28+
Updating deployment state...
29+
30+
>>> [CLI] bundle destroy --auto-approve
31+
All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/bind-dashboard-test-[UNIQUE_NAME]/default
32+
33+
Deleting files...
34+
Destroy complete!
35+
36+
>>> [CLI] volumes read main.test-schema-[UUID].volume-[UUID]
37+
{
38+
"catalog_name": "main",
39+
"full_name": "main.test-schema-[UUID].volume-[UUID]",
40+
"schema_name": "test-schema-[UUID]",
41+
"volume_type": "MANAGED"
42+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
VOLUME_NAME="volume-$(uuid)"
2+
SCHEMA_NAME="test-schema-$(uuid)"
3+
if [ -z "$CLOUD_ENV" ]; then
4+
VOLUME_NAME="volume-6260d50f-e8ff-4905-8f28-812345678903" # use hard-coded uuid when running locally
5+
SCHEMA_NAME="test-schema-6260d50f-e8ff-4905-8f28-812345678903"
6+
fi
7+
export VOLUME_NAME
8+
export SCHEMA_NAME
9+
export CATALOG_NAME="main"
10+
envsubst < databricks.yml.tmpl > databricks.yml
11+
12+
VOLUME_TYPE="MANAGED"
13+
14+
trace $CLI schemas create "${SCHEMA_NAME}" ${CATALOG_NAME} | jq '{full_name, catalog_name}'
15+
16+
title "Create a pre-defined volume: "
17+
VOLUME_FULL_NAME=$($CLI volumes create "${CATALOG_NAME}" "${SCHEMA_NAME}" "${VOLUME_NAME}" "${VOLUME_TYPE}" | jq -r '.full_name')
18+
19+
cleanupRemoveVolume() {
20+
$CLI volumes delete "${VOLUME_FULL_NAME}"
21+
}
22+
trap cleanupRemoveVolume EXIT
23+
24+
trace $CLI bundle deployment bind volume1 "${VOLUME_FULL_NAME}" --auto-approve
25+
26+
trace $CLI bundle deploy
27+
28+
trace $CLI volumes read "${VOLUME_FULL_NAME}" | jq '{catalog_name, full_name, schema_name, volume_type}'
29+
30+
trace $CLI bundle deployment unbind volume1
31+
32+
trace $CLI bundle destroy --auto-approve
33+
34+
trace $CLI volumes read "${VOLUME_FULL_NAME}" | jq '{catalog_name, full_name, schema_name, volume_type}'
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
Local = true
2+
Cloud = true
3+
RequiresUnityCatalog = true
4+
5+
Ignore = [
6+
"databricks.yml",
7+
]
8+
9+
[[Server]]
10+
Pattern = "POST /api/2.1/unity-catalog/schemas"
11+
Response.Body = '''
12+
{
13+
"name":"test-schema-6260d50f-e8ff-4905-8f28-812345678903",
14+
"catalog_name":"main",
15+
"full_name":"main.test-schema-6260d50f-e8ff-4905-8f28-812345678903"
16+
}
17+
'''
18+
19+
[[Server]]
20+
Pattern = "POST /api/2.1/unity-catalog/volumes"
21+
Response.Body = '''
22+
{
23+
"full_name":"main.test-schema-6260d50f-e8ff-4905-8f28-812345678903.volume-6260d50f-e8ff-4905-8f28-812345678903"
24+
}
25+
'''
26+
27+
[[Server]]
28+
Pattern = "GET /api/2.1/unity-catalog/volumes/{volume_fullname}"
29+
Response.Body = '''
30+
{
31+
"catalog_name": "main",
32+
"schema_name": "test-schema-6260d50f-e8ff-4905-8f28-812345678903",
33+
"name": "volume-6260d50f-e8ff-4905-8f28-812345678903",
34+
"full_name": "main.test-schema-6260d50f-e8ff-4905-8f28-812345678903.volume-6260d50f-e8ff-4905-8f28-812345678903",
35+
"volume_type": "MANAGED"
36+
}
37+
'''
38+
39+
[[Server]]
40+
Pattern = "DELETE /api/2.1/unity-catalog/volumes/{volume_fullname}"

bundle/config/resources.go

+6
Original file line numberDiff line numberDiff line change
@@ -130,6 +130,12 @@ func (r *Resources) FindResourceByConfigKey(key string) (ConfigResource, error)
130130
}
131131
}
132132

133+
for k := range r.Volumes {
134+
if k == key {
135+
found = append(found, r.Volumes[k])
136+
}
137+
}
138+
133139
if len(found) == 0 {
134140
return nil, fmt.Errorf("no such resource: %s", key)
135141
}

bundle/config/resources/volume.go

+21-2
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,9 @@ import (
66
"net/url"
77
"strings"
88

9+
"github.com/databricks/cli/libs/log"
10+
"github.com/databricks/databricks-sdk-go/apierr"
11+
912
"github.com/databricks/databricks-sdk-go"
1013
"github.com/databricks/databricks-sdk-go/marshal"
1114
"github.com/databricks/databricks-sdk-go/service/catalog"
@@ -33,8 +36,24 @@ func (v Volume) MarshalJSON() ([]byte, error) {
3336
return marshal.Marshal(v)
3437
}
3538

36-
func (v *Volume) Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) (bool, error) {
37-
return false, errors.New("volume.Exists() is not supported")
39+
func (v *Volume) Exists(ctx context.Context, w *databricks.WorkspaceClient, fullyQualifiedName string) (bool, error) {
40+
_, err := w.Volumes.Read(ctx, catalog.ReadVolumeRequest{
41+
Name: fullyQualifiedName,
42+
})
43+
if err != nil {
44+
log.Debugf(ctx, "volume with fully qualified name %s does not exist: %v", fullyQualifiedName, err)
45+
46+
var aerr *apierr.APIError
47+
if errors.As(err, &aerr) {
48+
if aerr.StatusCode == 404 {
49+
return false, nil
50+
}
51+
}
52+
53+
return false, err
54+
}
55+
56+
return true, nil
3857
}
3958

4059
func (v *Volume) TerraformResourceName() string {
+26
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
package resources
2+
3+
import (
4+
"context"
5+
"testing"
6+
7+
"github.com/databricks/databricks-sdk-go/apierr"
8+
"github.com/databricks/databricks-sdk-go/experimental/mocks"
9+
"github.com/stretchr/testify/mock"
10+
"github.com/stretchr/testify/require"
11+
)
12+
13+
func TestVolumeNotFound(t *testing.T) {
14+
ctx := context.Background()
15+
16+
m := mocks.NewMockWorkspaceClient(t)
17+
m.GetMockVolumesAPI().On("Read", mock.Anything, mock.Anything).Return(nil, &apierr.APIError{
18+
StatusCode: 404,
19+
})
20+
21+
s := &Volume{}
22+
exists, err := s.Exists(ctx, m.WorkspaceClient, "non-existent-volume")
23+
24+
require.Falsef(t, exists, "Exists should return false when getting a 404 response from Workspace")
25+
require.NoErrorf(t, err, "Exists should not return an error when getting a 404 response from Workspace")
26+
}

0 commit comments

Comments
 (0)