diff --git a/.codegen.json b/.codegen.json index bde85013f7..e0a4a066c7 100644 --- a/.codegen.json +++ b/.codegen.json @@ -1,6 +1,7 @@ { "formatter": "make fmt", "mode": "tf_v1", + "api_changelog": true, "changelog_config": ".codegen/changelog_config.yml", "version": { "common/version.go": "version = \"$VERSION\"" diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 4ae64141ab..4e0a18f572 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -2f10a6ab001e2fa2552f6b25f7fbb4a08014a43f \ No newline at end of file +4ad0f0856225e3ebc6bb11392d09d63a19e6d140 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 85b5f4e997..3485659ab4 100755 --- a/.gitattributes +++ b/.gitattributes @@ -15,10 +15,10 @@ docs/data-sources/account_setting_user_preference_v2.md linguist-generated=true docs/data-sources/account_setting_v2.md linguist-generated=true docs/data-sources/alert_v2.md linguist-generated=true docs/data-sources/alerts_v2.md linguist-generated=true +docs/data-sources/app_space.md linguist-generated=true +docs/data-sources/app_spaces.md linguist-generated=true docs/data-sources/apps_settings_custom_template.md linguist-generated=true docs/data-sources/apps_settings_custom_templates.md linguist-generated=true -docs/data-sources/apps_space.md linguist-generated=true -docs/data-sources/apps_spaces.md linguist-generated=true docs/data-sources/budget_policies.md linguist-generated=true docs/data-sources/budget_policy.md linguist-generated=true docs/data-sources/data_quality_monitor.md linguist-generated=true @@ -51,6 +51,8 @@ docs/data-sources/policy_info.md linguist-generated=true docs/data-sources/policy_infos.md linguist-generated=true docs/data-sources/postgres_branch.md linguist-generated=true docs/data-sources/postgres_branches.md linguist-generated=true +docs/data-sources/postgres_database.md linguist-generated=true +docs/data-sources/postgres_databases.md linguist-generated=true docs/data-sources/postgres_endpoint.md linguist-generated=true docs/data-sources/postgres_endpoints.md linguist-generated=true docs/data-sources/postgres_project.md linguist-generated=true @@ -73,8 +75,8 @@ docs/resources/account_network_policy.md linguist-generated=true docs/resources/account_setting_user_preference_v2.md linguist-generated=true docs/resources/account_setting_v2.md linguist-generated=true docs/resources/alert_v2.md linguist-generated=true +docs/resources/app_space.md linguist-generated=true docs/resources/apps_settings_custom_template.md linguist-generated=true -docs/resources/apps_space.md linguist-generated=true docs/resources/budget_policy.md linguist-generated=true docs/resources/data_quality_monitor.md linguist-generated=true docs/resources/data_quality_refresh.md linguist-generated=true @@ -91,6 +93,7 @@ docs/resources/materialized_features_feature_tag.md linguist-generated=true docs/resources/online_store.md linguist-generated=true docs/resources/policy_info.md linguist-generated=true docs/resources/postgres_branch.md linguist-generated=true +docs/resources/postgres_database.md linguist-generated=true docs/resources/postgres_endpoint.md linguist-generated=true docs/resources/postgres_project.md linguist-generated=true docs/resources/quality_monitor_v2.md linguist-generated=true @@ -125,12 +128,12 @@ internal/providers/pluginfw/products/account_setting_v2/resource_account_setting internal/providers/pluginfw/products/alert_v2/data_alert_v2.go linguist-generated=true internal/providers/pluginfw/products/alert_v2/data_alerts_v2.go linguist-generated=true internal/providers/pluginfw/products/alert_v2/resource_alert_v2.go linguist-generated=true +internal/providers/pluginfw/products/app_space/data_app_space.go linguist-generated=true +internal/providers/pluginfw/products/app_space/data_app_spaces.go linguist-generated=true +internal/providers/pluginfw/products/app_space/resource_app_space.go linguist-generated=true internal/providers/pluginfw/products/apps_settings_custom_template/data_apps_settings_custom_template.go linguist-generated=true internal/providers/pluginfw/products/apps_settings_custom_template/data_apps_settings_custom_templates.go linguist-generated=true internal/providers/pluginfw/products/apps_settings_custom_template/resource_apps_settings_custom_template.go linguist-generated=true -internal/providers/pluginfw/products/apps_space/data_apps_space.go linguist-generated=true -internal/providers/pluginfw/products/apps_space/data_apps_spaces.go linguist-generated=true -internal/providers/pluginfw/products/apps_space/resource_apps_space.go linguist-generated=true internal/providers/pluginfw/products/budget_policy/data_budget_policies.go linguist-generated=true internal/providers/pluginfw/products/budget_policy/data_budget_policy.go linguist-generated=true internal/providers/pluginfw/products/budget_policy/resource_budget_policy.go linguist-generated=true @@ -179,6 +182,9 @@ internal/providers/pluginfw/products/policy_info/resource_policy_info.go linguis internal/providers/pluginfw/products/postgres_branch/data_postgres_branch.go linguist-generated=true internal/providers/pluginfw/products/postgres_branch/data_postgres_branches.go linguist-generated=true internal/providers/pluginfw/products/postgres_branch/resource_postgres_branch.go linguist-generated=true +internal/providers/pluginfw/products/postgres_database/data_postgres_database.go linguist-generated=true +internal/providers/pluginfw/products/postgres_database/data_postgres_databases.go linguist-generated=true +internal/providers/pluginfw/products/postgres_database/resource_postgres_database.go linguist-generated=true internal/providers/pluginfw/products/postgres_endpoint/data_postgres_endpoint.go linguist-generated=true internal/providers/pluginfw/products/postgres_endpoint/data_postgres_endpoints.go linguist-generated=true internal/providers/pluginfw/products/postgres_endpoint/resource_postgres_endpoint.go linguist-generated=true diff --git a/Makefile b/Makefile index 9d1ee69030..0fef843ef1 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,6 @@ lint: vendor test: lint @echo "✓ Running tests ..." - @go tool gotestsum --format pkgname-and-test-fails --no-summary=skipped --raw-command go test -v -json -short -coverprofile=coverage.txt ./... coverage: test @echo "✓ Opening coverage for unit tests ..." diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index ffb4d56e16..42f68ca182 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -5,6 +5,8 @@ ### Breaking Changes ### New Features and Improvements +* Add resource and data sources for `databricks_postgres_database`. +* dsfafj ### Bug Fixes diff --git a/docs/data-sources/app_space.md b/docs/data-sources/app_space.md new file mode 100755 index 0000000000..224dd45c8e --- /dev/null +++ b/docs/data-sources/app_space.md @@ -0,0 +1,102 @@ +--- +subcategory: "Apps" +--- +# databricks_app_space Data Source +[![Private Preview](https://img.shields.io/badge/Release_Stage-Private_Preview-blueviolet)](https://docs.databricks.com/aws/en/release-notes/release-types) + + + +## Example Usage + + +## Arguments +The following arguments are supported: +* `name` (string, required) - The name of the app space. The name must contain only lowercase alphanumeric characters and hyphens. + It must be unique within the workspace +* `provider_config` (ProviderConfig, optional) - Configure the provider for management through account provider. + +### ProviderConfig +* `workspace_id` (string,required) - Workspace ID which the resource belongs to. This workspace must be part of the account which the provider is configured with. + +## Attributes +The following attributes are exported: +* `create_time` (string) - The creation time of the app space. Formatted timestamp in ISO 6801 +* `creator` (string) - The email of the user that created the app space +* `description` (string) - The description of the app space +* `effective_usage_policy_id` (string) - The effective usage policy ID used by apps in the space +* `effective_user_api_scopes` (list of string) - The effective api scopes granted to the user access token +* `id` (string) - The unique identifier of the app space +* `name` (string) - The name of the app space. The name must contain only lowercase alphanumeric characters and hyphens. + It must be unique within the workspace +* `resources` (list of AppResource) - Resources for the app space. Resources configured at the space level are available to all apps in the space +* `service_principal_client_id` (string) - The service principal client ID for the app space +* `service_principal_id` (integer) - The service principal ID for the app space +* `service_principal_name` (string) - The service principal name for the app space +* `status` (SpaceStatus) - The status of the app space +* `update_time` (string) - The update time of the app space. Formatted timestamp in ISO 6801 +* `updater` (string) - The email of the user that last updated the app space +* `usage_policy_id` (string) - The usage policy ID for managing cost at the space level +* `user_api_scopes` (list of string) - OAuth scopes for apps in the space + +### AppResource +* `app` (AppResourceApp) +* `database` (AppResourceDatabase) +* `description` (string) - Description of the App Resource +* `experiment` (AppResourceExperiment) +* `genie_space` (AppResourceGenieSpace) +* `job` (AppResourceJob) +* `name` (string) - Name of the App Resource +* `postgres` (AppResourcePostgres) +* `secret` (AppResourceSecret) +* `serving_endpoint` (AppResourceServingEndpoint) +* `sql_warehouse` (AppResourceSqlWarehouse) +* `uc_securable` (AppResourceUcSecurable) + +### AppResourceApp + +### AppResourceDatabase +* `database_name` (string) +* `instance_name` (string) +* `permission` (string) - Possible values are: `CAN_CONNECT_AND_CREATE` + +### AppResourceExperiment +* `experiment_id` (string) +* `permission` (string) - Possible values are: `CAN_EDIT`, `CAN_MANAGE`, `CAN_READ` + +### AppResourceGenieSpace +* `name` (string) +* `permission` (string) - Possible values are: `CAN_EDIT`, `CAN_MANAGE`, `CAN_RUN`, `CAN_VIEW` +* `space_id` (string) + +### AppResourceJob +* `id` (string) - Id of the job to grant permission on +* `permission` (string) - Permissions to grant on the Job. Supported permissions are: "CAN_MANAGE", "IS_OWNER", "CAN_MANAGE_RUN", "CAN_VIEW". Possible values are: `CAN_MANAGE`, `CAN_MANAGE_RUN`, `CAN_VIEW`, `IS_OWNER` + +### AppResourcePostgres +* `branch` (string) +* `database` (string) +* `permission` (string) - Possible values are: `CAN_CONNECT_AND_CREATE` + +### AppResourceSecret +* `key` (string) - Key of the secret to grant permission on +* `permission` (string) - Permission to grant on the secret scope. For secrets, only one permission is allowed. Permission must be one of: "READ", "WRITE", "MANAGE". Possible values are: `MANAGE`, `READ`, `WRITE` +* `scope` (string) - Scope of the secret to grant permission on + +### AppResourceServingEndpoint +* `name` (string) - Name of the serving endpoint to grant permission on +* `permission` (string) - Permission to grant on the serving endpoint. Supported permissions are: "CAN_MANAGE", "CAN_QUERY", "CAN_VIEW". Possible values are: `CAN_MANAGE`, `CAN_QUERY`, `CAN_VIEW` + +### AppResourceSqlWarehouse +* `id` (string) - Id of the SQL warehouse to grant permission on +* `permission` (string) - Permission to grant on the SQL warehouse. Supported permissions are: "CAN_MANAGE", "CAN_USE", "IS_OWNER". Possible values are: `CAN_MANAGE`, `CAN_USE`, `IS_OWNER` + +### AppResourceUcSecurable +* `permission` (string) - Possible values are: `EXECUTE`, `MODIFY`, `READ_VOLUME`, `SELECT`, `USE_CONNECTION`, `WRITE_VOLUME` +* `securable_full_name` (string) +* `securable_kind` (string) - The securable kind from Unity Catalog. + See https://docs.databricks.com/api/workspace/tables/get#securable_kind_manifest-securable_kind +* `securable_type` (string) - Possible values are: `CONNECTION`, `FUNCTION`, `TABLE`, `VOLUME` + +### SpaceStatus +* `message` (string) - Message providing context about the current state +* `state` (string) - The state of the app space. Possible values are: `SPACE_ACTIVE`, `SPACE_CREATING`, `SPACE_DELETED`, `SPACE_DELETING`, `SPACE_ERROR`, `SPACE_UPDATING` \ No newline at end of file diff --git a/docs/data-sources/app_spaces.md b/docs/data-sources/app_spaces.md new file mode 100755 index 0000000000..e8e19cf007 --- /dev/null +++ b/docs/data-sources/app_spaces.md @@ -0,0 +1,102 @@ +--- +subcategory: "Apps" +--- +# databricks_app_spaces Data Source +[![Private Preview](https://img.shields.io/badge/Release_Stage-Private_Preview-blueviolet)](https://docs.databricks.com/aws/en/release-notes/release-types) + + + +## Example Usage + + +## Arguments +The following arguments are supported: +* `page_size` (integer, optional) - Upper bound for items returned +* `provider_config` (ProviderConfig, optional) - Configure the provider for management through account provider. + +### ProviderConfig +* `workspace_id` (string,required) - Workspace ID which the resource belongs to. This workspace must be part of the account which the provider is configured with. + + +## Attributes +This data source exports a single attribute, `spaces`. It is a list of resources, each with the following attributes: +* `create_time` (string) - The creation time of the app space. Formatted timestamp in ISO 6801 +* `creator` (string) - The email of the user that created the app space +* `description` (string) - The description of the app space +* `effective_usage_policy_id` (string) - The effective usage policy ID used by apps in the space +* `effective_user_api_scopes` (list of string) - The effective api scopes granted to the user access token +* `id` (string) - The unique identifier of the app space +* `name` (string) - The name of the app space. The name must contain only lowercase alphanumeric characters and hyphens. + It must be unique within the workspace +* `resources` (list of AppResource) - Resources for the app space. Resources configured at the space level are available to all apps in the space +* `service_principal_client_id` (string) - The service principal client ID for the app space +* `service_principal_id` (integer) - The service principal ID for the app space +* `service_principal_name` (string) - The service principal name for the app space +* `status` (SpaceStatus) - The status of the app space +* `update_time` (string) - The update time of the app space. Formatted timestamp in ISO 6801 +* `updater` (string) - The email of the user that last updated the app space +* `usage_policy_id` (string) - The usage policy ID for managing cost at the space level +* `user_api_scopes` (list of string) - OAuth scopes for apps in the space + +### AppResource +* `app` (AppResourceApp) +* `database` (AppResourceDatabase) +* `description` (string) - Description of the App Resource +* `experiment` (AppResourceExperiment) +* `genie_space` (AppResourceGenieSpace) +* `job` (AppResourceJob) +* `name` (string) - Name of the App Resource +* `postgres` (AppResourcePostgres) +* `secret` (AppResourceSecret) +* `serving_endpoint` (AppResourceServingEndpoint) +* `sql_warehouse` (AppResourceSqlWarehouse) +* `uc_securable` (AppResourceUcSecurable) + +### AppResourceApp + +### AppResourceDatabase +* `database_name` (string) +* `instance_name` (string) +* `permission` (string) - Possible values are: `CAN_CONNECT_AND_CREATE` + +### AppResourceExperiment +* `experiment_id` (string) +* `permission` (string) - Possible values are: `CAN_EDIT`, `CAN_MANAGE`, `CAN_READ` + +### AppResourceGenieSpace +* `name` (string) +* `permission` (string) - Possible values are: `CAN_EDIT`, `CAN_MANAGE`, `CAN_RUN`, `CAN_VIEW` +* `space_id` (string) + +### AppResourceJob +* `id` (string) - Id of the job to grant permission on +* `permission` (string) - Permissions to grant on the Job. Supported permissions are: "CAN_MANAGE", "IS_OWNER", "CAN_MANAGE_RUN", "CAN_VIEW". Possible values are: `CAN_MANAGE`, `CAN_MANAGE_RUN`, `CAN_VIEW`, `IS_OWNER` + +### AppResourcePostgres +* `branch` (string) +* `database` (string) +* `permission` (string) - Possible values are: `CAN_CONNECT_AND_CREATE` + +### AppResourceSecret +* `key` (string) - Key of the secret to grant permission on +* `permission` (string) - Permission to grant on the secret scope. For secrets, only one permission is allowed. Permission must be one of: "READ", "WRITE", "MANAGE". Possible values are: `MANAGE`, `READ`, `WRITE` +* `scope` (string) - Scope of the secret to grant permission on + +### AppResourceServingEndpoint +* `name` (string) - Name of the serving endpoint to grant permission on +* `permission` (string) - Permission to grant on the serving endpoint. Supported permissions are: "CAN_MANAGE", "CAN_QUERY", "CAN_VIEW". Possible values are: `CAN_MANAGE`, `CAN_QUERY`, `CAN_VIEW` + +### AppResourceSqlWarehouse +* `id` (string) - Id of the SQL warehouse to grant permission on +* `permission` (string) - Permission to grant on the SQL warehouse. Supported permissions are: "CAN_MANAGE", "CAN_USE", "IS_OWNER". Possible values are: `CAN_MANAGE`, `CAN_USE`, `IS_OWNER` + +### AppResourceUcSecurable +* `permission` (string) - Possible values are: `EXECUTE`, `MODIFY`, `READ_VOLUME`, `SELECT`, `USE_CONNECTION`, `WRITE_VOLUME` +* `securable_full_name` (string) +* `securable_kind` (string) - The securable kind from Unity Catalog. + See https://docs.databricks.com/api/workspace/tables/get#securable_kind_manifest-securable_kind +* `securable_type` (string) - Possible values are: `CONNECTION`, `FUNCTION`, `TABLE`, `VOLUME` + +### SpaceStatus +* `message` (string) - Message providing context about the current state +* `state` (string) - The state of the app space. Possible values are: `SPACE_ACTIVE`, `SPACE_CREATING`, `SPACE_DELETED`, `SPACE_DELETING`, `SPACE_ERROR`, `SPACE_UPDATING` \ No newline at end of file diff --git a/docs/data-sources/endpoint.md b/docs/data-sources/endpoint.md index 3791e46792..4fec5f31c9 100755 --- a/docs/data-sources/endpoint.md +++ b/docs/data-sources/endpoint.md @@ -14,7 +14,7 @@ Endpoint datasource retrieves information about a single network connectivity en This is an example for getting an endpoint in Azure cloud: ```hcl data "databricks_endpoint" "this" { - account_id = "eae3abf6-1496-494e-9983-4660a5ad5aab" + parent = "accounts/123e4567-e89b-12d3-a456-426614174000" endpoint_id = "endpoint-123" } ``` diff --git a/docs/data-sources/endpoints.md b/docs/data-sources/endpoints.md index c1ed4447ee..4e271424c1 100755 --- a/docs/data-sources/endpoints.md +++ b/docs/data-sources/endpoints.md @@ -14,14 +14,15 @@ Endpoints datasource retrieves a list of all network connectivity endpoints for This is an example for listing endpoints in Azure cloud: ```hcl data "databricks_endpoints" "all" { - account_id = "eae3abf6-1496-494e-9983-4660a5ad5aab" + parent = "accounts/123e4567-e89b-12d3-a456-426614174000" } ``` ## Arguments The following arguments are supported: -* `parent` (string, required) +* `parent` (string, required) - The parent resource name of the account to list endpoints for. + Format: `accounts/{account_id}` * `page_size` (integer, optional) diff --git a/docs/data-sources/feature_engineering_feature.md b/docs/data-sources/feature_engineering_feature.md index e046f22b78..934283c775 100755 --- a/docs/data-sources/feature_engineering_feature.md +++ b/docs/data-sources/feature_engineering_feature.md @@ -44,9 +44,16 @@ The following attributes are exported: * `kafka_source` (KafkaSource) ### DeltaTableSource +* `dataframe_schema` (string) - Schema of the resulting dataframe after transformations, in Spark StructType JSON format (from df.schema.json()). + Required if transformation_sql is specified. + Example: {"type":"struct","fields":[{"name":"col_a","type":"integer","nullable":true,"metadata":{}},{"name":"col_c","type":"integer","nullable":true,"metadata":{}}]} * `entity_columns` (list of string) - The entity columns of the Delta table +* `filter_condition` (string) - Single WHERE clause to filter delta table before applying transformations. Will be row-wise evaluated, so should only include conditionals and projections * `full_name` (string) - The full three-part (catalog, schema, table) name of the Delta table * `timeseries_column` (string) - The timeseries column of the Delta table +* `transformation_sql` (string) - A single SQL SELECT expression applied after filter_condition. + Should contains all the columns needed (eg. "SELECT *, col_a + col_b AS col_c FROM x.y.z WHERE col_a > 0" would have `transformation_sql` "*, col_a + col_b AS col_c") + If transformation_sql is not provided, all columns of the delta table are present in the DataSource dataframe ### Function * `extra_parameters` (list of FunctionExtraParameter) - Extra parameters for parameterized functions diff --git a/docs/data-sources/feature_engineering_features.md b/docs/data-sources/feature_engineering_features.md index 5bc5e1c892..866b15954d 100755 --- a/docs/data-sources/feature_engineering_features.md +++ b/docs/data-sources/feature_engineering_features.md @@ -45,9 +45,16 @@ This data source exports a single attribute, `features`. It is a list of resourc * `kafka_source` (KafkaSource) ### DeltaTableSource +* `dataframe_schema` (string) - Schema of the resulting dataframe after transformations, in Spark StructType JSON format (from df.schema.json()). + Required if transformation_sql is specified. + Example: {"type":"struct","fields":[{"name":"col_a","type":"integer","nullable":true,"metadata":{}},{"name":"col_c","type":"integer","nullable":true,"metadata":{}}]} * `entity_columns` (list of string) - The entity columns of the Delta table +* `filter_condition` (string) - Single WHERE clause to filter delta table before applying transformations. Will be row-wise evaluated, so should only include conditionals and projections * `full_name` (string) - The full three-part (catalog, schema, table) name of the Delta table * `timeseries_column` (string) - The timeseries column of the Delta table +* `transformation_sql` (string) - A single SQL SELECT expression applied after filter_condition. + Should contains all the columns needed (eg. "SELECT *, col_a + col_b AS col_c FROM x.y.z WHERE col_a > 0" would have `transformation_sql` "*, col_a + col_b AS col_c") + If transformation_sql is not provided, all columns of the delta table are present in the DataSource dataframe ### Function * `extra_parameters` (list of FunctionExtraParameter) - Extra parameters for parameterized functions diff --git a/docs/data-sources/feature_engineering_kafka_config.md b/docs/data-sources/feature_engineering_kafka_config.md index 19f6d23157..1358ebb65b 100755 --- a/docs/data-sources/feature_engineering_kafka_config.md +++ b/docs/data-sources/feature_engineering_kafka_config.md @@ -40,9 +40,16 @@ The following attributes are exported: Only the delta table name is used for backfill, the entity columns and timeseries column are ignored as they are defined by the associated KafkaSource ### DeltaTableSource +* `dataframe_schema` (string) - Schema of the resulting dataframe after transformations, in Spark StructType JSON format (from df.schema.json()). + Required if transformation_sql is specified. + Example: {"type":"struct","fields":[{"name":"col_a","type":"integer","nullable":true,"metadata":{}},{"name":"col_c","type":"integer","nullable":true,"metadata":{}}]} * `entity_columns` (list of string) - The entity columns of the Delta table +* `filter_condition` (string) - Single WHERE clause to filter delta table before applying transformations. Will be row-wise evaluated, so should only include conditionals and projections * `full_name` (string) - The full three-part (catalog, schema, table) name of the Delta table * `timeseries_column` (string) - The timeseries column of the Delta table +* `transformation_sql` (string) - A single SQL SELECT expression applied after filter_condition. + Should contains all the columns needed (eg. "SELECT *, col_a + col_b AS col_c FROM x.y.z WHERE col_a > 0" would have `transformation_sql` "*, col_a + col_b AS col_c") + If transformation_sql is not provided, all columns of the delta table are present in the DataSource dataframe ### SchemaConfig * `json_schema` (string) - Schema of the JSON object in standard IETF JSON schema format (https://json-schema.org/) diff --git a/docs/data-sources/feature_engineering_kafka_configs.md b/docs/data-sources/feature_engineering_kafka_configs.md index 18326ac3b9..b0d9790bff 100755 --- a/docs/data-sources/feature_engineering_kafka_configs.md +++ b/docs/data-sources/feature_engineering_kafka_configs.md @@ -40,9 +40,16 @@ This data source exports a single attribute, `kafka_configs`. It is a list of re Only the delta table name is used for backfill, the entity columns and timeseries column are ignored as they are defined by the associated KafkaSource ### DeltaTableSource +* `dataframe_schema` (string) - Schema of the resulting dataframe after transformations, in Spark StructType JSON format (from df.schema.json()). + Required if transformation_sql is specified. + Example: {"type":"struct","fields":[{"name":"col_a","type":"integer","nullable":true,"metadata":{}},{"name":"col_c","type":"integer","nullable":true,"metadata":{}}]} * `entity_columns` (list of string) - The entity columns of the Delta table +* `filter_condition` (string) - Single WHERE clause to filter delta table before applying transformations. Will be row-wise evaluated, so should only include conditionals and projections * `full_name` (string) - The full three-part (catalog, schema, table) name of the Delta table * `timeseries_column` (string) - The timeseries column of the Delta table +* `transformation_sql` (string) - A single SQL SELECT expression applied after filter_condition. + Should contains all the columns needed (eg. "SELECT *, col_a + col_b AS col_c FROM x.y.z WHERE col_a > 0" would have `transformation_sql` "*, col_a + col_b AS col_c") + If transformation_sql is not provided, all columns of the delta table are present in the DataSource dataframe ### SchemaConfig * `json_schema` (string) - Schema of the JSON object in standard IETF JSON schema format (https://json-schema.org/) diff --git a/docs/data-sources/postgres_database.md b/docs/data-sources/postgres_database.md new file mode 100755 index 0000000000..04134620a7 --- /dev/null +++ b/docs/data-sources/postgres_database.md @@ -0,0 +1,50 @@ +--- +subcategory: "Postgres" +--- +# databricks_postgres_database Data Source +[![Private Preview](https://img.shields.io/badge/Release_Stage-Private_Preview-blueviolet)](https://docs.databricks.com/aws/en/release-notes/release-types) + + + +## Example Usage + + +## Arguments +The following arguments are supported: +* `name` (string, required) - The resource name of the database. + Format: projects/{project_id}/branches/{branch_id}/databases/{database_id} +* `provider_config` (ProviderConfig, optional) - Configure the provider for management through account provider. + +### ProviderConfig +* `workspace_id` (string,required) - Workspace ID which the resource belongs to. This workspace must be part of the account which the provider is configured with. + +## Attributes +The following attributes are exported: +* `create_time` (string) - A timestamp indicating when the database was created +* `name` (string) - The resource name of the database. + Format: projects/{project_id}/branches/{branch_id}/databases/{database_id} +* `parent` (string) - The branch containing this database. + Format: projects/{project_id}/branches/{branch_id} +* `spec` (DatabaseDatabaseSpec) - The desired state of the Database +* `status` (DatabaseDatabaseStatus) - The observed state of the Database +* `update_time` (string) - A timestamp indicating when the database was last updated + +### DatabaseDatabaseSpec +* `postgres_database` (string) - The name of the Postgres database. + + This expects a valid Postgres identifier as specified in the link below. + https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS + Required when creating the Database. + + To rename, pass a valid postgres identifier when updating the Database +* `role` (string) - The name of the role that owns the database. + Format: projects/{project_id}/branches/{branch_id}/roles/{role_id} + + To change the owner, pass valid existing Role name when updating the Database + + A database always has an owner + +### DatabaseDatabaseStatus +* `postgres_database` (string) - The name of the Postgres database +* `role` (string) - The name of the role that owns the database. + Format: projects/{project_id}/branches/{branch_id}/roles/{role_id} \ No newline at end of file diff --git a/docs/data-sources/postgres_databases.md b/docs/data-sources/postgres_databases.md new file mode 100755 index 0000000000..678cc84b2d --- /dev/null +++ b/docs/data-sources/postgres_databases.md @@ -0,0 +1,52 @@ +--- +subcategory: "Postgres" +--- +# databricks_postgres_databases Data Source +[![Private Preview](https://img.shields.io/badge/Release_Stage-Private_Preview-blueviolet)](https://docs.databricks.com/aws/en/release-notes/release-types) + + + +## Example Usage + + +## Arguments +The following arguments are supported: +* `parent` (string, required) - The Branch that owns this collection of databases. + Format: projects/{project_id}/branches/{branch_id} +* `page_size` (integer, optional) - Upper bound for items returned +* `provider_config` (ProviderConfig, optional) - Configure the provider for management through account provider. + +### ProviderConfig +* `workspace_id` (string,required) - Workspace ID which the resource belongs to. This workspace must be part of the account which the provider is configured with. + + +## Attributes +This data source exports a single attribute, `databases`. It is a list of resources, each with the following attributes: +* `create_time` (string) - A timestamp indicating when the database was created +* `name` (string) - The resource name of the database. + Format: projects/{project_id}/branches/{branch_id}/databases/{database_id} +* `parent` (string) - The branch containing this database. + Format: projects/{project_id}/branches/{branch_id} +* `spec` (DatabaseDatabaseSpec) - The desired state of the Database +* `status` (DatabaseDatabaseStatus) - The observed state of the Database +* `update_time` (string) - A timestamp indicating when the database was last updated + +### DatabaseDatabaseSpec +* `postgres_database` (string) - The name of the Postgres database. + + This expects a valid Postgres identifier as specified in the link below. + https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS + Required when creating the Database. + + To rename, pass a valid postgres identifier when updating the Database +* `role` (string) - The name of the role that owns the database. + Format: projects/{project_id}/branches/{branch_id}/roles/{role_id} + + To change the owner, pass valid existing Role name when updating the Database + + A database always has an owner + +### DatabaseDatabaseStatus +* `postgres_database` (string) - The name of the Postgres database +* `role` (string) - The name of the role that owns the database. + Format: projects/{project_id}/branches/{branch_id}/roles/{role_id} \ No newline at end of file diff --git a/docs/data-sources/postgres_project.md b/docs/data-sources/postgres_project.md index dab877cae6..d065d8845b 100755 --- a/docs/data-sources/postgres_project.md +++ b/docs/data-sources/postgres_project.md @@ -86,6 +86,7 @@ The following attributes are exported: To preserve existing tags, omit this field from the update_mask (or use wildcard "*" which auto-excludes empty tags) * `default_endpoint_settings` (ProjectDefaultEndpointSettings) * `display_name` (string) - Human-readable project name. Length should be between 1 and 256 characters +* `enable_pg_native_login` (boolean) - Whether to enable PG native password login on all endpoints in this project. Defaults to true * `history_retention_duration` (string) - The number of seconds to retain the shared history for point in time recovery for all branches in this project. Value should be between 0s and 2592000s (up to 30 days) * `pg_version` (integer) - The major Postgres version number. Supported versions are 16 and 17 @@ -95,6 +96,7 @@ The following attributes are exported: * `custom_tags` (list of ProjectCustomTag) - The effective custom tags associated with the project * `default_endpoint_settings` (ProjectDefaultEndpointSettings) - The effective default endpoint settings * `display_name` (string) - The effective human-readable project name +* `enable_pg_native_login` (boolean) - Whether to enable PG native password login on all endpoints in this project * `history_retention_duration` (string) - The effective number of seconds to retain the shared history for point in time recovery * `owner` (string) - The email of the project owner * `pg_version` (integer) - The effective major Postgres version number diff --git a/docs/data-sources/postgres_projects.md b/docs/data-sources/postgres_projects.md index 978bbdcc9f..195c93ad1d 100755 --- a/docs/data-sources/postgres_projects.md +++ b/docs/data-sources/postgres_projects.md @@ -76,6 +76,7 @@ This data source exports a single attribute, `projects`. It is a list of resourc To preserve existing tags, omit this field from the update_mask (or use wildcard "*" which auto-excludes empty tags) * `default_endpoint_settings` (ProjectDefaultEndpointSettings) * `display_name` (string) - Human-readable project name. Length should be between 1 and 256 characters +* `enable_pg_native_login` (boolean) - Whether to enable PG native password login on all endpoints in this project. Defaults to true * `history_retention_duration` (string) - The number of seconds to retain the shared history for point in time recovery for all branches in this project. Value should be between 0s and 2592000s (up to 30 days) * `pg_version` (integer) - The major Postgres version number. Supported versions are 16 and 17 @@ -85,6 +86,7 @@ This data source exports a single attribute, `projects`. It is a list of resourc * `custom_tags` (list of ProjectCustomTag) - The effective custom tags associated with the project * `default_endpoint_settings` (ProjectDefaultEndpointSettings) - The effective default endpoint settings * `display_name` (string) - The effective human-readable project name +* `enable_pg_native_login` (boolean) - Whether to enable PG native password login on all endpoints in this project * `history_retention_duration` (string) - The effective number of seconds to retain the shared history for point in time recovery * `owner` (string) - The email of the project owner * `pg_version` (integer) - The effective major Postgres version number diff --git a/docs/resources/app_space.md b/docs/resources/app_space.md new file mode 100755 index 0000000000..d677ca4eb7 --- /dev/null +++ b/docs/resources/app_space.md @@ -0,0 +1,114 @@ +--- +subcategory: "Apps" +--- +# databricks_app_space Resource +[![Private Preview](https://img.shields.io/badge/Release_Stage-Private_Preview-blueviolet)](https://docs.databricks.com/aws/en/release-notes/release-types) + + + +## Example Usage + + +## Arguments +The following arguments are supported: +* `name` (string, required) - The name of the app space. The name must contain only lowercase alphanumeric characters and hyphens. + It must be unique within the workspace +* `description` (string, optional) - The description of the app space +* `resources` (list of AppResource, optional) - Resources for the app space. Resources configured at the space level are available to all apps in the space +* `usage_policy_id` (string, optional) - The usage policy ID for managing cost at the space level +* `user_api_scopes` (list of string, optional) - OAuth scopes for apps in the space +* `provider_config` (ProviderConfig, optional) - Configure the provider for management through account provider. + +### ProviderConfig +* `workspace_id` (string,required) - Workspace ID which the resource belongs to. This workspace must be part of the account which the provider is configured with. + +### AppResource +* `name` (string, required) - Name of the App Resource +* `app` (AppResourceApp, optional) +* `database` (AppResourceDatabase, optional) +* `description` (string, optional) - Description of the App Resource +* `experiment` (AppResourceExperiment, optional) +* `genie_space` (AppResourceGenieSpace, optional) +* `job` (AppResourceJob, optional) +* `postgres` (AppResourcePostgres, optional) +* `secret` (AppResourceSecret, optional) +* `serving_endpoint` (AppResourceServingEndpoint, optional) +* `sql_warehouse` (AppResourceSqlWarehouse, optional) +* `uc_securable` (AppResourceUcSecurable, optional) + +### AppResourceDatabase +* `database_name` (string, required) +* `instance_name` (string, required) +* `permission` (string, required) - Possible values are: `CAN_CONNECT_AND_CREATE` + +### AppResourceExperiment +* `experiment_id` (string, required) +* `permission` (string, required) - Possible values are: `CAN_EDIT`, `CAN_MANAGE`, `CAN_READ` + +### AppResourceGenieSpace +* `name` (string, required) +* `permission` (string, required) - Possible values are: `CAN_EDIT`, `CAN_MANAGE`, `CAN_RUN`, `CAN_VIEW` +* `space_id` (string, required) + +### AppResourceJob +* `id` (string, required) - Id of the job to grant permission on +* `permission` (string, required) - Permissions to grant on the Job. Supported permissions are: "CAN_MANAGE", "IS_OWNER", "CAN_MANAGE_RUN", "CAN_VIEW". Possible values are: `CAN_MANAGE`, `CAN_MANAGE_RUN`, `CAN_VIEW`, `IS_OWNER` + +### AppResourcePostgres +* `branch` (string, optional) +* `database` (string, optional) +* `permission` (string, optional) - Possible values are: `CAN_CONNECT_AND_CREATE` + +### AppResourceSecret +* `key` (string, required) - Key of the secret to grant permission on +* `permission` (string, required) - Permission to grant on the secret scope. For secrets, only one permission is allowed. Permission must be one of: "READ", "WRITE", "MANAGE". Possible values are: `MANAGE`, `READ`, `WRITE` +* `scope` (string, required) - Scope of the secret to grant permission on + +### AppResourceServingEndpoint +* `name` (string, required) - Name of the serving endpoint to grant permission on +* `permission` (string, required) - Permission to grant on the serving endpoint. Supported permissions are: "CAN_MANAGE", "CAN_QUERY", "CAN_VIEW". Possible values are: `CAN_MANAGE`, `CAN_QUERY`, `CAN_VIEW` + +### AppResourceSqlWarehouse +* `id` (string, required) - Id of the SQL warehouse to grant permission on +* `permission` (string, required) - Permission to grant on the SQL warehouse. Supported permissions are: "CAN_MANAGE", "CAN_USE", "IS_OWNER". Possible values are: `CAN_MANAGE`, `CAN_USE`, `IS_OWNER` + +### AppResourceUcSecurable +* `permission` (string, required) - Possible values are: `EXECUTE`, `MODIFY`, `READ_VOLUME`, `SELECT`, `USE_CONNECTION`, `WRITE_VOLUME` +* `securable_full_name` (string, required) +* `securable_type` (string, required) - Possible values are: `CONNECTION`, `FUNCTION`, `TABLE`, `VOLUME` + +## Attributes +In addition to the above arguments, the following attributes are exported: +* `create_time` (string) - The creation time of the app space. Formatted timestamp in ISO 6801 +* `creator` (string) - The email of the user that created the app space +* `effective_usage_policy_id` (string) - The effective usage policy ID used by apps in the space +* `effective_user_api_scopes` (list of string) - The effective api scopes granted to the user access token +* `id` (string) - The unique identifier of the app space +* `service_principal_client_id` (string) - The service principal client ID for the app space +* `service_principal_id` (integer) - The service principal ID for the app space +* `service_principal_name` (string) - The service principal name for the app space +* `status` (SpaceStatus) - The status of the app space +* `update_time` (string) - The update time of the app space. Formatted timestamp in ISO 6801 +* `updater` (string) - The email of the user that last updated the app space + +### AppResourceUcSecurable +* `securable_kind` (string) - The securable kind from Unity Catalog. + See https://docs.databricks.com/api/workspace/tables/get#securable_kind_manifest-securable_kind + +### SpaceStatus +* `message` (string) - Message providing context about the current state +* `state` (string) - The state of the app space. Possible values are: `SPACE_ACTIVE`, `SPACE_CREATING`, `SPACE_DELETED`, `SPACE_DELETING`, `SPACE_ERROR`, `SPACE_UPDATING` + +## Import +As of Terraform v1.5, resources can be imported through configuration. +```hcl +import { + id = "name" + to = databricks_app_space.this +} +``` + +If you are using an older version of Terraform, import the resource using the `terraform import` command as follows: +```sh +terraform import databricks_app_space.this "name" +``` \ No newline at end of file diff --git a/docs/resources/endpoint.md b/docs/resources/endpoint.md index 0a4c189289..0f71fc8cd0 100755 --- a/docs/resources/endpoint.md +++ b/docs/resources/endpoint.md @@ -17,9 +17,9 @@ This resource manages Azure Private Endpoints that connect to Databricks workspa This is an example for creating an endpoint in Azure cloud: ```hcl resource "databricks_endpoint" "this" { - account_id = "eae3abf6-1496-494e-9983-4660a5ad5aab" - endpoint_name = "my-private-endpoint" - region = "westus" + parent = "accounts/123e4567-e89b-12d3-a456-426614174000" + display_name = "my-private-endpoint" + region = "westus" azure_private_endpoint_info { private_endpoint_name = "my-pe" private_endpoint_resource_guid = "12345678-1234-1234-1234-123456789abc" @@ -33,7 +33,8 @@ The following arguments are supported: * `display_name` (string, required) - The human-readable display name of this endpoint. The input should conform to RFC-1034, which restricts to letters, numbers, and hyphens, with the first character a letter, the last a letter or a number, and a 63 character maximum -* `parent` (string, required) +* `parent` (string, required) - The parent resource name of the account under which the endpoint is created. + Format: `accounts/{account_id}` * `region` (string, required) - The cloud provider region where this endpoint is located * `azure_private_endpoint_info` (AzurePrivateEndpointInfo, optional) - Info for an Azure private endpoint diff --git a/docs/resources/feature_engineering_feature.md b/docs/resources/feature_engineering_feature.md index ae726763e8..35aff5e9a3 100755 --- a/docs/resources/feature_engineering_feature.md +++ b/docs/resources/feature_engineering_feature.md @@ -43,6 +43,13 @@ The following arguments are supported: * `entity_columns` (list of string, required) - The entity columns of the Delta table * `full_name` (string, required) - The full three-part (catalog, schema, table) name of the Delta table * `timeseries_column` (string, required) - The timeseries column of the Delta table +* `dataframe_schema` (string, optional) - Schema of the resulting dataframe after transformations, in Spark StructType JSON format (from df.schema.json()). + Required if transformation_sql is specified. + Example: {"type":"struct","fields":[{"name":"col_a","type":"integer","nullable":true,"metadata":{}},{"name":"col_c","type":"integer","nullable":true,"metadata":{}}]} +* `filter_condition` (string, optional) - Single WHERE clause to filter delta table before applying transformations. Will be row-wise evaluated, so should only include conditionals and projections +* `transformation_sql` (string, optional) - A single SQL SELECT expression applied after filter_condition. + Should contains all the columns needed (eg. "SELECT *, col_a + col_b AS col_c FROM x.y.z WHERE col_a > 0" would have `transformation_sql` "*, col_a + col_b AS col_c") + If transformation_sql is not provided, all columns of the delta table are present in the DataSource dataframe ### Function * `function_type` (string, required) - The type of the function. Possible values are: `APPROX_COUNT_DISTINCT`, `APPROX_PERCENTILE`, `AVG`, `COUNT`, `FIRST`, `LAST`, `MAX`, `MIN`, `STDDEV_POP`, `STDDEV_SAMP`, `SUM`, `VAR_POP`, `VAR_SAMP` diff --git a/docs/resources/feature_engineering_kafka_config.md b/docs/resources/feature_engineering_kafka_config.md index 48a1516f73..b4bd82e67a 100755 --- a/docs/resources/feature_engineering_kafka_config.md +++ b/docs/resources/feature_engineering_kafka_config.md @@ -36,6 +36,13 @@ The following arguments are supported: * `entity_columns` (list of string, required) - The entity columns of the Delta table * `full_name` (string, required) - The full three-part (catalog, schema, table) name of the Delta table * `timeseries_column` (string, required) - The timeseries column of the Delta table +* `dataframe_schema` (string, optional) - Schema of the resulting dataframe after transformations, in Spark StructType JSON format (from df.schema.json()). + Required if transformation_sql is specified. + Example: {"type":"struct","fields":[{"name":"col_a","type":"integer","nullable":true,"metadata":{}},{"name":"col_c","type":"integer","nullable":true,"metadata":{}}]} +* `filter_condition` (string, optional) - Single WHERE clause to filter delta table before applying transformations. Will be row-wise evaluated, so should only include conditionals and projections +* `transformation_sql` (string, optional) - A single SQL SELECT expression applied after filter_condition. + Should contains all the columns needed (eg. "SELECT *, col_a + col_b AS col_c FROM x.y.z WHERE col_a > 0" would have `transformation_sql` "*, col_a + col_b AS col_c") + If transformation_sql is not provided, all columns of the delta table are present in the DataSource dataframe ### SchemaConfig * `json_schema` (string, optional) - Schema of the JSON object in standard IETF JSON schema format (https://json-schema.org/) diff --git a/docs/resources/postgres_database.md b/docs/resources/postgres_database.md new file mode 100755 index 0000000000..2b96b5278a --- /dev/null +++ b/docs/resources/postgres_database.md @@ -0,0 +1,70 @@ +--- +subcategory: "Postgres" +--- +# databricks_postgres_database Resource +[![Private Preview](https://img.shields.io/badge/Release_Stage-Private_Preview-blueviolet)](https://docs.databricks.com/aws/en/release-notes/release-types) + + + +## Example Usage + + +## Arguments +The following arguments are supported: +* `parent` (string, required) - The branch containing this database. + Format: projects/{project_id}/branches/{branch_id} +* `database_id` (string, optional) - The ID to use for the Database, which will become the final component of + the database's resource name. + This ID becomes the database name in postgres. + + This value should be 4-63 characters, and only use characters available in DNS names, + as defined by RFC-1123 + + If database_id is not specified in the request, it is generated automatically +* `spec` (DatabaseDatabaseSpec, optional) - The desired state of the Database +* `provider_config` (ProviderConfig, optional) - Configure the provider for management through account provider. + +### ProviderConfig +* `workspace_id` (string,required) - Workspace ID which the resource belongs to. This workspace must be part of the account which the provider is configured with. + +### DatabaseDatabaseSpec +* `postgres_database` (string, optional) - The name of the Postgres database. + + This expects a valid Postgres identifier as specified in the link below. + https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS + Required when creating the Database. + + To rename, pass a valid postgres identifier when updating the Database +* `role` (string, optional) - The name of the role that owns the database. + Format: projects/{project_id}/branches/{branch_id}/roles/{role_id} + + To change the owner, pass valid existing Role name when updating the Database + + A database always has an owner + +### DatabaseDatabaseStatus +* `postgres_database` (string, optional) - The name of the Postgres database +* `role` (string, optional) - The name of the role that owns the database. + Format: projects/{project_id}/branches/{branch_id}/roles/{role_id} + +## Attributes +In addition to the above arguments, the following attributes are exported: +* `create_time` (string) - A timestamp indicating when the database was created +* `name` (string) - The resource name of the database. + Format: projects/{project_id}/branches/{branch_id}/databases/{database_id} +* `status` (DatabaseDatabaseStatus) - The observed state of the Database +* `update_time` (string) - A timestamp indicating when the database was last updated + +## Import +As of Terraform v1.5, resources can be imported through configuration. +```hcl +import { + id = "name" + to = databricks_postgres_database.this +} +``` + +If you are using an older version of Terraform, import the resource using the `terraform import` command as follows: +```sh +terraform import databricks_postgres_database.this "name" +``` \ No newline at end of file diff --git a/docs/resources/postgres_project.md b/docs/resources/postgres_project.md index d0048e81bf..5af4632081 100755 --- a/docs/resources/postgres_project.md +++ b/docs/resources/postgres_project.md @@ -139,6 +139,7 @@ The following arguments are supported: To preserve existing tags, omit this field from the update_mask (or use wildcard "*" which auto-excludes empty tags) * `default_endpoint_settings` (ProjectDefaultEndpointSettings, optional) * `display_name` (string, optional) - Human-readable project name. Length should be between 1 and 256 characters +* `enable_pg_native_login` (boolean, optional) - Whether to enable PG native password login on all endpoints in this project. Defaults to true * `history_retention_duration` (string, optional) - The number of seconds to retain the shared history for point in time recovery for all branches in this project. Value should be between 0s and 2592000s (up to 30 days) * `pg_version` (integer, optional) - The major Postgres version number. Supported versions are 16 and 17 @@ -157,6 +158,7 @@ In addition to the above arguments, the following attributes are exported: * `custom_tags` (list of ProjectCustomTag) - The effective custom tags associated with the project * `default_endpoint_settings` (ProjectDefaultEndpointSettings) - The effective default endpoint settings * `display_name` (string) - The effective human-readable project name +* `enable_pg_native_login` (boolean) - Whether to enable PG native password login on all endpoints in this project * `history_retention_duration` (string) - The effective number of seconds to retain the shared history for point in time recovery * `owner` (string) - The email of the project owner * `pg_version` (integer) - The effective major Postgres version number diff --git a/go.mod b/go.mod index e326bcec63..9e891872a8 100644 --- a/go.mod +++ b/go.mod @@ -3,7 +3,7 @@ module github.com/databricks/terraform-provider-databricks go 1.24.0 require ( - github.com/databricks/databricks-sdk-go v0.113.0 + github.com/databricks/databricks-sdk-go v0.118.0 github.com/golang-jwt/jwt/v4 v4.5.2 github.com/hashicorp/go-cty v1.5.0 github.com/hashicorp/hcl v1.0.0 diff --git a/go.sum b/go.sum index 245f837400..051a38e07e 100644 --- a/go.sum +++ b/go.sum @@ -27,8 +27,8 @@ github.com/cloudflare/circl v1.6.3 h1:9GPOhQGF9MCYUeXyMYlqTR6a5gTrgR/fBLXvUgtVcg github.com/cloudflare/circl v1.6.3/go.mod h1:2eXP6Qfat4O/Yhh8BznvKnJ+uzEoTQ6jVKJRn81BiS4= github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s= github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= -github.com/databricks/databricks-sdk-go v0.113.0 h1:Tc4UmeBDptls+gRUzIedrVdfUkwW8j4HhvFptVf/kps= -github.com/databricks/databricks-sdk-go v0.113.0/go.mod h1:hWoHnHbNLjPKiTm5K/7bcIv3J3Pkgo5x9pPzh8K3RVE= +github.com/databricks/databricks-sdk-go v0.118.0 h1:KTEL2bQnKZIsFZAGT4m/Bg23Im6jYkNDzMJkWjKRc2s= +github.com/databricks/databricks-sdk-go v0.118.0/go.mod h1:hWoHnHbNLjPKiTm5K/7bcIv3J3Pkgo5x9pPzh8K3RVE= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= diff --git a/internal/providers/pluginfw/auto_generation.go b/internal/providers/pluginfw/auto_generation.go index 11b020c216..530c321c8d 100644 --- a/internal/providers/pluginfw/auto_generation.go +++ b/internal/providers/pluginfw/auto_generation.go @@ -13,9 +13,9 @@ import ( "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/alert_v2" - "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/apps_settings_custom_template" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/app_space" - "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/apps_space" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/apps_settings_custom_template" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/budget_policy" @@ -49,6 +49,8 @@ import ( "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/postgres_branch" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/postgres_database" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/postgres_endpoint" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/postgres_project" @@ -80,8 +82,8 @@ var autoGeneratedResources = []func() resource.Resource{ account_setting_user_preference_v2.ResourceUserPreference, account_setting_v2.ResourceSetting, alert_v2.ResourceAlertV2, + app_space.ResourceSpace, apps_settings_custom_template.ResourceCustomTemplate, - apps_space.ResourceSpace, budget_policy.ResourceBudgetPolicy, data_quality_monitor.ResourceMonitor, data_quality_refresh.ResourceRefresh, @@ -98,6 +100,7 @@ var autoGeneratedResources = []func() resource.Resource{ online_store.ResourceOnlineStore, policy_info.ResourcePolicyInfo, postgres_branch.ResourceBranch, + postgres_database.ResourceDatabase, postgres_endpoint.ResourceEndpoint, postgres_project.ResourceProject, quality_monitor_v2.ResourceQualityMonitor, @@ -117,8 +120,8 @@ var autoGeneratedDataSources = []func() datasource.DataSource{ account_setting_user_preference_v2.DataSourceUserPreference, account_setting_v2.DataSourceSetting, alert_v2.DataSourceAlertV2, + app_space.DataSourceSpace, apps_settings_custom_template.DataSourceCustomTemplate, - apps_space.DataSourceSpace, budget_policy.DataSourceBudgetPolicy, data_quality_monitor.DataSourceMonitor, data_quality_refresh.DataSourceRefresh, @@ -135,6 +138,7 @@ var autoGeneratedDataSources = []func() datasource.DataSource{ online_store.DataSourceOnlineStore, policy_info.DataSourcePolicyInfo, postgres_branch.DataSourceBranch, + postgres_database.DataSourceDatabase, postgres_endpoint.DataSourceEndpoint, postgres_project.DataSourceProject, quality_monitor_v2.DataSourceQualityMonitor, @@ -148,8 +152,8 @@ var autoGeneratedDataSources = []func() datasource.DataSource{ account_federation_policy.DataSourceFederationPolicies, account_network_policy.DataSourceAccountNetworkPolicies, alert_v2.DataSourceAlertsV2, + app_space.DataSourceSpaces, apps_settings_custom_template.DataSourceCustomTemplates, - apps_space.DataSourceSpaces, budget_policy.DataSourceBudgetPolicies, data_quality_monitor.DataSourceMonitors, data_quality_refresh.DataSourceRefreshes, @@ -166,6 +170,7 @@ var autoGeneratedDataSources = []func() datasource.DataSource{ online_store.DataSourceOnlineStores, policy_info.DataSourcePolicyInfos, postgres_branch.DataSourceBranches, + postgres_database.DataSourceDatabases, postgres_endpoint.DataSourceEndpoints, postgres_project.DataSourceProjects, quality_monitor_v2.DataSourceQualityMonitors, diff --git a/internal/providers/pluginfw/products/app_space/data_app_space.go b/internal/providers/pluginfw/products/app_space/data_app_space.go new file mode 100755 index 0000000000..4e67888f1e --- /dev/null +++ b/internal/providers/pluginfw/products/app_space/data_app_space.go @@ -0,0 +1,308 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package app_space + +import ( + "context" + "reflect" + "regexp" + + "github.com/databricks/databricks-sdk-go/service/apps" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/autogen" + pluginfwcontext "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/context" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema" + "github.com/databricks/terraform-provider-databricks/internal/service/apps_tf" + "github.com/hashicorp/terraform-plugin-framework-timetypes/timetypes" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +const dataSourceName = "app_space" + +var _ datasource.DataSourceWithConfigure = &SpaceDataSource{} + +func DataSourceSpace() datasource.DataSource { + return &SpaceDataSource{} +} + +type SpaceDataSource struct { + Client *autogen.DatabricksClient +} + +// ProviderConfigData contains the fields to configure the provider. +type ProviderConfigData struct { + WorkspaceID types.String `tfsdk:"workspace_id"` +} + +// ApplySchemaCustomizations applies the schema customizations to the ProviderConfig type. +func (r ProviderConfigData) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["workspace_id"] = attrs["workspace_id"].SetRequired() + attrs["workspace_id"] = attrs["workspace_id"].(tfschema.StringAttributeBuilder).AddValidator(stringvalidator.LengthAtLeast(1)) + attrs["workspace_id"] = attrs["workspace_id"].(tfschema.StringAttributeBuilder).AddValidator( + stringvalidator.RegexMatches(regexp.MustCompile(`^[1-9]\d*$`), "workspace_id must be a positive integer without leading zeros")) + return attrs +} + +// ProviderConfigDataWorkspaceIDPlanModifier is plan modifier for the workspace_id field. +// Resource requires replacement if the workspace_id changes from one non-empty value to another. +func ProviderConfigDataWorkspaceIDPlanModifier(ctx context.Context, req planmodifier.StringRequest, resp *stringplanmodifier.RequiresReplaceIfFuncResponse) { + // Require replacement if workspace_id changes from one non-empty value to another + oldValue := req.StateValue.ValueString() + newValue := req.PlanValue.ValueString() + + if oldValue != "" && newValue != "" && oldValue != newValue { + resp.RequiresReplace = true + } +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in the extended +// ProviderConfigData struct. Container types (types.Map, types.List, types.Set) and +// object types (types.Object) do not carry the type information of their elements in the Go +// type system. This function provides a way to retrieve the type information of the elements in +// complex fields at runtime. The values of the map are the reflected types of the contained elements. +// They must be either primitive values from the plugin framework type system +// (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF SDK values. +func (r ProviderConfigData) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{} +} + +// ToObjectValue returns the object value for the resource, combining attributes from the +// embedded TFSDK model and contains additional fields. +// +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, ProviderConfigData +// only implements ToObjectValue() and Type(). +func (r ProviderConfigData) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + r.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "workspace_id": r.WorkspaceID, + }, + ) +} + +// Type returns the object type with attributes from both the embedded TFSDK model +// and contains additional fields. +func (r ProviderConfigData) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "workspace_id": types.StringType, + }, + } +} + +// SpaceData extends the main model with additional fields. +type SpaceData struct { + // The creation time of the app space. Formatted timestamp in ISO 6801. + CreateTime timetypes.RFC3339 `tfsdk:"create_time"` + // The email of the user that created the app space. + Creator types.String `tfsdk:"creator"` + // The description of the app space. + Description types.String `tfsdk:"description"` + // The effective usage policy ID used by apps in the space. + EffectiveUsagePolicyId types.String `tfsdk:"effective_usage_policy_id"` + // The effective api scopes granted to the user access token. + EffectiveUserApiScopes types.List `tfsdk:"effective_user_api_scopes"` + // The unique identifier of the app space. + Id types.String `tfsdk:"id"` + // The name of the app space. The name must contain only lowercase + // alphanumeric characters and hyphens. It must be unique within the + // workspace. + Name types.String `tfsdk:"name"` + // Resources for the app space. Resources configured at the space level are + // available to all apps in the space. + Resources types.List `tfsdk:"resources"` + // The service principal client ID for the app space. + ServicePrincipalClientId types.String `tfsdk:"service_principal_client_id"` + // The service principal ID for the app space. + ServicePrincipalId types.Int64 `tfsdk:"service_principal_id"` + // The service principal name for the app space. + ServicePrincipalName types.String `tfsdk:"service_principal_name"` + // The status of the app space. + Status types.Object `tfsdk:"status"` + // The update time of the app space. Formatted timestamp in ISO 6801. + UpdateTime timetypes.RFC3339 `tfsdk:"update_time"` + // The email of the user that last updated the app space. + Updater types.String `tfsdk:"updater"` + // The usage policy ID for managing cost at the space level. + UsagePolicyId types.String `tfsdk:"usage_policy_id"` + // OAuth scopes for apps in the space. + UserApiScopes types.List `tfsdk:"user_api_scopes"` + ProviderConfigData types.Object `tfsdk:"provider_config"` +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in the extended +// SpaceData struct. Container types (types.Map, types.List, types.Set) and +// object types (types.Object) do not carry the type information of their elements in the Go +// type system. This function provides a way to retrieve the type information of the elements in +// complex fields at runtime. The values of the map are the reflected types of the contained elements. +// They must be either primitive values from the plugin framework type system +// (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF SDK values. +func (m SpaceData) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{ + "effective_user_api_scopes": reflect.TypeOf(types.String{}), + "resources": reflect.TypeOf(apps_tf.AppResource{}), + "status": reflect.TypeOf(apps_tf.SpaceStatus{}), + "user_api_scopes": reflect.TypeOf(types.String{}), + "provider_config": reflect.TypeOf(ProviderConfigData{}), + } +} + +// ToObjectValue returns the object value for the resource, combining attributes from the +// embedded TFSDK model and contains additional fields. +// +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, SpaceData +// only implements ToObjectValue() and Type(). +func (m SpaceData) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "create_time": m.CreateTime, + "creator": m.Creator, + "description": m.Description, + "effective_usage_policy_id": m.EffectiveUsagePolicyId, + "effective_user_api_scopes": m.EffectiveUserApiScopes, + "id": m.Id, + "name": m.Name, + "resources": m.Resources, + "service_principal_client_id": m.ServicePrincipalClientId, + "service_principal_id": m.ServicePrincipalId, + "service_principal_name": m.ServicePrincipalName, + "status": m.Status, + "update_time": m.UpdateTime, + "updater": m.Updater, + "usage_policy_id": m.UsagePolicyId, + "user_api_scopes": m.UserApiScopes, + + "provider_config": m.ProviderConfigData, + }, + ) +} + +// Type returns the object type with attributes from both the embedded TFSDK model +// and contains additional fields. +func (m SpaceData) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "create_time": timetypes.RFC3339{}.Type(ctx), + "creator": types.StringType, + "description": types.StringType, + "effective_usage_policy_id": types.StringType, + "effective_user_api_scopes": basetypes.ListType{ + ElemType: types.StringType, + }, + "id": types.StringType, + "name": types.StringType, + "resources": basetypes.ListType{ + ElemType: apps_tf.AppResource{}.Type(ctx), + }, + "service_principal_client_id": types.StringType, + "service_principal_id": types.Int64Type, + "service_principal_name": types.StringType, + "status": apps_tf.SpaceStatus{}.Type(ctx), + "update_time": timetypes.RFC3339{}.Type(ctx), + "updater": types.StringType, + "usage_policy_id": types.StringType, + "user_api_scopes": basetypes.ListType{ + ElemType: types.StringType, + }, + + "provider_config": ProviderConfigData{}.Type(ctx), + }, + } +} + +func (m SpaceData) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["create_time"] = attrs["create_time"].SetComputed() + attrs["creator"] = attrs["creator"].SetComputed() + attrs["description"] = attrs["description"].SetComputed() + attrs["effective_usage_policy_id"] = attrs["effective_usage_policy_id"].SetComputed() + attrs["effective_user_api_scopes"] = attrs["effective_user_api_scopes"].SetComputed() + attrs["id"] = attrs["id"].SetComputed() + attrs["name"] = attrs["name"].SetRequired() + attrs["resources"] = attrs["resources"].SetComputed() + attrs["service_principal_client_id"] = attrs["service_principal_client_id"].SetComputed() + attrs["service_principal_id"] = attrs["service_principal_id"].SetComputed() + attrs["service_principal_name"] = attrs["service_principal_name"].SetComputed() + attrs["status"] = attrs["status"].SetComputed() + attrs["update_time"] = attrs["update_time"].SetComputed() + attrs["updater"] = attrs["updater"].SetComputed() + attrs["usage_policy_id"] = attrs["usage_policy_id"].SetComputed() + attrs["user_api_scopes"] = attrs["user_api_scopes"].SetComputed() + + attrs["provider_config"] = attrs["provider_config"].SetOptional() + + return attrs +} + +func (r *SpaceDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = autogen.GetDatabricksProductionName(dataSourceName) +} + +func (r *SpaceDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + attrs, blocks := tfschema.DataSourceStructToSchemaMap(ctx, SpaceData{}, nil) + resp.Schema = schema.Schema{ + Description: "Terraform schema for Databricks Space", + Attributes: attrs, + Blocks: blocks, + } +} + +func (r *SpaceDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + r.Client = autogen.ConfigureDataSource(req, resp) +} + +func (r *SpaceDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + ctx = pluginfwcontext.SetUserAgentInDataSourceContext(ctx, dataSourceName) + + var config SpaceData + resp.Diagnostics.Append(req.Config.Get(ctx, &config)...) + if resp.Diagnostics.HasError() { + return + } + + var readRequest apps.GetSpaceRequest + resp.Diagnostics.Append(converters.TfSdkToGoSdkStruct(ctx, config, &readRequest)...) + if resp.Diagnostics.HasError() { + return + } + + var namespace ProviderConfigData + resp.Diagnostics.Append(config.ProviderConfigData.As(ctx, &namespace, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + if resp.Diagnostics.HasError() { + return + } + client, clientDiags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, namespace.WorkspaceID.ValueString()) + + resp.Diagnostics.Append(clientDiags...) + if resp.Diagnostics.HasError() { + return + } + + response, err := client.Apps.GetSpace(ctx, readRequest) + if err != nil { + resp.Diagnostics.AddError("failed to get app_space", err.Error()) + return + } + + var newState SpaceData + resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, response, &newState)...) + if resp.Diagnostics.HasError() { + return + } + // Preserve provider_config from config since it's not part of the API response + newState.ProviderConfigData = config.ProviderConfigData + + resp.Diagnostics.Append(resp.State.Set(ctx, newState)...) +} diff --git a/internal/providers/pluginfw/products/app_space/data_app_spaces.go b/internal/providers/pluginfw/products/app_space/data_app_spaces.go new file mode 100755 index 0000000000..0305da0d34 --- /dev/null +++ b/internal/providers/pluginfw/products/app_space/data_app_spaces.go @@ -0,0 +1,124 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package app_space + +import ( + "context" + "reflect" + + "github.com/databricks/databricks-sdk-go/service/apps" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/autogen" + pluginfwcontext "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/context" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +const dataSourcesName = "app_spaces" + +var _ datasource.DataSourceWithConfigure = &SpacesDataSource{} + +func DataSourceSpaces() datasource.DataSource { + return &SpacesDataSource{} +} + +// SpacesData extends the main model with additional fields. +type SpacesData struct { + Apps types.List `tfsdk:"spaces"` + // Upper bound for items returned. + PageSize types.Int64 `tfsdk:"page_size"` + ProviderConfigData types.Object `tfsdk:"provider_config"` +} + +func (SpacesData) GetComplexFieldTypes(context.Context) map[string]reflect.Type { + return map[string]reflect.Type{ + "spaces": reflect.TypeOf(SpaceData{}), + "provider_config": reflect.TypeOf(ProviderConfigData{}), + } +} + +func (m SpacesData) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["page_size"] = attrs["page_size"].SetOptional() + + attrs["spaces"] = attrs["spaces"].SetComputed() + attrs["provider_config"] = attrs["provider_config"].SetOptional() + + return attrs +} + +type SpacesDataSource struct { + Client *autogen.DatabricksClient +} + +func (r *SpacesDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = autogen.GetDatabricksProductionName(dataSourcesName) +} + +func (r *SpacesDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + attrs, blocks := tfschema.DataSourceStructToSchemaMap(ctx, SpacesData{}, nil) + resp.Schema = schema.Schema{ + Description: "Terraform schema for Databricks Space", + Attributes: attrs, + Blocks: blocks, + } +} + +func (r *SpacesDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + r.Client = autogen.ConfigureDataSource(req, resp) +} + +func (r *SpacesDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + ctx = pluginfwcontext.SetUserAgentInDataSourceContext(ctx, dataSourcesName) + + var config SpacesData + resp.Diagnostics.Append(req.Config.Get(ctx, &config)...) + if resp.Diagnostics.HasError() { + return + } + + var listRequest apps.ListSpacesRequest + resp.Diagnostics.Append(converters.TfSdkToGoSdkStruct(ctx, config, &listRequest)...) + if resp.Diagnostics.HasError() { + return + } + + var namespace ProviderConfigData + resp.Diagnostics.Append(config.ProviderConfigData.As(ctx, &namespace, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + if resp.Diagnostics.HasError() { + return + } + client, clientDiags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, namespace.WorkspaceID.ValueString()) + + resp.Diagnostics.Append(clientDiags...) + if resp.Diagnostics.HasError() { + return + } + + response, err := client.Apps.ListSpacesAll(ctx, listRequest) + if err != nil { + resp.Diagnostics.AddError("failed to list app_spaces", err.Error()) + return + } + + var results = []attr.Value{} + for _, item := range response { + var space SpaceData + resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, item, &space)...) + if resp.Diagnostics.HasError() { + return + } + space.ProviderConfigData = config.ProviderConfigData + + results = append(results, space.ToObjectValue(ctx)) + } + + config.Apps = types.ListValueMust(SpaceData{}.Type(ctx), results) + resp.Diagnostics.Append(resp.State.Set(ctx, config)...) +} diff --git a/internal/providers/pluginfw/products/app_space/resource_app_space.go b/internal/providers/pluginfw/products/app_space/resource_app_space.go new file mode 100755 index 0000000000..d61585a4f7 --- /dev/null +++ b/internal/providers/pluginfw/products/app_space/resource_app_space.go @@ -0,0 +1,684 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package app_space + +import ( + "context" + "fmt" + "reflect" + "regexp" + "strings" + + "github.com/databricks/databricks-sdk-go/apierr" + "github.com/databricks/databricks-sdk-go/common/types/fieldmask" + "github.com/databricks/databricks-sdk-go/service/apps" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/autogen" + pluginfwcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common" + pluginfwcontext "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/context" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema" + "github.com/databricks/terraform-provider-databricks/internal/service/apps_tf" + "github.com/hashicorp/terraform-plugin-framework-timetypes/timetypes" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/tfsdk" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +const resourceName = "app_space" + +var _ resource.ResourceWithConfigure = &SpaceResource{} + +func ResourceSpace() resource.Resource { + return &SpaceResource{} +} + +type SpaceResource struct { + Client *autogen.DatabricksClient +} + +// ProviderConfig contains the fields to configure the provider. +type ProviderConfig struct { + WorkspaceID types.String `tfsdk:"workspace_id"` +} + +// ApplySchemaCustomizations applies the schema customizations to the ProviderConfig type. +func (r ProviderConfig) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["workspace_id"] = attrs["workspace_id"].SetRequired() + attrs["workspace_id"] = attrs["workspace_id"].(tfschema.StringAttributeBuilder).AddPlanModifier( + stringplanmodifier.RequiresReplaceIf(ProviderConfigWorkspaceIDPlanModifier, "", "")) + + attrs["workspace_id"] = attrs["workspace_id"].(tfschema.StringAttributeBuilder).AddValidator(stringvalidator.LengthAtLeast(1)) + attrs["workspace_id"] = attrs["workspace_id"].(tfschema.StringAttributeBuilder).AddValidator( + stringvalidator.RegexMatches(regexp.MustCompile(`^[1-9]\d*$`), "workspace_id must be a positive integer without leading zeros")) + return attrs +} + +// ProviderConfigWorkspaceIDPlanModifier is plan modifier for the workspace_id field. +// Resource requires replacement if the workspace_id changes from one non-empty value to another. +func ProviderConfigWorkspaceIDPlanModifier(ctx context.Context, req planmodifier.StringRequest, resp *stringplanmodifier.RequiresReplaceIfFuncResponse) { + // Require replacement if workspace_id changes from one non-empty value to another + oldValue := req.StateValue.ValueString() + newValue := req.PlanValue.ValueString() + + if oldValue != "" && newValue != "" && oldValue != newValue { + resp.RequiresReplace = true + } +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in the extended +// ProviderConfig struct. Container types (types.Map, types.List, types.Set) and +// object types (types.Object) do not carry the type information of their elements in the Go +// type system. This function provides a way to retrieve the type information of the elements in +// complex fields at runtime. The values of the map are the reflected types of the contained elements. +// They must be either primitive values from the plugin framework type system +// (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF SDK values. +func (r ProviderConfig) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{} +} + +// ToObjectValue returns the object value for the resource, combining attributes from the +// embedded TFSDK model and contains additional fields. +// +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, ProviderConfig +// only implements ToObjectValue() and Type(). +func (r ProviderConfig) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + r.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "workspace_id": r.WorkspaceID, + }, + ) +} + +// Type returns the object type with attributes from both the embedded TFSDK model +// and contains additional fields. +func (r ProviderConfig) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "workspace_id": types.StringType, + }, + } +} + +// Space extends the main model with additional fields. +type Space struct { + // The creation time of the app space. Formatted timestamp in ISO 6801. + CreateTime timetypes.RFC3339 `tfsdk:"create_time"` + // The email of the user that created the app space. + Creator types.String `tfsdk:"creator"` + // The description of the app space. + Description types.String `tfsdk:"description"` + // The effective usage policy ID used by apps in the space. + EffectiveUsagePolicyId types.String `tfsdk:"effective_usage_policy_id"` + // The effective api scopes granted to the user access token. + EffectiveUserApiScopes types.List `tfsdk:"effective_user_api_scopes"` + // The unique identifier of the app space. + Id types.String `tfsdk:"id"` + // The name of the app space. The name must contain only lowercase + // alphanumeric characters and hyphens. It must be unique within the + // workspace. + Name types.String `tfsdk:"name"` + // Resources for the app space. Resources configured at the space level are + // available to all apps in the space. + Resources types.List `tfsdk:"resources"` + // The service principal client ID for the app space. + ServicePrincipalClientId types.String `tfsdk:"service_principal_client_id"` + // The service principal ID for the app space. + ServicePrincipalId types.Int64 `tfsdk:"service_principal_id"` + // The service principal name for the app space. + ServicePrincipalName types.String `tfsdk:"service_principal_name"` + // The status of the app space. + Status types.Object `tfsdk:"status"` + // The update time of the app space. Formatted timestamp in ISO 6801. + UpdateTime timetypes.RFC3339 `tfsdk:"update_time"` + // The email of the user that last updated the app space. + Updater types.String `tfsdk:"updater"` + // The usage policy ID for managing cost at the space level. + UsagePolicyId types.String `tfsdk:"usage_policy_id"` + // OAuth scopes for apps in the space. + UserApiScopes types.List `tfsdk:"user_api_scopes"` + ProviderConfig types.Object `tfsdk:"provider_config"` +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in the extended +// Space struct. Container types (types.Map, types.List, types.Set) and +// object types (types.Object) do not carry the type information of their elements in the Go +// type system. This function provides a way to retrieve the type information of the elements in +// complex fields at runtime. The values of the map are the reflected types of the contained elements. +// They must be either primitive values from the plugin framework type system +// (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF SDK values. +func (m Space) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{ + "effective_user_api_scopes": reflect.TypeOf(types.String{}), + "resources": reflect.TypeOf(apps_tf.AppResource{}), + "status": reflect.TypeOf(apps_tf.SpaceStatus{}), + "user_api_scopes": reflect.TypeOf(types.String{}), + "provider_config": reflect.TypeOf(ProviderConfig{}), + } +} + +// ToObjectValue returns the object value for the resource, combining attributes from the +// embedded TFSDK model and contains additional fields. +// +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, Space +// only implements ToObjectValue() and Type(). +func (m Space) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{"create_time": m.CreateTime, + "creator": m.Creator, + "description": m.Description, + "effective_usage_policy_id": m.EffectiveUsagePolicyId, + "effective_user_api_scopes": m.EffectiveUserApiScopes, + "id": m.Id, + "name": m.Name, + "resources": m.Resources, + "service_principal_client_id": m.ServicePrincipalClientId, + "service_principal_id": m.ServicePrincipalId, + "service_principal_name": m.ServicePrincipalName, + "status": m.Status, + "update_time": m.UpdateTime, + "updater": m.Updater, + "usage_policy_id": m.UsagePolicyId, + "user_api_scopes": m.UserApiScopes, + + "provider_config": m.ProviderConfig, + }, + ) +} + +// Type returns the object type with attributes from both the embedded TFSDK model +// and contains additional fields. +func (m Space) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{"create_time": timetypes.RFC3339{}.Type(ctx), + "creator": types.StringType, + "description": types.StringType, + "effective_usage_policy_id": types.StringType, + "effective_user_api_scopes": basetypes.ListType{ + ElemType: types.StringType, + }, + "id": types.StringType, + "name": types.StringType, + "resources": basetypes.ListType{ + ElemType: apps_tf.AppResource{}.Type(ctx), + }, + "service_principal_client_id": types.StringType, + "service_principal_id": types.Int64Type, + "service_principal_name": types.StringType, + "status": apps_tf.SpaceStatus{}.Type(ctx), + "update_time": timetypes.RFC3339{}.Type(ctx), + "updater": types.StringType, + "usage_policy_id": types.StringType, + "user_api_scopes": basetypes.ListType{ + ElemType: types.StringType, + }, + + "provider_config": ProviderConfig{}.Type(ctx), + }, + } +} + +// SyncFieldsDuringCreateOrUpdate copies values from the plan into the receiver, +// including both embedded model fields and additional fields. This method is called +// during create and update. +func (to *Space) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from Space) { + if !from.EffectiveUserApiScopes.IsNull() && !from.EffectiveUserApiScopes.IsUnknown() && to.EffectiveUserApiScopes.IsNull() && len(from.EffectiveUserApiScopes.Elements()) == 0 { + // The default representation of an empty list for TF autogenerated resources in the resource state is Null. + // If a user specified a non-Null, empty list for EffectiveUserApiScopes, and the deserialized field value is Null, + // set the resulting resource state to the empty list to match the planned value. + to.EffectiveUserApiScopes = from.EffectiveUserApiScopes + } + if !from.Resources.IsNull() && !from.Resources.IsUnknown() && to.Resources.IsNull() && len(from.Resources.Elements()) == 0 { + // The default representation of an empty list for TF autogenerated resources in the resource state is Null. + // If a user specified a non-Null, empty list for Resources, and the deserialized field value is Null, + // set the resulting resource state to the empty list to match the planned value. + to.Resources = from.Resources + } + if !from.Status.IsNull() && !from.Status.IsUnknown() { + if toStatus, ok := to.GetStatus(ctx); ok { + if fromStatus, ok := from.GetStatus(ctx); ok { + // Recursively sync the fields of Status + toStatus.SyncFieldsDuringCreateOrUpdate(ctx, fromStatus) + to.SetStatus(ctx, toStatus) + } + } + } + if !from.UserApiScopes.IsNull() && !from.UserApiScopes.IsUnknown() && to.UserApiScopes.IsNull() && len(from.UserApiScopes.Elements()) == 0 { + // The default representation of an empty list for TF autogenerated resources in the resource state is Null. + // If a user specified a non-Null, empty list for UserApiScopes, and the deserialized field value is Null, + // set the resulting resource state to the empty list to match the planned value. + to.UserApiScopes = from.UserApiScopes + } + to.ProviderConfig = from.ProviderConfig + +} + +// SyncFieldsDuringRead copies values from the existing state into the receiver, +// including both embedded model fields and additional fields. This method is called +// during read. +func (to *Space) SyncFieldsDuringRead(ctx context.Context, from Space) { + if !from.EffectiveUserApiScopes.IsNull() && !from.EffectiveUserApiScopes.IsUnknown() && to.EffectiveUserApiScopes.IsNull() && len(from.EffectiveUserApiScopes.Elements()) == 0 { + // The default representation of an empty list for TF autogenerated resources in the resource state is Null. + // If a user specified a non-Null, empty list for EffectiveUserApiScopes, and the deserialized field value is Null, + // set the resulting resource state to the empty list to match the planned value. + to.EffectiveUserApiScopes = from.EffectiveUserApiScopes + } + if !from.Resources.IsNull() && !from.Resources.IsUnknown() && to.Resources.IsNull() && len(from.Resources.Elements()) == 0 { + // The default representation of an empty list for TF autogenerated resources in the resource state is Null. + // If a user specified a non-Null, empty list for Resources, and the deserialized field value is Null, + // set the resulting resource state to the empty list to match the planned value. + to.Resources = from.Resources + } + if !from.Status.IsNull() && !from.Status.IsUnknown() { + if toStatus, ok := to.GetStatus(ctx); ok { + if fromStatus, ok := from.GetStatus(ctx); ok { + toStatus.SyncFieldsDuringRead(ctx, fromStatus) + to.SetStatus(ctx, toStatus) + } + } + } + if !from.UserApiScopes.IsNull() && !from.UserApiScopes.IsUnknown() && to.UserApiScopes.IsNull() && len(from.UserApiScopes.Elements()) == 0 { + // The default representation of an empty list for TF autogenerated resources in the resource state is Null. + // If a user specified a non-Null, empty list for UserApiScopes, and the deserialized field value is Null, + // set the resulting resource state to the empty list to match the planned value. + to.UserApiScopes = from.UserApiScopes + } + to.ProviderConfig = from.ProviderConfig + +} + +func (m Space) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["create_time"] = attrs["create_time"].SetComputed() + attrs["creator"] = attrs["creator"].SetComputed() + attrs["description"] = attrs["description"].SetOptional() + attrs["effective_usage_policy_id"] = attrs["effective_usage_policy_id"].SetComputed() + attrs["effective_user_api_scopes"] = attrs["effective_user_api_scopes"].SetComputed() + attrs["id"] = attrs["id"].SetComputed() + attrs["name"] = attrs["name"].SetRequired() + attrs["resources"] = attrs["resources"].SetOptional() + attrs["service_principal_client_id"] = attrs["service_principal_client_id"].SetComputed() + attrs["service_principal_id"] = attrs["service_principal_id"].SetComputed() + attrs["service_principal_name"] = attrs["service_principal_name"].SetComputed() + attrs["status"] = attrs["status"].SetComputed() + attrs["update_time"] = attrs["update_time"].SetComputed() + attrs["updater"] = attrs["updater"].SetComputed() + attrs["usage_policy_id"] = attrs["usage_policy_id"].SetOptional() + attrs["user_api_scopes"] = attrs["user_api_scopes"].SetOptional() + + attrs["name"] = attrs["name"].(tfschema.StringAttributeBuilder).AddPlanModifier(stringplanmodifier.UseStateForUnknown()).(tfschema.AttributeBuilder) + attrs["provider_config"] = attrs["provider_config"].SetOptional() + + return attrs +} + +// GetEffectiveUserApiScopes returns the value of the EffectiveUserApiScopes field in Space as +// a slice of types.String values. +// If the field is unknown or null, the boolean return value is false. +func (m *Space) GetEffectiveUserApiScopes(ctx context.Context) ([]types.String, bool) { + if m.EffectiveUserApiScopes.IsNull() || m.EffectiveUserApiScopes.IsUnknown() { + return nil, false + } + var v []types.String + d := m.EffectiveUserApiScopes.ElementsAs(ctx, &v, true) + if d.HasError() { + panic(pluginfwcommon.DiagToString(d)) + } + return v, true +} + +// SetEffectiveUserApiScopes sets the value of the EffectiveUserApiScopes field in Space. +func (m *Space) SetEffectiveUserApiScopes(ctx context.Context, v []types.String) { + vs := make([]attr.Value, 0, len(v)) + for _, e := range v { + vs = append(vs, e) + } + t := m.Type(ctx).(basetypes.ObjectType).AttrTypes["effective_user_api_scopes"] + t = t.(attr.TypeWithElementType).ElementType() + m.EffectiveUserApiScopes = types.ListValueMust(t, vs) +} + +// GetResources returns the value of the Resources field in Space as +// a slice of apps_tf.AppResource values. +// If the field is unknown or null, the boolean return value is false. +func (m *Space) GetResources(ctx context.Context) ([]apps_tf.AppResource, bool) { + if m.Resources.IsNull() || m.Resources.IsUnknown() { + return nil, false + } + var v []apps_tf.AppResource + d := m.Resources.ElementsAs(ctx, &v, true) + if d.HasError() { + panic(pluginfwcommon.DiagToString(d)) + } + return v, true +} + +// SetResources sets the value of the Resources field in Space. +func (m *Space) SetResources(ctx context.Context, v []apps_tf.AppResource) { + vs := make([]attr.Value, 0, len(v)) + for _, e := range v { + vs = append(vs, e.ToObjectValue(ctx)) + } + t := m.Type(ctx).(basetypes.ObjectType).AttrTypes["resources"] + t = t.(attr.TypeWithElementType).ElementType() + m.Resources = types.ListValueMust(t, vs) +} + +// GetStatus returns the value of the Status field in Space as +// a apps_tf.SpaceStatus value. +// If the field is unknown or null, the boolean return value is false. +func (m *Space) GetStatus(ctx context.Context) (apps_tf.SpaceStatus, bool) { + var e apps_tf.SpaceStatus + if m.Status.IsNull() || m.Status.IsUnknown() { + return e, false + } + var v apps_tf.SpaceStatus + d := m.Status.As(ctx, &v, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + }) + if d.HasError() { + panic(pluginfwcommon.DiagToString(d)) + } + return v, true +} + +// SetStatus sets the value of the Status field in Space. +func (m *Space) SetStatus(ctx context.Context, v apps_tf.SpaceStatus) { + vs := v.ToObjectValue(ctx) + m.Status = vs +} + +// GetUserApiScopes returns the value of the UserApiScopes field in Space as +// a slice of types.String values. +// If the field is unknown or null, the boolean return value is false. +func (m *Space) GetUserApiScopes(ctx context.Context) ([]types.String, bool) { + if m.UserApiScopes.IsNull() || m.UserApiScopes.IsUnknown() { + return nil, false + } + var v []types.String + d := m.UserApiScopes.ElementsAs(ctx, &v, true) + if d.HasError() { + panic(pluginfwcommon.DiagToString(d)) + } + return v, true +} + +// SetUserApiScopes sets the value of the UserApiScopes field in Space. +func (m *Space) SetUserApiScopes(ctx context.Context, v []types.String) { + vs := make([]attr.Value, 0, len(v)) + for _, e := range v { + vs = append(vs, e) + } + t := m.Type(ctx).(basetypes.ObjectType).AttrTypes["user_api_scopes"] + t = t.(attr.TypeWithElementType).ElementType() + m.UserApiScopes = types.ListValueMust(t, vs) +} + +func (r *SpaceResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = autogen.GetDatabricksProductionName(resourceName) +} + +func (r *SpaceResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { + attrs, blocks := tfschema.ResourceStructToSchemaMap(ctx, Space{}, nil) + resp.Schema = schema.Schema{ + Description: "Terraform schema for Databricks app_space", + Attributes: attrs, + Blocks: blocks, + } +} + +func (r *SpaceResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + r.Client = autogen.ConfigureResource(req, resp) +} + +func (r *SpaceResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) + + var plan Space + resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) + if resp.Diagnostics.HasError() { + return + } + var space apps.Space + + resp.Diagnostics.Append(converters.TfSdkToGoSdkStruct(ctx, plan, &space)...) + if resp.Diagnostics.HasError() { + return + } + + createRequest := apps.CreateSpaceRequest{ + Space: space, + } + + var namespace ProviderConfig + resp.Diagnostics.Append(plan.ProviderConfig.As(ctx, &namespace, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + if resp.Diagnostics.HasError() { + return + } + client, clientDiags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, namespace.WorkspaceID.ValueString()) + + resp.Diagnostics.Append(clientDiags...) + if resp.Diagnostics.HasError() { + return + } + + response, err := client.Apps.CreateSpace(ctx, createRequest) + if err != nil { + resp.Diagnostics.AddError("failed to create app_space", err.Error()) + return + } + + var newState Space + + waitResponse, err := response.Wait(ctx) + if err != nil { + resp.Diagnostics.AddError("error waiting for app_space to be ready", err.Error()) + return + } + + resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, waitResponse, &newState)...) + + if resp.Diagnostics.HasError() { + return + } + + newState.SyncFieldsDuringCreateOrUpdate(ctx, plan) + + resp.Diagnostics.Append(resp.State.Set(ctx, newState)...) + if resp.Diagnostics.HasError() { + return + } +} + +func (r *SpaceResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) + + var existingState Space + resp.Diagnostics.Append(req.State.Get(ctx, &existingState)...) + if resp.Diagnostics.HasError() { + return + } + + var readRequest apps.GetSpaceRequest + resp.Diagnostics.Append(converters.TfSdkToGoSdkStruct(ctx, existingState, &readRequest)...) + if resp.Diagnostics.HasError() { + return + } + + var namespace ProviderConfig + resp.Diagnostics.Append(existingState.ProviderConfig.As(ctx, &namespace, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + if resp.Diagnostics.HasError() { + return + } + client, clientDiags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, namespace.WorkspaceID.ValueString()) + + resp.Diagnostics.Append(clientDiags...) + if resp.Diagnostics.HasError() { + return + } + response, err := client.Apps.GetSpace(ctx, readRequest) + if err != nil { + if apierr.IsMissing(err) { + resp.State.RemoveResource(ctx) + return + } + + resp.Diagnostics.AddError("failed to get app_space", err.Error()) + return + } + + var newState Space + resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, response, &newState)...) + if resp.Diagnostics.HasError() { + return + } + + newState.SyncFieldsDuringRead(ctx, existingState) + + resp.Diagnostics.Append(resp.State.Set(ctx, newState)...) +} + +func (r *SpaceResource) update(ctx context.Context, plan Space, diags *diag.Diagnostics, state *tfsdk.State) { + var space apps.Space + + diags.Append(converters.TfSdkToGoSdkStruct(ctx, plan, &space)...) + if diags.HasError() { + return + } + + updateRequest := apps.UpdateSpaceRequest{ + Space: space, + Name: plan.Name.ValueString(), + UpdateMask: *fieldmask.New(strings.Split("description,resources,usage_policy_id,user_api_scopes", ",")), + } + + var namespace ProviderConfig + diags.Append(plan.ProviderConfig.As(ctx, &namespace, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + if diags.HasError() { + return + } + client, clientDiags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, namespace.WorkspaceID.ValueString()) + + diags.Append(clientDiags...) + if diags.HasError() { + return + } + response, err := client.Apps.UpdateSpace(ctx, updateRequest) + if err != nil { + diags.AddError("failed to update app_space", err.Error()) + return + } + + var newState Space + + waitResponse, err := response.Wait(ctx) + if err != nil { + diags.AddError("error waiting for app_space update", err.Error()) + return + } + + diags.Append(converters.GoSdkToTfSdkStruct(ctx, waitResponse, &newState)...) + + if diags.HasError() { + return + } + + newState.SyncFieldsDuringCreateOrUpdate(ctx, plan) + diags.Append(state.Set(ctx, newState)...) +} + +func (r *SpaceResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) + + var plan Space + resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) + if resp.Diagnostics.HasError() { + return + } + + r.update(ctx, plan, &resp.Diagnostics, &resp.State) +} + +func (r *SpaceResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) + + var state Space + resp.Diagnostics.Append(req.State.Get(ctx, &state)...) + if resp.Diagnostics.HasError() { + return + } + + var deleteRequest apps.DeleteSpaceRequest + resp.Diagnostics.Append(converters.TfSdkToGoSdkStruct(ctx, state, &deleteRequest)...) + if resp.Diagnostics.HasError() { + return + } + + var namespace ProviderConfig + resp.Diagnostics.Append(state.ProviderConfig.As(ctx, &namespace, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + if resp.Diagnostics.HasError() { + return + } + client, clientDiags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, namespace.WorkspaceID.ValueString()) + + resp.Diagnostics.Append(clientDiags...) + if resp.Diagnostics.HasError() { + return + } + + response, err := client.Apps.DeleteSpace(ctx, deleteRequest) + if err != nil { + resp.Diagnostics.AddError("failed to delete app_space", err.Error()) + return + } + + err = response.Wait(ctx) + if err != nil && !apierr.IsMissing(err) { + resp.Diagnostics.AddError("error waiting for app_space delete", err.Error()) + return + } + +} + +var _ resource.ResourceWithImportState = &SpaceResource{} + +func (r *SpaceResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + parts := strings.Split(req.ID, ",") + + if len(parts) != 1 || parts[0] == "" { + resp.Diagnostics.AddError( + "Unexpected Import Identifier", + fmt.Sprintf( + "Expected import identifier with format: name. Got: %q", + req.ID, + ), + ) + return + } + + name := parts[0] + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("name"), name)...) +} diff --git a/internal/providers/pluginfw/products/endpoint/data_endpoints.go b/internal/providers/pluginfw/products/endpoint/data_endpoints.go index 9136a1ce89..76f76dbd68 100755 --- a/internal/providers/pluginfw/products/endpoint/data_endpoints.go +++ b/internal/providers/pluginfw/products/endpoint/data_endpoints.go @@ -30,7 +30,8 @@ type EndpointsData struct { Endpoints types.List `tfsdk:"items"` PageSize types.Int64 `tfsdk:"page_size"` - + // The parent resource name of the account to list endpoints for. Format: + // `accounts/{account_id}`. Parent types.String `tfsdk:"parent"` } diff --git a/internal/providers/pluginfw/products/endpoint/resource_endpoint.go b/internal/providers/pluginfw/products/endpoint/resource_endpoint.go index 4d8f5da739..ec98d527e2 100755 --- a/internal/providers/pluginfw/products/endpoint/resource_endpoint.go +++ b/internal/providers/pluginfw/products/endpoint/resource_endpoint.go @@ -59,7 +59,8 @@ type Endpoint struct { // The resource name of the endpoint, which uniquely identifies the // endpoint. Name types.String `tfsdk:"name"` - + // The parent resource name of the account under which the endpoint is + // created. Format: `accounts/{account_id}`. Parent types.String `tfsdk:"parent"` // The cloud provider region where this endpoint is located. Region types.String `tfsdk:"region"` diff --git a/internal/providers/pluginfw/products/postgres_database/data_postgres_database.go b/internal/providers/pluginfw/products/postgres_database/data_postgres_database.go new file mode 100755 index 0000000000..c80103471a --- /dev/null +++ b/internal/providers/pluginfw/products/postgres_database/data_postgres_database.go @@ -0,0 +1,249 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package postgres_database + +import ( + "context" + "reflect" + "regexp" + + "github.com/databricks/databricks-sdk-go/service/postgres" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/autogen" + pluginfwcontext "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/context" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema" + "github.com/databricks/terraform-provider-databricks/internal/service/postgres_tf" + "github.com/hashicorp/terraform-plugin-framework-timetypes/timetypes" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +const dataSourceName = "postgres_database" + +var _ datasource.DataSourceWithConfigure = &DatabaseDataSource{} + +func DataSourceDatabase() datasource.DataSource { + return &DatabaseDataSource{} +} + +type DatabaseDataSource struct { + Client *autogen.DatabricksClient +} + +// ProviderConfigData contains the fields to configure the provider. +type ProviderConfigData struct { + WorkspaceID types.String `tfsdk:"workspace_id"` +} + +// ApplySchemaCustomizations applies the schema customizations to the ProviderConfig type. +func (r ProviderConfigData) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["workspace_id"] = attrs["workspace_id"].SetRequired() + attrs["workspace_id"] = attrs["workspace_id"].(tfschema.StringAttributeBuilder).AddValidator(stringvalidator.LengthAtLeast(1)) + attrs["workspace_id"] = attrs["workspace_id"].(tfschema.StringAttributeBuilder).AddValidator( + stringvalidator.RegexMatches(regexp.MustCompile(`^[1-9]\d*$`), "workspace_id must be a positive integer without leading zeros")) + return attrs +} + +// ProviderConfigDataWorkspaceIDPlanModifier is plan modifier for the workspace_id field. +// Resource requires replacement if the workspace_id changes from one non-empty value to another. +func ProviderConfigDataWorkspaceIDPlanModifier(ctx context.Context, req planmodifier.StringRequest, resp *stringplanmodifier.RequiresReplaceIfFuncResponse) { + // Require replacement if workspace_id changes from one non-empty value to another + oldValue := req.StateValue.ValueString() + newValue := req.PlanValue.ValueString() + + if oldValue != "" && newValue != "" && oldValue != newValue { + resp.RequiresReplace = true + } +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in the extended +// ProviderConfigData struct. Container types (types.Map, types.List, types.Set) and +// object types (types.Object) do not carry the type information of their elements in the Go +// type system. This function provides a way to retrieve the type information of the elements in +// complex fields at runtime. The values of the map are the reflected types of the contained elements. +// They must be either primitive values from the plugin framework type system +// (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF SDK values. +func (r ProviderConfigData) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{} +} + +// ToObjectValue returns the object value for the resource, combining attributes from the +// embedded TFSDK model and contains additional fields. +// +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, ProviderConfigData +// only implements ToObjectValue() and Type(). +func (r ProviderConfigData) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + r.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "workspace_id": r.WorkspaceID, + }, + ) +} + +// Type returns the object type with attributes from both the embedded TFSDK model +// and contains additional fields. +func (r ProviderConfigData) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "workspace_id": types.StringType, + }, + } +} + +// DatabaseData extends the main model with additional fields. +type DatabaseData struct { + // A timestamp indicating when the database was created. + CreateTime timetypes.RFC3339 `tfsdk:"create_time"` + // The resource name of the database. Format: + // projects/{project_id}/branches/{branch_id}/databases/{database_id} + Name types.String `tfsdk:"name"` + // The branch containing this database. Format: + // projects/{project_id}/branches/{branch_id} + Parent types.String `tfsdk:"parent"` + // The desired state of the Database. + Spec types.Object `tfsdk:"spec"` + // The observed state of the Database. + Status types.Object `tfsdk:"status"` + // A timestamp indicating when the database was last updated. + UpdateTime timetypes.RFC3339 `tfsdk:"update_time"` + ProviderConfigData types.Object `tfsdk:"provider_config"` +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in the extended +// DatabaseData struct. Container types (types.Map, types.List, types.Set) and +// object types (types.Object) do not carry the type information of their elements in the Go +// type system. This function provides a way to retrieve the type information of the elements in +// complex fields at runtime. The values of the map are the reflected types of the contained elements. +// They must be either primitive values from the plugin framework type system +// (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF SDK values. +func (m DatabaseData) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{ + "spec": reflect.TypeOf(postgres_tf.DatabaseDatabaseSpec{}), + "status": reflect.TypeOf(postgres_tf.DatabaseDatabaseStatus{}), + "provider_config": reflect.TypeOf(ProviderConfigData{}), + } +} + +// ToObjectValue returns the object value for the resource, combining attributes from the +// embedded TFSDK model and contains additional fields. +// +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, DatabaseData +// only implements ToObjectValue() and Type(). +func (m DatabaseData) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "create_time": m.CreateTime, + "name": m.Name, + "parent": m.Parent, + "spec": m.Spec, + "status": m.Status, + "update_time": m.UpdateTime, + + "provider_config": m.ProviderConfigData, + }, + ) +} + +// Type returns the object type with attributes from both the embedded TFSDK model +// and contains additional fields. +func (m DatabaseData) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "create_time": timetypes.RFC3339{}.Type(ctx), + "name": types.StringType, + "parent": types.StringType, + "spec": postgres_tf.DatabaseDatabaseSpec{}.Type(ctx), + "status": postgres_tf.DatabaseDatabaseStatus{}.Type(ctx), + "update_time": timetypes.RFC3339{}.Type(ctx), + + "provider_config": ProviderConfigData{}.Type(ctx), + }, + } +} + +func (m DatabaseData) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["create_time"] = attrs["create_time"].SetComputed() + attrs["name"] = attrs["name"].SetRequired() + attrs["parent"] = attrs["parent"].SetComputed() + attrs["spec"] = attrs["spec"].SetComputed() + attrs["status"] = attrs["status"].SetComputed() + attrs["update_time"] = attrs["update_time"].SetComputed() + + attrs["provider_config"] = attrs["provider_config"].SetOptional() + + return attrs +} + +func (r *DatabaseDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = autogen.GetDatabricksProductionName(dataSourceName) +} + +func (r *DatabaseDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + attrs, blocks := tfschema.DataSourceStructToSchemaMap(ctx, DatabaseData{}, nil) + resp.Schema = schema.Schema{ + Description: "Terraform schema for Databricks Database", + Attributes: attrs, + Blocks: blocks, + } +} + +func (r *DatabaseDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + r.Client = autogen.ConfigureDataSource(req, resp) +} + +func (r *DatabaseDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + ctx = pluginfwcontext.SetUserAgentInDataSourceContext(ctx, dataSourceName) + + var config DatabaseData + resp.Diagnostics.Append(req.Config.Get(ctx, &config)...) + if resp.Diagnostics.HasError() { + return + } + + var readRequest postgres.GetDatabaseRequest + resp.Diagnostics.Append(converters.TfSdkToGoSdkStruct(ctx, config, &readRequest)...) + if resp.Diagnostics.HasError() { + return + } + + var namespace ProviderConfigData + resp.Diagnostics.Append(config.ProviderConfigData.As(ctx, &namespace, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + if resp.Diagnostics.HasError() { + return + } + client, clientDiags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, namespace.WorkspaceID.ValueString()) + + resp.Diagnostics.Append(clientDiags...) + if resp.Diagnostics.HasError() { + return + } + + response, err := client.Postgres.GetDatabase(ctx, readRequest) + if err != nil { + resp.Diagnostics.AddError("failed to get postgres_database", err.Error()) + return + } + + var newState DatabaseData + resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, response, &newState)...) + if resp.Diagnostics.HasError() { + return + } + // Preserve provider_config from config since it's not part of the API response + newState.ProviderConfigData = config.ProviderConfigData + + resp.Diagnostics.Append(resp.State.Set(ctx, newState)...) +} diff --git a/internal/providers/pluginfw/products/postgres_database/data_postgres_databases.go b/internal/providers/pluginfw/products/postgres_database/data_postgres_databases.go new file mode 100755 index 0000000000..62ea729941 --- /dev/null +++ b/internal/providers/pluginfw/products/postgres_database/data_postgres_databases.go @@ -0,0 +1,128 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package postgres_database + +import ( + "context" + "reflect" + + "github.com/databricks/databricks-sdk-go/service/postgres" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/autogen" + pluginfwcontext "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/context" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +const dataSourcesName = "postgres_databases" + +var _ datasource.DataSourceWithConfigure = &DatabasesDataSource{} + +func DataSourceDatabases() datasource.DataSource { + return &DatabasesDataSource{} +} + +// DatabasesData extends the main model with additional fields. +type DatabasesData struct { + Postgres types.List `tfsdk:"databases"` + // Upper bound for items returned. + PageSize types.Int64 `tfsdk:"page_size"` + // The Branch that owns this collection of databases. Format: + // projects/{project_id}/branches/{branch_id} + Parent types.String `tfsdk:"parent"` + ProviderConfigData types.Object `tfsdk:"provider_config"` +} + +func (DatabasesData) GetComplexFieldTypes(context.Context) map[string]reflect.Type { + return map[string]reflect.Type{ + "databases": reflect.TypeOf(DatabaseData{}), + "provider_config": reflect.TypeOf(ProviderConfigData{}), + } +} + +func (m DatabasesData) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["parent"] = attrs["parent"].SetRequired() + attrs["page_size"] = attrs["page_size"].SetOptional() + + attrs["databases"] = attrs["databases"].SetComputed() + attrs["provider_config"] = attrs["provider_config"].SetOptional() + + return attrs +} + +type DatabasesDataSource struct { + Client *autogen.DatabricksClient +} + +func (r *DatabasesDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = autogen.GetDatabricksProductionName(dataSourcesName) +} + +func (r *DatabasesDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + attrs, blocks := tfschema.DataSourceStructToSchemaMap(ctx, DatabasesData{}, nil) + resp.Schema = schema.Schema{ + Description: "Terraform schema for Databricks Database", + Attributes: attrs, + Blocks: blocks, + } +} + +func (r *DatabasesDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + r.Client = autogen.ConfigureDataSource(req, resp) +} + +func (r *DatabasesDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + ctx = pluginfwcontext.SetUserAgentInDataSourceContext(ctx, dataSourcesName) + + var config DatabasesData + resp.Diagnostics.Append(req.Config.Get(ctx, &config)...) + if resp.Diagnostics.HasError() { + return + } + + var listRequest postgres.ListDatabasesRequest + resp.Diagnostics.Append(converters.TfSdkToGoSdkStruct(ctx, config, &listRequest)...) + if resp.Diagnostics.HasError() { + return + } + + var namespace ProviderConfigData + resp.Diagnostics.Append(config.ProviderConfigData.As(ctx, &namespace, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + if resp.Diagnostics.HasError() { + return + } + client, clientDiags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, namespace.WorkspaceID.ValueString()) + + resp.Diagnostics.Append(clientDiags...) + if resp.Diagnostics.HasError() { + return + } + + response, err := client.Postgres.ListDatabasesAll(ctx, listRequest) + if err != nil { + resp.Diagnostics.AddError("failed to list postgres_databases", err.Error()) + return + } + + var results = []attr.Value{} + for _, item := range response { + var database DatabaseData + resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, item, &database)...) + if resp.Diagnostics.HasError() { + return + } + database.ProviderConfigData = config.ProviderConfigData + + results = append(results, database.ToObjectValue(ctx)) + } + + config.Postgres = types.ListValueMust(DatabaseData{}.Type(ctx), results) + resp.Diagnostics.Append(resp.State.Set(ctx, config)...) +} diff --git a/internal/providers/pluginfw/products/postgres_database/resource_postgres_database.go b/internal/providers/pluginfw/products/postgres_database/resource_postgres_database.go new file mode 100755 index 0000000000..7e8488dad2 --- /dev/null +++ b/internal/providers/pluginfw/products/postgres_database/resource_postgres_database.go @@ -0,0 +1,585 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package postgres_database + +import ( + "context" + "fmt" + "reflect" + "regexp" + "strings" + + "github.com/databricks/databricks-sdk-go/apierr" + "github.com/databricks/databricks-sdk-go/common/types/fieldmask" + "github.com/databricks/databricks-sdk-go/service/postgres" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/autogen" + pluginfwcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common" + pluginfwcontext "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/context" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema" + "github.com/databricks/terraform-provider-databricks/internal/service/postgres_tf" + "github.com/hashicorp/terraform-plugin-framework-timetypes/timetypes" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/objectplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/tfsdk" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +const resourceName = "postgres_database" + +var _ resource.ResourceWithConfigure = &DatabaseResource{} + +func ResourceDatabase() resource.Resource { + return &DatabaseResource{} +} + +type DatabaseResource struct { + Client *autogen.DatabricksClient +} + +// ProviderConfig contains the fields to configure the provider. +type ProviderConfig struct { + WorkspaceID types.String `tfsdk:"workspace_id"` +} + +// ApplySchemaCustomizations applies the schema customizations to the ProviderConfig type. +func (r ProviderConfig) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["workspace_id"] = attrs["workspace_id"].SetRequired() + attrs["workspace_id"] = attrs["workspace_id"].(tfschema.StringAttributeBuilder).AddPlanModifier( + stringplanmodifier.RequiresReplaceIf(ProviderConfigWorkspaceIDPlanModifier, "", "")) + + attrs["workspace_id"] = attrs["workspace_id"].(tfschema.StringAttributeBuilder).AddValidator(stringvalidator.LengthAtLeast(1)) + attrs["workspace_id"] = attrs["workspace_id"].(tfschema.StringAttributeBuilder).AddValidator( + stringvalidator.RegexMatches(regexp.MustCompile(`^[1-9]\d*$`), "workspace_id must be a positive integer without leading zeros")) + return attrs +} + +// ProviderConfigWorkspaceIDPlanModifier is plan modifier for the workspace_id field. +// Resource requires replacement if the workspace_id changes from one non-empty value to another. +func ProviderConfigWorkspaceIDPlanModifier(ctx context.Context, req planmodifier.StringRequest, resp *stringplanmodifier.RequiresReplaceIfFuncResponse) { + // Require replacement if workspace_id changes from one non-empty value to another + oldValue := req.StateValue.ValueString() + newValue := req.PlanValue.ValueString() + + if oldValue != "" && newValue != "" && oldValue != newValue { + resp.RequiresReplace = true + } +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in the extended +// ProviderConfig struct. Container types (types.Map, types.List, types.Set) and +// object types (types.Object) do not carry the type information of their elements in the Go +// type system. This function provides a way to retrieve the type information of the elements in +// complex fields at runtime. The values of the map are the reflected types of the contained elements. +// They must be either primitive values from the plugin framework type system +// (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF SDK values. +func (r ProviderConfig) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{} +} + +// ToObjectValue returns the object value for the resource, combining attributes from the +// embedded TFSDK model and contains additional fields. +// +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, ProviderConfig +// only implements ToObjectValue() and Type(). +func (r ProviderConfig) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + r.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "workspace_id": r.WorkspaceID, + }, + ) +} + +// Type returns the object type with attributes from both the embedded TFSDK model +// and contains additional fields. +func (r ProviderConfig) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "workspace_id": types.StringType, + }, + } +} + +// Database extends the main model with additional fields. +type Database struct { + // A timestamp indicating when the database was created. + CreateTime timetypes.RFC3339 `tfsdk:"create_time"` + // The ID to use for the Database, which will become the final component of + // the database's resource name. This ID becomes the database name in + // postgres. + // + // This value should be 4-63 characters, and only use characters available + // in DNS names, as defined by RFC-1123 + // + // If database_id is not specified in the request, it is generated + // automatically. + DatabaseId types.String `tfsdk:"database_id"` + // The resource name of the database. Format: + // projects/{project_id}/branches/{branch_id}/databases/{database_id} + Name types.String `tfsdk:"name"` + // The branch containing this database. Format: + // projects/{project_id}/branches/{branch_id} + Parent types.String `tfsdk:"parent"` + // The desired state of the Database. + Spec types.Object `tfsdk:"spec"` + // The observed state of the Database. + Status types.Object `tfsdk:"status"` + // A timestamp indicating when the database was last updated. + UpdateTime timetypes.RFC3339 `tfsdk:"update_time"` + ProviderConfig types.Object `tfsdk:"provider_config"` +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in the extended +// Database struct. Container types (types.Map, types.List, types.Set) and +// object types (types.Object) do not carry the type information of their elements in the Go +// type system. This function provides a way to retrieve the type information of the elements in +// complex fields at runtime. The values of the map are the reflected types of the contained elements. +// They must be either primitive values from the plugin framework type system +// (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF SDK values. +func (m Database) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{ + "spec": reflect.TypeOf(postgres_tf.DatabaseDatabaseSpec{}), + "status": reflect.TypeOf(postgres_tf.DatabaseDatabaseStatus{}), + "provider_config": reflect.TypeOf(ProviderConfig{}), + } +} + +// ToObjectValue returns the object value for the resource, combining attributes from the +// embedded TFSDK model and contains additional fields. +// +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, Database +// only implements ToObjectValue() and Type(). +func (m Database) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{"create_time": m.CreateTime, + "database_id": m.DatabaseId, + "name": m.Name, + "parent": m.Parent, + "spec": m.Spec, + "status": m.Status, + "update_time": m.UpdateTime, + + "provider_config": m.ProviderConfig, + }, + ) +} + +// Type returns the object type with attributes from both the embedded TFSDK model +// and contains additional fields. +func (m Database) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{"create_time": timetypes.RFC3339{}.Type(ctx), + "database_id": types.StringType, + "name": types.StringType, + "parent": types.StringType, + "spec": postgres_tf.DatabaseDatabaseSpec{}.Type(ctx), + "status": postgres_tf.DatabaseDatabaseStatus{}.Type(ctx), + "update_time": timetypes.RFC3339{}.Type(ctx), + + "provider_config": ProviderConfig{}.Type(ctx), + }, + } +} + +// SyncFieldsDuringCreateOrUpdate copies values from the plan into the receiver, +// including both embedded model fields and additional fields. This method is called +// during create and update. +func (to *Database) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from Database) { + to.DatabaseId = from.DatabaseId + if !from.Spec.IsUnknown() && !from.Spec.IsNull() { + // Spec is an input only field and not returned by the service, so we keep the value from the prior state. + to.Spec = from.Spec + } + if !from.Spec.IsNull() && !from.Spec.IsUnknown() { + if toSpec, ok := to.GetSpec(ctx); ok { + if fromSpec, ok := from.GetSpec(ctx); ok { + // Recursively sync the fields of Spec + toSpec.SyncFieldsDuringCreateOrUpdate(ctx, fromSpec) + to.SetSpec(ctx, toSpec) + } + } + } + if !from.Status.IsNull() && !from.Status.IsUnknown() { + if toStatus, ok := to.GetStatus(ctx); ok { + if fromStatus, ok := from.GetStatus(ctx); ok { + // Recursively sync the fields of Status + toStatus.SyncFieldsDuringCreateOrUpdate(ctx, fromStatus) + to.SetStatus(ctx, toStatus) + } + } + } + to.ProviderConfig = from.ProviderConfig + +} + +// SyncFieldsDuringRead copies values from the existing state into the receiver, +// including both embedded model fields and additional fields. This method is called +// during read. +func (to *Database) SyncFieldsDuringRead(ctx context.Context, from Database) { + to.DatabaseId = from.DatabaseId + if !from.Spec.IsUnknown() && !from.Spec.IsNull() { + // Spec is an input only field and not returned by the service, so we keep the value from the prior state. + to.Spec = from.Spec + } + if !from.Spec.IsNull() && !from.Spec.IsUnknown() { + if toSpec, ok := to.GetSpec(ctx); ok { + if fromSpec, ok := from.GetSpec(ctx); ok { + toSpec.SyncFieldsDuringRead(ctx, fromSpec) + to.SetSpec(ctx, toSpec) + } + } + } + if !from.Status.IsNull() && !from.Status.IsUnknown() { + if toStatus, ok := to.GetStatus(ctx); ok { + if fromStatus, ok := from.GetStatus(ctx); ok { + toStatus.SyncFieldsDuringRead(ctx, fromStatus) + to.SetStatus(ctx, toStatus) + } + } + } + to.ProviderConfig = from.ProviderConfig + +} + +func (m Database) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["create_time"] = attrs["create_time"].SetComputed() + attrs["name"] = attrs["name"].SetComputed() + attrs["parent"] = attrs["parent"].SetRequired() + attrs["parent"] = attrs["parent"].(tfschema.StringAttributeBuilder).AddPlanModifier(stringplanmodifier.RequiresReplace()).(tfschema.AttributeBuilder) + attrs["spec"] = attrs["spec"].SetOptional() + attrs["spec"] = attrs["spec"].SetComputed() + attrs["spec"] = attrs["spec"].(tfschema.SingleNestedAttributeBuilder).AddPlanModifier(objectplanmodifier.UseStateForUnknown()).(tfschema.AttributeBuilder) + attrs["status"] = attrs["status"].SetComputed() + attrs["update_time"] = attrs["update_time"].SetComputed() + attrs["database_id"] = attrs["database_id"].SetComputed() + attrs["database_id"] = attrs["database_id"].SetOptional() + attrs["database_id"] = attrs["database_id"].(tfschema.StringAttributeBuilder).AddPlanModifier(stringplanmodifier.UseStateForUnknown()).(tfschema.AttributeBuilder) + attrs["database_id"] = attrs["database_id"].(tfschema.StringAttributeBuilder).AddPlanModifier(stringplanmodifier.RequiresReplace()).(tfschema.AttributeBuilder) + + attrs["name"] = attrs["name"].(tfschema.StringAttributeBuilder).AddPlanModifier(stringplanmodifier.UseStateForUnknown()).(tfschema.AttributeBuilder) + attrs["provider_config"] = attrs["provider_config"].SetOptional() + + return attrs +} + +// GetSpec returns the value of the Spec field in Database as +// a postgres_tf.DatabaseDatabaseSpec value. +// If the field is unknown or null, the boolean return value is false. +func (m *Database) GetSpec(ctx context.Context) (postgres_tf.DatabaseDatabaseSpec, bool) { + var e postgres_tf.DatabaseDatabaseSpec + if m.Spec.IsNull() || m.Spec.IsUnknown() { + return e, false + } + var v postgres_tf.DatabaseDatabaseSpec + d := m.Spec.As(ctx, &v, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + }) + if d.HasError() { + panic(pluginfwcommon.DiagToString(d)) + } + return v, true +} + +// SetSpec sets the value of the Spec field in Database. +func (m *Database) SetSpec(ctx context.Context, v postgres_tf.DatabaseDatabaseSpec) { + vs := v.ToObjectValue(ctx) + m.Spec = vs +} + +// GetStatus returns the value of the Status field in Database as +// a postgres_tf.DatabaseDatabaseStatus value. +// If the field is unknown or null, the boolean return value is false. +func (m *Database) GetStatus(ctx context.Context) (postgres_tf.DatabaseDatabaseStatus, bool) { + var e postgres_tf.DatabaseDatabaseStatus + if m.Status.IsNull() || m.Status.IsUnknown() { + return e, false + } + var v postgres_tf.DatabaseDatabaseStatus + d := m.Status.As(ctx, &v, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + }) + if d.HasError() { + panic(pluginfwcommon.DiagToString(d)) + } + return v, true +} + +// SetStatus sets the value of the Status field in Database. +func (m *Database) SetStatus(ctx context.Context, v postgres_tf.DatabaseDatabaseStatus) { + vs := v.ToObjectValue(ctx) + m.Status = vs +} + +func (r *DatabaseResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = autogen.GetDatabricksProductionName(resourceName) +} + +func (r *DatabaseResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { + attrs, blocks := tfschema.ResourceStructToSchemaMap(ctx, Database{}, nil) + resp.Schema = schema.Schema{ + Description: "Terraform schema for Databricks postgres_database", + Attributes: attrs, + Blocks: blocks, + } +} + +func (r *DatabaseResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + r.Client = autogen.ConfigureResource(req, resp) +} + +func (r *DatabaseResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) + + var plan Database + resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) + if resp.Diagnostics.HasError() { + return + } + var database postgres.Database + + resp.Diagnostics.Append(converters.TfSdkToGoSdkStruct(ctx, plan, &database)...) + if resp.Diagnostics.HasError() { + return + } + + createRequest := postgres.CreateDatabaseRequest{ + Database: database, + Parent: plan.Parent.ValueString(), + DatabaseId: plan.DatabaseId.ValueString(), + } + + var namespace ProviderConfig + resp.Diagnostics.Append(plan.ProviderConfig.As(ctx, &namespace, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + if resp.Diagnostics.HasError() { + return + } + client, clientDiags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, namespace.WorkspaceID.ValueString()) + + resp.Diagnostics.Append(clientDiags...) + if resp.Diagnostics.HasError() { + return + } + + response, err := client.Postgres.CreateDatabase(ctx, createRequest) + if err != nil { + resp.Diagnostics.AddError("failed to create postgres_database", err.Error()) + return + } + + var newState Database + + waitResponse, err := response.Wait(ctx) + if err != nil { + resp.Diagnostics.AddError("error waiting for postgres_database to be ready", err.Error()) + return + } + + resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, waitResponse, &newState)...) + + if resp.Diagnostics.HasError() { + return + } + + newState.SyncFieldsDuringCreateOrUpdate(ctx, plan) + + resp.Diagnostics.Append(resp.State.Set(ctx, newState)...) + if resp.Diagnostics.HasError() { + return + } +} + +func (r *DatabaseResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) + + var existingState Database + resp.Diagnostics.Append(req.State.Get(ctx, &existingState)...) + if resp.Diagnostics.HasError() { + return + } + + var readRequest postgres.GetDatabaseRequest + resp.Diagnostics.Append(converters.TfSdkToGoSdkStruct(ctx, existingState, &readRequest)...) + if resp.Diagnostics.HasError() { + return + } + + var namespace ProviderConfig + resp.Diagnostics.Append(existingState.ProviderConfig.As(ctx, &namespace, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + if resp.Diagnostics.HasError() { + return + } + client, clientDiags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, namespace.WorkspaceID.ValueString()) + + resp.Diagnostics.Append(clientDiags...) + if resp.Diagnostics.HasError() { + return + } + response, err := client.Postgres.GetDatabase(ctx, readRequest) + if err != nil { + if apierr.IsMissing(err) { + resp.State.RemoveResource(ctx) + return + } + + resp.Diagnostics.AddError("failed to get postgres_database", err.Error()) + return + } + + var newState Database + resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, response, &newState)...) + if resp.Diagnostics.HasError() { + return + } + + newState.SyncFieldsDuringRead(ctx, existingState) + + resp.Diagnostics.Append(resp.State.Set(ctx, newState)...) +} + +func (r *DatabaseResource) update(ctx context.Context, plan Database, diags *diag.Diagnostics, state *tfsdk.State) { + var database postgres.Database + + diags.Append(converters.TfSdkToGoSdkStruct(ctx, plan, &database)...) + if diags.HasError() { + return + } + + updateRequest := postgres.UpdateDatabaseRequest{ + Database: database, + Name: plan.Name.ValueString(), + UpdateMask: *fieldmask.New(strings.Split("spec", ",")), + } + + var namespace ProviderConfig + diags.Append(plan.ProviderConfig.As(ctx, &namespace, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + if diags.HasError() { + return + } + client, clientDiags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, namespace.WorkspaceID.ValueString()) + + diags.Append(clientDiags...) + if diags.HasError() { + return + } + response, err := client.Postgres.UpdateDatabase(ctx, updateRequest) + if err != nil { + diags.AddError("failed to update postgres_database", err.Error()) + return + } + + var newState Database + + waitResponse, err := response.Wait(ctx) + if err != nil { + diags.AddError("error waiting for postgres_database update", err.Error()) + return + } + + diags.Append(converters.GoSdkToTfSdkStruct(ctx, waitResponse, &newState)...) + + if diags.HasError() { + return + } + + newState.SyncFieldsDuringCreateOrUpdate(ctx, plan) + diags.Append(state.Set(ctx, newState)...) +} + +func (r *DatabaseResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) + + var plan Database + resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) + if resp.Diagnostics.HasError() { + return + } + + r.update(ctx, plan, &resp.Diagnostics, &resp.State) +} + +func (r *DatabaseResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) + + var state Database + resp.Diagnostics.Append(req.State.Get(ctx, &state)...) + if resp.Diagnostics.HasError() { + return + } + + var deleteRequest postgres.DeleteDatabaseRequest + resp.Diagnostics.Append(converters.TfSdkToGoSdkStruct(ctx, state, &deleteRequest)...) + if resp.Diagnostics.HasError() { + return + } + + var namespace ProviderConfig + resp.Diagnostics.Append(state.ProviderConfig.As(ctx, &namespace, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + if resp.Diagnostics.HasError() { + return + } + client, clientDiags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, namespace.WorkspaceID.ValueString()) + + resp.Diagnostics.Append(clientDiags...) + if resp.Diagnostics.HasError() { + return + } + + response, err := client.Postgres.DeleteDatabase(ctx, deleteRequest) + if err != nil { + resp.Diagnostics.AddError("failed to delete postgres_database", err.Error()) + return + } + + err = response.Wait(ctx) + if err != nil && !apierr.IsMissing(err) { + resp.Diagnostics.AddError("error waiting for postgres_database delete", err.Error()) + return + } + +} + +var _ resource.ResourceWithImportState = &DatabaseResource{} + +func (r *DatabaseResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + parts := strings.Split(req.ID, ",") + + if len(parts) != 1 || parts[0] == "" { + resp.Diagnostics.AddError( + "Unexpected Import Identifier", + fmt.Sprintf( + "Expected import identifier with format: name. Got: %q", + req.ID, + ), + ) + return + } + + name := parts[0] + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("name"), name)...) +} diff --git a/internal/service/apps_tf/legacy_model.go b/internal/service/apps_tf/legacy_model.go index 1aec676197..fa9058a9f9 100755 --- a/internal/service/apps_tf/legacy_model.go +++ b/internal/service/apps_tf/legacy_model.go @@ -2306,6 +2306,8 @@ type AppResource_SdkV2 struct { // Name of the App Resource. Name types.String `tfsdk:"name"` + Postgres types.List `tfsdk:"postgres"` + Secret types.List `tfsdk:"secret"` ServingEndpoint types.List `tfsdk:"serving_endpoint"` @@ -2361,6 +2363,15 @@ func (to *AppResource_SdkV2) SyncFieldsDuringCreateOrUpdate(ctx context.Context, } } } + if !from.Postgres.IsNull() && !from.Postgres.IsUnknown() { + if toPostgres, ok := to.GetPostgres(ctx); ok { + if fromPostgres, ok := from.GetPostgres(ctx); ok { + // Recursively sync the fields of Postgres + toPostgres.SyncFieldsDuringCreateOrUpdate(ctx, fromPostgres) + to.SetPostgres(ctx, toPostgres) + } + } + } if !from.Secret.IsNull() && !from.Secret.IsUnknown() { if toSecret, ok := to.GetSecret(ctx); ok { if fromSecret, ok := from.GetSecret(ctx); ok { @@ -2440,6 +2451,14 @@ func (to *AppResource_SdkV2) SyncFieldsDuringRead(ctx context.Context, from AppR } } } + if !from.Postgres.IsNull() && !from.Postgres.IsUnknown() { + if toPostgres, ok := to.GetPostgres(ctx); ok { + if fromPostgres, ok := from.GetPostgres(ctx); ok { + toPostgres.SyncFieldsDuringRead(ctx, fromPostgres) + to.SetPostgres(ctx, toPostgres) + } + } + } if !from.Secret.IsNull() && !from.Secret.IsUnknown() { if toSecret, ok := to.GetSecret(ctx); ok { if fromSecret, ok := from.GetSecret(ctx); ok { @@ -2487,6 +2506,8 @@ func (m AppResource_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.A attrs["job"] = attrs["job"].SetOptional() attrs["job"] = attrs["job"].(tfschema.ListNestedAttributeBuilder).AddValidator(listvalidator.SizeAtMost(1)).(tfschema.AttributeBuilder) attrs["name"] = attrs["name"].SetRequired() + attrs["postgres"] = attrs["postgres"].SetOptional() + attrs["postgres"] = attrs["postgres"].(tfschema.ListNestedAttributeBuilder).AddValidator(listvalidator.SizeAtMost(1)).(tfschema.AttributeBuilder) attrs["secret"] = attrs["secret"].SetOptional() attrs["secret"] = attrs["secret"].(tfschema.ListNestedAttributeBuilder).AddValidator(listvalidator.SizeAtMost(1)).(tfschema.AttributeBuilder) attrs["serving_endpoint"] = attrs["serving_endpoint"].SetOptional() @@ -2513,6 +2534,7 @@ func (m AppResource_SdkV2) GetComplexFieldTypes(ctx context.Context) map[string] "experiment": reflect.TypeOf(AppResourceExperiment_SdkV2{}), "genie_space": reflect.TypeOf(AppResourceGenieSpace_SdkV2{}), "job": reflect.TypeOf(AppResourceJob_SdkV2{}), + "postgres": reflect.TypeOf(AppResourcePostgres_SdkV2{}), "secret": reflect.TypeOf(AppResourceSecret_SdkV2{}), "serving_endpoint": reflect.TypeOf(AppResourceServingEndpoint_SdkV2{}), "sql_warehouse": reflect.TypeOf(AppResourceSqlWarehouse_SdkV2{}), @@ -2534,6 +2556,7 @@ func (m AppResource_SdkV2) ToObjectValue(ctx context.Context) basetypes.ObjectVa "genie_space": m.GenieSpace, "job": m.Job, "name": m.Name, + "postgres": m.Postgres, "secret": m.Secret, "serving_endpoint": m.ServingEndpoint, "sql_warehouse": m.SqlWarehouse, @@ -2562,6 +2585,9 @@ func (m AppResource_SdkV2) Type(ctx context.Context) attr.Type { ElemType: AppResourceJob_SdkV2{}.Type(ctx), }, "name": types.StringType, + "postgres": basetypes.ListType{ + ElemType: AppResourcePostgres_SdkV2{}.Type(ctx), + }, "secret": basetypes.ListType{ ElemType: AppResourceSecret_SdkV2{}.Type(ctx), }, @@ -2708,6 +2734,32 @@ func (m *AppResource_SdkV2) SetJob(ctx context.Context, v AppResourceJob_SdkV2) m.Job = types.ListValueMust(t, vs) } +// GetPostgres returns the value of the Postgres field in AppResource_SdkV2 as +// a AppResourcePostgres_SdkV2 value. +// If the field is unknown or null, the boolean return value is false. +func (m *AppResource_SdkV2) GetPostgres(ctx context.Context) (AppResourcePostgres_SdkV2, bool) { + var e AppResourcePostgres_SdkV2 + if m.Postgres.IsNull() || m.Postgres.IsUnknown() { + return e, false + } + var v []AppResourcePostgres_SdkV2 + d := m.Postgres.ElementsAs(ctx, &v, true) + if d.HasError() { + panic(pluginfwcommon.DiagToString(d)) + } + if len(v) == 0 { + return e, false + } + return v[0], true +} + +// SetPostgres sets the value of the Postgres field in AppResource_SdkV2. +func (m *AppResource_SdkV2) SetPostgres(ctx context.Context, v AppResourcePostgres_SdkV2) { + vs := []attr.Value{v.ToObjectValue(ctx)} + t := m.Type(ctx).(basetypes.ObjectType).AttrTypes["postgres"] + m.Postgres = types.ListValueMust(t, vs) +} + // GetSecret returns the value of the Secret field in AppResource_SdkV2 as // a AppResourceSecret_SdkV2 value. // If the field is unknown or null, the boolean return value is false. @@ -3073,6 +3125,63 @@ func (m AppResourceJob_SdkV2) Type(ctx context.Context) attr.Type { } } +type AppResourcePostgres_SdkV2 struct { + Branch types.String `tfsdk:"branch"` + + Database types.String `tfsdk:"database"` + + Permission types.String `tfsdk:"permission"` +} + +func (to *AppResourcePostgres_SdkV2) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from AppResourcePostgres_SdkV2) { +} + +func (to *AppResourcePostgres_SdkV2) SyncFieldsDuringRead(ctx context.Context, from AppResourcePostgres_SdkV2) { +} + +func (m AppResourcePostgres_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["branch"] = attrs["branch"].SetOptional() + attrs["database"] = attrs["database"].SetOptional() + attrs["permission"] = attrs["permission"].SetOptional() + + return attrs +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in AppResourcePostgres. +// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry +// the type information of their elements in the Go type system. This function provides a way to +// retrieve the type information of the elements in complex fields at runtime. The values of the map +// are the reflected types of the contained elements. They must be either primitive values from the +// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF +// SDK values. +func (m AppResourcePostgres_SdkV2) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{} +} + +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, AppResourcePostgres_SdkV2 +// only implements ToObjectValue() and Type(). +func (m AppResourcePostgres_SdkV2) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "branch": m.Branch, + "database": m.Database, + "permission": m.Permission, + }) +} + +// Type implements basetypes.ObjectValuable. +func (m AppResourcePostgres_SdkV2) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "branch": types.StringType, + "database": types.StringType, + "permission": types.StringType, + }, + } +} + type AppResourceSecret_SdkV2 struct { // Key of the secret to grant permission on. Key types.String `tfsdk:"key"` @@ -6080,10 +6189,6 @@ type Space_SdkV2 struct { // alphanumeric characters and hyphens. It must be unique within the // workspace. Name types.String `tfsdk:"name"` - // The OAuth2 app client ID for the app space. - Oauth2AppClientId types.String `tfsdk:"oauth2_app_client_id"` - // The OAuth2 app integration ID for the app space. - Oauth2AppIntegrationId types.String `tfsdk:"oauth2_app_integration_id"` // Resources for the app space. Resources configured at the space level are // available to all apps in the space. Resources types.List `tfsdk:"resources"` @@ -6172,8 +6277,6 @@ func (m Space_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.Attribu attrs["effective_user_api_scopes"] = attrs["effective_user_api_scopes"].SetComputed() attrs["id"] = attrs["id"].SetComputed() attrs["name"] = attrs["name"].SetRequired() - attrs["oauth2_app_client_id"] = attrs["oauth2_app_client_id"].SetComputed() - attrs["oauth2_app_integration_id"] = attrs["oauth2_app_integration_id"].SetComputed() attrs["resources"] = attrs["resources"].SetOptional() attrs["service_principal_client_id"] = attrs["service_principal_client_id"].SetComputed() attrs["service_principal_id"] = attrs["service_principal_id"].SetComputed() @@ -6218,8 +6321,6 @@ func (m Space_SdkV2) ToObjectValue(ctx context.Context) basetypes.ObjectValue { "effective_user_api_scopes": m.EffectiveUserApiScopes, "id": m.Id, "name": m.Name, - "oauth2_app_client_id": m.Oauth2AppClientId, - "oauth2_app_integration_id": m.Oauth2AppIntegrationId, "resources": m.Resources, "service_principal_client_id": m.ServicePrincipalClientId, "service_principal_id": m.ServicePrincipalId, @@ -6243,10 +6344,8 @@ func (m Space_SdkV2) Type(ctx context.Context) attr.Type { "effective_user_api_scopes": basetypes.ListType{ ElemType: types.StringType, }, - "id": types.StringType, - "name": types.StringType, - "oauth2_app_client_id": types.StringType, - "oauth2_app_integration_id": types.StringType, + "id": types.StringType, + "name": types.StringType, "resources": basetypes.ListType{ ElemType: AppResource_SdkV2{}.Type(ctx), }, diff --git a/internal/service/apps_tf/model.go b/internal/service/apps_tf/model.go index f638d9e394..15b87892fd 100755 --- a/internal/service/apps_tf/model.go +++ b/internal/service/apps_tf/model.go @@ -2249,6 +2249,8 @@ type AppResource struct { // Name of the App Resource. Name types.String `tfsdk:"name"` + Postgres types.Object `tfsdk:"postgres"` + Secret types.Object `tfsdk:"secret"` ServingEndpoint types.Object `tfsdk:"serving_endpoint"` @@ -2304,6 +2306,15 @@ func (to *AppResource) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from } } } + if !from.Postgres.IsNull() && !from.Postgres.IsUnknown() { + if toPostgres, ok := to.GetPostgres(ctx); ok { + if fromPostgres, ok := from.GetPostgres(ctx); ok { + // Recursively sync the fields of Postgres + toPostgres.SyncFieldsDuringCreateOrUpdate(ctx, fromPostgres) + to.SetPostgres(ctx, toPostgres) + } + } + } if !from.Secret.IsNull() && !from.Secret.IsUnknown() { if toSecret, ok := to.GetSecret(ctx); ok { if fromSecret, ok := from.GetSecret(ctx); ok { @@ -2383,6 +2394,14 @@ func (to *AppResource) SyncFieldsDuringRead(ctx context.Context, from AppResourc } } } + if !from.Postgres.IsNull() && !from.Postgres.IsUnknown() { + if toPostgres, ok := to.GetPostgres(ctx); ok { + if fromPostgres, ok := from.GetPostgres(ctx); ok { + toPostgres.SyncFieldsDuringRead(ctx, fromPostgres) + to.SetPostgres(ctx, toPostgres) + } + } + } if !from.Secret.IsNull() && !from.Secret.IsUnknown() { if toSecret, ok := to.GetSecret(ctx); ok { if fromSecret, ok := from.GetSecret(ctx); ok { @@ -2425,6 +2444,7 @@ func (m AppResource) ApplySchemaCustomizations(attrs map[string]tfschema.Attribu attrs["genie_space"] = attrs["genie_space"].SetOptional() attrs["job"] = attrs["job"].SetOptional() attrs["name"] = attrs["name"].SetRequired() + attrs["postgres"] = attrs["postgres"].SetOptional() attrs["secret"] = attrs["secret"].SetOptional() attrs["serving_endpoint"] = attrs["serving_endpoint"].SetOptional() attrs["sql_warehouse"] = attrs["sql_warehouse"].SetOptional() @@ -2447,6 +2467,7 @@ func (m AppResource) GetComplexFieldTypes(ctx context.Context) map[string]reflec "experiment": reflect.TypeOf(AppResourceExperiment{}), "genie_space": reflect.TypeOf(AppResourceGenieSpace{}), "job": reflect.TypeOf(AppResourceJob{}), + "postgres": reflect.TypeOf(AppResourcePostgres{}), "secret": reflect.TypeOf(AppResourceSecret{}), "serving_endpoint": reflect.TypeOf(AppResourceServingEndpoint{}), "sql_warehouse": reflect.TypeOf(AppResourceSqlWarehouse{}), @@ -2468,6 +2489,7 @@ func (m AppResource) ToObjectValue(ctx context.Context) basetypes.ObjectValue { "genie_space": m.GenieSpace, "job": m.Job, "name": m.Name, + "postgres": m.Postgres, "secret": m.Secret, "serving_endpoint": m.ServingEndpoint, "sql_warehouse": m.SqlWarehouse, @@ -2486,6 +2508,7 @@ func (m AppResource) Type(ctx context.Context) attr.Type { "genie_space": AppResourceGenieSpace{}.Type(ctx), "job": AppResourceJob{}.Type(ctx), "name": types.StringType, + "postgres": AppResourcePostgres{}.Type(ctx), "secret": AppResourceSecret{}.Type(ctx), "serving_endpoint": AppResourceServingEndpoint{}.Type(ctx), "sql_warehouse": AppResourceSqlWarehouse{}.Type(ctx), @@ -2619,6 +2642,31 @@ func (m *AppResource) SetJob(ctx context.Context, v AppResourceJob) { m.Job = vs } +// GetPostgres returns the value of the Postgres field in AppResource as +// a AppResourcePostgres value. +// If the field is unknown or null, the boolean return value is false. +func (m *AppResource) GetPostgres(ctx context.Context) (AppResourcePostgres, bool) { + var e AppResourcePostgres + if m.Postgres.IsNull() || m.Postgres.IsUnknown() { + return e, false + } + var v AppResourcePostgres + d := m.Postgres.As(ctx, &v, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + }) + if d.HasError() { + panic(pluginfwcommon.DiagToString(d)) + } + return v, true +} + +// SetPostgres sets the value of the Postgres field in AppResource. +func (m *AppResource) SetPostgres(ctx context.Context, v AppResourcePostgres) { + vs := v.ToObjectValue(ctx) + m.Postgres = vs +} + // GetSecret returns the value of the Secret field in AppResource as // a AppResourceSecret value. // If the field is unknown or null, the boolean return value is false. @@ -2980,6 +3028,63 @@ func (m AppResourceJob) Type(ctx context.Context) attr.Type { } } +type AppResourcePostgres struct { + Branch types.String `tfsdk:"branch"` + + Database types.String `tfsdk:"database"` + + Permission types.String `tfsdk:"permission"` +} + +func (to *AppResourcePostgres) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from AppResourcePostgres) { +} + +func (to *AppResourcePostgres) SyncFieldsDuringRead(ctx context.Context, from AppResourcePostgres) { +} + +func (m AppResourcePostgres) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["branch"] = attrs["branch"].SetOptional() + attrs["database"] = attrs["database"].SetOptional() + attrs["permission"] = attrs["permission"].SetOptional() + + return attrs +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in AppResourcePostgres. +// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry +// the type information of their elements in the Go type system. This function provides a way to +// retrieve the type information of the elements in complex fields at runtime. The values of the map +// are the reflected types of the contained elements. They must be either primitive values from the +// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF +// SDK values. +func (m AppResourcePostgres) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{} +} + +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, AppResourcePostgres +// only implements ToObjectValue() and Type(). +func (m AppResourcePostgres) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "branch": m.Branch, + "database": m.Database, + "permission": m.Permission, + }) +} + +// Type implements basetypes.ObjectValuable. +func (m AppResourcePostgres) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "branch": types.StringType, + "database": types.StringType, + "permission": types.StringType, + }, + } +} + type AppResourceSecret struct { // Key of the secret to grant permission on. Key types.String `tfsdk:"key"` @@ -5947,10 +6052,6 @@ type Space struct { // alphanumeric characters and hyphens. It must be unique within the // workspace. Name types.String `tfsdk:"name"` - // The OAuth2 app client ID for the app space. - Oauth2AppClientId types.String `tfsdk:"oauth2_app_client_id"` - // The OAuth2 app integration ID for the app space. - Oauth2AppIntegrationId types.String `tfsdk:"oauth2_app_integration_id"` // Resources for the app space. Resources configured at the space level are // available to all apps in the space. Resources types.List `tfsdk:"resources"` @@ -6039,8 +6140,6 @@ func (m Space) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuil attrs["effective_user_api_scopes"] = attrs["effective_user_api_scopes"].SetComputed() attrs["id"] = attrs["id"].SetComputed() attrs["name"] = attrs["name"].SetRequired() - attrs["oauth2_app_client_id"] = attrs["oauth2_app_client_id"].SetComputed() - attrs["oauth2_app_integration_id"] = attrs["oauth2_app_integration_id"].SetComputed() attrs["resources"] = attrs["resources"].SetOptional() attrs["service_principal_client_id"] = attrs["service_principal_client_id"].SetComputed() attrs["service_principal_id"] = attrs["service_principal_id"].SetComputed() @@ -6084,8 +6183,6 @@ func (m Space) ToObjectValue(ctx context.Context) basetypes.ObjectValue { "effective_user_api_scopes": m.EffectiveUserApiScopes, "id": m.Id, "name": m.Name, - "oauth2_app_client_id": m.Oauth2AppClientId, - "oauth2_app_integration_id": m.Oauth2AppIntegrationId, "resources": m.Resources, "service_principal_client_id": m.ServicePrincipalClientId, "service_principal_id": m.ServicePrincipalId, @@ -6109,10 +6206,8 @@ func (m Space) Type(ctx context.Context) attr.Type { "effective_user_api_scopes": basetypes.ListType{ ElemType: types.StringType, }, - "id": types.StringType, - "name": types.StringType, - "oauth2_app_client_id": types.StringType, - "oauth2_app_integration_id": types.StringType, + "id": types.StringType, + "name": types.StringType, "resources": basetypes.ListType{ ElemType: AppResource{}.Type(ctx), }, diff --git a/internal/service/cleanrooms_tf/legacy_model.go b/internal/service/cleanrooms_tf/legacy_model.go index 738b6dae0c..bb45134f0f 100755 --- a/internal/service/cleanrooms_tf/legacy_model.go +++ b/internal/service/cleanrooms_tf/legacy_model.go @@ -2872,7 +2872,7 @@ func (to *CreateCleanRoomOutputCatalogResponse_SdkV2) SyncFieldsDuringRead(ctx c } func (m CreateCleanRoomOutputCatalogResponse_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { - attrs["output_catalog"] = attrs["output_catalog"].SetOptional() + attrs["output_catalog"] = attrs["output_catalog"].SetRequired() attrs["output_catalog"] = attrs["output_catalog"].(tfschema.ListNestedAttributeBuilder).AddValidator(listvalidator.SizeAtMost(1)).(tfschema.AttributeBuilder) return attrs diff --git a/internal/service/cleanrooms_tf/model.go b/internal/service/cleanrooms_tf/model.go index aa5f08a7e3..41f2b01b70 100755 --- a/internal/service/cleanrooms_tf/model.go +++ b/internal/service/cleanrooms_tf/model.go @@ -2796,7 +2796,7 @@ func (to *CreateCleanRoomOutputCatalogResponse) SyncFieldsDuringRead(ctx context } func (m CreateCleanRoomOutputCatalogResponse) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { - attrs["output_catalog"] = attrs["output_catalog"].SetOptional() + attrs["output_catalog"] = attrs["output_catalog"].SetRequired() return attrs } diff --git a/internal/service/compute_tf/legacy_model.go b/internal/service/compute_tf/legacy_model.go index 6466ceab40..c414d5bff6 100755 --- a/internal/service/compute_tf/legacy_model.go +++ b/internal/service/compute_tf/legacy_model.go @@ -9858,8 +9858,6 @@ type EditInstancePool_SdkV2 struct { MaxCapacity types.Int64 `tfsdk:"max_capacity"` // Minimum number of idle instances to keep in the instance pool MinIdleInstances types.Int64 `tfsdk:"min_idle_instances"` - // Flexible node type configuration for the pool. - NodeTypeFlexibility types.List `tfsdk:"node_type_flexibility"` // This field encodes, through a single value, the resources available to // each of the Spark nodes in this cluster. For example, the Spark nodes can // be provisioned and optimized for memory or compute intensive workloads. A @@ -9875,26 +9873,9 @@ type EditInstancePool_SdkV2 struct { } func (to *EditInstancePool_SdkV2) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from EditInstancePool_SdkV2) { - if !from.NodeTypeFlexibility.IsNull() && !from.NodeTypeFlexibility.IsUnknown() { - if toNodeTypeFlexibility, ok := to.GetNodeTypeFlexibility(ctx); ok { - if fromNodeTypeFlexibility, ok := from.GetNodeTypeFlexibility(ctx); ok { - // Recursively sync the fields of NodeTypeFlexibility - toNodeTypeFlexibility.SyncFieldsDuringCreateOrUpdate(ctx, fromNodeTypeFlexibility) - to.SetNodeTypeFlexibility(ctx, toNodeTypeFlexibility) - } - } - } } func (to *EditInstancePool_SdkV2) SyncFieldsDuringRead(ctx context.Context, from EditInstancePool_SdkV2) { - if !from.NodeTypeFlexibility.IsNull() && !from.NodeTypeFlexibility.IsUnknown() { - if toNodeTypeFlexibility, ok := to.GetNodeTypeFlexibility(ctx); ok { - if fromNodeTypeFlexibility, ok := from.GetNodeTypeFlexibility(ctx); ok { - toNodeTypeFlexibility.SyncFieldsDuringRead(ctx, fromNodeTypeFlexibility) - to.SetNodeTypeFlexibility(ctx, toNodeTypeFlexibility) - } - } - } } func (m EditInstancePool_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { @@ -9904,8 +9885,6 @@ func (m EditInstancePool_SdkV2) ApplySchemaCustomizations(attrs map[string]tfsch attrs["instance_pool_name"] = attrs["instance_pool_name"].SetRequired() attrs["max_capacity"] = attrs["max_capacity"].SetOptional() attrs["min_idle_instances"] = attrs["min_idle_instances"].SetOptional() - attrs["node_type_flexibility"] = attrs["node_type_flexibility"].SetOptional() - attrs["node_type_flexibility"] = attrs["node_type_flexibility"].(tfschema.ListNestedAttributeBuilder).AddValidator(listvalidator.SizeAtMost(1)).(tfschema.AttributeBuilder) attrs["node_type_id"] = attrs["node_type_id"].SetRequired() attrs["remote_disk_throughput"] = attrs["remote_disk_throughput"].SetOptional() attrs["total_initial_remote_disk_size"] = attrs["total_initial_remote_disk_size"].SetOptional() @@ -9922,8 +9901,7 @@ func (m EditInstancePool_SdkV2) ApplySchemaCustomizations(attrs map[string]tfsch // SDK values. func (m EditInstancePool_SdkV2) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { return map[string]reflect.Type{ - "custom_tags": reflect.TypeOf(types.String{}), - "node_type_flexibility": reflect.TypeOf(NodeTypeFlexibility_SdkV2{}), + "custom_tags": reflect.TypeOf(types.String{}), } } @@ -9940,7 +9918,6 @@ func (m EditInstancePool_SdkV2) ToObjectValue(ctx context.Context) basetypes.Obj "instance_pool_name": m.InstancePoolName, "max_capacity": m.MaxCapacity, "min_idle_instances": m.MinIdleInstances, - "node_type_flexibility": m.NodeTypeFlexibility, "node_type_id": m.NodeTypeId, "remote_disk_throughput": m.RemoteDiskThroughput, "total_initial_remote_disk_size": m.TotalInitialRemoteDiskSize, @@ -9959,12 +9936,9 @@ func (m EditInstancePool_SdkV2) Type(ctx context.Context) attr.Type { "instance_pool_name": types.StringType, "max_capacity": types.Int64Type, "min_idle_instances": types.Int64Type, - "node_type_flexibility": basetypes.ListType{ - ElemType: NodeTypeFlexibility_SdkV2{}.Type(ctx), - }, - "node_type_id": types.StringType, - "remote_disk_throughput": types.Int64Type, - "total_initial_remote_disk_size": types.Int64Type, + "node_type_id": types.StringType, + "remote_disk_throughput": types.Int64Type, + "total_initial_remote_disk_size": types.Int64Type, }, } } @@ -9995,32 +9969,6 @@ func (m *EditInstancePool_SdkV2) SetCustomTags(ctx context.Context, v map[string m.CustomTags = types.MapValueMust(t, vs) } -// GetNodeTypeFlexibility returns the value of the NodeTypeFlexibility field in EditInstancePool_SdkV2 as -// a NodeTypeFlexibility_SdkV2 value. -// If the field is unknown or null, the boolean return value is false. -func (m *EditInstancePool_SdkV2) GetNodeTypeFlexibility(ctx context.Context) (NodeTypeFlexibility_SdkV2, bool) { - var e NodeTypeFlexibility_SdkV2 - if m.NodeTypeFlexibility.IsNull() || m.NodeTypeFlexibility.IsUnknown() { - return e, false - } - var v []NodeTypeFlexibility_SdkV2 - d := m.NodeTypeFlexibility.ElementsAs(ctx, &v, true) - if d.HasError() { - panic(pluginfwcommon.DiagToString(d)) - } - if len(v) == 0 { - return e, false - } - return v[0], true -} - -// SetNodeTypeFlexibility sets the value of the NodeTypeFlexibility field in EditInstancePool_SdkV2. -func (m *EditInstancePool_SdkV2) SetNodeTypeFlexibility(ctx context.Context, v NodeTypeFlexibility_SdkV2) { - vs := []attr.Value{v.ToObjectValue(ctx)} - t := m.Type(ctx).(basetypes.ObjectType).AttrTypes["node_type_flexibility"] - m.NodeTypeFlexibility = types.ListValueMust(t, vs) -} - type EditInstancePoolResponse_SdkV2 struct { } diff --git a/internal/service/compute_tf/model.go b/internal/service/compute_tf/model.go index 9fb50011ea..ddeb1e825d 100755 --- a/internal/service/compute_tf/model.go +++ b/internal/service/compute_tf/model.go @@ -9601,8 +9601,6 @@ type EditInstancePool struct { MaxCapacity types.Int64 `tfsdk:"max_capacity"` // Minimum number of idle instances to keep in the instance pool MinIdleInstances types.Int64 `tfsdk:"min_idle_instances"` - // Flexible node type configuration for the pool. - NodeTypeFlexibility types.Object `tfsdk:"node_type_flexibility"` // This field encodes, through a single value, the resources available to // each of the Spark nodes in this cluster. For example, the Spark nodes can // be provisioned and optimized for memory or compute intensive workloads. A @@ -9618,26 +9616,9 @@ type EditInstancePool struct { } func (to *EditInstancePool) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from EditInstancePool) { - if !from.NodeTypeFlexibility.IsNull() && !from.NodeTypeFlexibility.IsUnknown() { - if toNodeTypeFlexibility, ok := to.GetNodeTypeFlexibility(ctx); ok { - if fromNodeTypeFlexibility, ok := from.GetNodeTypeFlexibility(ctx); ok { - // Recursively sync the fields of NodeTypeFlexibility - toNodeTypeFlexibility.SyncFieldsDuringCreateOrUpdate(ctx, fromNodeTypeFlexibility) - to.SetNodeTypeFlexibility(ctx, toNodeTypeFlexibility) - } - } - } } func (to *EditInstancePool) SyncFieldsDuringRead(ctx context.Context, from EditInstancePool) { - if !from.NodeTypeFlexibility.IsNull() && !from.NodeTypeFlexibility.IsUnknown() { - if toNodeTypeFlexibility, ok := to.GetNodeTypeFlexibility(ctx); ok { - if fromNodeTypeFlexibility, ok := from.GetNodeTypeFlexibility(ctx); ok { - toNodeTypeFlexibility.SyncFieldsDuringRead(ctx, fromNodeTypeFlexibility) - to.SetNodeTypeFlexibility(ctx, toNodeTypeFlexibility) - } - } - } } func (m EditInstancePool) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { @@ -9647,7 +9628,6 @@ func (m EditInstancePool) ApplySchemaCustomizations(attrs map[string]tfschema.At attrs["instance_pool_name"] = attrs["instance_pool_name"].SetRequired() attrs["max_capacity"] = attrs["max_capacity"].SetOptional() attrs["min_idle_instances"] = attrs["min_idle_instances"].SetOptional() - attrs["node_type_flexibility"] = attrs["node_type_flexibility"].SetOptional() attrs["node_type_id"] = attrs["node_type_id"].SetRequired() attrs["remote_disk_throughput"] = attrs["remote_disk_throughput"].SetOptional() attrs["total_initial_remote_disk_size"] = attrs["total_initial_remote_disk_size"].SetOptional() @@ -9664,8 +9644,7 @@ func (m EditInstancePool) ApplySchemaCustomizations(attrs map[string]tfschema.At // SDK values. func (m EditInstancePool) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { return map[string]reflect.Type{ - "custom_tags": reflect.TypeOf(types.String{}), - "node_type_flexibility": reflect.TypeOf(NodeTypeFlexibility{}), + "custom_tags": reflect.TypeOf(types.String{}), } } @@ -9682,7 +9661,6 @@ func (m EditInstancePool) ToObjectValue(ctx context.Context) basetypes.ObjectVal "instance_pool_name": m.InstancePoolName, "max_capacity": m.MaxCapacity, "min_idle_instances": m.MinIdleInstances, - "node_type_flexibility": m.NodeTypeFlexibility, "node_type_id": m.NodeTypeId, "remote_disk_throughput": m.RemoteDiskThroughput, "total_initial_remote_disk_size": m.TotalInitialRemoteDiskSize, @@ -9701,7 +9679,6 @@ func (m EditInstancePool) Type(ctx context.Context) attr.Type { "instance_pool_name": types.StringType, "max_capacity": types.Int64Type, "min_idle_instances": types.Int64Type, - "node_type_flexibility": NodeTypeFlexibility{}.Type(ctx), "node_type_id": types.StringType, "remote_disk_throughput": types.Int64Type, "total_initial_remote_disk_size": types.Int64Type, @@ -9735,31 +9712,6 @@ func (m *EditInstancePool) SetCustomTags(ctx context.Context, v map[string]types m.CustomTags = types.MapValueMust(t, vs) } -// GetNodeTypeFlexibility returns the value of the NodeTypeFlexibility field in EditInstancePool as -// a NodeTypeFlexibility value. -// If the field is unknown or null, the boolean return value is false. -func (m *EditInstancePool) GetNodeTypeFlexibility(ctx context.Context) (NodeTypeFlexibility, bool) { - var e NodeTypeFlexibility - if m.NodeTypeFlexibility.IsNull() || m.NodeTypeFlexibility.IsUnknown() { - return e, false - } - var v NodeTypeFlexibility - d := m.NodeTypeFlexibility.As(ctx, &v, basetypes.ObjectAsOptions{ - UnhandledNullAsEmpty: true, - UnhandledUnknownAsEmpty: true, - }) - if d.HasError() { - panic(pluginfwcommon.DiagToString(d)) - } - return v, true -} - -// SetNodeTypeFlexibility sets the value of the NodeTypeFlexibility field in EditInstancePool. -func (m *EditInstancePool) SetNodeTypeFlexibility(ctx context.Context, v NodeTypeFlexibility) { - vs := v.ToObjectValue(ctx) - m.NodeTypeFlexibility = vs -} - type EditInstancePoolResponse struct { } diff --git a/internal/service/ml_tf/legacy_model.go b/internal/service/ml_tf/legacy_model.go index 37e0e6e036..fe4673055e 100755 --- a/internal/service/ml_tf/legacy_model.go +++ b/internal/service/ml_tf/legacy_model.go @@ -5350,12 +5350,27 @@ func (m DeleteWebhookResponse_SdkV2) Type(ctx context.Context) attr.Type { } type DeltaTableSource_SdkV2 struct { + // Schema of the resulting dataframe after transformations, in Spark + // StructType JSON format (from df.schema.json()). Required if + // transformation_sql is specified. Example: + // {"type":"struct","fields":[{"name":"col_a","type":"integer","nullable":true,"metadata":{}},{"name":"col_c","type":"integer","nullable":true,"metadata":{}}]} + DataframeSchema types.String `tfsdk:"dataframe_schema"` // The entity columns of the Delta table. EntityColumns types.List `tfsdk:"entity_columns"` + // Single WHERE clause to filter delta table before applying + // transformations. Will be row-wise evaluated, so should only include + // conditionals and projections. + FilterCondition types.String `tfsdk:"filter_condition"` // The full three-part (catalog, schema, table) name of the Delta table. FullName types.String `tfsdk:"full_name"` // The timeseries column of the Delta table. TimeseriesColumn types.String `tfsdk:"timeseries_column"` + // A single SQL SELECT expression applied after filter_condition. Should + // contains all the columns needed (eg. "SELECT *, col_a + col_b AS col_c + // FROM x.y.z WHERE col_a > 0" would have `transformation_sql` "*, col_a + + // col_b AS col_c") If transformation_sql is not provided, all columns of + // the delta table are present in the DataSource dataframe. + TransformationSql types.String `tfsdk:"transformation_sql"` } func (to *DeltaTableSource_SdkV2) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from DeltaTableSource_SdkV2) { @@ -5365,9 +5380,12 @@ func (to *DeltaTableSource_SdkV2) SyncFieldsDuringRead(ctx context.Context, from } func (m DeltaTableSource_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["dataframe_schema"] = attrs["dataframe_schema"].SetOptional() attrs["entity_columns"] = attrs["entity_columns"].SetRequired() + attrs["filter_condition"] = attrs["filter_condition"].SetOptional() attrs["full_name"] = attrs["full_name"].SetRequired() attrs["timeseries_column"] = attrs["timeseries_column"].SetRequired() + attrs["transformation_sql"] = attrs["transformation_sql"].SetOptional() return attrs } @@ -5392,9 +5410,12 @@ func (m DeltaTableSource_SdkV2) ToObjectValue(ctx context.Context) basetypes.Obj return types.ObjectValueMust( m.Type(ctx).(basetypes.ObjectType).AttrTypes, map[string]attr.Value{ - "entity_columns": m.EntityColumns, - "full_name": m.FullName, - "timeseries_column": m.TimeseriesColumn, + "dataframe_schema": m.DataframeSchema, + "entity_columns": m.EntityColumns, + "filter_condition": m.FilterCondition, + "full_name": m.FullName, + "timeseries_column": m.TimeseriesColumn, + "transformation_sql": m.TransformationSql, }) } @@ -5402,11 +5423,14 @@ func (m DeltaTableSource_SdkV2) ToObjectValue(ctx context.Context) basetypes.Obj func (m DeltaTableSource_SdkV2) Type(ctx context.Context) attr.Type { return types.ObjectType{ AttrTypes: map[string]attr.Type{ + "dataframe_schema": types.StringType, "entity_columns": basetypes.ListType{ ElemType: types.StringType, }, - "full_name": types.StringType, - "timeseries_column": types.StringType, + "filter_condition": types.StringType, + "full_name": types.StringType, + "timeseries_column": types.StringType, + "transformation_sql": types.StringType, }, } } diff --git a/internal/service/ml_tf/model.go b/internal/service/ml_tf/model.go index f6149b0826..2bfb797115 100755 --- a/internal/service/ml_tf/model.go +++ b/internal/service/ml_tf/model.go @@ -5270,12 +5270,27 @@ func (m DeleteWebhookResponse) Type(ctx context.Context) attr.Type { } type DeltaTableSource struct { + // Schema of the resulting dataframe after transformations, in Spark + // StructType JSON format (from df.schema.json()). Required if + // transformation_sql is specified. Example: + // {"type":"struct","fields":[{"name":"col_a","type":"integer","nullable":true,"metadata":{}},{"name":"col_c","type":"integer","nullable":true,"metadata":{}}]} + DataframeSchema types.String `tfsdk:"dataframe_schema"` // The entity columns of the Delta table. EntityColumns types.List `tfsdk:"entity_columns"` + // Single WHERE clause to filter delta table before applying + // transformations. Will be row-wise evaluated, so should only include + // conditionals and projections. + FilterCondition types.String `tfsdk:"filter_condition"` // The full three-part (catalog, schema, table) name of the Delta table. FullName types.String `tfsdk:"full_name"` // The timeseries column of the Delta table. TimeseriesColumn types.String `tfsdk:"timeseries_column"` + // A single SQL SELECT expression applied after filter_condition. Should + // contains all the columns needed (eg. "SELECT *, col_a + col_b AS col_c + // FROM x.y.z WHERE col_a > 0" would have `transformation_sql` "*, col_a + + // col_b AS col_c") If transformation_sql is not provided, all columns of + // the delta table are present in the DataSource dataframe. + TransformationSql types.String `tfsdk:"transformation_sql"` } func (to *DeltaTableSource) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from DeltaTableSource) { @@ -5285,9 +5300,12 @@ func (to *DeltaTableSource) SyncFieldsDuringRead(ctx context.Context, from Delta } func (m DeltaTableSource) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["dataframe_schema"] = attrs["dataframe_schema"].SetOptional() attrs["entity_columns"] = attrs["entity_columns"].SetRequired() + attrs["filter_condition"] = attrs["filter_condition"].SetOptional() attrs["full_name"] = attrs["full_name"].SetRequired() attrs["timeseries_column"] = attrs["timeseries_column"].SetRequired() + attrs["transformation_sql"] = attrs["transformation_sql"].SetOptional() return attrs } @@ -5312,9 +5330,12 @@ func (m DeltaTableSource) ToObjectValue(ctx context.Context) basetypes.ObjectVal return types.ObjectValueMust( m.Type(ctx).(basetypes.ObjectType).AttrTypes, map[string]attr.Value{ - "entity_columns": m.EntityColumns, - "full_name": m.FullName, - "timeseries_column": m.TimeseriesColumn, + "dataframe_schema": m.DataframeSchema, + "entity_columns": m.EntityColumns, + "filter_condition": m.FilterCondition, + "full_name": m.FullName, + "timeseries_column": m.TimeseriesColumn, + "transformation_sql": m.TransformationSql, }) } @@ -5322,11 +5343,14 @@ func (m DeltaTableSource) ToObjectValue(ctx context.Context) basetypes.ObjectVal func (m DeltaTableSource) Type(ctx context.Context) attr.Type { return types.ObjectType{ AttrTypes: map[string]attr.Type{ + "dataframe_schema": types.StringType, "entity_columns": basetypes.ListType{ ElemType: types.StringType, }, - "full_name": types.StringType, - "timeseries_column": types.StringType, + "filter_condition": types.StringType, + "full_name": types.StringType, + "timeseries_column": types.StringType, + "transformation_sql": types.StringType, }, } } diff --git a/internal/service/networking_tf/legacy_model.go b/internal/service/networking_tf/legacy_model.go index 9d709dd7b0..215aae7afb 100755 --- a/internal/service/networking_tf/legacy_model.go +++ b/internal/service/networking_tf/legacy_model.go @@ -93,7 +93,8 @@ func (m AzurePrivateEndpointInfo_SdkV2) Type(ctx context.Context) attr.Type { type CreateEndpointRequest_SdkV2 struct { Endpoint types.List `tfsdk:"endpoint"` - + // The parent resource name of the account under which the endpoint is + // created. Format: `accounts/{account_id}`. Parent types.String `tfsdk:"-"` } @@ -444,7 +445,8 @@ type ListEndpointsRequest_SdkV2 struct { PageSize types.Int64 `tfsdk:"-"` PageToken types.String `tfsdk:"-"` - + // The parent resource name of the account to list endpoints for. Format: + // `accounts/{account_id}`. Parent types.String `tfsdk:"-"` } diff --git a/internal/service/networking_tf/model.go b/internal/service/networking_tf/model.go index 2e846870fc..c00de9c3bd 100755 --- a/internal/service/networking_tf/model.go +++ b/internal/service/networking_tf/model.go @@ -92,7 +92,8 @@ func (m AzurePrivateEndpointInfo) Type(ctx context.Context) attr.Type { type CreateEndpointRequest struct { Endpoint types.Object `tfsdk:"endpoint"` - + // The parent resource name of the account under which the endpoint is + // created. Format: `accounts/{account_id}`. Parent types.String `tfsdk:"-"` } @@ -435,7 +436,8 @@ type ListEndpointsRequest struct { PageSize types.Int64 `tfsdk:"-"` PageToken types.String `tfsdk:"-"` - + // The parent resource name of the account to list endpoints for. Format: + // `accounts/{account_id}`. Parent types.String `tfsdk:"-"` } diff --git a/internal/service/pipelines_tf/legacy_model.go b/internal/service/pipelines_tf/legacy_model.go index d81278d28b..9d1e99f50e 100755 --- a/internal/service/pipelines_tf/legacy_model.go +++ b/internal/service/pipelines_tf/legacy_model.go @@ -3518,6 +3518,8 @@ type GetPipelineResponse_SdkV2 struct { CreatorUserName types.String `tfsdk:"creator_user_name"` // Serverless budget policy ID of this pipeline. EffectiveBudgetPolicyId types.String `tfsdk:"effective_budget_policy_id"` + // Publishing mode of the pipeline + EffectivePublishingMode types.String `tfsdk:"effective_publishing_mode"` // The health of a pipeline. Health types.String `tfsdk:"health"` // The last time the pipeline settings were modified or created. @@ -3600,6 +3602,7 @@ func (m GetPipelineResponse_SdkV2) ApplySchemaCustomizations(attrs map[string]tf attrs["cluster_id"] = attrs["cluster_id"].SetOptional() attrs["creator_user_name"] = attrs["creator_user_name"].SetOptional() attrs["effective_budget_policy_id"] = attrs["effective_budget_policy_id"].SetOptional() + attrs["effective_publishing_mode"] = attrs["effective_publishing_mode"].SetOptional() attrs["health"] = attrs["health"].SetOptional() attrs["last_modified"] = attrs["last_modified"].SetOptional() attrs["latest_updates"] = attrs["latest_updates"].SetOptional() @@ -3641,6 +3644,7 @@ func (m GetPipelineResponse_SdkV2) ToObjectValue(ctx context.Context) basetypes. "cluster_id": m.ClusterId, "creator_user_name": m.CreatorUserName, "effective_budget_policy_id": m.EffectiveBudgetPolicyId, + "effective_publishing_mode": m.EffectivePublishingMode, "health": m.Health, "last_modified": m.LastModified, "latest_updates": m.LatestUpdates, @@ -3661,6 +3665,7 @@ func (m GetPipelineResponse_SdkV2) Type(ctx context.Context) attr.Type { "cluster_id": types.StringType, "creator_user_name": types.StringType, "effective_budget_policy_id": types.StringType, + "effective_publishing_mode": types.StringType, "health": types.StringType, "last_modified": types.Int64Type, "latest_updates": basetypes.ListType{ @@ -8777,6 +8782,19 @@ type PipelinesEnvironment_SdkV2 struct { // dependency could be , , (WSFS or Volumes in Databricks), Dependencies types.List `tfsdk:"dependencies"` + // The environment version of the serverless Python environment used to + // execute customer Python code. Each environment version includes a + // specific Python version and a curated set of pre-installed libraries with + // defined versions, providing a stable and reproducible execution + // environment. + // + // Databricks supports a three-year lifecycle for each environment version. + // For available versions and their included packages, see + // https://docs.databricks.com/aws/en/release-notes/serverless/environment-version/ + // + // The value should be a string representing the environment version number, + // for example: `"4"`. + EnvironmentVersion types.String `tfsdk:"environment_version"` } func (to *PipelinesEnvironment_SdkV2) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from PipelinesEnvironment_SdkV2) { @@ -8799,6 +8817,7 @@ func (to *PipelinesEnvironment_SdkV2) SyncFieldsDuringRead(ctx context.Context, func (m PipelinesEnvironment_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { attrs["dependencies"] = attrs["dependencies"].SetOptional() + attrs["environment_version"] = attrs["environment_version"].SetOptional() return attrs } @@ -8823,7 +8842,8 @@ func (m PipelinesEnvironment_SdkV2) ToObjectValue(ctx context.Context) basetypes return types.ObjectValueMust( m.Type(ctx).(basetypes.ObjectType).AttrTypes, map[string]attr.Value{ - "dependencies": m.Dependencies, + "dependencies": m.Dependencies, + "environment_version": m.EnvironmentVersion, }) } @@ -8834,6 +8854,7 @@ func (m PipelinesEnvironment_SdkV2) Type(ctx context.Context) attr.Type { "dependencies": basetypes.ListType{ ElemType: types.StringType, }, + "environment_version": types.StringType, }, } } @@ -10127,6 +10148,11 @@ type StartUpdate_SdkV2 struct { // Only replace_where flows may be specified. Flows not listed use their // original predicate. ReplaceWhereOverrides types.List `tfsdk:"replace_where_overrides"` + // A list of flows for which this update should reset the streaming + // checkpoint. This selection will not clear the data in the flow's target + // table. Flows in this list may also appear in refresh_selection and + // full_refresh_selection. + ResetCheckpointSelection types.List `tfsdk:"reset_checkpoint_selection"` // The information about the requested rewind operation. If specified this // is a rewind mode update. RewindSpec types.List `tfsdk:"rewind_spec"` @@ -10154,6 +10180,12 @@ func (to *StartUpdate_SdkV2) SyncFieldsDuringCreateOrUpdate(ctx context.Context, // set the resulting resource state to the empty list to match the planned value. to.ReplaceWhereOverrides = from.ReplaceWhereOverrides } + if !from.ResetCheckpointSelection.IsNull() && !from.ResetCheckpointSelection.IsUnknown() && to.ResetCheckpointSelection.IsNull() && len(from.ResetCheckpointSelection.Elements()) == 0 { + // The default representation of an empty list for TF autogenerated resources in the resource state is Null. + // If a user specified a non-Null, empty list for ResetCheckpointSelection, and the deserialized field value is Null, + // set the resulting resource state to the empty list to match the planned value. + to.ResetCheckpointSelection = from.ResetCheckpointSelection + } if !from.RewindSpec.IsNull() && !from.RewindSpec.IsUnknown() { if toRewindSpec, ok := to.GetRewindSpec(ctx); ok { if fromRewindSpec, ok := from.GetRewindSpec(ctx); ok { @@ -10184,6 +10216,12 @@ func (to *StartUpdate_SdkV2) SyncFieldsDuringRead(ctx context.Context, from Star // set the resulting resource state to the empty list to match the planned value. to.ReplaceWhereOverrides = from.ReplaceWhereOverrides } + if !from.ResetCheckpointSelection.IsNull() && !from.ResetCheckpointSelection.IsUnknown() && to.ResetCheckpointSelection.IsNull() && len(from.ResetCheckpointSelection.Elements()) == 0 { + // The default representation of an empty list for TF autogenerated resources in the resource state is Null. + // If a user specified a non-Null, empty list for ResetCheckpointSelection, and the deserialized field value is Null, + // set the resulting resource state to the empty list to match the planned value. + to.ResetCheckpointSelection = from.ResetCheckpointSelection + } if !from.RewindSpec.IsNull() && !from.RewindSpec.IsUnknown() { if toRewindSpec, ok := to.GetRewindSpec(ctx); ok { if fromRewindSpec, ok := from.GetRewindSpec(ctx); ok { @@ -10201,6 +10239,7 @@ func (m StartUpdate_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.A attrs["parameters"] = attrs["parameters"].SetOptional() attrs["refresh_selection"] = attrs["refresh_selection"].SetOptional() attrs["replace_where_overrides"] = attrs["replace_where_overrides"].SetOptional() + attrs["reset_checkpoint_selection"] = attrs["reset_checkpoint_selection"].SetOptional() attrs["rewind_spec"] = attrs["rewind_spec"].SetOptional() attrs["rewind_spec"] = attrs["rewind_spec"].(tfschema.ListNestedAttributeBuilder).AddValidator(listvalidator.SizeAtMost(1)).(tfschema.AttributeBuilder) attrs["validate_only"] = attrs["validate_only"].SetOptional() @@ -10218,11 +10257,12 @@ func (m StartUpdate_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.A // SDK values. func (m StartUpdate_SdkV2) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { return map[string]reflect.Type{ - "full_refresh_selection": reflect.TypeOf(types.String{}), - "parameters": reflect.TypeOf(types.String{}), - "refresh_selection": reflect.TypeOf(types.String{}), - "replace_where_overrides": reflect.TypeOf(ReplaceWhereOverride_SdkV2{}), - "rewind_spec": reflect.TypeOf(RewindSpec_SdkV2{}), + "full_refresh_selection": reflect.TypeOf(types.String{}), + "parameters": reflect.TypeOf(types.String{}), + "refresh_selection": reflect.TypeOf(types.String{}), + "replace_where_overrides": reflect.TypeOf(ReplaceWhereOverride_SdkV2{}), + "reset_checkpoint_selection": reflect.TypeOf(types.String{}), + "rewind_spec": reflect.TypeOf(RewindSpec_SdkV2{}), } } @@ -10233,15 +10273,16 @@ func (m StartUpdate_SdkV2) ToObjectValue(ctx context.Context) basetypes.ObjectVa return types.ObjectValueMust( m.Type(ctx).(basetypes.ObjectType).AttrTypes, map[string]attr.Value{ - "cause": m.Cause, - "full_refresh": m.FullRefresh, - "full_refresh_selection": m.FullRefreshSelection, - "parameters": m.Parameters, - "pipeline_id": m.PipelineId, - "refresh_selection": m.RefreshSelection, - "replace_where_overrides": m.ReplaceWhereOverrides, - "rewind_spec": m.RewindSpec, - "validate_only": m.ValidateOnly, + "cause": m.Cause, + "full_refresh": m.FullRefresh, + "full_refresh_selection": m.FullRefreshSelection, + "parameters": m.Parameters, + "pipeline_id": m.PipelineId, + "refresh_selection": m.RefreshSelection, + "replace_where_overrides": m.ReplaceWhereOverrides, + "reset_checkpoint_selection": m.ResetCheckpointSelection, + "rewind_spec": m.RewindSpec, + "validate_only": m.ValidateOnly, }) } @@ -10264,6 +10305,9 @@ func (m StartUpdate_SdkV2) Type(ctx context.Context) attr.Type { "replace_where_overrides": basetypes.ListType{ ElemType: ReplaceWhereOverride_SdkV2{}.Type(ctx), }, + "reset_checkpoint_selection": basetypes.ListType{ + ElemType: types.StringType, + }, "rewind_spec": basetypes.ListType{ ElemType: RewindSpec_SdkV2{}.Type(ctx), }, @@ -10376,6 +10420,32 @@ func (m *StartUpdate_SdkV2) SetReplaceWhereOverrides(ctx context.Context, v []Re m.ReplaceWhereOverrides = types.ListValueMust(t, vs) } +// GetResetCheckpointSelection returns the value of the ResetCheckpointSelection field in StartUpdate_SdkV2 as +// a slice of types.String values. +// If the field is unknown or null, the boolean return value is false. +func (m *StartUpdate_SdkV2) GetResetCheckpointSelection(ctx context.Context) ([]types.String, bool) { + if m.ResetCheckpointSelection.IsNull() || m.ResetCheckpointSelection.IsUnknown() { + return nil, false + } + var v []types.String + d := m.ResetCheckpointSelection.ElementsAs(ctx, &v, true) + if d.HasError() { + panic(pluginfwcommon.DiagToString(d)) + } + return v, true +} + +// SetResetCheckpointSelection sets the value of the ResetCheckpointSelection field in StartUpdate_SdkV2. +func (m *StartUpdate_SdkV2) SetResetCheckpointSelection(ctx context.Context, v []types.String) { + vs := make([]attr.Value, 0, len(v)) + for _, e := range v { + vs = append(vs, e) + } + t := m.Type(ctx).(basetypes.ObjectType).AttrTypes["reset_checkpoint_selection"] + t = t.(attr.TypeWithElementType).ElementType() + m.ResetCheckpointSelection = types.ListValueMust(t, vs) +} + // GetRewindSpec returns the value of the RewindSpec field in StartUpdate_SdkV2 as // a RewindSpec_SdkV2 value. // If the field is unknown or null, the boolean return value is false. diff --git a/internal/service/pipelines_tf/model.go b/internal/service/pipelines_tf/model.go index edacea2be5..2a1c9341af 100755 --- a/internal/service/pipelines_tf/model.go +++ b/internal/service/pipelines_tf/model.go @@ -3409,6 +3409,8 @@ type GetPipelineResponse struct { CreatorUserName types.String `tfsdk:"creator_user_name"` // Serverless budget policy ID of this pipeline. EffectiveBudgetPolicyId types.String `tfsdk:"effective_budget_policy_id"` + // Publishing mode of the pipeline + EffectivePublishingMode types.String `tfsdk:"effective_publishing_mode"` // The health of a pipeline. Health types.String `tfsdk:"health"` // The last time the pipeline settings were modified or created. @@ -3491,6 +3493,7 @@ func (m GetPipelineResponse) ApplySchemaCustomizations(attrs map[string]tfschema attrs["cluster_id"] = attrs["cluster_id"].SetOptional() attrs["creator_user_name"] = attrs["creator_user_name"].SetOptional() attrs["effective_budget_policy_id"] = attrs["effective_budget_policy_id"].SetOptional() + attrs["effective_publishing_mode"] = attrs["effective_publishing_mode"].SetOptional() attrs["health"] = attrs["health"].SetOptional() attrs["last_modified"] = attrs["last_modified"].SetOptional() attrs["latest_updates"] = attrs["latest_updates"].SetOptional() @@ -3530,6 +3533,7 @@ func (m GetPipelineResponse) ToObjectValue(ctx context.Context) basetypes.Object "cluster_id": m.ClusterId, "creator_user_name": m.CreatorUserName, "effective_budget_policy_id": m.EffectiveBudgetPolicyId, + "effective_publishing_mode": m.EffectivePublishingMode, "health": m.Health, "last_modified": m.LastModified, "latest_updates": m.LatestUpdates, @@ -3550,6 +3554,7 @@ func (m GetPipelineResponse) Type(ctx context.Context) attr.Type { "cluster_id": types.StringType, "creator_user_name": types.StringType, "effective_budget_policy_id": types.StringType, + "effective_publishing_mode": types.StringType, "health": types.StringType, "last_modified": types.Int64Type, "latest_updates": basetypes.ListType{ @@ -8540,6 +8545,19 @@ type PipelinesEnvironment struct { // dependency could be , , (WSFS or Volumes in Databricks), Dependencies types.List `tfsdk:"dependencies"` + // The environment version of the serverless Python environment used to + // execute customer Python code. Each environment version includes a + // specific Python version and a curated set of pre-installed libraries with + // defined versions, providing a stable and reproducible execution + // environment. + // + // Databricks supports a three-year lifecycle for each environment version. + // For available versions and their included packages, see + // https://docs.databricks.com/aws/en/release-notes/serverless/environment-version/ + // + // The value should be a string representing the environment version number, + // for example: `"4"`. + EnvironmentVersion types.String `tfsdk:"environment_version"` } func (to *PipelinesEnvironment) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from PipelinesEnvironment) { @@ -8562,6 +8580,7 @@ func (to *PipelinesEnvironment) SyncFieldsDuringRead(ctx context.Context, from P func (m PipelinesEnvironment) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { attrs["dependencies"] = attrs["dependencies"].SetOptional() + attrs["environment_version"] = attrs["environment_version"].SetOptional() return attrs } @@ -8586,7 +8605,8 @@ func (m PipelinesEnvironment) ToObjectValue(ctx context.Context) basetypes.Objec return types.ObjectValueMust( m.Type(ctx).(basetypes.ObjectType).AttrTypes, map[string]attr.Value{ - "dependencies": m.Dependencies, + "dependencies": m.Dependencies, + "environment_version": m.EnvironmentVersion, }) } @@ -8597,6 +8617,7 @@ func (m PipelinesEnvironment) Type(ctx context.Context) attr.Type { "dependencies": basetypes.ListType{ ElemType: types.StringType, }, + "environment_version": types.StringType, }, } } @@ -9866,6 +9887,11 @@ type StartUpdate struct { // Only replace_where flows may be specified. Flows not listed use their // original predicate. ReplaceWhereOverrides types.List `tfsdk:"replace_where_overrides"` + // A list of flows for which this update should reset the streaming + // checkpoint. This selection will not clear the data in the flow's target + // table. Flows in this list may also appear in refresh_selection and + // full_refresh_selection. + ResetCheckpointSelection types.List `tfsdk:"reset_checkpoint_selection"` // The information about the requested rewind operation. If specified this // is a rewind mode update. RewindSpec types.Object `tfsdk:"rewind_spec"` @@ -9893,6 +9919,12 @@ func (to *StartUpdate) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from // set the resulting resource state to the empty list to match the planned value. to.ReplaceWhereOverrides = from.ReplaceWhereOverrides } + if !from.ResetCheckpointSelection.IsNull() && !from.ResetCheckpointSelection.IsUnknown() && to.ResetCheckpointSelection.IsNull() && len(from.ResetCheckpointSelection.Elements()) == 0 { + // The default representation of an empty list for TF autogenerated resources in the resource state is Null. + // If a user specified a non-Null, empty list for ResetCheckpointSelection, and the deserialized field value is Null, + // set the resulting resource state to the empty list to match the planned value. + to.ResetCheckpointSelection = from.ResetCheckpointSelection + } if !from.RewindSpec.IsNull() && !from.RewindSpec.IsUnknown() { if toRewindSpec, ok := to.GetRewindSpec(ctx); ok { if fromRewindSpec, ok := from.GetRewindSpec(ctx); ok { @@ -9923,6 +9955,12 @@ func (to *StartUpdate) SyncFieldsDuringRead(ctx context.Context, from StartUpdat // set the resulting resource state to the empty list to match the planned value. to.ReplaceWhereOverrides = from.ReplaceWhereOverrides } + if !from.ResetCheckpointSelection.IsNull() && !from.ResetCheckpointSelection.IsUnknown() && to.ResetCheckpointSelection.IsNull() && len(from.ResetCheckpointSelection.Elements()) == 0 { + // The default representation of an empty list for TF autogenerated resources in the resource state is Null. + // If a user specified a non-Null, empty list for ResetCheckpointSelection, and the deserialized field value is Null, + // set the resulting resource state to the empty list to match the planned value. + to.ResetCheckpointSelection = from.ResetCheckpointSelection + } if !from.RewindSpec.IsNull() && !from.RewindSpec.IsUnknown() { if toRewindSpec, ok := to.GetRewindSpec(ctx); ok { if fromRewindSpec, ok := from.GetRewindSpec(ctx); ok { @@ -9940,6 +9978,7 @@ func (m StartUpdate) ApplySchemaCustomizations(attrs map[string]tfschema.Attribu attrs["parameters"] = attrs["parameters"].SetOptional() attrs["refresh_selection"] = attrs["refresh_selection"].SetOptional() attrs["replace_where_overrides"] = attrs["replace_where_overrides"].SetOptional() + attrs["reset_checkpoint_selection"] = attrs["reset_checkpoint_selection"].SetOptional() attrs["rewind_spec"] = attrs["rewind_spec"].SetOptional() attrs["validate_only"] = attrs["validate_only"].SetOptional() attrs["pipeline_id"] = attrs["pipeline_id"].SetRequired() @@ -9956,11 +9995,12 @@ func (m StartUpdate) ApplySchemaCustomizations(attrs map[string]tfschema.Attribu // SDK values. func (m StartUpdate) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { return map[string]reflect.Type{ - "full_refresh_selection": reflect.TypeOf(types.String{}), - "parameters": reflect.TypeOf(types.String{}), - "refresh_selection": reflect.TypeOf(types.String{}), - "replace_where_overrides": reflect.TypeOf(ReplaceWhereOverride{}), - "rewind_spec": reflect.TypeOf(RewindSpec{}), + "full_refresh_selection": reflect.TypeOf(types.String{}), + "parameters": reflect.TypeOf(types.String{}), + "refresh_selection": reflect.TypeOf(types.String{}), + "replace_where_overrides": reflect.TypeOf(ReplaceWhereOverride{}), + "reset_checkpoint_selection": reflect.TypeOf(types.String{}), + "rewind_spec": reflect.TypeOf(RewindSpec{}), } } @@ -9971,15 +10011,16 @@ func (m StartUpdate) ToObjectValue(ctx context.Context) basetypes.ObjectValue { return types.ObjectValueMust( m.Type(ctx).(basetypes.ObjectType).AttrTypes, map[string]attr.Value{ - "cause": m.Cause, - "full_refresh": m.FullRefresh, - "full_refresh_selection": m.FullRefreshSelection, - "parameters": m.Parameters, - "pipeline_id": m.PipelineId, - "refresh_selection": m.RefreshSelection, - "replace_where_overrides": m.ReplaceWhereOverrides, - "rewind_spec": m.RewindSpec, - "validate_only": m.ValidateOnly, + "cause": m.Cause, + "full_refresh": m.FullRefresh, + "full_refresh_selection": m.FullRefreshSelection, + "parameters": m.Parameters, + "pipeline_id": m.PipelineId, + "refresh_selection": m.RefreshSelection, + "replace_where_overrides": m.ReplaceWhereOverrides, + "reset_checkpoint_selection": m.ResetCheckpointSelection, + "rewind_spec": m.RewindSpec, + "validate_only": m.ValidateOnly, }) } @@ -10002,6 +10043,9 @@ func (m StartUpdate) Type(ctx context.Context) attr.Type { "replace_where_overrides": basetypes.ListType{ ElemType: ReplaceWhereOverride{}.Type(ctx), }, + "reset_checkpoint_selection": basetypes.ListType{ + ElemType: types.StringType, + }, "rewind_spec": RewindSpec{}.Type(ctx), "validate_only": types.BoolType, }, @@ -10112,6 +10156,32 @@ func (m *StartUpdate) SetReplaceWhereOverrides(ctx context.Context, v []ReplaceW m.ReplaceWhereOverrides = types.ListValueMust(t, vs) } +// GetResetCheckpointSelection returns the value of the ResetCheckpointSelection field in StartUpdate as +// a slice of types.String values. +// If the field is unknown or null, the boolean return value is false. +func (m *StartUpdate) GetResetCheckpointSelection(ctx context.Context) ([]types.String, bool) { + if m.ResetCheckpointSelection.IsNull() || m.ResetCheckpointSelection.IsUnknown() { + return nil, false + } + var v []types.String + d := m.ResetCheckpointSelection.ElementsAs(ctx, &v, true) + if d.HasError() { + panic(pluginfwcommon.DiagToString(d)) + } + return v, true +} + +// SetResetCheckpointSelection sets the value of the ResetCheckpointSelection field in StartUpdate. +func (m *StartUpdate) SetResetCheckpointSelection(ctx context.Context, v []types.String) { + vs := make([]attr.Value, 0, len(v)) + for _, e := range v { + vs = append(vs, e) + } + t := m.Type(ctx).(basetypes.ObjectType).AttrTypes["reset_checkpoint_selection"] + t = t.(attr.TypeWithElementType).ElementType() + m.ResetCheckpointSelection = types.ListValueMust(t, vs) +} + // GetRewindSpec returns the value of the RewindSpec field in StartUpdate as // a RewindSpec value. // If the field is unknown or null, the boolean return value is false. diff --git a/internal/service/postgres_tf/legacy_model.go b/internal/service/postgres_tf/legacy_model.go index 15f6a6ef79..9043198287 100755 --- a/internal/service/postgres_tf/legacy_model.go +++ b/internal/service/postgres_tf/legacy_model.go @@ -557,6 +557,121 @@ func (m *CreateBranchRequest_SdkV2) SetBranch(ctx context.Context, v Branch_SdkV m.Branch = types.ListValueMust(t, vs) } +type CreateDatabaseRequest_SdkV2 struct { + // The desired specification of a Database. + Database types.List `tfsdk:"database"` + // The ID to use for the Database, which will become the final component of + // the database's resource name. This ID becomes the database name in + // postgres. + // + // This value should be 4-63 characters, and only use characters available + // in DNS names, as defined by RFC-1123 + // + // If database_id is not specified in the request, it is generated + // automatically. + DatabaseId types.String `tfsdk:"-"` + // The Branch where this Database will be created. Format: + // projects/{project_id}/branches/{branch_id} + Parent types.String `tfsdk:"-"` +} + +func (to *CreateDatabaseRequest_SdkV2) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from CreateDatabaseRequest_SdkV2) { + if !from.Database.IsNull() && !from.Database.IsUnknown() { + if toDatabase, ok := to.GetDatabase(ctx); ok { + if fromDatabase, ok := from.GetDatabase(ctx); ok { + // Recursively sync the fields of Database + toDatabase.SyncFieldsDuringCreateOrUpdate(ctx, fromDatabase) + to.SetDatabase(ctx, toDatabase) + } + } + } +} + +func (to *CreateDatabaseRequest_SdkV2) SyncFieldsDuringRead(ctx context.Context, from CreateDatabaseRequest_SdkV2) { + if !from.Database.IsNull() && !from.Database.IsUnknown() { + if toDatabase, ok := to.GetDatabase(ctx); ok { + if fromDatabase, ok := from.GetDatabase(ctx); ok { + toDatabase.SyncFieldsDuringRead(ctx, fromDatabase) + to.SetDatabase(ctx, toDatabase) + } + } + } +} + +func (m CreateDatabaseRequest_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["database"] = attrs["database"].SetRequired() + attrs["database"] = attrs["database"].(tfschema.ListNestedAttributeBuilder).AddValidator(listvalidator.SizeAtMost(1)).(tfschema.AttributeBuilder) + attrs["parent"] = attrs["parent"].SetRequired() + attrs["database_id"] = attrs["database_id"].SetOptional() + + return attrs +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in CreateDatabaseRequest. +// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry +// the type information of their elements in the Go type system. This function provides a way to +// retrieve the type information of the elements in complex fields at runtime. The values of the map +// are the reflected types of the contained elements. They must be either primitive values from the +// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF +// SDK values. +func (m CreateDatabaseRequest_SdkV2) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{ + "database": reflect.TypeOf(Database_SdkV2{}), + } +} + +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, CreateDatabaseRequest_SdkV2 +// only implements ToObjectValue() and Type(). +func (m CreateDatabaseRequest_SdkV2) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "database": m.Database, + "database_id": m.DatabaseId, + "parent": m.Parent, + }) +} + +// Type implements basetypes.ObjectValuable. +func (m CreateDatabaseRequest_SdkV2) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "database": basetypes.ListType{ + ElemType: Database_SdkV2{}.Type(ctx), + }, + "database_id": types.StringType, + "parent": types.StringType, + }, + } +} + +// GetDatabase returns the value of the Database field in CreateDatabaseRequest_SdkV2 as +// a Database_SdkV2 value. +// If the field is unknown or null, the boolean return value is false. +func (m *CreateDatabaseRequest_SdkV2) GetDatabase(ctx context.Context) (Database_SdkV2, bool) { + var e Database_SdkV2 + if m.Database.IsNull() || m.Database.IsUnknown() { + return e, false + } + var v []Database_SdkV2 + d := m.Database.ElementsAs(ctx, &v, true) + if d.HasError() { + panic(pluginfwcommon.DiagToString(d)) + } + if len(v) == 0 { + return e, false + } + return v[0], true +} + +// SetDatabase sets the value of the Database field in CreateDatabaseRequest_SdkV2. +func (m *CreateDatabaseRequest_SdkV2) SetDatabase(ctx context.Context, v Database_SdkV2) { + vs := []attr.Value{v.ToObjectValue(ctx)} + t := m.Type(ctx).(basetypes.ObjectType).AttrTypes["database"] + m.Database = types.ListValueMust(t, vs) +} + type CreateEndpointRequest_SdkV2 struct { // The Endpoint to create. Endpoint types.List `tfsdk:"endpoint"` @@ -886,6 +1001,187 @@ func (m *CreateRoleRequest_SdkV2) SetRole(ctx context.Context, v Role_SdkV2) { m.Role = types.ListValueMust(t, vs) } +// Database represents a Postgres database within a Branch. +type Database_SdkV2 struct { + // A timestamp indicating when the database was created. + CreateTime timetypes.RFC3339 `tfsdk:"create_time"` + // The resource name of the database. Format: + // projects/{project_id}/branches/{branch_id}/databases/{database_id} + Name types.String `tfsdk:"name"` + // The branch containing this database. Format: + // projects/{project_id}/branches/{branch_id} + Parent types.String `tfsdk:"parent"` + // The desired state of the Database. + Spec types.List `tfsdk:"spec"` + // The observed state of the Database. + Status types.List `tfsdk:"status"` + // A timestamp indicating when the database was last updated. + UpdateTime timetypes.RFC3339 `tfsdk:"update_time"` +} + +func (to *Database_SdkV2) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from Database_SdkV2) { + if !from.Spec.IsUnknown() && !from.Spec.IsNull() { + // Spec is an input only field and not returned by the service, so we keep the value from the prior state. + to.Spec = from.Spec + } + if !from.Spec.IsNull() && !from.Spec.IsUnknown() { + if toSpec, ok := to.GetSpec(ctx); ok { + if fromSpec, ok := from.GetSpec(ctx); ok { + // Recursively sync the fields of Spec + toSpec.SyncFieldsDuringCreateOrUpdate(ctx, fromSpec) + to.SetSpec(ctx, toSpec) + } + } + } + if !from.Status.IsNull() && !from.Status.IsUnknown() { + if toStatus, ok := to.GetStatus(ctx); ok { + if fromStatus, ok := from.GetStatus(ctx); ok { + // Recursively sync the fields of Status + toStatus.SyncFieldsDuringCreateOrUpdate(ctx, fromStatus) + to.SetStatus(ctx, toStatus) + } + } + } +} + +func (to *Database_SdkV2) SyncFieldsDuringRead(ctx context.Context, from Database_SdkV2) { + if !from.Spec.IsUnknown() && !from.Spec.IsNull() { + // Spec is an input only field and not returned by the service, so we keep the value from the prior state. + to.Spec = from.Spec + } + if !from.Spec.IsNull() && !from.Spec.IsUnknown() { + if toSpec, ok := to.GetSpec(ctx); ok { + if fromSpec, ok := from.GetSpec(ctx); ok { + toSpec.SyncFieldsDuringRead(ctx, fromSpec) + to.SetSpec(ctx, toSpec) + } + } + } + if !from.Status.IsNull() && !from.Status.IsUnknown() { + if toStatus, ok := to.GetStatus(ctx); ok { + if fromStatus, ok := from.GetStatus(ctx); ok { + toStatus.SyncFieldsDuringRead(ctx, fromStatus) + to.SetStatus(ctx, toStatus) + } + } + } +} + +func (m Database_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["create_time"] = attrs["create_time"].SetComputed() + attrs["name"] = attrs["name"].SetOptional() + attrs["parent"] = attrs["parent"].SetComputed() + attrs["spec"] = attrs["spec"].SetOptional() + attrs["spec"] = attrs["spec"].SetComputed() + attrs["spec"] = attrs["spec"].(tfschema.ListNestedAttributeBuilder).AddPlanModifier(listplanmodifier.UseStateForUnknown()).(tfschema.AttributeBuilder) + attrs["spec"] = attrs["spec"].(tfschema.ListNestedAttributeBuilder).AddValidator(listvalidator.SizeAtMost(1)).(tfschema.AttributeBuilder) + attrs["status"] = attrs["status"].SetComputed() + attrs["status"] = attrs["status"].(tfschema.ListNestedAttributeBuilder).AddValidator(listvalidator.SizeAtMost(1)).(tfschema.AttributeBuilder) + attrs["update_time"] = attrs["update_time"].SetComputed() + + return attrs +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in Database. +// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry +// the type information of their elements in the Go type system. This function provides a way to +// retrieve the type information of the elements in complex fields at runtime. The values of the map +// are the reflected types of the contained elements. They must be either primitive values from the +// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF +// SDK values. +func (m Database_SdkV2) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{ + "spec": reflect.TypeOf(DatabaseDatabaseSpec_SdkV2{}), + "status": reflect.TypeOf(DatabaseDatabaseStatus_SdkV2{}), + } +} + +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, Database_SdkV2 +// only implements ToObjectValue() and Type(). +func (m Database_SdkV2) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "create_time": m.CreateTime, + "name": m.Name, + "parent": m.Parent, + "spec": m.Spec, + "status": m.Status, + "update_time": m.UpdateTime, + }) +} + +// Type implements basetypes.ObjectValuable. +func (m Database_SdkV2) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "create_time": timetypes.RFC3339{}.Type(ctx), + "name": types.StringType, + "parent": types.StringType, + "spec": basetypes.ListType{ + ElemType: DatabaseDatabaseSpec_SdkV2{}.Type(ctx), + }, + "status": basetypes.ListType{ + ElemType: DatabaseDatabaseStatus_SdkV2{}.Type(ctx), + }, + "update_time": timetypes.RFC3339{}.Type(ctx), + }, + } +} + +// GetSpec returns the value of the Spec field in Database_SdkV2 as +// a DatabaseDatabaseSpec_SdkV2 value. +// If the field is unknown or null, the boolean return value is false. +func (m *Database_SdkV2) GetSpec(ctx context.Context) (DatabaseDatabaseSpec_SdkV2, bool) { + var e DatabaseDatabaseSpec_SdkV2 + if m.Spec.IsNull() || m.Spec.IsUnknown() { + return e, false + } + var v []DatabaseDatabaseSpec_SdkV2 + d := m.Spec.ElementsAs(ctx, &v, true) + if d.HasError() { + panic(pluginfwcommon.DiagToString(d)) + } + if len(v) == 0 { + return e, false + } + return v[0], true +} + +// SetSpec sets the value of the Spec field in Database_SdkV2. +func (m *Database_SdkV2) SetSpec(ctx context.Context, v DatabaseDatabaseSpec_SdkV2) { + vs := []attr.Value{v.ToObjectValue(ctx)} + t := m.Type(ctx).(basetypes.ObjectType).AttrTypes["spec"] + m.Spec = types.ListValueMust(t, vs) +} + +// GetStatus returns the value of the Status field in Database_SdkV2 as +// a DatabaseDatabaseStatus_SdkV2 value. +// If the field is unknown or null, the boolean return value is false. +func (m *Database_SdkV2) GetStatus(ctx context.Context) (DatabaseDatabaseStatus_SdkV2, bool) { + var e DatabaseDatabaseStatus_SdkV2 + if m.Status.IsNull() || m.Status.IsUnknown() { + return e, false + } + var v []DatabaseDatabaseStatus_SdkV2 + d := m.Status.ElementsAs(ctx, &v, true) + if d.HasError() { + panic(pluginfwcommon.DiagToString(d)) + } + if len(v) == 0 { + return e, false + } + return v[0], true +} + +// SetStatus sets the value of the Status field in Database_SdkV2. +func (m *Database_SdkV2) SetStatus(ctx context.Context, v DatabaseDatabaseStatus_SdkV2) { + vs := []attr.Value{v.ToObjectValue(ctx)} + t := m.Type(ctx).(basetypes.ObjectType).AttrTypes["status"] + m.Status = types.ListValueMust(t, vs) +} + type DatabaseCredential_SdkV2 struct { // Timestamp in UTC of when this credential expires. ExpireTime timetypes.RFC3339 `tfsdk:"expire_time"` @@ -940,6 +1236,166 @@ func (m DatabaseCredential_SdkV2) Type(ctx context.Context) attr.Type { } } +type DatabaseDatabaseSpec_SdkV2 struct { + // The name of the Postgres database. + // + // This expects a valid Postgres identifier as specified in the link below. + // https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS + // Required when creating the Database. + // + // To rename, pass a valid postgres identifier when updating the Database. + PostgresDatabase types.String `tfsdk:"postgres_database"` + // The name of the role that owns the database. Format: + // projects/{project_id}/branches/{branch_id}/roles/{role_id} + // + // To change the owner, pass valid existing Role name when updating the + // Database + // + // A database always has an owner. + Role types.String `tfsdk:"role"` +} + +func (to *DatabaseDatabaseSpec_SdkV2) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from DatabaseDatabaseSpec_SdkV2) { +} + +func (to *DatabaseDatabaseSpec_SdkV2) SyncFieldsDuringRead(ctx context.Context, from DatabaseDatabaseSpec_SdkV2) { +} + +func (m DatabaseDatabaseSpec_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["postgres_database"] = attrs["postgres_database"].SetOptional() + attrs["role"] = attrs["role"].SetOptional() + + return attrs +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in DatabaseDatabaseSpec. +// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry +// the type information of their elements in the Go type system. This function provides a way to +// retrieve the type information of the elements in complex fields at runtime. The values of the map +// are the reflected types of the contained elements. They must be either primitive values from the +// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF +// SDK values. +func (m DatabaseDatabaseSpec_SdkV2) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{} +} + +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, DatabaseDatabaseSpec_SdkV2 +// only implements ToObjectValue() and Type(). +func (m DatabaseDatabaseSpec_SdkV2) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "postgres_database": m.PostgresDatabase, + "role": m.Role, + }) +} + +// Type implements basetypes.ObjectValuable. +func (m DatabaseDatabaseSpec_SdkV2) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "postgres_database": types.StringType, + "role": types.StringType, + }, + } +} + +type DatabaseDatabaseStatus_SdkV2 struct { + // The name of the Postgres database. + PostgresDatabase types.String `tfsdk:"postgres_database"` + // The name of the role that owns the database. Format: + // projects/{project_id}/branches/{branch_id}/roles/{role_id} + Role types.String `tfsdk:"role"` +} + +func (to *DatabaseDatabaseStatus_SdkV2) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from DatabaseDatabaseStatus_SdkV2) { +} + +func (to *DatabaseDatabaseStatus_SdkV2) SyncFieldsDuringRead(ctx context.Context, from DatabaseDatabaseStatus_SdkV2) { +} + +func (m DatabaseDatabaseStatus_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["postgres_database"] = attrs["postgres_database"].SetOptional() + attrs["role"] = attrs["role"].SetOptional() + + return attrs +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in DatabaseDatabaseStatus. +// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry +// the type information of their elements in the Go type system. This function provides a way to +// retrieve the type information of the elements in complex fields at runtime. The values of the map +// are the reflected types of the contained elements. They must be either primitive values from the +// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF +// SDK values. +func (m DatabaseDatabaseStatus_SdkV2) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{} +} + +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, DatabaseDatabaseStatus_SdkV2 +// only implements ToObjectValue() and Type(). +func (m DatabaseDatabaseStatus_SdkV2) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "postgres_database": m.PostgresDatabase, + "role": m.Role, + }) +} + +// Type implements basetypes.ObjectValuable. +func (m DatabaseDatabaseStatus_SdkV2) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "postgres_database": types.StringType, + "role": types.StringType, + }, + } +} + +type DatabaseOperationMetadata_SdkV2 struct { +} + +func (to *DatabaseOperationMetadata_SdkV2) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from DatabaseOperationMetadata_SdkV2) { +} + +func (to *DatabaseOperationMetadata_SdkV2) SyncFieldsDuringRead(ctx context.Context, from DatabaseOperationMetadata_SdkV2) { +} + +func (m DatabaseOperationMetadata_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + + return attrs +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in DatabaseOperationMetadata. +// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry +// the type information of their elements in the Go type system. This function provides a way to +// retrieve the type information of the elements in complex fields at runtime. The values of the map +// are the reflected types of the contained elements. They must be either primitive values from the +// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF +// SDK values. +func (m DatabaseOperationMetadata_SdkV2) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{} +} + +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, DatabaseOperationMetadata_SdkV2 +// only implements ToObjectValue() and Type(). +func (m DatabaseOperationMetadata_SdkV2) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{}) +} + +// Type implements basetypes.ObjectValuable. +func (m DatabaseOperationMetadata_SdkV2) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{}, + } +} + // Databricks Error that is returned by all Databricks APIs. type DatabricksServiceExceptionWithDetailsProto_SdkV2 struct { Details types.List `tfsdk:"details"` @@ -1094,6 +1550,55 @@ func (m DeleteBranchRequest_SdkV2) Type(ctx context.Context) attr.Type { } } +type DeleteDatabaseRequest_SdkV2 struct { + // The resource name of the postgres database. Format: + // projects/{project_id}/branches/{branch_id}/databases/{database_id} + Name types.String `tfsdk:"-"` +} + +func (to *DeleteDatabaseRequest_SdkV2) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from DeleteDatabaseRequest_SdkV2) { +} + +func (to *DeleteDatabaseRequest_SdkV2) SyncFieldsDuringRead(ctx context.Context, from DeleteDatabaseRequest_SdkV2) { +} + +func (m DeleteDatabaseRequest_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["name"] = attrs["name"].SetRequired() + + return attrs +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in DeleteDatabaseRequest. +// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry +// the type information of their elements in the Go type system. This function provides a way to +// retrieve the type information of the elements in complex fields at runtime. The values of the map +// are the reflected types of the contained elements. They must be either primitive values from the +// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF +// SDK values. +func (m DeleteDatabaseRequest_SdkV2) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{} +} + +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, DeleteDatabaseRequest_SdkV2 +// only implements ToObjectValue() and Type(). +func (m DeleteDatabaseRequest_SdkV2) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "name": m.Name, + }) +} + +// Type implements basetypes.ObjectValuable. +func (m DeleteDatabaseRequest_SdkV2) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "name": types.StringType, + }, + } +} + type DeleteEndpointRequest_SdkV2 struct { // The full resource path of the endpoint to delete. Format: // projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id} @@ -2317,6 +2822,55 @@ func (m GetBranchRequest_SdkV2) Type(ctx context.Context) attr.Type { } } +type GetDatabaseRequest_SdkV2 struct { + // The name of the Database to retrieve. Format: + // projects/{project_id}/branches/{branch_id}/databases/{database_id} + Name types.String `tfsdk:"-"` +} + +func (to *GetDatabaseRequest_SdkV2) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from GetDatabaseRequest_SdkV2) { +} + +func (to *GetDatabaseRequest_SdkV2) SyncFieldsDuringRead(ctx context.Context, from GetDatabaseRequest_SdkV2) { +} + +func (m GetDatabaseRequest_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["name"] = attrs["name"].SetRequired() + + return attrs +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in GetDatabaseRequest. +// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry +// the type information of their elements in the Go type system. This function provides a way to +// retrieve the type information of the elements in complex fields at runtime. The values of the map +// are the reflected types of the contained elements. They must be either primitive values from the +// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF +// SDK values. +func (m GetDatabaseRequest_SdkV2) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{} +} + +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, GetDatabaseRequest_SdkV2 +// only implements ToObjectValue() and Type(). +func (m GetDatabaseRequest_SdkV2) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "name": m.Name, + }) +} + +// Type implements basetypes.ObjectValuable. +func (m GetDatabaseRequest_SdkV2) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "name": types.StringType, + }, + } +} + type GetEndpointRequest_SdkV2 struct { // The full resource path of the endpoint to retrieve. Format: // projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id} @@ -2763,6 +3317,161 @@ func (m *ListBranchesResponse_SdkV2) SetBranches(ctx context.Context, v []Branch m.Branches = types.ListValueMust(t, vs) } +type ListDatabasesRequest_SdkV2 struct { + // Upper bound for items returned. + PageSize types.Int64 `tfsdk:"-"` + // Pagination token to go to the next page of Databases. Requests first page + // if absent. + PageToken types.String `tfsdk:"-"` + // The Branch that owns this collection of databases. Format: + // projects/{project_id}/branches/{branch_id} + Parent types.String `tfsdk:"-"` +} + +func (to *ListDatabasesRequest_SdkV2) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from ListDatabasesRequest_SdkV2) { +} + +func (to *ListDatabasesRequest_SdkV2) SyncFieldsDuringRead(ctx context.Context, from ListDatabasesRequest_SdkV2) { +} + +func (m ListDatabasesRequest_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["parent"] = attrs["parent"].SetRequired() + attrs["page_token"] = attrs["page_token"].SetOptional() + attrs["page_size"] = attrs["page_size"].SetOptional() + + return attrs +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in ListDatabasesRequest. +// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry +// the type information of their elements in the Go type system. This function provides a way to +// retrieve the type information of the elements in complex fields at runtime. The values of the map +// are the reflected types of the contained elements. They must be either primitive values from the +// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF +// SDK values. +func (m ListDatabasesRequest_SdkV2) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{} +} + +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, ListDatabasesRequest_SdkV2 +// only implements ToObjectValue() and Type(). +func (m ListDatabasesRequest_SdkV2) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "page_size": m.PageSize, + "page_token": m.PageToken, + "parent": m.Parent, + }) +} + +// Type implements basetypes.ObjectValuable. +func (m ListDatabasesRequest_SdkV2) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "page_size": types.Int64Type, + "page_token": types.StringType, + "parent": types.StringType, + }, + } +} + +type ListDatabasesResponse_SdkV2 struct { + // List of databases. + Databases types.List `tfsdk:"databases"` + // Pagination token to request the next page of databases. + NextPageToken types.String `tfsdk:"next_page_token"` +} + +func (to *ListDatabasesResponse_SdkV2) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from ListDatabasesResponse_SdkV2) { + if !from.Databases.IsNull() && !from.Databases.IsUnknown() && to.Databases.IsNull() && len(from.Databases.Elements()) == 0 { + // The default representation of an empty list for TF autogenerated resources in the resource state is Null. + // If a user specified a non-Null, empty list for Databases, and the deserialized field value is Null, + // set the resulting resource state to the empty list to match the planned value. + to.Databases = from.Databases + } +} + +func (to *ListDatabasesResponse_SdkV2) SyncFieldsDuringRead(ctx context.Context, from ListDatabasesResponse_SdkV2) { + if !from.Databases.IsNull() && !from.Databases.IsUnknown() && to.Databases.IsNull() && len(from.Databases.Elements()) == 0 { + // The default representation of an empty list for TF autogenerated resources in the resource state is Null. + // If a user specified a non-Null, empty list for Databases, and the deserialized field value is Null, + // set the resulting resource state to the empty list to match the planned value. + to.Databases = from.Databases + } +} + +func (m ListDatabasesResponse_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["databases"] = attrs["databases"].SetOptional() + attrs["next_page_token"] = attrs["next_page_token"].SetOptional() + + return attrs +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in ListDatabasesResponse. +// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry +// the type information of their elements in the Go type system. This function provides a way to +// retrieve the type information of the elements in complex fields at runtime. The values of the map +// are the reflected types of the contained elements. They must be either primitive values from the +// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF +// SDK values. +func (m ListDatabasesResponse_SdkV2) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{ + "databases": reflect.TypeOf(Database_SdkV2{}), + } +} + +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, ListDatabasesResponse_SdkV2 +// only implements ToObjectValue() and Type(). +func (m ListDatabasesResponse_SdkV2) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "databases": m.Databases, + "next_page_token": m.NextPageToken, + }) +} + +// Type implements basetypes.ObjectValuable. +func (m ListDatabasesResponse_SdkV2) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "databases": basetypes.ListType{ + ElemType: Database_SdkV2{}.Type(ctx), + }, + "next_page_token": types.StringType, + }, + } +} + +// GetDatabases returns the value of the Databases field in ListDatabasesResponse_SdkV2 as +// a slice of Database_SdkV2 values. +// If the field is unknown or null, the boolean return value is false. +func (m *ListDatabasesResponse_SdkV2) GetDatabases(ctx context.Context) ([]Database_SdkV2, bool) { + if m.Databases.IsNull() || m.Databases.IsUnknown() { + return nil, false + } + var v []Database_SdkV2 + d := m.Databases.ElementsAs(ctx, &v, true) + if d.HasError() { + panic(pluginfwcommon.DiagToString(d)) + } + return v, true +} + +// SetDatabases sets the value of the Databases field in ListDatabasesResponse_SdkV2. +func (m *ListDatabasesResponse_SdkV2) SetDatabases(ctx context.Context, v []Database_SdkV2) { + vs := make([]attr.Value, 0, len(v)) + for _, e := range v { + vs = append(vs, e.ToObjectValue(ctx)) + } + t := m.Type(ctx).(basetypes.ObjectType).AttrTypes["databases"] + t = t.(attr.TypeWithElementType).ElementType() + m.Databases = types.ListValueMust(t, vs) +} + type ListEndpointsRequest_SdkV2 struct { // Upper bound for items returned. Cannot be negative. PageSize types.Int64 `tfsdk:"-"` @@ -3809,6 +4518,9 @@ type ProjectSpec_SdkV2 struct { // Human-readable project name. Length should be between 1 and 256 // characters. DisplayName types.String `tfsdk:"display_name"` + // Whether to enable PG native password login on all endpoints in this + // project. Defaults to true. + EnablePgNativeLogin types.Bool `tfsdk:"enable_pg_native_login"` // The number of seconds to retain the shared history for point in time // recovery for all branches in this project. Value should be between 0s and // 2592000s (up to 30 days). @@ -3858,6 +4570,7 @@ func (m ProjectSpec_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.A attrs["default_endpoint_settings"] = attrs["default_endpoint_settings"].SetOptional() attrs["default_endpoint_settings"] = attrs["default_endpoint_settings"].(tfschema.ListNestedAttributeBuilder).AddValidator(listvalidator.SizeAtMost(1)).(tfschema.AttributeBuilder) attrs["display_name"] = attrs["display_name"].SetOptional() + attrs["enable_pg_native_login"] = attrs["enable_pg_native_login"].SetOptional() attrs["history_retention_duration"] = attrs["history_retention_duration"].SetOptional() attrs["pg_version"] = attrs["pg_version"].SetOptional() attrs["pg_version"] = attrs["pg_version"].(tfschema.Int64AttributeBuilder).AddPlanModifier(int64planmodifier.RequiresReplace()).(tfschema.AttributeBuilder) @@ -3890,6 +4603,7 @@ func (m ProjectSpec_SdkV2) ToObjectValue(ctx context.Context) basetypes.ObjectVa "custom_tags": m.CustomTags, "default_endpoint_settings": m.DefaultEndpointSettings, "display_name": m.DisplayName, + "enable_pg_native_login": m.EnablePgNativeLogin, "history_retention_duration": m.HistoryRetentionDuration, "pg_version": m.PgVersion, }) @@ -3907,6 +4621,7 @@ func (m ProjectSpec_SdkV2) Type(ctx context.Context) attr.Type { ElemType: ProjectDefaultEndpointSettings_SdkV2{}.Type(ctx), }, "display_name": types.StringType, + "enable_pg_native_login": types.BoolType, "history_retention_duration": timetypes.GoDuration{}.Type(ctx), "pg_version": types.Int64Type, }, @@ -3976,6 +4691,9 @@ type ProjectStatus_SdkV2 struct { DefaultEndpointSettings types.List `tfsdk:"default_endpoint_settings"` // The effective human-readable project name. DisplayName types.String `tfsdk:"display_name"` + // Whether to enable PG native password login on all endpoints in this + // project. + EnablePgNativeLogin types.Bool `tfsdk:"enable_pg_native_login"` // The effective number of seconds to retain the shared history for point in // time recovery. HistoryRetentionDuration timetypes.GoDuration `tfsdk:"history_retention_duration"` @@ -4029,6 +4747,7 @@ func (m ProjectStatus_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema attrs["default_endpoint_settings"] = attrs["default_endpoint_settings"].SetComputed() attrs["default_endpoint_settings"] = attrs["default_endpoint_settings"].(tfschema.ListNestedAttributeBuilder).AddValidator(listvalidator.SizeAtMost(1)).(tfschema.AttributeBuilder) attrs["display_name"] = attrs["display_name"].SetComputed() + attrs["enable_pg_native_login"] = attrs["enable_pg_native_login"].SetComputed() attrs["history_retention_duration"] = attrs["history_retention_duration"].SetComputed() attrs["owner"] = attrs["owner"].SetComputed() attrs["pg_version"] = attrs["pg_version"].SetComputed() @@ -4063,6 +4782,7 @@ func (m ProjectStatus_SdkV2) ToObjectValue(ctx context.Context) basetypes.Object "custom_tags": m.CustomTags, "default_endpoint_settings": m.DefaultEndpointSettings, "display_name": m.DisplayName, + "enable_pg_native_login": m.EnablePgNativeLogin, "history_retention_duration": m.HistoryRetentionDuration, "owner": m.Owner, "pg_version": m.PgVersion, @@ -4083,6 +4803,7 @@ func (m ProjectStatus_SdkV2) Type(ctx context.Context) attr.Type { ElemType: ProjectDefaultEndpointSettings_SdkV2{}.Type(ctx), }, "display_name": types.StringType, + "enable_pg_native_login": types.BoolType, "history_retention_duration": timetypes.GoDuration{}.Type(ctx), "owner": types.StringType, "pg_version": types.Int64Type, @@ -4968,6 +5689,118 @@ func (m *UpdateBranchRequest_SdkV2) SetBranch(ctx context.Context, v Branch_SdkV m.Branch = types.ListValueMust(t, vs) } +type UpdateDatabaseRequest_SdkV2 struct { + // The Database to update. + // + // The database's `name` field is used to identify the database to update. + // Format: + // projects/{project_id}/branches/{branch_id}/databases/{database_id} + Database types.List `tfsdk:"database"` + // The resource name of the database. Format: + // projects/{project_id}/branches/{branch_id}/databases/{database_id} + Name types.String `tfsdk:"-"` + // The list of fields to update. If unspecified, all fields will be updated + // when possible. + UpdateMask types.String `tfsdk:"-"` +} + +func (to *UpdateDatabaseRequest_SdkV2) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from UpdateDatabaseRequest_SdkV2) { + if !from.Database.IsNull() && !from.Database.IsUnknown() { + if toDatabase, ok := to.GetDatabase(ctx); ok { + if fromDatabase, ok := from.GetDatabase(ctx); ok { + // Recursively sync the fields of Database + toDatabase.SyncFieldsDuringCreateOrUpdate(ctx, fromDatabase) + to.SetDatabase(ctx, toDatabase) + } + } + } +} + +func (to *UpdateDatabaseRequest_SdkV2) SyncFieldsDuringRead(ctx context.Context, from UpdateDatabaseRequest_SdkV2) { + if !from.Database.IsNull() && !from.Database.IsUnknown() { + if toDatabase, ok := to.GetDatabase(ctx); ok { + if fromDatabase, ok := from.GetDatabase(ctx); ok { + toDatabase.SyncFieldsDuringRead(ctx, fromDatabase) + to.SetDatabase(ctx, toDatabase) + } + } + } +} + +func (m UpdateDatabaseRequest_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["database"] = attrs["database"].SetRequired() + attrs["database"] = attrs["database"].(tfschema.ListNestedAttributeBuilder).AddValidator(listvalidator.SizeAtMost(1)).(tfschema.AttributeBuilder) + attrs["name"] = attrs["name"].SetRequired() + attrs["update_mask"] = attrs["update_mask"].SetRequired() + + return attrs +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in UpdateDatabaseRequest. +// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry +// the type information of their elements in the Go type system. This function provides a way to +// retrieve the type information of the elements in complex fields at runtime. The values of the map +// are the reflected types of the contained elements. They must be either primitive values from the +// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF +// SDK values. +func (m UpdateDatabaseRequest_SdkV2) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{ + "database": reflect.TypeOf(Database_SdkV2{}), + } +} + +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, UpdateDatabaseRequest_SdkV2 +// only implements ToObjectValue() and Type(). +func (m UpdateDatabaseRequest_SdkV2) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "database": m.Database, + "name": m.Name, + "update_mask": m.UpdateMask, + }) +} + +// Type implements basetypes.ObjectValuable. +func (m UpdateDatabaseRequest_SdkV2) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "database": basetypes.ListType{ + ElemType: Database_SdkV2{}.Type(ctx), + }, + "name": types.StringType, + "update_mask": types.StringType, + }, + } +} + +// GetDatabase returns the value of the Database field in UpdateDatabaseRequest_SdkV2 as +// a Database_SdkV2 value. +// If the field is unknown or null, the boolean return value is false. +func (m *UpdateDatabaseRequest_SdkV2) GetDatabase(ctx context.Context) (Database_SdkV2, bool) { + var e Database_SdkV2 + if m.Database.IsNull() || m.Database.IsUnknown() { + return e, false + } + var v []Database_SdkV2 + d := m.Database.ElementsAs(ctx, &v, true) + if d.HasError() { + panic(pluginfwcommon.DiagToString(d)) + } + if len(v) == 0 { + return e, false + } + return v[0], true +} + +// SetDatabase sets the value of the Database field in UpdateDatabaseRequest_SdkV2. +func (m *UpdateDatabaseRequest_SdkV2) SetDatabase(ctx context.Context, v Database_SdkV2) { + vs := []attr.Value{v.ToObjectValue(ctx)} + t := m.Type(ctx).(basetypes.ObjectType).AttrTypes["database"] + m.Database = types.ListValueMust(t, vs) +} + type UpdateEndpointRequest_SdkV2 struct { // The Endpoint to update. // diff --git a/internal/service/postgres_tf/model.go b/internal/service/postgres_tf/model.go index d5eb4508ca..beaba3d657 100755 --- a/internal/service/postgres_tf/model.go +++ b/internal/service/postgres_tf/model.go @@ -544,6 +544,117 @@ func (m *CreateBranchRequest) SetBranch(ctx context.Context, v Branch) { m.Branch = vs } +type CreateDatabaseRequest struct { + // The desired specification of a Database. + Database types.Object `tfsdk:"database"` + // The ID to use for the Database, which will become the final component of + // the database's resource name. This ID becomes the database name in + // postgres. + // + // This value should be 4-63 characters, and only use characters available + // in DNS names, as defined by RFC-1123 + // + // If database_id is not specified in the request, it is generated + // automatically. + DatabaseId types.String `tfsdk:"-"` + // The Branch where this Database will be created. Format: + // projects/{project_id}/branches/{branch_id} + Parent types.String `tfsdk:"-"` +} + +func (to *CreateDatabaseRequest) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from CreateDatabaseRequest) { + if !from.Database.IsNull() && !from.Database.IsUnknown() { + if toDatabase, ok := to.GetDatabase(ctx); ok { + if fromDatabase, ok := from.GetDatabase(ctx); ok { + // Recursively sync the fields of Database + toDatabase.SyncFieldsDuringCreateOrUpdate(ctx, fromDatabase) + to.SetDatabase(ctx, toDatabase) + } + } + } +} + +func (to *CreateDatabaseRequest) SyncFieldsDuringRead(ctx context.Context, from CreateDatabaseRequest) { + if !from.Database.IsNull() && !from.Database.IsUnknown() { + if toDatabase, ok := to.GetDatabase(ctx); ok { + if fromDatabase, ok := from.GetDatabase(ctx); ok { + toDatabase.SyncFieldsDuringRead(ctx, fromDatabase) + to.SetDatabase(ctx, toDatabase) + } + } + } +} + +func (m CreateDatabaseRequest) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["database"] = attrs["database"].SetRequired() + attrs["parent"] = attrs["parent"].SetRequired() + attrs["database_id"] = attrs["database_id"].SetOptional() + + return attrs +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in CreateDatabaseRequest. +// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry +// the type information of their elements in the Go type system. This function provides a way to +// retrieve the type information of the elements in complex fields at runtime. The values of the map +// are the reflected types of the contained elements. They must be either primitive values from the +// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF +// SDK values. +func (m CreateDatabaseRequest) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{ + "database": reflect.TypeOf(Database{}), + } +} + +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, CreateDatabaseRequest +// only implements ToObjectValue() and Type(). +func (m CreateDatabaseRequest) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "database": m.Database, + "database_id": m.DatabaseId, + "parent": m.Parent, + }) +} + +// Type implements basetypes.ObjectValuable. +func (m CreateDatabaseRequest) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "database": Database{}.Type(ctx), + "database_id": types.StringType, + "parent": types.StringType, + }, + } +} + +// GetDatabase returns the value of the Database field in CreateDatabaseRequest as +// a Database value. +// If the field is unknown or null, the boolean return value is false. +func (m *CreateDatabaseRequest) GetDatabase(ctx context.Context) (Database, bool) { + var e Database + if m.Database.IsNull() || m.Database.IsUnknown() { + return e, false + } + var v Database + d := m.Database.As(ctx, &v, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + }) + if d.HasError() { + panic(pluginfwcommon.DiagToString(d)) + } + return v, true +} + +// SetDatabase sets the value of the Database field in CreateDatabaseRequest. +func (m *CreateDatabaseRequest) SetDatabase(ctx context.Context, v Database) { + vs := v.ToObjectValue(ctx) + m.Database = vs +} + type CreateEndpointRequest struct { // The Endpoint to create. Endpoint types.Object `tfsdk:"endpoint"` @@ -861,6 +972,179 @@ func (m *CreateRoleRequest) SetRole(ctx context.Context, v Role) { m.Role = vs } +// Database represents a Postgres database within a Branch. +type Database struct { + // A timestamp indicating when the database was created. + CreateTime timetypes.RFC3339 `tfsdk:"create_time"` + // The resource name of the database. Format: + // projects/{project_id}/branches/{branch_id}/databases/{database_id} + Name types.String `tfsdk:"name"` + // The branch containing this database. Format: + // projects/{project_id}/branches/{branch_id} + Parent types.String `tfsdk:"parent"` + // The desired state of the Database. + Spec types.Object `tfsdk:"spec"` + // The observed state of the Database. + Status types.Object `tfsdk:"status"` + // A timestamp indicating when the database was last updated. + UpdateTime timetypes.RFC3339 `tfsdk:"update_time"` +} + +func (to *Database) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from Database) { + if !from.Spec.IsUnknown() && !from.Spec.IsNull() { + // Spec is an input only field and not returned by the service, so we keep the value from the prior state. + to.Spec = from.Spec + } + if !from.Spec.IsNull() && !from.Spec.IsUnknown() { + if toSpec, ok := to.GetSpec(ctx); ok { + if fromSpec, ok := from.GetSpec(ctx); ok { + // Recursively sync the fields of Spec + toSpec.SyncFieldsDuringCreateOrUpdate(ctx, fromSpec) + to.SetSpec(ctx, toSpec) + } + } + } + if !from.Status.IsNull() && !from.Status.IsUnknown() { + if toStatus, ok := to.GetStatus(ctx); ok { + if fromStatus, ok := from.GetStatus(ctx); ok { + // Recursively sync the fields of Status + toStatus.SyncFieldsDuringCreateOrUpdate(ctx, fromStatus) + to.SetStatus(ctx, toStatus) + } + } + } +} + +func (to *Database) SyncFieldsDuringRead(ctx context.Context, from Database) { + if !from.Spec.IsUnknown() && !from.Spec.IsNull() { + // Spec is an input only field and not returned by the service, so we keep the value from the prior state. + to.Spec = from.Spec + } + if !from.Spec.IsNull() && !from.Spec.IsUnknown() { + if toSpec, ok := to.GetSpec(ctx); ok { + if fromSpec, ok := from.GetSpec(ctx); ok { + toSpec.SyncFieldsDuringRead(ctx, fromSpec) + to.SetSpec(ctx, toSpec) + } + } + } + if !from.Status.IsNull() && !from.Status.IsUnknown() { + if toStatus, ok := to.GetStatus(ctx); ok { + if fromStatus, ok := from.GetStatus(ctx); ok { + toStatus.SyncFieldsDuringRead(ctx, fromStatus) + to.SetStatus(ctx, toStatus) + } + } + } +} + +func (m Database) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["create_time"] = attrs["create_time"].SetComputed() + attrs["name"] = attrs["name"].SetOptional() + attrs["parent"] = attrs["parent"].SetComputed() + attrs["spec"] = attrs["spec"].SetOptional() + attrs["spec"] = attrs["spec"].SetComputed() + attrs["spec"] = attrs["spec"].(tfschema.SingleNestedAttributeBuilder).AddPlanModifier(objectplanmodifier.UseStateForUnknown()).(tfschema.AttributeBuilder) + attrs["status"] = attrs["status"].SetComputed() + attrs["update_time"] = attrs["update_time"].SetComputed() + + return attrs +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in Database. +// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry +// the type information of their elements in the Go type system. This function provides a way to +// retrieve the type information of the elements in complex fields at runtime. The values of the map +// are the reflected types of the contained elements. They must be either primitive values from the +// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF +// SDK values. +func (m Database) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{ + "spec": reflect.TypeOf(DatabaseDatabaseSpec{}), + "status": reflect.TypeOf(DatabaseDatabaseStatus{}), + } +} + +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, Database +// only implements ToObjectValue() and Type(). +func (m Database) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "create_time": m.CreateTime, + "name": m.Name, + "parent": m.Parent, + "spec": m.Spec, + "status": m.Status, + "update_time": m.UpdateTime, + }) +} + +// Type implements basetypes.ObjectValuable. +func (m Database) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "create_time": timetypes.RFC3339{}.Type(ctx), + "name": types.StringType, + "parent": types.StringType, + "spec": DatabaseDatabaseSpec{}.Type(ctx), + "status": DatabaseDatabaseStatus{}.Type(ctx), + "update_time": timetypes.RFC3339{}.Type(ctx), + }, + } +} + +// GetSpec returns the value of the Spec field in Database as +// a DatabaseDatabaseSpec value. +// If the field is unknown or null, the boolean return value is false. +func (m *Database) GetSpec(ctx context.Context) (DatabaseDatabaseSpec, bool) { + var e DatabaseDatabaseSpec + if m.Spec.IsNull() || m.Spec.IsUnknown() { + return e, false + } + var v DatabaseDatabaseSpec + d := m.Spec.As(ctx, &v, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + }) + if d.HasError() { + panic(pluginfwcommon.DiagToString(d)) + } + return v, true +} + +// SetSpec sets the value of the Spec field in Database. +func (m *Database) SetSpec(ctx context.Context, v DatabaseDatabaseSpec) { + vs := v.ToObjectValue(ctx) + m.Spec = vs +} + +// GetStatus returns the value of the Status field in Database as +// a DatabaseDatabaseStatus value. +// If the field is unknown or null, the boolean return value is false. +func (m *Database) GetStatus(ctx context.Context) (DatabaseDatabaseStatus, bool) { + var e DatabaseDatabaseStatus + if m.Status.IsNull() || m.Status.IsUnknown() { + return e, false + } + var v DatabaseDatabaseStatus + d := m.Status.As(ctx, &v, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + }) + if d.HasError() { + panic(pluginfwcommon.DiagToString(d)) + } + return v, true +} + +// SetStatus sets the value of the Status field in Database. +func (m *Database) SetStatus(ctx context.Context, v DatabaseDatabaseStatus) { + vs := v.ToObjectValue(ctx) + m.Status = vs +} + type DatabaseCredential struct { // Timestamp in UTC of when this credential expires. ExpireTime timetypes.RFC3339 `tfsdk:"expire_time"` @@ -915,6 +1199,166 @@ func (m DatabaseCredential) Type(ctx context.Context) attr.Type { } } +type DatabaseDatabaseSpec struct { + // The name of the Postgres database. + // + // This expects a valid Postgres identifier as specified in the link below. + // https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS + // Required when creating the Database. + // + // To rename, pass a valid postgres identifier when updating the Database. + PostgresDatabase types.String `tfsdk:"postgres_database"` + // The name of the role that owns the database. Format: + // projects/{project_id}/branches/{branch_id}/roles/{role_id} + // + // To change the owner, pass valid existing Role name when updating the + // Database + // + // A database always has an owner. + Role types.String `tfsdk:"role"` +} + +func (to *DatabaseDatabaseSpec) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from DatabaseDatabaseSpec) { +} + +func (to *DatabaseDatabaseSpec) SyncFieldsDuringRead(ctx context.Context, from DatabaseDatabaseSpec) { +} + +func (m DatabaseDatabaseSpec) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["postgres_database"] = attrs["postgres_database"].SetOptional() + attrs["role"] = attrs["role"].SetOptional() + + return attrs +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in DatabaseDatabaseSpec. +// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry +// the type information of their elements in the Go type system. This function provides a way to +// retrieve the type information of the elements in complex fields at runtime. The values of the map +// are the reflected types of the contained elements. They must be either primitive values from the +// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF +// SDK values. +func (m DatabaseDatabaseSpec) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{} +} + +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, DatabaseDatabaseSpec +// only implements ToObjectValue() and Type(). +func (m DatabaseDatabaseSpec) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "postgres_database": m.PostgresDatabase, + "role": m.Role, + }) +} + +// Type implements basetypes.ObjectValuable. +func (m DatabaseDatabaseSpec) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "postgres_database": types.StringType, + "role": types.StringType, + }, + } +} + +type DatabaseDatabaseStatus struct { + // The name of the Postgres database. + PostgresDatabase types.String `tfsdk:"postgres_database"` + // The name of the role that owns the database. Format: + // projects/{project_id}/branches/{branch_id}/roles/{role_id} + Role types.String `tfsdk:"role"` +} + +func (to *DatabaseDatabaseStatus) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from DatabaseDatabaseStatus) { +} + +func (to *DatabaseDatabaseStatus) SyncFieldsDuringRead(ctx context.Context, from DatabaseDatabaseStatus) { +} + +func (m DatabaseDatabaseStatus) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["postgres_database"] = attrs["postgres_database"].SetOptional() + attrs["role"] = attrs["role"].SetOptional() + + return attrs +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in DatabaseDatabaseStatus. +// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry +// the type information of their elements in the Go type system. This function provides a way to +// retrieve the type information of the elements in complex fields at runtime. The values of the map +// are the reflected types of the contained elements. They must be either primitive values from the +// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF +// SDK values. +func (m DatabaseDatabaseStatus) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{} +} + +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, DatabaseDatabaseStatus +// only implements ToObjectValue() and Type(). +func (m DatabaseDatabaseStatus) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "postgres_database": m.PostgresDatabase, + "role": m.Role, + }) +} + +// Type implements basetypes.ObjectValuable. +func (m DatabaseDatabaseStatus) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "postgres_database": types.StringType, + "role": types.StringType, + }, + } +} + +type DatabaseOperationMetadata struct { +} + +func (to *DatabaseOperationMetadata) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from DatabaseOperationMetadata) { +} + +func (to *DatabaseOperationMetadata) SyncFieldsDuringRead(ctx context.Context, from DatabaseOperationMetadata) { +} + +func (m DatabaseOperationMetadata) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + + return attrs +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in DatabaseOperationMetadata. +// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry +// the type information of their elements in the Go type system. This function provides a way to +// retrieve the type information of the elements in complex fields at runtime. The values of the map +// are the reflected types of the contained elements. They must be either primitive values from the +// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF +// SDK values. +func (m DatabaseOperationMetadata) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{} +} + +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, DatabaseOperationMetadata +// only implements ToObjectValue() and Type(). +func (m DatabaseOperationMetadata) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{}) +} + +// Type implements basetypes.ObjectValuable. +func (m DatabaseOperationMetadata) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{}, + } +} + // Databricks Error that is returned by all Databricks APIs. type DatabricksServiceExceptionWithDetailsProto struct { Details types.List `tfsdk:"details"` @@ -1069,6 +1513,55 @@ func (m DeleteBranchRequest) Type(ctx context.Context) attr.Type { } } +type DeleteDatabaseRequest struct { + // The resource name of the postgres database. Format: + // projects/{project_id}/branches/{branch_id}/databases/{database_id} + Name types.String `tfsdk:"-"` +} + +func (to *DeleteDatabaseRequest) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from DeleteDatabaseRequest) { +} + +func (to *DeleteDatabaseRequest) SyncFieldsDuringRead(ctx context.Context, from DeleteDatabaseRequest) { +} + +func (m DeleteDatabaseRequest) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["name"] = attrs["name"].SetRequired() + + return attrs +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in DeleteDatabaseRequest. +// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry +// the type information of their elements in the Go type system. This function provides a way to +// retrieve the type information of the elements in complex fields at runtime. The values of the map +// are the reflected types of the contained elements. They must be either primitive values from the +// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF +// SDK values. +func (m DeleteDatabaseRequest) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{} +} + +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, DeleteDatabaseRequest +// only implements ToObjectValue() and Type(). +func (m DeleteDatabaseRequest) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "name": m.Name, + }) +} + +// Type implements basetypes.ObjectValuable. +func (m DeleteDatabaseRequest) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "name": types.StringType, + }, + } +} + type DeleteEndpointRequest struct { // The full resource path of the endpoint to delete. Format: // projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id} @@ -2264,6 +2757,55 @@ func (m GetBranchRequest) Type(ctx context.Context) attr.Type { } } +type GetDatabaseRequest struct { + // The name of the Database to retrieve. Format: + // projects/{project_id}/branches/{branch_id}/databases/{database_id} + Name types.String `tfsdk:"-"` +} + +func (to *GetDatabaseRequest) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from GetDatabaseRequest) { +} + +func (to *GetDatabaseRequest) SyncFieldsDuringRead(ctx context.Context, from GetDatabaseRequest) { +} + +func (m GetDatabaseRequest) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["name"] = attrs["name"].SetRequired() + + return attrs +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in GetDatabaseRequest. +// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry +// the type information of their elements in the Go type system. This function provides a way to +// retrieve the type information of the elements in complex fields at runtime. The values of the map +// are the reflected types of the contained elements. They must be either primitive values from the +// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF +// SDK values. +func (m GetDatabaseRequest) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{} +} + +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, GetDatabaseRequest +// only implements ToObjectValue() and Type(). +func (m GetDatabaseRequest) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "name": m.Name, + }) +} + +// Type implements basetypes.ObjectValuable. +func (m GetDatabaseRequest) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "name": types.StringType, + }, + } +} + type GetEndpointRequest struct { // The full resource path of the endpoint to retrieve. Format: // projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id} @@ -2706,6 +3248,161 @@ func (m *ListBranchesResponse) SetBranches(ctx context.Context, v []Branch) { m.Branches = types.ListValueMust(t, vs) } +type ListDatabasesRequest struct { + // Upper bound for items returned. + PageSize types.Int64 `tfsdk:"-"` + // Pagination token to go to the next page of Databases. Requests first page + // if absent. + PageToken types.String `tfsdk:"-"` + // The Branch that owns this collection of databases. Format: + // projects/{project_id}/branches/{branch_id} + Parent types.String `tfsdk:"-"` +} + +func (to *ListDatabasesRequest) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from ListDatabasesRequest) { +} + +func (to *ListDatabasesRequest) SyncFieldsDuringRead(ctx context.Context, from ListDatabasesRequest) { +} + +func (m ListDatabasesRequest) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["parent"] = attrs["parent"].SetRequired() + attrs["page_token"] = attrs["page_token"].SetOptional() + attrs["page_size"] = attrs["page_size"].SetOptional() + + return attrs +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in ListDatabasesRequest. +// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry +// the type information of their elements in the Go type system. This function provides a way to +// retrieve the type information of the elements in complex fields at runtime. The values of the map +// are the reflected types of the contained elements. They must be either primitive values from the +// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF +// SDK values. +func (m ListDatabasesRequest) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{} +} + +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, ListDatabasesRequest +// only implements ToObjectValue() and Type(). +func (m ListDatabasesRequest) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "page_size": m.PageSize, + "page_token": m.PageToken, + "parent": m.Parent, + }) +} + +// Type implements basetypes.ObjectValuable. +func (m ListDatabasesRequest) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "page_size": types.Int64Type, + "page_token": types.StringType, + "parent": types.StringType, + }, + } +} + +type ListDatabasesResponse struct { + // List of databases. + Databases types.List `tfsdk:"databases"` + // Pagination token to request the next page of databases. + NextPageToken types.String `tfsdk:"next_page_token"` +} + +func (to *ListDatabasesResponse) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from ListDatabasesResponse) { + if !from.Databases.IsNull() && !from.Databases.IsUnknown() && to.Databases.IsNull() && len(from.Databases.Elements()) == 0 { + // The default representation of an empty list for TF autogenerated resources in the resource state is Null. + // If a user specified a non-Null, empty list for Databases, and the deserialized field value is Null, + // set the resulting resource state to the empty list to match the planned value. + to.Databases = from.Databases + } +} + +func (to *ListDatabasesResponse) SyncFieldsDuringRead(ctx context.Context, from ListDatabasesResponse) { + if !from.Databases.IsNull() && !from.Databases.IsUnknown() && to.Databases.IsNull() && len(from.Databases.Elements()) == 0 { + // The default representation of an empty list for TF autogenerated resources in the resource state is Null. + // If a user specified a non-Null, empty list for Databases, and the deserialized field value is Null, + // set the resulting resource state to the empty list to match the planned value. + to.Databases = from.Databases + } +} + +func (m ListDatabasesResponse) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["databases"] = attrs["databases"].SetOptional() + attrs["next_page_token"] = attrs["next_page_token"].SetOptional() + + return attrs +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in ListDatabasesResponse. +// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry +// the type information of their elements in the Go type system. This function provides a way to +// retrieve the type information of the elements in complex fields at runtime. The values of the map +// are the reflected types of the contained elements. They must be either primitive values from the +// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF +// SDK values. +func (m ListDatabasesResponse) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{ + "databases": reflect.TypeOf(Database{}), + } +} + +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, ListDatabasesResponse +// only implements ToObjectValue() and Type(). +func (m ListDatabasesResponse) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "databases": m.Databases, + "next_page_token": m.NextPageToken, + }) +} + +// Type implements basetypes.ObjectValuable. +func (m ListDatabasesResponse) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "databases": basetypes.ListType{ + ElemType: Database{}.Type(ctx), + }, + "next_page_token": types.StringType, + }, + } +} + +// GetDatabases returns the value of the Databases field in ListDatabasesResponse as +// a slice of Database values. +// If the field is unknown or null, the boolean return value is false. +func (m *ListDatabasesResponse) GetDatabases(ctx context.Context) ([]Database, bool) { + if m.Databases.IsNull() || m.Databases.IsUnknown() { + return nil, false + } + var v []Database + d := m.Databases.ElementsAs(ctx, &v, true) + if d.HasError() { + panic(pluginfwcommon.DiagToString(d)) + } + return v, true +} + +// SetDatabases sets the value of the Databases field in ListDatabasesResponse. +func (m *ListDatabasesResponse) SetDatabases(ctx context.Context, v []Database) { + vs := make([]attr.Value, 0, len(v)) + for _, e := range v { + vs = append(vs, e.ToObjectValue(ctx)) + } + t := m.Type(ctx).(basetypes.ObjectType).AttrTypes["databases"] + t = t.(attr.TypeWithElementType).ElementType() + m.Databases = types.ListValueMust(t, vs) +} + type ListEndpointsRequest struct { // Upper bound for items returned. Cannot be negative. PageSize types.Int64 `tfsdk:"-"` @@ -3736,6 +4433,9 @@ type ProjectSpec struct { // Human-readable project name. Length should be between 1 and 256 // characters. DisplayName types.String `tfsdk:"display_name"` + // Whether to enable PG native password login on all endpoints in this + // project. Defaults to true. + EnablePgNativeLogin types.Bool `tfsdk:"enable_pg_native_login"` // The number of seconds to retain the shared history for point in time // recovery for all branches in this project. Value should be between 0s and // 2592000s (up to 30 days). @@ -3784,6 +4484,7 @@ func (m ProjectSpec) ApplySchemaCustomizations(attrs map[string]tfschema.Attribu attrs["custom_tags"] = attrs["custom_tags"].SetOptional() attrs["default_endpoint_settings"] = attrs["default_endpoint_settings"].SetOptional() attrs["display_name"] = attrs["display_name"].SetOptional() + attrs["enable_pg_native_login"] = attrs["enable_pg_native_login"].SetOptional() attrs["history_retention_duration"] = attrs["history_retention_duration"].SetOptional() attrs["pg_version"] = attrs["pg_version"].SetOptional() attrs["pg_version"] = attrs["pg_version"].(tfschema.Int64AttributeBuilder).AddPlanModifier(int64planmodifier.RequiresReplace()).(tfschema.AttributeBuilder) @@ -3816,6 +4517,7 @@ func (m ProjectSpec) ToObjectValue(ctx context.Context) basetypes.ObjectValue { "custom_tags": m.CustomTags, "default_endpoint_settings": m.DefaultEndpointSettings, "display_name": m.DisplayName, + "enable_pg_native_login": m.EnablePgNativeLogin, "history_retention_duration": m.HistoryRetentionDuration, "pg_version": m.PgVersion, }) @@ -3831,6 +4533,7 @@ func (m ProjectSpec) Type(ctx context.Context) attr.Type { }, "default_endpoint_settings": ProjectDefaultEndpointSettings{}.Type(ctx), "display_name": types.StringType, + "enable_pg_native_login": types.BoolType, "history_retention_duration": timetypes.GoDuration{}.Type(ctx), "pg_version": types.Int64Type, }, @@ -3899,6 +4602,9 @@ type ProjectStatus struct { DefaultEndpointSettings types.Object `tfsdk:"default_endpoint_settings"` // The effective human-readable project name. DisplayName types.String `tfsdk:"display_name"` + // Whether to enable PG native password login on all endpoints in this + // project. + EnablePgNativeLogin types.Bool `tfsdk:"enable_pg_native_login"` // The effective number of seconds to retain the shared history for point in // time recovery. HistoryRetentionDuration timetypes.GoDuration `tfsdk:"history_retention_duration"` @@ -3951,6 +4657,7 @@ func (m ProjectStatus) ApplySchemaCustomizations(attrs map[string]tfschema.Attri attrs["custom_tags"] = attrs["custom_tags"].SetComputed() attrs["default_endpoint_settings"] = attrs["default_endpoint_settings"].SetComputed() attrs["display_name"] = attrs["display_name"].SetComputed() + attrs["enable_pg_native_login"] = attrs["enable_pg_native_login"].SetComputed() attrs["history_retention_duration"] = attrs["history_retention_duration"].SetComputed() attrs["owner"] = attrs["owner"].SetComputed() attrs["pg_version"] = attrs["pg_version"].SetComputed() @@ -3985,6 +4692,7 @@ func (m ProjectStatus) ToObjectValue(ctx context.Context) basetypes.ObjectValue "custom_tags": m.CustomTags, "default_endpoint_settings": m.DefaultEndpointSettings, "display_name": m.DisplayName, + "enable_pg_native_login": m.EnablePgNativeLogin, "history_retention_duration": m.HistoryRetentionDuration, "owner": m.Owner, "pg_version": m.PgVersion, @@ -4003,6 +4711,7 @@ func (m ProjectStatus) Type(ctx context.Context) attr.Type { }, "default_endpoint_settings": ProjectDefaultEndpointSettings{}.Type(ctx), "display_name": types.StringType, + "enable_pg_native_login": types.BoolType, "history_retention_duration": timetypes.GoDuration{}.Type(ctx), "owner": types.StringType, "pg_version": types.Int64Type, @@ -4871,6 +5580,114 @@ func (m *UpdateBranchRequest) SetBranch(ctx context.Context, v Branch) { m.Branch = vs } +type UpdateDatabaseRequest struct { + // The Database to update. + // + // The database's `name` field is used to identify the database to update. + // Format: + // projects/{project_id}/branches/{branch_id}/databases/{database_id} + Database types.Object `tfsdk:"database"` + // The resource name of the database. Format: + // projects/{project_id}/branches/{branch_id}/databases/{database_id} + Name types.String `tfsdk:"-"` + // The list of fields to update. If unspecified, all fields will be updated + // when possible. + UpdateMask types.String `tfsdk:"-"` +} + +func (to *UpdateDatabaseRequest) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from UpdateDatabaseRequest) { + if !from.Database.IsNull() && !from.Database.IsUnknown() { + if toDatabase, ok := to.GetDatabase(ctx); ok { + if fromDatabase, ok := from.GetDatabase(ctx); ok { + // Recursively sync the fields of Database + toDatabase.SyncFieldsDuringCreateOrUpdate(ctx, fromDatabase) + to.SetDatabase(ctx, toDatabase) + } + } + } +} + +func (to *UpdateDatabaseRequest) SyncFieldsDuringRead(ctx context.Context, from UpdateDatabaseRequest) { + if !from.Database.IsNull() && !from.Database.IsUnknown() { + if toDatabase, ok := to.GetDatabase(ctx); ok { + if fromDatabase, ok := from.GetDatabase(ctx); ok { + toDatabase.SyncFieldsDuringRead(ctx, fromDatabase) + to.SetDatabase(ctx, toDatabase) + } + } + } +} + +func (m UpdateDatabaseRequest) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["database"] = attrs["database"].SetRequired() + attrs["name"] = attrs["name"].SetRequired() + attrs["update_mask"] = attrs["update_mask"].SetRequired() + + return attrs +} + +// GetComplexFieldTypes returns a map of the types of elements in complex fields in UpdateDatabaseRequest. +// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry +// the type information of their elements in the Go type system. This function provides a way to +// retrieve the type information of the elements in complex fields at runtime. The values of the map +// are the reflected types of the contained elements. They must be either primitive values from the +// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF +// SDK values. +func (m UpdateDatabaseRequest) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{ + "database": reflect.TypeOf(Database{}), + } +} + +// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise +// interfere with how the plugin framework retrieves and sets values in state. Thus, UpdateDatabaseRequest +// only implements ToObjectValue() and Type(). +func (m UpdateDatabaseRequest) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + m.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "database": m.Database, + "name": m.Name, + "update_mask": m.UpdateMask, + }) +} + +// Type implements basetypes.ObjectValuable. +func (m UpdateDatabaseRequest) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "database": Database{}.Type(ctx), + "name": types.StringType, + "update_mask": types.StringType, + }, + } +} + +// GetDatabase returns the value of the Database field in UpdateDatabaseRequest as +// a Database value. +// If the field is unknown or null, the boolean return value is false. +func (m *UpdateDatabaseRequest) GetDatabase(ctx context.Context) (Database, bool) { + var e Database + if m.Database.IsNull() || m.Database.IsUnknown() { + return e, false + } + var v Database + d := m.Database.As(ctx, &v, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + }) + if d.HasError() { + panic(pluginfwcommon.DiagToString(d)) + } + return v, true +} + +// SetDatabase sets the value of the Database field in UpdateDatabaseRequest. +func (m *UpdateDatabaseRequest) SetDatabase(ctx context.Context, v Database) { + vs := v.ToObjectValue(ctx) + m.Database = vs +} + type UpdateEndpointRequest struct { // The Endpoint to update. // diff --git a/internal/service/sharing_tf/legacy_model.go b/internal/service/sharing_tf/legacy_model.go index 1c930a11fa..5a9d9d4530 100755 --- a/internal/service/sharing_tf/legacy_model.go +++ b/internal/service/sharing_tf/legacy_model.go @@ -5640,9 +5640,6 @@ type Table_SdkV2 struct { Comment types.String `tfsdk:"comment"` // The id of the table. Id types.String `tfsdk:"id"` - // Internal information for D2D sharing that should not be disclosed to - // external users. - InternalAttributes types.List `tfsdk:"internal_attributes"` // The catalog and schema of the materialized table MaterializationNamespace types.String `tfsdk:"materialization_namespace"` // The name of a materialized table. @@ -5660,15 +5657,6 @@ type Table_SdkV2 struct { } func (to *Table_SdkV2) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from Table_SdkV2) { - if !from.InternalAttributes.IsNull() && !from.InternalAttributes.IsUnknown() { - if toInternalAttributes, ok := to.GetInternalAttributes(ctx); ok { - if fromInternalAttributes, ok := from.GetInternalAttributes(ctx); ok { - // Recursively sync the fields of InternalAttributes - toInternalAttributes.SyncFieldsDuringCreateOrUpdate(ctx, fromInternalAttributes) - to.SetInternalAttributes(ctx, toInternalAttributes) - } - } - } if !from.Tags.IsNull() && !from.Tags.IsUnknown() && to.Tags.IsNull() && len(from.Tags.Elements()) == 0 { // The default representation of an empty list for TF autogenerated resources in the resource state is Null. // If a user specified a non-Null, empty list for Tags, and the deserialized field value is Null, @@ -5678,14 +5666,6 @@ func (to *Table_SdkV2) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from } func (to *Table_SdkV2) SyncFieldsDuringRead(ctx context.Context, from Table_SdkV2) { - if !from.InternalAttributes.IsNull() && !from.InternalAttributes.IsUnknown() { - if toInternalAttributes, ok := to.GetInternalAttributes(ctx); ok { - if fromInternalAttributes, ok := from.GetInternalAttributes(ctx); ok { - toInternalAttributes.SyncFieldsDuringRead(ctx, fromInternalAttributes) - to.SetInternalAttributes(ctx, toInternalAttributes) - } - } - } if !from.Tags.IsNull() && !from.Tags.IsUnknown() && to.Tags.IsNull() && len(from.Tags.Elements()) == 0 { // The default representation of an empty list for TF autogenerated resources in the resource state is Null. // If a user specified a non-Null, empty list for Tags, and the deserialized field value is Null, @@ -5697,8 +5677,6 @@ func (to *Table_SdkV2) SyncFieldsDuringRead(ctx context.Context, from Table_SdkV func (m Table_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { attrs["comment"] = attrs["comment"].SetOptional() attrs["id"] = attrs["id"].SetOptional() - attrs["internal_attributes"] = attrs["internal_attributes"].SetOptional() - attrs["internal_attributes"] = attrs["internal_attributes"].(tfschema.ListNestedAttributeBuilder).AddValidator(listvalidator.SizeAtMost(1)).(tfschema.AttributeBuilder) attrs["materialization_namespace"] = attrs["materialization_namespace"].SetOptional() attrs["materialized_table_name"] = attrs["materialized_table_name"].SetOptional() attrs["name"] = attrs["name"].SetOptional() @@ -5719,8 +5697,7 @@ func (m Table_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.Attribu // SDK values. func (m Table_SdkV2) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { return map[string]reflect.Type{ - "internal_attributes": reflect.TypeOf(TableInternalAttributes_SdkV2{}), - "tags": reflect.TypeOf(catalog_tf.TagKeyValue_SdkV2{}), + "tags": reflect.TypeOf(catalog_tf.TagKeyValue_SdkV2{}), } } @@ -5733,7 +5710,6 @@ func (m Table_SdkV2) ToObjectValue(ctx context.Context) basetypes.ObjectValue { map[string]attr.Value{ "comment": m.Comment, "id": m.Id, - "internal_attributes": m.InternalAttributes, "materialization_namespace": m.MaterializationNamespace, "materialized_table_name": m.MaterializedTableName, "name": m.Name, @@ -5748,11 +5724,8 @@ func (m Table_SdkV2) ToObjectValue(ctx context.Context) basetypes.ObjectValue { func (m Table_SdkV2) Type(ctx context.Context) attr.Type { return types.ObjectType{ AttrTypes: map[string]attr.Type{ - "comment": types.StringType, - "id": types.StringType, - "internal_attributes": basetypes.ListType{ - ElemType: TableInternalAttributes_SdkV2{}.Type(ctx), - }, + "comment": types.StringType, + "id": types.StringType, "materialization_namespace": types.StringType, "materialized_table_name": types.StringType, "name": types.StringType, @@ -5766,32 +5739,6 @@ func (m Table_SdkV2) Type(ctx context.Context) attr.Type { } } -// GetInternalAttributes returns the value of the InternalAttributes field in Table_SdkV2 as -// a TableInternalAttributes_SdkV2 value. -// If the field is unknown or null, the boolean return value is false. -func (m *Table_SdkV2) GetInternalAttributes(ctx context.Context) (TableInternalAttributes_SdkV2, bool) { - var e TableInternalAttributes_SdkV2 - if m.InternalAttributes.IsNull() || m.InternalAttributes.IsUnknown() { - return e, false - } - var v []TableInternalAttributes_SdkV2 - d := m.InternalAttributes.ElementsAs(ctx, &v, true) - if d.HasError() { - panic(pluginfwcommon.DiagToString(d)) - } - if len(v) == 0 { - return e, false - } - return v[0], true -} - -// SetInternalAttributes sets the value of the InternalAttributes field in Table_SdkV2. -func (m *Table_SdkV2) SetInternalAttributes(ctx context.Context, v TableInternalAttributes_SdkV2) { - vs := []attr.Value{v.ToObjectValue(ctx)} - t := m.Type(ctx).(basetypes.ObjectType).AttrTypes["internal_attributes"] - m.InternalAttributes = types.ListValueMust(t, vs) -} - // GetTags returns the value of the Tags field in Table_SdkV2 as // a slice of catalog_tf.TagKeyValue_SdkV2 values. // If the field is unknown or null, the boolean return value is false. @@ -5818,137 +5765,6 @@ func (m *Table_SdkV2) SetTags(ctx context.Context, v []catalog_tf.TagKeyValue_Sd m.Tags = types.ListValueMust(t, vs) } -// Internal information for D2D sharing that should not be disclosed to external -// users. -type TableInternalAttributes_SdkV2 struct { - // Managed Delta Metadata location for foreign iceberg tables. - AuxiliaryManagedLocation types.String `tfsdk:"auxiliary_managed_location"` - // Storage locations of all table dependencies for shared views. Used on the - // recipient side for SEG (Secure Egress Gateway) whitelisting. - DependencyStorageLocations types.List `tfsdk:"dependency_storage_locations"` - // Whether the table has uniform enabled. - HasDeltaUniformIceberg types.Bool `tfsdk:"has_delta_uniform_iceberg"` - // Will be populated in the reconciliation response for VIEW and - // FOREIGN_TABLE, with the value of the parent UC entity's storage_location, - // following the same logic as getManagedEntityPath in - // CreateStagingTableHandler, which is used to store the materialized table - // for a shared VIEW/FOREIGN_TABLE for D2O queries. The value will be used - // on the recipient side to be whitelisted when SEG is enabled on the - // workspace of the recipient, to allow the recipient users to query this - // shared VIEW/FOREIGN_TABLE. - ParentStorageLocation types.String `tfsdk:"parent_storage_location"` - // The cloud storage location of a shard table with DIRECTORY_BASED_TABLE - // type. - StorageLocation types.String `tfsdk:"storage_location"` - // The type of the shared table. - Type_ types.String `tfsdk:"type"` - // The view definition of a shared view. DEPRECATED. - ViewDefinition types.String `tfsdk:"view_definition"` -} - -func (to *TableInternalAttributes_SdkV2) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from TableInternalAttributes_SdkV2) { - if !from.DependencyStorageLocations.IsNull() && !from.DependencyStorageLocations.IsUnknown() && to.DependencyStorageLocations.IsNull() && len(from.DependencyStorageLocations.Elements()) == 0 { - // The default representation of an empty list for TF autogenerated resources in the resource state is Null. - // If a user specified a non-Null, empty list for DependencyStorageLocations, and the deserialized field value is Null, - // set the resulting resource state to the empty list to match the planned value. - to.DependencyStorageLocations = from.DependencyStorageLocations - } -} - -func (to *TableInternalAttributes_SdkV2) SyncFieldsDuringRead(ctx context.Context, from TableInternalAttributes_SdkV2) { - if !from.DependencyStorageLocations.IsNull() && !from.DependencyStorageLocations.IsUnknown() && to.DependencyStorageLocations.IsNull() && len(from.DependencyStorageLocations.Elements()) == 0 { - // The default representation of an empty list for TF autogenerated resources in the resource state is Null. - // If a user specified a non-Null, empty list for DependencyStorageLocations, and the deserialized field value is Null, - // set the resulting resource state to the empty list to match the planned value. - to.DependencyStorageLocations = from.DependencyStorageLocations - } -} - -func (m TableInternalAttributes_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { - attrs["auxiliary_managed_location"] = attrs["auxiliary_managed_location"].SetOptional() - attrs["dependency_storage_locations"] = attrs["dependency_storage_locations"].SetOptional() - attrs["has_delta_uniform_iceberg"] = attrs["has_delta_uniform_iceberg"].SetOptional() - attrs["parent_storage_location"] = attrs["parent_storage_location"].SetOptional() - attrs["storage_location"] = attrs["storage_location"].SetOptional() - attrs["type"] = attrs["type"].SetOptional() - attrs["view_definition"] = attrs["view_definition"].SetOptional() - - return attrs -} - -// GetComplexFieldTypes returns a map of the types of elements in complex fields in TableInternalAttributes. -// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry -// the type information of their elements in the Go type system. This function provides a way to -// retrieve the type information of the elements in complex fields at runtime. The values of the map -// are the reflected types of the contained elements. They must be either primitive values from the -// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF -// SDK values. -func (m TableInternalAttributes_SdkV2) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { - return map[string]reflect.Type{ - "dependency_storage_locations": reflect.TypeOf(types.String{}), - } -} - -// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise -// interfere with how the plugin framework retrieves and sets values in state. Thus, TableInternalAttributes_SdkV2 -// only implements ToObjectValue() and Type(). -func (m TableInternalAttributes_SdkV2) ToObjectValue(ctx context.Context) basetypes.ObjectValue { - return types.ObjectValueMust( - m.Type(ctx).(basetypes.ObjectType).AttrTypes, - map[string]attr.Value{ - "auxiliary_managed_location": m.AuxiliaryManagedLocation, - "dependency_storage_locations": m.DependencyStorageLocations, - "has_delta_uniform_iceberg": m.HasDeltaUniformIceberg, - "parent_storage_location": m.ParentStorageLocation, - "storage_location": m.StorageLocation, - "type": m.Type_, - "view_definition": m.ViewDefinition, - }) -} - -// Type implements basetypes.ObjectValuable. -func (m TableInternalAttributes_SdkV2) Type(ctx context.Context) attr.Type { - return types.ObjectType{ - AttrTypes: map[string]attr.Type{ - "auxiliary_managed_location": types.StringType, - "dependency_storage_locations": basetypes.ListType{ - ElemType: types.StringType, - }, - "has_delta_uniform_iceberg": types.BoolType, - "parent_storage_location": types.StringType, - "storage_location": types.StringType, - "type": types.StringType, - "view_definition": types.StringType, - }, - } -} - -// GetDependencyStorageLocations returns the value of the DependencyStorageLocations field in TableInternalAttributes_SdkV2 as -// a slice of types.String values. -// If the field is unknown or null, the boolean return value is false. -func (m *TableInternalAttributes_SdkV2) GetDependencyStorageLocations(ctx context.Context) ([]types.String, bool) { - if m.DependencyStorageLocations.IsNull() || m.DependencyStorageLocations.IsUnknown() { - return nil, false - } - var v []types.String - d := m.DependencyStorageLocations.ElementsAs(ctx, &v, true) - if d.HasError() { - panic(pluginfwcommon.DiagToString(d)) - } - return v, true -} - -// SetDependencyStorageLocations sets the value of the DependencyStorageLocations field in TableInternalAttributes_SdkV2. -func (m *TableInternalAttributes_SdkV2) SetDependencyStorageLocations(ctx context.Context, v []types.String) { - vs := make([]attr.Value, 0, len(v)) - for _, e := range v { - vs = append(vs, e) - } - t := m.Type(ctx).(basetypes.ObjectType).AttrTypes["dependency_storage_locations"] - t = t.(attr.TypeWithElementType).ElementType() - m.DependencyStorageLocations = types.ListValueMust(t, vs) -} - type UpdateProvider_SdkV2 struct { // Description about the provider. Comment types.String `tfsdk:"comment"` @@ -6521,9 +6337,6 @@ type Volume_SdkV2 struct { // shared_volume_id for recon to check if this volume is already in // recipient's DB or not. Id types.String `tfsdk:"id"` - // Internal attributes for D2D sharing that should not be disclosed to - // external users. - InternalAttributes types.List `tfsdk:"internal_attributes"` // The name of the volume. Name types.String `tfsdk:"name"` // The name of the schema that the volume belongs to. @@ -6537,15 +6350,6 @@ type Volume_SdkV2 struct { } func (to *Volume_SdkV2) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from Volume_SdkV2) { - if !from.InternalAttributes.IsNull() && !from.InternalAttributes.IsUnknown() { - if toInternalAttributes, ok := to.GetInternalAttributes(ctx); ok { - if fromInternalAttributes, ok := from.GetInternalAttributes(ctx); ok { - // Recursively sync the fields of InternalAttributes - toInternalAttributes.SyncFieldsDuringCreateOrUpdate(ctx, fromInternalAttributes) - to.SetInternalAttributes(ctx, toInternalAttributes) - } - } - } if !from.Tags.IsNull() && !from.Tags.IsUnknown() && to.Tags.IsNull() && len(from.Tags.Elements()) == 0 { // The default representation of an empty list for TF autogenerated resources in the resource state is Null. // If a user specified a non-Null, empty list for Tags, and the deserialized field value is Null, @@ -6555,14 +6359,6 @@ func (to *Volume_SdkV2) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from } func (to *Volume_SdkV2) SyncFieldsDuringRead(ctx context.Context, from Volume_SdkV2) { - if !from.InternalAttributes.IsNull() && !from.InternalAttributes.IsUnknown() { - if toInternalAttributes, ok := to.GetInternalAttributes(ctx); ok { - if fromInternalAttributes, ok := from.GetInternalAttributes(ctx); ok { - toInternalAttributes.SyncFieldsDuringRead(ctx, fromInternalAttributes) - to.SetInternalAttributes(ctx, toInternalAttributes) - } - } - } if !from.Tags.IsNull() && !from.Tags.IsUnknown() && to.Tags.IsNull() && len(from.Tags.Elements()) == 0 { // The default representation of an empty list for TF autogenerated resources in the resource state is Null. // If a user specified a non-Null, empty list for Tags, and the deserialized field value is Null, @@ -6574,8 +6370,6 @@ func (to *Volume_SdkV2) SyncFieldsDuringRead(ctx context.Context, from Volume_Sd func (m Volume_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { attrs["comment"] = attrs["comment"].SetOptional() attrs["id"] = attrs["id"].SetOptional() - attrs["internal_attributes"] = attrs["internal_attributes"].SetOptional() - attrs["internal_attributes"] = attrs["internal_attributes"].(tfschema.ListNestedAttributeBuilder).AddValidator(listvalidator.SizeAtMost(1)).(tfschema.AttributeBuilder) attrs["name"] = attrs["name"].SetOptional() attrs["schema"] = attrs["schema"].SetOptional() attrs["share"] = attrs["share"].SetOptional() @@ -6594,8 +6388,7 @@ func (m Volume_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.Attrib // SDK values. func (m Volume_SdkV2) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { return map[string]reflect.Type{ - "internal_attributes": reflect.TypeOf(VolumeInternalAttributes_SdkV2{}), - "tags": reflect.TypeOf(catalog_tf.TagKeyValue_SdkV2{}), + "tags": reflect.TypeOf(catalog_tf.TagKeyValue_SdkV2{}), } } @@ -6606,14 +6399,13 @@ func (m Volume_SdkV2) ToObjectValue(ctx context.Context) basetypes.ObjectValue { return types.ObjectValueMust( m.Type(ctx).(basetypes.ObjectType).AttrTypes, map[string]attr.Value{ - "comment": m.Comment, - "id": m.Id, - "internal_attributes": m.InternalAttributes, - "name": m.Name, - "schema": m.Schema, - "share": m.Share, - "share_id": m.ShareId, - "tags": m.Tags, + "comment": m.Comment, + "id": m.Id, + "name": m.Name, + "schema": m.Schema, + "share": m.Share, + "share_id": m.ShareId, + "tags": m.Tags, }) } @@ -6621,11 +6413,8 @@ func (m Volume_SdkV2) ToObjectValue(ctx context.Context) basetypes.ObjectValue { func (m Volume_SdkV2) Type(ctx context.Context) attr.Type { return types.ObjectType{ AttrTypes: map[string]attr.Type{ - "comment": types.StringType, - "id": types.StringType, - "internal_attributes": basetypes.ListType{ - ElemType: VolumeInternalAttributes_SdkV2{}.Type(ctx), - }, + "comment": types.StringType, + "id": types.StringType, "name": types.StringType, "schema": types.StringType, "share": types.StringType, @@ -6637,32 +6426,6 @@ func (m Volume_SdkV2) Type(ctx context.Context) attr.Type { } } -// GetInternalAttributes returns the value of the InternalAttributes field in Volume_SdkV2 as -// a VolumeInternalAttributes_SdkV2 value. -// If the field is unknown or null, the boolean return value is false. -func (m *Volume_SdkV2) GetInternalAttributes(ctx context.Context) (VolumeInternalAttributes_SdkV2, bool) { - var e VolumeInternalAttributes_SdkV2 - if m.InternalAttributes.IsNull() || m.InternalAttributes.IsUnknown() { - return e, false - } - var v []VolumeInternalAttributes_SdkV2 - d := m.InternalAttributes.ElementsAs(ctx, &v, true) - if d.HasError() { - panic(pluginfwcommon.DiagToString(d)) - } - if len(v) == 0 { - return e, false - } - return v[0], true -} - -// SetInternalAttributes sets the value of the InternalAttributes field in Volume_SdkV2. -func (m *Volume_SdkV2) SetInternalAttributes(ctx context.Context, v VolumeInternalAttributes_SdkV2) { - vs := []attr.Value{v.ToObjectValue(ctx)} - t := m.Type(ctx).(basetypes.ObjectType).AttrTypes["internal_attributes"] - m.InternalAttributes = types.ListValueMust(t, vs) -} - // GetTags returns the value of the Tags field in Volume_SdkV2 as // a slice of catalog_tf.TagKeyValue_SdkV2 values. // If the field is unknown or null, the boolean return value is false. @@ -6688,58 +6451,3 @@ func (m *Volume_SdkV2) SetTags(ctx context.Context, v []catalog_tf.TagKeyValue_S t = t.(attr.TypeWithElementType).ElementType() m.Tags = types.ListValueMust(t, vs) } - -// Internal information for D2D sharing that should not be disclosed to external -// users. -type VolumeInternalAttributes_SdkV2 struct { - // The cloud storage location of the volume - StorageLocation types.String `tfsdk:"storage_location"` - // The type of the shared volume. - Type_ types.String `tfsdk:"type"` -} - -func (to *VolumeInternalAttributes_SdkV2) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from VolumeInternalAttributes_SdkV2) { -} - -func (to *VolumeInternalAttributes_SdkV2) SyncFieldsDuringRead(ctx context.Context, from VolumeInternalAttributes_SdkV2) { -} - -func (m VolumeInternalAttributes_SdkV2) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { - attrs["storage_location"] = attrs["storage_location"].SetOptional() - attrs["type"] = attrs["type"].SetOptional() - - return attrs -} - -// GetComplexFieldTypes returns a map of the types of elements in complex fields in VolumeInternalAttributes. -// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry -// the type information of their elements in the Go type system. This function provides a way to -// retrieve the type information of the elements in complex fields at runtime. The values of the map -// are the reflected types of the contained elements. They must be either primitive values from the -// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF -// SDK values. -func (m VolumeInternalAttributes_SdkV2) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { - return map[string]reflect.Type{} -} - -// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise -// interfere with how the plugin framework retrieves and sets values in state. Thus, VolumeInternalAttributes_SdkV2 -// only implements ToObjectValue() and Type(). -func (m VolumeInternalAttributes_SdkV2) ToObjectValue(ctx context.Context) basetypes.ObjectValue { - return types.ObjectValueMust( - m.Type(ctx).(basetypes.ObjectType).AttrTypes, - map[string]attr.Value{ - "storage_location": m.StorageLocation, - "type": m.Type_, - }) -} - -// Type implements basetypes.ObjectValuable. -func (m VolumeInternalAttributes_SdkV2) Type(ctx context.Context) attr.Type { - return types.ObjectType{ - AttrTypes: map[string]attr.Type{ - "storage_location": types.StringType, - "type": types.StringType, - }, - } -} diff --git a/internal/service/sharing_tf/model.go b/internal/service/sharing_tf/model.go index 052e306515..e9d773380f 100755 --- a/internal/service/sharing_tf/model.go +++ b/internal/service/sharing_tf/model.go @@ -5587,9 +5587,6 @@ type Table struct { Comment types.String `tfsdk:"comment"` // The id of the table. Id types.String `tfsdk:"id"` - // Internal information for D2D sharing that should not be disclosed to - // external users. - InternalAttributes types.Object `tfsdk:"internal_attributes"` // The catalog and schema of the materialized table MaterializationNamespace types.String `tfsdk:"materialization_namespace"` // The name of a materialized table. @@ -5607,15 +5604,6 @@ type Table struct { } func (to *Table) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from Table) { - if !from.InternalAttributes.IsNull() && !from.InternalAttributes.IsUnknown() { - if toInternalAttributes, ok := to.GetInternalAttributes(ctx); ok { - if fromInternalAttributes, ok := from.GetInternalAttributes(ctx); ok { - // Recursively sync the fields of InternalAttributes - toInternalAttributes.SyncFieldsDuringCreateOrUpdate(ctx, fromInternalAttributes) - to.SetInternalAttributes(ctx, toInternalAttributes) - } - } - } if !from.Tags.IsNull() && !from.Tags.IsUnknown() && to.Tags.IsNull() && len(from.Tags.Elements()) == 0 { // The default representation of an empty list for TF autogenerated resources in the resource state is Null. // If a user specified a non-Null, empty list for Tags, and the deserialized field value is Null, @@ -5625,14 +5613,6 @@ func (to *Table) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from Table) } func (to *Table) SyncFieldsDuringRead(ctx context.Context, from Table) { - if !from.InternalAttributes.IsNull() && !from.InternalAttributes.IsUnknown() { - if toInternalAttributes, ok := to.GetInternalAttributes(ctx); ok { - if fromInternalAttributes, ok := from.GetInternalAttributes(ctx); ok { - toInternalAttributes.SyncFieldsDuringRead(ctx, fromInternalAttributes) - to.SetInternalAttributes(ctx, toInternalAttributes) - } - } - } if !from.Tags.IsNull() && !from.Tags.IsUnknown() && to.Tags.IsNull() && len(from.Tags.Elements()) == 0 { // The default representation of an empty list for TF autogenerated resources in the resource state is Null. // If a user specified a non-Null, empty list for Tags, and the deserialized field value is Null, @@ -5644,7 +5624,6 @@ func (to *Table) SyncFieldsDuringRead(ctx context.Context, from Table) { func (m Table) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { attrs["comment"] = attrs["comment"].SetOptional() attrs["id"] = attrs["id"].SetOptional() - attrs["internal_attributes"] = attrs["internal_attributes"].SetOptional() attrs["materialization_namespace"] = attrs["materialization_namespace"].SetOptional() attrs["materialized_table_name"] = attrs["materialized_table_name"].SetOptional() attrs["name"] = attrs["name"].SetOptional() @@ -5665,8 +5644,7 @@ func (m Table) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuil // SDK values. func (m Table) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { return map[string]reflect.Type{ - "internal_attributes": reflect.TypeOf(TableInternalAttributes{}), - "tags": reflect.TypeOf(catalog_tf.TagKeyValue{}), + "tags": reflect.TypeOf(catalog_tf.TagKeyValue{}), } } @@ -5679,7 +5657,6 @@ func (m Table) ToObjectValue(ctx context.Context) basetypes.ObjectValue { map[string]attr.Value{ "comment": m.Comment, "id": m.Id, - "internal_attributes": m.InternalAttributes, "materialization_namespace": m.MaterializationNamespace, "materialized_table_name": m.MaterializedTableName, "name": m.Name, @@ -5696,7 +5673,6 @@ func (m Table) Type(ctx context.Context) attr.Type { AttrTypes: map[string]attr.Type{ "comment": types.StringType, "id": types.StringType, - "internal_attributes": TableInternalAttributes{}.Type(ctx), "materialization_namespace": types.StringType, "materialized_table_name": types.StringType, "name": types.StringType, @@ -5710,31 +5686,6 @@ func (m Table) Type(ctx context.Context) attr.Type { } } -// GetInternalAttributes returns the value of the InternalAttributes field in Table as -// a TableInternalAttributes value. -// If the field is unknown or null, the boolean return value is false. -func (m *Table) GetInternalAttributes(ctx context.Context) (TableInternalAttributes, bool) { - var e TableInternalAttributes - if m.InternalAttributes.IsNull() || m.InternalAttributes.IsUnknown() { - return e, false - } - var v TableInternalAttributes - d := m.InternalAttributes.As(ctx, &v, basetypes.ObjectAsOptions{ - UnhandledNullAsEmpty: true, - UnhandledUnknownAsEmpty: true, - }) - if d.HasError() { - panic(pluginfwcommon.DiagToString(d)) - } - return v, true -} - -// SetInternalAttributes sets the value of the InternalAttributes field in Table. -func (m *Table) SetInternalAttributes(ctx context.Context, v TableInternalAttributes) { - vs := v.ToObjectValue(ctx) - m.InternalAttributes = vs -} - // GetTags returns the value of the Tags field in Table as // a slice of catalog_tf.TagKeyValue values. // If the field is unknown or null, the boolean return value is false. @@ -5761,137 +5712,6 @@ func (m *Table) SetTags(ctx context.Context, v []catalog_tf.TagKeyValue) { m.Tags = types.ListValueMust(t, vs) } -// Internal information for D2D sharing that should not be disclosed to external -// users. -type TableInternalAttributes struct { - // Managed Delta Metadata location for foreign iceberg tables. - AuxiliaryManagedLocation types.String `tfsdk:"auxiliary_managed_location"` - // Storage locations of all table dependencies for shared views. Used on the - // recipient side for SEG (Secure Egress Gateway) whitelisting. - DependencyStorageLocations types.List `tfsdk:"dependency_storage_locations"` - // Whether the table has uniform enabled. - HasDeltaUniformIceberg types.Bool `tfsdk:"has_delta_uniform_iceberg"` - // Will be populated in the reconciliation response for VIEW and - // FOREIGN_TABLE, with the value of the parent UC entity's storage_location, - // following the same logic as getManagedEntityPath in - // CreateStagingTableHandler, which is used to store the materialized table - // for a shared VIEW/FOREIGN_TABLE for D2O queries. The value will be used - // on the recipient side to be whitelisted when SEG is enabled on the - // workspace of the recipient, to allow the recipient users to query this - // shared VIEW/FOREIGN_TABLE. - ParentStorageLocation types.String `tfsdk:"parent_storage_location"` - // The cloud storage location of a shard table with DIRECTORY_BASED_TABLE - // type. - StorageLocation types.String `tfsdk:"storage_location"` - // The type of the shared table. - Type_ types.String `tfsdk:"type"` - // The view definition of a shared view. DEPRECATED. - ViewDefinition types.String `tfsdk:"view_definition"` -} - -func (to *TableInternalAttributes) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from TableInternalAttributes) { - if !from.DependencyStorageLocations.IsNull() && !from.DependencyStorageLocations.IsUnknown() && to.DependencyStorageLocations.IsNull() && len(from.DependencyStorageLocations.Elements()) == 0 { - // The default representation of an empty list for TF autogenerated resources in the resource state is Null. - // If a user specified a non-Null, empty list for DependencyStorageLocations, and the deserialized field value is Null, - // set the resulting resource state to the empty list to match the planned value. - to.DependencyStorageLocations = from.DependencyStorageLocations - } -} - -func (to *TableInternalAttributes) SyncFieldsDuringRead(ctx context.Context, from TableInternalAttributes) { - if !from.DependencyStorageLocations.IsNull() && !from.DependencyStorageLocations.IsUnknown() && to.DependencyStorageLocations.IsNull() && len(from.DependencyStorageLocations.Elements()) == 0 { - // The default representation of an empty list for TF autogenerated resources in the resource state is Null. - // If a user specified a non-Null, empty list for DependencyStorageLocations, and the deserialized field value is Null, - // set the resulting resource state to the empty list to match the planned value. - to.DependencyStorageLocations = from.DependencyStorageLocations - } -} - -func (m TableInternalAttributes) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { - attrs["auxiliary_managed_location"] = attrs["auxiliary_managed_location"].SetOptional() - attrs["dependency_storage_locations"] = attrs["dependency_storage_locations"].SetOptional() - attrs["has_delta_uniform_iceberg"] = attrs["has_delta_uniform_iceberg"].SetOptional() - attrs["parent_storage_location"] = attrs["parent_storage_location"].SetOptional() - attrs["storage_location"] = attrs["storage_location"].SetOptional() - attrs["type"] = attrs["type"].SetOptional() - attrs["view_definition"] = attrs["view_definition"].SetOptional() - - return attrs -} - -// GetComplexFieldTypes returns a map of the types of elements in complex fields in TableInternalAttributes. -// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry -// the type information of their elements in the Go type system. This function provides a way to -// retrieve the type information of the elements in complex fields at runtime. The values of the map -// are the reflected types of the contained elements. They must be either primitive values from the -// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF -// SDK values. -func (m TableInternalAttributes) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { - return map[string]reflect.Type{ - "dependency_storage_locations": reflect.TypeOf(types.String{}), - } -} - -// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise -// interfere with how the plugin framework retrieves and sets values in state. Thus, TableInternalAttributes -// only implements ToObjectValue() and Type(). -func (m TableInternalAttributes) ToObjectValue(ctx context.Context) basetypes.ObjectValue { - return types.ObjectValueMust( - m.Type(ctx).(basetypes.ObjectType).AttrTypes, - map[string]attr.Value{ - "auxiliary_managed_location": m.AuxiliaryManagedLocation, - "dependency_storage_locations": m.DependencyStorageLocations, - "has_delta_uniform_iceberg": m.HasDeltaUniformIceberg, - "parent_storage_location": m.ParentStorageLocation, - "storage_location": m.StorageLocation, - "type": m.Type_, - "view_definition": m.ViewDefinition, - }) -} - -// Type implements basetypes.ObjectValuable. -func (m TableInternalAttributes) Type(ctx context.Context) attr.Type { - return types.ObjectType{ - AttrTypes: map[string]attr.Type{ - "auxiliary_managed_location": types.StringType, - "dependency_storage_locations": basetypes.ListType{ - ElemType: types.StringType, - }, - "has_delta_uniform_iceberg": types.BoolType, - "parent_storage_location": types.StringType, - "storage_location": types.StringType, - "type": types.StringType, - "view_definition": types.StringType, - }, - } -} - -// GetDependencyStorageLocations returns the value of the DependencyStorageLocations field in TableInternalAttributes as -// a slice of types.String values. -// If the field is unknown or null, the boolean return value is false. -func (m *TableInternalAttributes) GetDependencyStorageLocations(ctx context.Context) ([]types.String, bool) { - if m.DependencyStorageLocations.IsNull() || m.DependencyStorageLocations.IsUnknown() { - return nil, false - } - var v []types.String - d := m.DependencyStorageLocations.ElementsAs(ctx, &v, true) - if d.HasError() { - panic(pluginfwcommon.DiagToString(d)) - } - return v, true -} - -// SetDependencyStorageLocations sets the value of the DependencyStorageLocations field in TableInternalAttributes. -func (m *TableInternalAttributes) SetDependencyStorageLocations(ctx context.Context, v []types.String) { - vs := make([]attr.Value, 0, len(v)) - for _, e := range v { - vs = append(vs, e) - } - t := m.Type(ctx).(basetypes.ObjectType).AttrTypes["dependency_storage_locations"] - t = t.(attr.TypeWithElementType).ElementType() - m.DependencyStorageLocations = types.ListValueMust(t, vs) -} - type UpdateProvider struct { // Description about the provider. Comment types.String `tfsdk:"comment"` @@ -6456,9 +6276,6 @@ type Volume struct { // shared_volume_id for recon to check if this volume is already in // recipient's DB or not. Id types.String `tfsdk:"id"` - // Internal attributes for D2D sharing that should not be disclosed to - // external users. - InternalAttributes types.Object `tfsdk:"internal_attributes"` // The name of the volume. Name types.String `tfsdk:"name"` // The name of the schema that the volume belongs to. @@ -6472,15 +6289,6 @@ type Volume struct { } func (to *Volume) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from Volume) { - if !from.InternalAttributes.IsNull() && !from.InternalAttributes.IsUnknown() { - if toInternalAttributes, ok := to.GetInternalAttributes(ctx); ok { - if fromInternalAttributes, ok := from.GetInternalAttributes(ctx); ok { - // Recursively sync the fields of InternalAttributes - toInternalAttributes.SyncFieldsDuringCreateOrUpdate(ctx, fromInternalAttributes) - to.SetInternalAttributes(ctx, toInternalAttributes) - } - } - } if !from.Tags.IsNull() && !from.Tags.IsUnknown() && to.Tags.IsNull() && len(from.Tags.Elements()) == 0 { // The default representation of an empty list for TF autogenerated resources in the resource state is Null. // If a user specified a non-Null, empty list for Tags, and the deserialized field value is Null, @@ -6490,14 +6298,6 @@ func (to *Volume) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from Volum } func (to *Volume) SyncFieldsDuringRead(ctx context.Context, from Volume) { - if !from.InternalAttributes.IsNull() && !from.InternalAttributes.IsUnknown() { - if toInternalAttributes, ok := to.GetInternalAttributes(ctx); ok { - if fromInternalAttributes, ok := from.GetInternalAttributes(ctx); ok { - toInternalAttributes.SyncFieldsDuringRead(ctx, fromInternalAttributes) - to.SetInternalAttributes(ctx, toInternalAttributes) - } - } - } if !from.Tags.IsNull() && !from.Tags.IsUnknown() && to.Tags.IsNull() && len(from.Tags.Elements()) == 0 { // The default representation of an empty list for TF autogenerated resources in the resource state is Null. // If a user specified a non-Null, empty list for Tags, and the deserialized field value is Null, @@ -6509,7 +6309,6 @@ func (to *Volume) SyncFieldsDuringRead(ctx context.Context, from Volume) { func (m Volume) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { attrs["comment"] = attrs["comment"].SetOptional() attrs["id"] = attrs["id"].SetOptional() - attrs["internal_attributes"] = attrs["internal_attributes"].SetOptional() attrs["name"] = attrs["name"].SetOptional() attrs["schema"] = attrs["schema"].SetOptional() attrs["share"] = attrs["share"].SetOptional() @@ -6528,8 +6327,7 @@ func (m Volume) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBui // SDK values. func (m Volume) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { return map[string]reflect.Type{ - "internal_attributes": reflect.TypeOf(VolumeInternalAttributes{}), - "tags": reflect.TypeOf(catalog_tf.TagKeyValue{}), + "tags": reflect.TypeOf(catalog_tf.TagKeyValue{}), } } @@ -6540,14 +6338,13 @@ func (m Volume) ToObjectValue(ctx context.Context) basetypes.ObjectValue { return types.ObjectValueMust( m.Type(ctx).(basetypes.ObjectType).AttrTypes, map[string]attr.Value{ - "comment": m.Comment, - "id": m.Id, - "internal_attributes": m.InternalAttributes, - "name": m.Name, - "schema": m.Schema, - "share": m.Share, - "share_id": m.ShareId, - "tags": m.Tags, + "comment": m.Comment, + "id": m.Id, + "name": m.Name, + "schema": m.Schema, + "share": m.Share, + "share_id": m.ShareId, + "tags": m.Tags, }) } @@ -6555,13 +6352,12 @@ func (m Volume) ToObjectValue(ctx context.Context) basetypes.ObjectValue { func (m Volume) Type(ctx context.Context) attr.Type { return types.ObjectType{ AttrTypes: map[string]attr.Type{ - "comment": types.StringType, - "id": types.StringType, - "internal_attributes": VolumeInternalAttributes{}.Type(ctx), - "name": types.StringType, - "schema": types.StringType, - "share": types.StringType, - "share_id": types.StringType, + "comment": types.StringType, + "id": types.StringType, + "name": types.StringType, + "schema": types.StringType, + "share": types.StringType, + "share_id": types.StringType, "tags": basetypes.ListType{ ElemType: catalog_tf.TagKeyValue{}.Type(ctx), }, @@ -6569,31 +6365,6 @@ func (m Volume) Type(ctx context.Context) attr.Type { } } -// GetInternalAttributes returns the value of the InternalAttributes field in Volume as -// a VolumeInternalAttributes value. -// If the field is unknown or null, the boolean return value is false. -func (m *Volume) GetInternalAttributes(ctx context.Context) (VolumeInternalAttributes, bool) { - var e VolumeInternalAttributes - if m.InternalAttributes.IsNull() || m.InternalAttributes.IsUnknown() { - return e, false - } - var v VolumeInternalAttributes - d := m.InternalAttributes.As(ctx, &v, basetypes.ObjectAsOptions{ - UnhandledNullAsEmpty: true, - UnhandledUnknownAsEmpty: true, - }) - if d.HasError() { - panic(pluginfwcommon.DiagToString(d)) - } - return v, true -} - -// SetInternalAttributes sets the value of the InternalAttributes field in Volume. -func (m *Volume) SetInternalAttributes(ctx context.Context, v VolumeInternalAttributes) { - vs := v.ToObjectValue(ctx) - m.InternalAttributes = vs -} - // GetTags returns the value of the Tags field in Volume as // a slice of catalog_tf.TagKeyValue values. // If the field is unknown or null, the boolean return value is false. @@ -6619,58 +6390,3 @@ func (m *Volume) SetTags(ctx context.Context, v []catalog_tf.TagKeyValue) { t = t.(attr.TypeWithElementType).ElementType() m.Tags = types.ListValueMust(t, vs) } - -// Internal information for D2D sharing that should not be disclosed to external -// users. -type VolumeInternalAttributes struct { - // The cloud storage location of the volume - StorageLocation types.String `tfsdk:"storage_location"` - // The type of the shared volume. - Type_ types.String `tfsdk:"type"` -} - -func (to *VolumeInternalAttributes) SyncFieldsDuringCreateOrUpdate(ctx context.Context, from VolumeInternalAttributes) { -} - -func (to *VolumeInternalAttributes) SyncFieldsDuringRead(ctx context.Context, from VolumeInternalAttributes) { -} - -func (m VolumeInternalAttributes) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { - attrs["storage_location"] = attrs["storage_location"].SetOptional() - attrs["type"] = attrs["type"].SetOptional() - - return attrs -} - -// GetComplexFieldTypes returns a map of the types of elements in complex fields in VolumeInternalAttributes. -// Container types (types.Map, types.List, types.Set) and object types (types.Object) do not carry -// the type information of their elements in the Go type system. This function provides a way to -// retrieve the type information of the elements in complex fields at runtime. The values of the map -// are the reflected types of the contained elements. They must be either primitive values from the -// plugin framework type system (types.String{}, types.Bool{}, types.Int64{}, types.Float64{}) or TF -// SDK values. -func (m VolumeInternalAttributes) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { - return map[string]reflect.Type{} -} - -// TFSDK types cannot implement the ObjectValuable interface directly, as it would otherwise -// interfere with how the plugin framework retrieves and sets values in state. Thus, VolumeInternalAttributes -// only implements ToObjectValue() and Type(). -func (m VolumeInternalAttributes) ToObjectValue(ctx context.Context) basetypes.ObjectValue { - return types.ObjectValueMust( - m.Type(ctx).(basetypes.ObjectType).AttrTypes, - map[string]attr.Value{ - "storage_location": m.StorageLocation, - "type": m.Type_, - }) -} - -// Type implements basetypes.ObjectValuable. -func (m VolumeInternalAttributes) Type(ctx context.Context) attr.Type { - return types.ObjectType{ - AttrTypes: map[string]attr.Type{ - "storage_location": types.StringType, - "type": types.StringType, - }, - } -}