Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions NEXT_CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
### New Features and Improvements

* Added `expected_workspace_status` to `databricks_mws_workspaces` to support creating workspaces in provisioning status ([#5019](https://github.com/databricks/terraform-provider-databricks/pull/5019))
* Make `account_id` optional in `mws_*` resources ([#5133](https://github.com/databricks/terraform-provider-databricks/pull/5133))

### Bug Fixes

Expand Down
6 changes: 1 addition & 5 deletions docs/resources/mws_customer_managed_keys.md
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,6 @@ resource "aws_kms_alias" "managed_services_customer_managed_key_alias" {
}

resource "databricks_mws_customer_managed_keys" "managed_services" {
account_id = var.databricks_account_id
aws_key_info {
key_arn = aws_kms_key.managed_services_customer_managed_key.arn
key_alias = aws_kms_alias.managed_services_customer_managed_key_alias.name
Expand All @@ -88,7 +87,6 @@ variable "cmek_resource_id" {
}

resource "databricks_mws_customer_managed_keys" "managed_services" {
account_id = var.databricks_account_id
gcp_key_info {
kms_key_id = var.cmek_resource_id
}
Expand Down Expand Up @@ -191,7 +189,6 @@ resource "aws_kms_alias" "storage_customer_managed_key_alias" {
}

resource "databricks_mws_customer_managed_keys" "storage" {
account_id = var.databricks_account_id
aws_key_info {
key_arn = aws_kms_key.storage_customer_managed_key.arn
key_alias = aws_kms_alias.storage_customer_managed_key_alias.name
Expand All @@ -213,7 +210,6 @@ variable "cmek_resource_id" {
}

resource "databricks_mws_customer_managed_keys" "storage" {
account_id = var.databricks_account_id
gcp_key_info {
kms_key_id = var.cmek_resource_id
}
Expand All @@ -228,7 +224,7 @@ The following arguments are required:

* `aws_key_info` - This field is a block and is documented below. This conflicts with `gcp_key_info`
* `gcp_key_info` - This field is a block and is documented below. This conflicts with `aws_key_info`
* `account_id` - Account Id that could be found in the top right corner of [Accounts Console](https://accounts.cloud.databricks.com/)
* `account_id` - (Optional) Account Id that could be found in the top right corner of [Accounts Console](https://accounts.cloud.databricks.com/)
* `use_cases` - *(since v0.3.4)* List of use cases for which this key will be used. *If you've used the resource before, please add `use_cases = ["MANAGED_SERVICES"]` to keep the previous behaviour.* Possible values are:
* `MANAGED_SERVICES` - for encryption of the workspace objects (notebooks, secrets) that are stored in the control plane
* `STORAGE` - for encryption of the DBFS Storage & Cluster EBS Volumes
Expand Down
8 changes: 1 addition & 7 deletions docs/resources/mws_log_delivery.md
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,6 @@ resource "time_sleep" "wait" {
}

resource "databricks_mws_credentials" "log_writer" {
account_id = var.databricks_account_id
credentials_name = "Usage Delivery"
role_arn = aws_iam_role.logdelivery.arn
depends_on = [
Expand All @@ -78,13 +77,11 @@ resource "databricks_mws_credentials" "log_writer" {
}

resource "databricks_mws_storage_configurations" "log_bucket" {
account_id = var.databricks_account_id
storage_configuration_name = "Usage Logs"
bucket_name = aws_s3_bucket.logdelivery.bucket
}

resource "databricks_mws_log_delivery" "usage_logs" {
account_id = var.databricks_account_id
credentials_id = databricks_mws_credentials.log_writer.credentials_id
storage_configuration_id = databricks_mws_storage_configurations.log_bucket.storage_configuration_id
delivery_path_prefix = "billable-usage"
Expand All @@ -94,7 +91,6 @@ resource "databricks_mws_log_delivery" "usage_logs" {
}

resource "databricks_mws_log_delivery" "audit_logs" {
account_id = var.databricks_account_id
credentials_id = databricks_mws_credentials.log_writer.credentials_id
storage_configuration_id = databricks_mws_storage_configurations.log_bucket.storage_configuration_id
delivery_path_prefix = "audit-logs"
Expand All @@ -112,7 +108,6 @@ Common processing scenario is to apply [cost allocation tags](https://docs.aws.a

```hcl
resource "databricks_mws_log_delivery" "usage_logs" {
account_id = var.databricks_account_id
credentials_id = databricks_mws_credentials.log_writer.credentials_id
storage_configuration_id = databricks_mws_storage_configurations.log_bucket.storage_configuration_id
delivery_path_prefix = "billable-usage"
Expand All @@ -128,7 +123,6 @@ JSON files with [static schema](https://docs.databricks.com/administration-guide

```hcl
resource "databricks_mws_log_delivery" "audit_logs" {
account_id = var.databricks_account_id
credentials_id = databricks_mws_credentials.log_writer.credentials_id
storage_configuration_id = databricks_mws_storage_configurations.log_bucket.storage_configuration_id
delivery_path_prefix = "audit-logs"
Expand All @@ -140,7 +134,7 @@ resource "databricks_mws_log_delivery" "audit_logs" {

## Argument reference

* `account_id` - Account Id that could be found in the top right corner of [Accounts Console](https://accounts.cloud.databricks.com/).
* `account_id` - (Optional) Account Id that could be found in the top right corner of [Accounts Console](https://accounts.cloud.databricks.com/).
* `config_name` - The optional human-readable name of the log delivery configuration. Defaults to empty.
* `log_type` - The type of log delivery. `BILLABLE_USAGE` and `AUDIT_LOGS` are supported.
* `output_format` - The file type of log delivery. Currently `CSV` (for `BILLABLE_USAGE`) and `JSON` (for `AUDIT_LOGS`) are supported.
Expand Down
6 changes: 1 addition & 5 deletions docs/resources/mws_networks.md
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,6 @@ module "vpc" {

resource "databricks_mws_networks" "this" {
provider = databricks.mws
account_id = var.databricks_account_id
network_name = "${local.prefix}-network"
security_group_ids = [module.vpc.default_security_group_id]
subnet_ids = module.vpc.private_subnets
Expand All @@ -88,7 +87,6 @@ In order to create a VPC [that leverages AWS PrivateLink](https://docs.databrick
```hcl
resource "databricks_mws_networks" "this" {
provider = databricks.mws
account_id = var.databricks_account_id
network_name = "${local.prefix}-network"
security_group_ids = [module.vpc.default_security_group_id]
subnet_ids = module.vpc.private_subnets
Expand Down Expand Up @@ -137,7 +135,6 @@ resource "google_compute_router_nat" "nat" {
}

resource "databricks_mws_networks" "this" {
account_id = var.databricks_account_id
network_name = "test-demo-${random_string.suffix.result}"
gcp_network_info {
network_project_id = var.google_project
Expand All @@ -152,7 +149,6 @@ In order to create a VPC [that leverages GCP Private Service Connect](https://do

```hcl
resource "databricks_mws_networks" "this" {
account_id = var.databricks_account_id
network_name = "test-demo-${random_string.suffix.result}"
gcp_network_info {
network_project_id = var.google_project
Expand All @@ -179,7 +175,7 @@ Due to specifics of platform APIs, changing any attribute of network configurati

The following arguments are available:

* `account_id` - Account Id that could be found in the top right corner of [Accounts Console](https://accounts.cloud.databricks.com/)
* `account_id` - (Optional) Account Id that could be found in the top right corner of [Accounts Console](https://accounts.cloud.databricks.com/)
* `network_name` - name under which this network is registered
* `vpc_id` - (AWS only) [aws_vpc](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/vpc) id
* `subnet_ids` - (AWS only) ids of [aws_subnet](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/subnet)
Expand Down
1 change: 0 additions & 1 deletion docs/resources/mws_private_access_settings.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ It is strongly recommended that customers read the [Enable AWS Private Link](htt
```hcl
resource "databricks_mws_private_access_settings" "pas" {
provider = databricks.mws
account_id = var.databricks_account_id
private_access_settings_name = "Private Access Settings for ${local.prefix}"
region = var.region
public_access_enabled = true
Expand Down
3 changes: 1 addition & 2 deletions docs/resources/mws_storage_configurations.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@ resource "aws_s3_bucket_versioning" "root_versioning" {

resource "databricks_mws_storage_configurations" "this" {
provider = databricks.mws
account_id = var.databricks_account_id
storage_configuration_name = "${var.prefix}-storage"
bucket_name = aws_s3_bucket.root_storage_bucket.bucket
}
Expand All @@ -43,7 +42,7 @@ resource "databricks_mws_storage_configurations" "this" {
The following arguments are required:

* `bucket_name` - name of AWS S3 bucket
* `account_id` - Account Id that could be found in the top right corner of [Accounts Console](https://accounts.cloud.databricks.com/)
* `account_id` - (Optional) Account Id that could be found in the top right corner of [Accounts Console](https://accounts.cloud.databricks.com/)
* `storage_configuration_name` - name under which this storage configuration is stored

## Attribute Reference
Expand Down
8 changes: 1 addition & 7 deletions docs/resources/mws_vpc_endpoint.md
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,6 @@ Once you have created the necessary endpoints, you need to register each of them
```hcl
resource "databricks_mws_vpc_endpoint" "workspace" {
provider = databricks.mws
account_id = var.databricks_account_id
aws_vpc_endpoint_id = aws_vpc_endpoint.workspace.id
vpc_endpoint_name = "VPC Relay for ${module.vpc.vpc_id}"
region = var.region
Expand All @@ -82,7 +81,6 @@ resource "databricks_mws_vpc_endpoint" "workspace" {

resource "databricks_mws_vpc_endpoint" "relay" {
provider = databricks.mws
account_id = var.databricks_account_id
aws_vpc_endpoint_id = aws_vpc_endpoint.relay.id
vpc_endpoint_name = "VPC Relay for ${module.vpc.vpc_id}"
region = var.region
Expand All @@ -95,7 +93,6 @@ Typically the next steps after this would be to create a [databricks_mws_private
```hcl
resource "databricks_mws_workspaces" "this" {
provider = databricks.mws
account_id = var.databricks_account_id
aws_region = var.region
workspace_name = local.prefix
credentials_id = databricks_mws_credentials.this.credentials_id
Expand Down Expand Up @@ -128,7 +125,6 @@ provider "databricks" {

resource "databricks_mws_vpc_endpoint" "workspace" {
provider = databricks.mws
account_id = var.databricks_account_id
vpc_endpoint_name = "PSC Rest API endpoint"
gcp_vpc_endpoint_info {
project_id = var.google_project
Expand All @@ -139,7 +135,6 @@ resource "databricks_mws_vpc_endpoint" "workspace" {

resource "databricks_mws_vpc_endpoint" "relay" {
provider = databricks.mws
account_id = var.databricks_account_id
vpc_endpoint_name = "PSC Relay endpoint"
gcp_vpc_endpoint_info {
project_id = var.google_project
Expand All @@ -154,7 +149,6 @@ Typically the next steps after this would be to create a [databricks_mws_private
```hcl
resource "databricks_mws_workspaces" "this" {
provider = databricks.mws
account_id = var.databricks_account_id
workspace_name = "gcp workspace"
location = var.subnet_region
cloud_resource_container {
Expand All @@ -177,7 +171,7 @@ resource "databricks_mws_workspaces" "this" {

The following arguments are required:

* `account_id` - Account Id that could be found in the Accounts Console for [AWS](https://accounts.cloud.databricks.com/) or [GCP](https://accounts.gcp.databricks.com/)
* `account_id` - (Optional) Account Id that could be found in the Accounts Console for [AWS](https://accounts.cloud.databricks.com/) or [GCP](https://accounts.gcp.databricks.com/)
* `aws_vpc_endpoint_id` - (AWS only) ID of configured [aws_vpc_endpoint](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/vpc_endpoint)
* `vpc_endpoint_name` - Name of VPC Endpoint in Databricks Account
* `region` - (AWS only) Region of AWS VPC
Expand Down
23 changes: 7 additions & 16 deletions docs/resources/mws_workspaces.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ To use serverless workspaces, you must enroll in the [Default Storage preview](h

```hcl
resource "databricks_mws_workspaces" "serverless_workspace" {
account_id = "" # Your Databricks account ID
workspace_name = "serverless-workspace"
aws_region = "us-east-1"
compute_mode = "SERVERLESS"
Expand All @@ -45,30 +44,28 @@ variable "databricks_account_id" {
}

provider "databricks" {
alias = "mws"
host = "https://accounts.cloud.databricks.com"
alias = "mws"
host = "https://accounts.cloud.databricks.com"
account_id = var.account_id
}

// register cross-account ARN
resource "databricks_mws_credentials" "this" {
provider = databricks.mws
account_id = var.databricks_account_id
credentials_name = "${var.prefix}-creds"
role_arn = var.crossaccount_arn
}

// register root bucket
resource "databricks_mws_storage_configurations" "this" {
provider = databricks.mws
account_id = var.databricks_account_id
storage_configuration_name = "${var.prefix}-storage"
bucket_name = var.root_bucket
}

// register VPC
resource "databricks_mws_networks" "this" {
provider = databricks.mws
account_id = var.databricks_account_id
network_name = "${var.prefix}-network"
vpc_id = var.vpc_id
subnet_ids = var.subnets_private
Expand All @@ -78,7 +75,6 @@ resource "databricks_mws_networks" "this" {
// create workspace in given VPC with DBFS on root bucket
resource "databricks_mws_workspaces" "this" {
provider = databricks.mws
account_id = var.databricks_account_id
workspace_name = var.prefix
aws_region = var.region

Expand Down Expand Up @@ -130,7 +126,6 @@ resource "aws_iam_role_policy" "this" {

resource "databricks_mws_credentials" "this" {
provider = databricks.mws
account_id = var.databricks_account_id
credentials_name = "${local.prefix}-creds"
role_arn = aws_iam_role.cross_account_role.arn
}
Expand Down Expand Up @@ -180,14 +175,12 @@ resource "aws_s3_bucket_policy" "root_bucket_policy" {

resource "databricks_mws_storage_configurations" "this" {
provider = databricks.mws
account_id = var.databricks_account_id
storage_configuration_name = "${local.prefix}-storage"
bucket_name = aws_s3_bucket.root_storage_bucket.bucket
}

resource "databricks_mws_workspaces" "this" {
provider = databricks.mws
account_id = var.databricks_account_id
workspace_name = local.prefix
aws_region = "us-east-1"

Expand Down Expand Up @@ -219,14 +212,14 @@ variable "databricks_google_service_account" {}
variable "google_project" {}

provider "databricks" {
alias = "mws"
host = "https://accounts.gcp.databricks.com"
alias = "mws"
host = "https://accounts.gcp.databricks.com"
account_id = var.account_id
}


// register VPC
resource "databricks_mws_networks" "this" {
account_id = var.databricks_account_id
network_name = "${var.prefix}-network"
gcp_network_info {
network_project_id = var.google_project
Expand All @@ -240,7 +233,6 @@ resource "databricks_mws_networks" "this" {

// create workspace in given VPC
resource "databricks_mws_workspaces" "this" {
account_id = var.databricks_account_id
workspace_name = var.prefix
location = var.subnet_region
cloud_resource_container {
Expand Down Expand Up @@ -274,7 +266,6 @@ data "google_client_config" "current" {

resource "databricks_mws_workspaces" "this" {
provider = databricks.accounts
account_id = var.databricks_account_id
workspace_name = var.prefix
location = data.google_client_config.current.region

Expand All @@ -292,7 +283,7 @@ resource "databricks_mws_workspaces" "this" {

The following arguments are available:

* `account_id` - Account Id that could be found in the top right corner of [Accounts Console](https://accounts.cloud.databricks.com/).
* `account_id` - (Optional) Account Id that could be found in the top right corner of [Accounts Console](https://accounts.cloud.databricks.com/). If not specified, then it's taken from the provider config.
* `deployment_name` - (Optional) part of URL as in `https://<prefix>-<deployment-name>.cloud.databricks.com`. Deployment name cannot be used until a deployment name prefix is defined. Please contact your Databricks representative. Once a new deployment prefix is added/updated, it only will affect the new workspaces created.
* `workspace_name` - name of the workspace, will appear on UI.
* `network_id` - (Optional) `network_id` from [networks](mws_networks.md).
Expand Down
4 changes: 2 additions & 2 deletions mws/mws.go
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ type GcpNetworkInfo struct {

// Network is the object that contains all the information for BYOVPC
type Network struct {
AccountID string `json:"account_id" tf:"force_new"`
AccountID string `json:"account_id,omitempty" tf:"computed,force_new"`
NetworkID string `json:"network_id,omitempty" tf:"computed"`
NetworkName string `json:"network_name" tf:"force_new"`
VPCID string `json:"vpc_id,omitempty" tf:"force_new"`
Expand All @@ -100,7 +100,7 @@ type GcpVpcEndpointInfo struct {
type VPCEndpoint struct {
VPCEndpointID string `json:"vpc_endpoint_id,omitempty" tf:"computed"`
AwsVPCEndpointID string `json:"aws_vpc_endpoint_id,omitempty"`
AccountID string `json:"account_id,omitempty"`
AccountID string `json:"account_id,omitempty" tf:"computed,force_new"`
VPCEndpointName string `json:"vpc_endpoint_name"`
AwsVPCEndpointServiceID string `json:"aws_endpoint_service_id,omitempty" tf:"computed"`
AWSAccountID string `json:"aws_account_id,omitempty" tf:"computed"`
Expand Down
9 changes: 8 additions & 1 deletion mws/resource_mws_customer_managed_keys.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ type CustomerManagedKey struct {
CustomerManagedKeyID string `json:"customer_managed_key_id,omitempty" tf:"computed"`
AwsKeyInfo *AwsKeyInfo `json:"aws_key_info,omitempty" tf:"force_new,conflicts:gcp_key_info"`
GcpKeyInfo *GcpKeyInfo `json:"gcp_key_info,omitempty" tf:"force_new,conflicts:aws_key_info"`
AccountID string `json:"account_id" tf:"force_new"`
AccountID string `json:"account_id,omitempty" tf:"computed,force_new"`
CreationTime int64 `json:"creation_time,omitempty" tf:"computed"`
UseCases []string `json:"use_cases"`
}
Expand Down Expand Up @@ -78,6 +78,13 @@ func ResourceMwsCustomerManagedKeys() common.Resource {
Create: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
var cmk CustomerManagedKey
common.DataToStructPointer(d, s, &cmk)
if cmk.AccountID == "" {
if c.Config == nil || c.Config.AccountID == "" {
return fmt.Errorf("account_id is required in the provider block or in the resource")
}
cmk.AccountID = c.Config.AccountID
d.Set("account_id", cmk.AccountID)
}
customerManagedKeyData, err := NewCustomerManagedKeysAPI(ctx, c).Create(cmk)
if err != nil {
return err
Expand Down
Loading
Loading