diff --git a/.changelog/3832.txt b/.changelog/3832.txt new file mode 100644 index 0000000000..91690adfad --- /dev/null +++ b/.changelog/3832.txt @@ -0,0 +1,23 @@ +```release-note:new-datasource +mongodbatlas_stream_workspace +``` + +```release-note:new-datasource +mongodbatlas_stream_workspaces +``` + +```release-note:new-resource +mongodbatlas_stream_workspace +``` + +```release-note:note +data-source/mongodbatlas_stream_instance: Deprecates the `mongodbatlas_stream_instance` datasource, use `mongodbatlas_stream_workspace` +``` + +```release-note:note +data-source/mongodbatlas_stream_instances: Deprecates the `mongodbatlas_stream_instances` datasource, use `mongodbatlas_stream_workspaces` +``` + +```release-note:note +resource/mongodbatlas_stream_instance: Deprecates the `mongodbatlas_stream_instance` resource, use `mongodbatlas_stream_workspace` +``` diff --git a/docs/data-sources/stream_instance.md b/docs/data-sources/stream_instance.md index 715cb6ecb5..5ea6733efa 100644 --- a/docs/data-sources/stream_instance.md +++ b/docs/data-sources/stream_instance.md @@ -4,8 +4,28 @@ subcategory: "Streams" # Data Source: mongodbatlas_stream_instance +~> **DEPRECATED:** This data source is deprecated. Please use [`mongodbatlas_stream_workspace`](stream_workspace) instead. + `mongodbatlas_stream_instance` describes a stream instance. +## Migration to stream_workspace + +To migrate from `mongodbatlas_stream_instance` to `mongodbatlas_stream_workspace`, update your data source configuration. See [Migration Guide: Stream Instance to Stream Workspace](../guides/stream-instance-to-stream-workspace-migration-guide) for step-by-step instructions and examples: + +```terraform +# New (recommended) +data "mongodbatlas_stream_workspace" "example" { + project_id = "" + workspace_name = "" # Changed from instance_name +} + +# Old (deprecated) +data "mongodbatlas_stream_instance" "example" { + project_id = "" + instance_name = "" +} +``` + ## Example Usage ```terraform diff --git a/docs/data-sources/stream_instances.md b/docs/data-sources/stream_instances.md index df30fff15d..9a7db0417b 100644 --- a/docs/data-sources/stream_instances.md +++ b/docs/data-sources/stream_instances.md @@ -4,8 +4,26 @@ subcategory: "Streams" # Data Source: mongodbatlas_stream_instances +~> **DEPRECATED:** This data source is deprecated. Please use [`mongodbatlas_stream_workspaces`](stream_workspaces.md) instead. + `mongodbatlas_stream_instances` describes the stream instances defined in a project. +## Migration to stream_workspaces + +To migrate from `mongodbatlas_stream_instances` to `mongodbatlas_stream_workspaces`, update your data source configuration. See [Migration Guide: Stream Instance to Stream Workspace](../guides/stream-instance-to-stream-workspace-migration-guide) for step-by-step instructions and examples: + +```terraform +# New (recommended) +data "mongodbatlas_stream_workspaces" "example" { + project_id = "" +} + +# Old (deprecated) +data "mongodbatlas_stream_instances" "example" { + project_id = "" +} +``` + ## Example Usage ```terraform diff --git a/docs/data-sources/stream_workspace.md b/docs/data-sources/stream_workspace.md new file mode 100644 index 0000000000..8588c279f9 --- /dev/null +++ b/docs/data-sources/stream_workspace.md @@ -0,0 +1,45 @@ +--- +subcategory: "Streams" +--- + +# Data Source: mongodbatlas_stream_workspace + +`mongodbatlas_stream_workspace` describes a stream workspace that contains configurations for stream processing. + +~> **NOTE:** Use this data source for new configurations instead of `mongodbatlas_stream_instance`. + +## Example Usage + +```terraform +data "mongodbatlas_stream_workspace" "example" { + project_id = "" + workspace_name = "" +} +``` + +## Migration from stream_instance + +If you're migrating from the deprecated `mongodbatlas_stream_instance` data source, see the [Migration Guide: Stream Instance to Stream Workspace](../guides/stream-instance-to-stream-workspace-migration-guide) for step-by-step instructions and examples. + +## Argument Reference + +* `project_id` - (Required) Unique 24-hexadecimal digit string that identifies your project. +* `workspace_name` - (Required) Label that identifies the stream workspace. + +## Attributes Reference + +* `data_process_region` - Defines the cloud service provider and region where MongoDB Cloud performs stream processing. See [data process region](#data-process-region). +* `hostnames` - List that contains the hostnames assigned to the stream workspace. +* `stream_config` - Defines the configuration options for an Atlas Stream Processing Instance. See [stream config](#stream-config) + + +### Data Process Region + +* `cloud_provider` - Label that identifies the cloud service provider where MongoDB Cloud performs stream processing. The [MongoDB Atlas API](https://www.mongodb.com/docs/atlas/reference/api-resources-spec/#tag/Streams/operation/creategroupstreamworkspace) describes the valid values. +* `region` - Name of the cloud provider region hosting Atlas Stream Processing. The [MongoDB Atlas API](https://www.mongodb.com/docs/atlas/reference/api-resources-spec/#tag/Streams/operation/creategroupstreamworkspace) describes the valid values. + +### Stream Config + +* `tier` - Selected tier for the Stream Instance. Configures Memory / VCPU allowances. The [MongoDB Atlas API](https://www.mongodb.com/docs/atlas/reference/api-resources-spec/#tag/Streams/operation/creategroupstreamworkspace) describes the valid values. + +To learn more, see: [MongoDB Atlas API - Stream Workspace](https://www.mongodb.com/docs/atlas/reference/api-resources-spec/#tag/Streams/operation/creategroupstreamworkspace) Documentation. diff --git a/docs/data-sources/stream_workspaces.md b/docs/data-sources/stream_workspaces.md new file mode 100644 index 0000000000..da468ce7d3 --- /dev/null +++ b/docs/data-sources/stream_workspaces.md @@ -0,0 +1,55 @@ +--- +subcategory: "Streams" +--- + +# Data Source: mongodbatlas_stream_workspaces + +`mongodbatlas_stream_workspaces` describes the stream workspaces defined in a project. + +~> **NOTE:** Use this data source for new configurations instead of `mongodbatlas_stream_instances`. + +## Example Usage + +```terraform +data "mongodbatlas_stream_workspaces" "test" { + project_id = "" +} +``` + +## Migration from stream_instances + +If you're migrating from the deprecated `mongodbatlas_stream_instances` data source, see the [Migration Guide: Stream Instance to Stream Workspace](../guides/stream-instance-to-stream-workspace-migration-guide) for step-by-step instructions and examples. + +## Argument Reference + +* `project_id` - (Required) Unique 24-hexadecimal digit string that identifies your project. + +* `page_num` - (Optional) Number of the page that displays the current set of the total objects that the response returns. Defaults to `1`. +* `items_per_page` - (Optional) Number of items that the response returns per page, up to a maximum of `500`. Defaults to `100`. + + +## Attributes Reference + +In addition to all arguments above, it also exports the following attributes: + +* `results` - A list where each element contains a Stream Workspace. +* `total_count` - Count of the total number of items in the result set. The count might be greater than the number of objects in the results array if the entire result set is paginated. + +### Stream Workspace + +* `project_id` - Unique 24-hexadecimal digit string that identifies your project. +* `workspace_name` - Label that identifies the stream workspace. +* `data_process_region` - Defines the cloud service provider and region where MongoDB Cloud performs stream processing. See [data process region](#data-process-region). +* `hostnames` - List that contains the hostnames assigned to the stream workspace. +* `stream_config` - Defines the configuration options for an Atlas Stream Processing Instance. See [stream config](#stream-config) + +### Data Process Region + +* `cloud_provider` - Label that identifies the cloud service provider where MongoDB Cloud performs stream processing. The [MongoDB Atlas API](https://www.mongodb.com/docs/atlas/reference/api-resources-spec/#tag/Streams/operation/creategroupstreamworkspace) describes the valid values. +* `region` - Name of the cloud provider region hosting Atlas Stream Processing. The [MongoDB Atlas API](https://www.mongodb.com/docs/atlas/reference/api-resources-spec/#tag/Streams/operation/creategroupstreamworkspace) describes the valid values. + +### Stream Config + +* `tier` - Selected tier for the Stream Workspace. Configures Memory / VCPU allowances. The [MongoDB Atlas API](https://www.mongodb.com/docs/atlas/reference/api-resources-spec/#tag/Streams/operation/creategroupstreamworkspace) describes the valid values. + +To learn more, see: [MongoDB Atlas API - Stream Instance](https://www.mongodb.com/docs/atlas/reference/api-resources-spec/#tag/Streams/operation/creategroupstreamworkspace) Documentation. diff --git a/docs/guides/stream-instance-to-stream-workspace-migration-guide.md b/docs/guides/stream-instance-to-stream-workspace-migration-guide.md new file mode 100644 index 0000000000..c18fd93572 --- /dev/null +++ b/docs/guides/stream-instance-to-stream-workspace-migration-guide.md @@ -0,0 +1,212 @@ +--- +page_title: "Migration Guide: Stream Instance to Stream Workspace" +--- + +# Migration Guide: Stream Instance to Stream Workspace + +**Objective**: This guide explains how to replace the deprecated `mongodbatlas_stream_instance` resource with the `mongodbatlas_stream_workspace` resource. For data source migrations, refer to the [output changes](#output-changes) section. + +## Why do we have both `mongodbatlas_stream_instance` and `mongodbatlas_stream_workspace` resources? + +Both `mongodbatlas_stream_instance` and `mongodbatlas_stream_workspace` resources currently allow customers to manage MongoDB Atlas Stream Processing workspaces. Initially, only `mongodbatlas_stream_instance` existed. However, MongoDB Atlas has evolved its terminology and API to use "workspace" instead of "instance" for stream processing environments. To align with this change and provide a clearer, more consistent naming convention, we created the `mongodbatlas_stream_workspace` resource as a direct replacement. + +## If I am using `mongodbatlas_stream_instance`, why should I move to `mongodbatlas_stream_workspace`? + +The `mongodbatlas_stream_workspace` resource provides the exact same functionality as `mongodbatlas_stream_instance` but with updated terminology that aligns with MongoDB Atlas's current naming conventions. This change provides: + +1. **Consistent Terminology**: Aligns with MongoDB Atlas's current documentation and UI terminology +2. **Future-Proof**: New stream processing features will be developed using the workspace terminology +3. **Clearer Intent**: The term "workspace" better describes the stream processing environment + +To maintain consistency with MongoDB Atlas's terminology and ensure you're using the most current resource names, we recommend migrating to `mongodbatlas_stream_workspace`. The `mongodbatlas_stream_instance` resource is deprecated and will be removed in future major versions of the provider. + +## How should I move to `mongodbatlas_stream_workspace`? + +To move from `mongodbatlas_stream_instance` to `mongodbatlas_stream_workspace` we offer two alternatives: +1. [(Recommended) Use the `moved` block](#migration-using-the-moved-block-recommended) +2. [Manually use the import command with the `mongodbatlas_stream_workspace` resource](#migration-using-import) + +### Best Practices Before Migrating + +Before doing any migration, create a backup of your [Terraform state file](https://developer.hashicorp.com/terraform/cli/commands/state). + +## Migration using the Moved block (recommended) + +This is our recommended method to migrate from `mongodbatlas_stream_instance` to `mongodbatlas_stream_workspace`. The [moved block](https://developer.hashicorp.com/terraform/language/moved) is a Terraform feature that allows to move between resource types. It's conceptually similar to running `removed` and `import` commands separately but it brings the convenience of doing it in one step. + +**Prerequisites:** + - Terraform version 1.8 or later is required, more information in the [State Move page](https://developer.hashicorp.com/terraform/plugin/framework/resources/state-move). + - MongoDB Atlas Provider version 2.1 or later is required. + +The basic experience when using the `moved` block is as follows: +1. Before starting, run `terraform plan` to make sure that there are no planned changes. +2. Add the `mongodbatlas_stream_workspace` resource definition. +3. Comment out or delete the `mongodbatlas_stream_instance` resource definition. +4. Update the references from your previous stream instance resource: `mongodbatlas_stream_instance.this.XXXX` to the new `mongodbatlas_stream_workspace.this.XXX`. + - Change `instance_name` to `workspace_name` in your references + - Double check [output-changes](#output-changes) to ensure the underlying configuration stays unchanged. +5. Add the `moved` block to your configuration file, e.g.: +```terraform +moved { + from = mongodbatlas_stream_instance.this + to = mongodbatlas_stream_workspace.this +} +``` +6. Run `terraform plan` and make sure that there are no planned changes, only the moved block should be shown. This is an example output of a successful plan: +```text + # mongodbatlas_stream_instance.this has moved to mongodbatlas_stream_workspace.this + resource "mongodbatlas_stream_workspace" "this" { + workspace_name = "my-workspace" + # (6 unchanged attributes hidden) + } + + Plan: 0 to add, 0 to change, 0 to destroy. +``` + +7. Run `terraform apply` to apply the changes. The `mongodbatlas_stream_instance` resource will be removed from the Terraform state and the `mongodbatlas_stream_workspace` resource will be added. +8. Hashicorp recommends to keep the move block in your configuration file to help track the migrations, however you can delete the `moved` block from your configuration file without any adverse impact. + +## Migration using import + +**Note**: We recommend the [`moved` block](#migration-using-the-moved-block-recommended) method as it's more convenient and less error-prone. + +This method uses [Terraform native tools](https://developer.hashicorp.com/terraform/language/import/generating-configuration) and works if you: +1. Have an existing stream workspace without any Terraform configuration and want to import and manage it with Terraform. +2. Have existing `mongodbatlas_stream_instance` resource(s) but you can't use the [recommended approach](#migration-using-the-moved-block-recommended). + +The process works as follow: +1. If you have an existing `mongodbatlas_stream_instance` resource, remove it from your configuration and delete it from the state file, e.g.: `terraform state rm mongodbatlas_stream_instance.this`. +2. Find the import IDs of the stream workspaces you want to migrate: `{PROJECT_ID}-{WORKSPACE_NAME}`, such as `664619d870c247237f4b86a6-my-workspace` +3. Import it using the `terraform import` command, e.g.: `terraform import mongodbatlas_stream_workspace.this 664619d870c247237f4b86a6-my-workspace`. +4. Run `terraform plan -generate-config-out=stream_workspace.tf`. This should generate a `stream_workspace.tf` file. +5. Update the references from your previous stream instance resource: `mongodbatlas_stream_instance.this.XXXX` to the new `mongodbatlas_stream_workspace.this.XXX`. + - Change `instance_name` to `workspace_name` in your references + - Double check [output-changes](#output-changes) to ensure the underlying configuration stays unchanged. +6. Run `terraform apply`. You should see the resource(s) imported. + +## Main Changes Between `mongodbatlas_stream_instance` and `mongodbatlas_stream_workspace` + +The primary change is the field name for identifying the stream processing workspace: + +1. **Field Name**: `instance_name` is replaced with `workspace_name` +2. **Resource Name**: `mongodbatlas_stream_instance` becomes `mongodbatlas_stream_workspace` +3. **Data Source Names**: + - `mongodbatlas_stream_instance` becomes `mongodbatlas_stream_workspace` + - `mongodbatlas_stream_instances` becomes `mongodbatlas_stream_workspaces` + +All other functionality remains identical. + +### Example 1: Old Configuration (`mongodbatlas_stream_instance`) + +```terraform +resource "mongodbatlas_stream_instance" "this" { + project_id = var.project_id + instance_name = "my-stream-workspace" + data_process_region = { + region = "VIRGINIA_USA" + cloud_provider = "AWS" + } +} + +data "mongodbatlas_stream_instance" "this" { + project_id = var.project_id + instance_name = mongodbatlas_stream_instance.this.instance_name +} + +data "mongodbatlas_stream_instances" "all" { + project_id = var.project_id +} +``` + +### Example 2: New Configuration (`mongodbatlas_stream_workspace`) + +```terraform +resource "mongodbatlas_stream_workspace" "this" { + project_id = var.project_id + workspace_name = "my-stream-workspace" + data_process_region = { + region = "VIRGINIA_USA" + cloud_provider = "AWS" + } +} + +data "mongodbatlas_stream_workspace" "this" { + project_id = var.project_id + workspace_name = mongodbatlas_stream_workspace.this.workspace_name +} + +data "mongodbatlas_stream_workspaces" "all" { + project_id = var.project_id +} +``` + +### Output Changes + +The only change in outputs is the field name: +- **Field Name Change**: + - Before: `mongodbatlas_stream_instance.this.instance_name` + - After: `mongodbatlas_stream_workspace.this.workspace_name` + +All other attributes (`project_id`, `data_process_region`, `stream_config`, `hostnames`, etc.) remain exactly the same. + +## Complete Migration Example with Moved Block + +Here's a complete example showing the migration process: + +### Step 1: Original Configuration +```terraform +resource "mongodbatlas_stream_instance" "example" { + project_id = var.project_id + instance_name = "my-workspace" + data_process_region = { + region = "VIRGINIA_USA" + cloud_provider = "AWS" + } +} + +output "stream_hostnames" { + value = mongodbatlas_stream_instance.example.hostnames +} +``` + +### Step 2: Add Moved Block and New Resource +```terraform +# Add the moved block +moved { + from = mongodbatlas_stream_instance.example + to = mongodbatlas_stream_workspace.example +} + +# Replace with new resource (note: instance_name becomes workspace_name) +resource "mongodbatlas_stream_workspace" "example" { + project_id = var.project_id + workspace_name = "my-workspace" + data_process_region = { + region = "VIRGINIA_USA" + cloud_provider = "AWS" + } +} + +# Update output references +output "stream_hostnames" { + value = mongodbatlas_stream_workspace.example.hostnames +} +``` + +### Step 3: Apply and Clean Up +After running `terraform apply`, you can optionally remove the `moved` block: + +```terraform +resource "mongodbatlas_stream_workspace" "example" { + project_id = var.project_id + workspace_name = "my-workspace" + data_process_region = { + region = "VIRGINIA_USA" + cloud_provider = "AWS" + } +} + +output "stream_hostnames" { + value = mongodbatlas_stream_workspace.example.hostnames +} +``` diff --git a/docs/resources/stream_instance.md b/docs/resources/stream_instance.md index 45a84650d4..be9e091081 100644 --- a/docs/resources/stream_instance.md +++ b/docs/resources/stream_instance.md @@ -4,8 +4,17 @@ subcategory: "Streams" # Resource: mongodbatlas_stream_instance +~> **DEPRECATED:** This resource is deprecated. Please use [`mongodbatlas_stream_workspace`](stream_workspace) instead. + `mongodbatlas_stream_instance` provides a Stream Instance resource. The resource lets you create, edit, and delete stream instances in a project. +## Migration to stream_workspace + +`mongodbatlas_stream_instance` resources can be moved to `mongodbatlas_stream_workspace` in MongoDB Atlas Provider v2.2.0 and later with Terraform v1.8 and later. + +More information about moving resources can be found in our [Migration Guide](https://registry.terraform.io/providers/mongodb/mongodbatlas/latest/docs/guides/stream-instance-to-stream-workspace-migration-guide) and in the Terraform documentation [here](https://developer.hashicorp.com/terraform/language/moved) and [here](https://developer.hashicorp.com/terraform/language/modules/develop/refactoring). + + ## Example Usage ```terraform @@ -20,7 +29,8 @@ resource "mongodbatlas_stream_instance" "test" { ``` ### Further Examples -- [Atlas Stream Instance](https://github.com/mongodb/terraform-provider-mongodbatlas/tree/v2.1.0/examples/mongodbatlas_stream_instance) +- [Atlas Stream Instance (Deprecated)](https://github.com/mongodb/terraform-provider-mongodbatlas/tree/master/examples/mongodbatlas_stream_instance) +- [Atlas Stream Workspace (Recommended)](https://github.com/mongodb/terraform-provider-mongodbatlas/tree/master/examples/mongodbatlas_stream_workspace) ## Argument Reference diff --git a/docs/resources/stream_workspace.md b/docs/resources/stream_workspace.md new file mode 100644 index 0000000000..38bf0625b9 --- /dev/null +++ b/docs/resources/stream_workspace.md @@ -0,0 +1,77 @@ +--- +subcategory: "Streams" +--- + +# Resource: mongodbatlas_stream_workspace + +`mongodbatlas_stream_workspace` provides a Stream Workspace resource. The resource lets you create, edit, and delete stream workspaces in a project. + +~> **NOTE:** This resource is an alias for `mongodbatlas_stream_instance`. Use this resource for new configurations. + +## Example Usage + +```terraform +resource "mongodbatlas_stream_workspace" "test" { + project_id = var.project_id + workspace_name = "WorkspaceName" + data_process_region = { + region = "VIRGINIA_USA" + cloud_provider = "AWS" + } +} +``` + +### Further Examples +- [Atlas Stream Workspace](https://github.com/mongodb/terraform-provider-mongodbatlas/tree/master/examples/mongodbatlas_stream_workspace) + +## Migration from stream_instance + +To migrate from `mongodbatlas_stream_instance` to `mongodbatlas_stream_workspace`, use the following `moved` block: + +```terraform +moved { + from = mongodbatlas_stream_instance.example + to = mongodbatlas_stream_workspace.example +} + +resource "mongodbatlas_stream_workspace" "example" { + project_id = var.project_id + workspace_name = "WorkspaceName" # Changed from instance_name + data_process_region = { + region = "VIRGINIA_USA" + cloud_provider = "AWS" + } +} +``` + +## Argument Reference + +* `project_id` - (Required) Unique 24-hexadecimal digit string that identifies your project. +* `workspace_name` - (Required) Label that identifies the stream workspace. +* `data_process_region` - (Required) Cloud service provider and region where MongoDB Cloud performs stream processing. See [data process region](#data-process-region). +* `stream_config` - (Optional) Configuration options for an Atlas Stream Processing Instance. See [stream config](#stream-config) + + +### Data Process Region + +* `cloud_provider` - (Required) Label that identifies the cloud service provider where MongoDB Cloud performs stream processing. The [MongoDB Atlas API](https://www.mongodb.com/docs/atlas/reference/api-resources-spec/#tag/Streams/operation/creategroupstreamworkspace) describes the valid values. +* `region` - (Required) Name of the cloud provider region hosting Atlas Stream Processing. The [MongoDB Atlas API](https://www.mongodb.com/docs/atlas/reference/api-resources-spec/#tag/Streams/operation/creategroupstreamworkspace) describes the valid values. + +### Stream Config + +* `tier` - (Required) Selected tier for the Stream Instance. Configures Memory / VCPU allowances. The [MongoDB Atlas API](https://www.mongodb.com/docs/atlas/reference/api-resources-spec/#tag/Streams/operation/creategroupstreamworkspace) describes the valid values. + + +## Attributes Reference + +In addition to all arguments above, the following attributes are exported: + +* `hostnames` - List that contains the hostnames assigned to the stream workspace. + +## Import + +You can import stream workspace resource using the project ID and workspace name, in the format `PROJECT_ID-WORKSPACE_NAME`. For example: + +``` +$ terraform import mongodbatlas_stream_workspace.test 650972848269185c55f40ca1-WorkspaceName +``` diff --git a/examples/migrate_stream_instance_to_stream_workspace/README.md b/examples/migrate_stream_instance_to_stream_workspace/README.md new file mode 100644 index 0000000000..255fe231e6 --- /dev/null +++ b/examples/migrate_stream_instance_to_stream_workspace/README.md @@ -0,0 +1,87 @@ +# Migration from `mongodbatlas_stream_instance` to `mongodbatlas_stream_workspace` + +This example demonstrates how to migrate a `mongodbatlas_stream_instance` resource to `mongodbatlas_stream_workspace` using the `moved` block. For more details, please refer to the [Migration Guide: Stream Instance to Stream Workspace](https://registry.terraform.io/providers/mongodb/mongodbatlas/latest/docs/guides/stream-instance-to-stream-workspace-migration-guide). + +## Overview + +The `mongodbatlas_stream_workspace` resource is the new preferred way to manage MongoDB Atlas Stream Processing instances. It provides the same functionality as `mongodbatlas_stream_instance` but uses updated terminology that aligns with the MongoDB Atlas UI. + +**Key Changes:** +- Resource name: `mongodbatlas_stream_instance` → `mongodbatlas_stream_workspace` +- Field name: `instance_name` → `workspace_name` +- All other fields remain the same + +## Migration Steps + +This example shows the complete migration process: + +1. [Create the `mongodbatlas_stream_instance`](#step-1-create-the-stream-instance) (skip if you already have one) +2. [Add the `mongodbatlas_stream_workspace` configuration](#step-2-add-stream-workspace-configuration) +3. [Add the moved block](#step-3-add-moved-block) +4. [Perform the migration](#step-4-perform-the-migration) +5. [Clean up](#step-5-clean-up) + +## Step 1: Create the Stream Instance + +**Note**: Skip this step if you already have a `mongodbatlas_stream_instance` resource. + +1. Uncomment the code in [stream_instance.tf](stream_instance.tf) +2. Comment the code in [stream_workspace.tf](stream_workspace.tf) +3. Create a `terraform.tfvars` file: +```terraform +project_id = "your-project-id" +workspace_name = "my-stream-workspace" +``` +4. Run `terraform init && terraform apply` + +## Step 2: Add Stream Workspace Configuration + +1. Comment out the `mongodbatlas_stream_instance` in [stream_instance.tf](stream_instance.tf) +2. Uncomment the `mongodbatlas_stream_workspace` in [stream_workspace.tf](stream_workspace.tf) +3. Update any references from `mongodbatlas_stream_instance.example` to `mongodbatlas_stream_workspace.example` + +## Step 3: Add Moved Block + +The moved block in [stream_workspace.tf](stream_workspace.tf) tells Terraform to migrate the state: + +```terraform +moved { + from = mongodbatlas_stream_instance.example + to = mongodbatlas_stream_workspace.example +} +``` + +## Step 4: Perform the Migration + +1. Run `terraform validate` to ensure there are no configuration errors +2. Run `terraform plan` - you should see: + ``` + Terraform will perform the following actions: + # mongodbatlas_stream_instance.example has moved to mongodbatlas_stream_workspace.example + resource "mongodbatlas_stream_workspace" "example" { + workspace_name = "my-stream-workspace" + # (other unchanged attributes hidden) + } + + Plan: 0 to add, 0 to change, 0 to destroy. + ``` +3. Run `terraform apply` and type `yes` to confirm the migration + +## Step 5: Clean Up + +After successful migration: +1. Remove the commented `mongodbatlas_stream_instance` resource from [stream_instance.tf](stream_instance.tf) +2. The `moved` block can be kept for historical reference or removed after the migration is complete + +## Troubleshooting + +- **Reference errors**: Ensure all references are updated from `mongodbatlas_stream_instance.example` to `mongodbatlas_stream_workspace.example` +- **Field name errors**: Make sure to use `workspace_name` instead of `instance_name` in the new resource +- **Plan changes**: If you see unexpected changes, verify that all field values match between the old and new resources + +## Next Steps + +After migration, you can: +- Update any related resources (stream connections, processors) to reference the new workspace +- Use the new `mongodbatlas_stream_workspace` data source for lookups +- Refer to the [stream workspace documentation](https://registry.terraform.io/providers/mongodb/mongodbatlas/latest/docs/resources/stream_workspace) for additional configuration options diff --git a/examples/migrate_stream_instance_to_stream_workspace/stream_instance.tf b/examples/migrate_stream_instance_to_stream_workspace/stream_instance.tf new file mode 100644 index 0000000000..74f83c0a9c --- /dev/null +++ b/examples/migrate_stream_instance_to_stream_workspace/stream_instance.tf @@ -0,0 +1,12 @@ +# BEFORE: Original stream instance resource (comment out after migration) +# resource "mongodbatlas_stream_instance" "example" { +# project_id = var.project_id +# instance_name = var.workspace_name +# data_process_region = { +# region = "VIRGINIA_USA" +# cloud_provider = "AWS" +# } +# stream_config = { +# tier = "SP30" +# } +# } diff --git a/examples/migrate_stream_instance_to_stream_workspace/stream_workspace.tf b/examples/migrate_stream_instance_to_stream_workspace/stream_workspace.tf new file mode 100644 index 0000000000..703701c2a4 --- /dev/null +++ b/examples/migrate_stream_instance_to_stream_workspace/stream_workspace.tf @@ -0,0 +1,18 @@ +# AFTER: New stream workspace resource +resource "mongodbatlas_stream_workspace" "example" { + project_id = var.project_id + workspace_name = var.workspace_name # Note: instance_name -> workspace_name + data_process_region = { + region = "VIRGINIA_USA" + cloud_provider = "AWS" + } + stream_config = { + tier = "SP30" + } +} + +# Moved block to migrate from stream_instance to stream_workspace +moved { + from = mongodbatlas_stream_instance.example + to = mongodbatlas_stream_workspace.example +} diff --git a/examples/migrate_stream_instance_to_stream_workspace/variables.tf b/examples/migrate_stream_instance_to_stream_workspace/variables.tf new file mode 100644 index 0000000000..27f0e3fe63 --- /dev/null +++ b/examples/migrate_stream_instance_to_stream_workspace/variables.tf @@ -0,0 +1,9 @@ +variable "project_id" { + description = "The MongoDB Atlas project ID" + type = string +} + +variable "workspace_name" { + description = "The name of the stream workspace (formerly instance_name)" + type = string +} diff --git a/examples/migrate_stream_instance_to_stream_workspace/versions.tf b/examples/migrate_stream_instance_to_stream_workspace/versions.tf new file mode 100644 index 0000000000..97084d2410 --- /dev/null +++ b/examples/migrate_stream_instance_to_stream_workspace/versions.tf @@ -0,0 +1,9 @@ +terraform { + required_version = ">= 1.8" + required_providers { + mongodbatlas = { + source = "mongodb/mongodbatlas" + version = "~> 2.1" + } + } +} diff --git a/examples/mongodbatlas_stream_instance/README.md b/examples/mongodbatlas_stream_instance/README.md index bfc8fb370d..48ac74a09e 100644 --- a/examples/mongodbatlas_stream_instance/README.md +++ b/examples/mongodbatlas_stream_instance/README.md @@ -1,7 +1,13 @@ # MongoDB Atlas Provider - Atlas Stream Instance defined in a Project +> **DEPRECATED:** This example uses the deprecated `mongodbatlas_stream_instance` resource. Please use the [`mongodbatlas_stream_workspace`](../mongodbatlas_stream_workspace/) example instead. + This example shows how to use Atlas Stream Instances in Terraform. It also creates a project, which is a prerequisite. +## Migration to stream_workspace + +To migrate to the new `mongodbatlas_stream_workspace` resource, see the [stream_workspace example](../mongodbatlas_stream_workspace/) which demonstrates the updated syntax. + You must set the following variables: - `atlas_client_id`: MongoDB Atlas Service Account Client ID diff --git a/examples/mongodbatlas_stream_instance/main.tf b/examples/mongodbatlas_stream_instance/main.tf index 03926bea81..6bb111879e 100644 --- a/examples/mongodbatlas_stream_instance/main.tf +++ b/examples/mongodbatlas_stream_instance/main.tf @@ -1,10 +1,20 @@ +# DEPRECATED: This example uses the deprecated mongodbatlas_stream_instance resource. +# For new deployments, use mongodbatlas_stream_workspace instead. +# See ../mongodbatlas_stream_workspace/ for the updated example. + resource "mongodbatlas_project" "example" { name = "project-name" org_id = var.org_id } +# Add this moved block to migrate from stream_instance to stream_workspace: +# moved { +# from = mongodbatlas_stream_instance.example +# to = mongodbatlas_stream_workspace.example +# } + resource "mongodbatlas_stream_instance" "example" { - project_id = mongodbatlas_project.example + project_id = mongodbatlas_project.example.id instance_name = "InstanceName" data_process_region = { region = "VIRGINIA_USA" diff --git a/examples/mongodbatlas_stream_workspace/README.md b/examples/mongodbatlas_stream_workspace/README.md new file mode 100644 index 0000000000..0b3541eef9 --- /dev/null +++ b/examples/mongodbatlas_stream_workspace/README.md @@ -0,0 +1,15 @@ +# MongoDB Atlas Provider - Atlas Stream Workspace defined in a Project + +This example shows how to use Atlas Stream Workspaces in Terraform. It also creates a project, which is a prerequisite. + +You must set the following variables: + +- `atlas_client_id`: MongoDB Atlas Service Account Client ID. +- `atlas_client_secret`: MongoDB Atlas Service Account Client Secret. +- `org_id`: Unique 24-hexadecimal digit string that identifies the Organization that must contain the project. + +To learn more, see the [Stream Workspace Documentation](https://www.mongodb.com/docs/atlas/atlas-sp/manage-processing-instance/#configure-a-stream-processing-instance). + +## Migration from stream_instance + +See the [Migration Guide: Stream Instance to Stream Workspace](../migrate_stream_instance_to_stream_workspace/) for step-by-step instructions and examples for migrating from the deprecated `mongodbatlas_stream_instance` resource. diff --git a/examples/mongodbatlas_stream_workspace/main.tf b/examples/mongodbatlas_stream_workspace/main.tf new file mode 100644 index 0000000000..d174ccc5f8 --- /dev/null +++ b/examples/mongodbatlas_stream_workspace/main.tf @@ -0,0 +1,25 @@ +resource "mongodbatlas_project" "example" { + name = "project-name" + org_id = var.org_id +} + +resource "mongodbatlas_stream_workspace" "example" { + project_id = mongodbatlas_project.example.id + workspace_name = "WorkspaceName" + data_process_region = { + region = "VIRGINIA_USA" + cloud_provider = "AWS" + } + stream_config = { + tier = "SP30" + } +} + +data "mongodbatlas_stream_workspace" "example" { + project_id = mongodbatlas_project.example.id + workspace_name = mongodbatlas_stream_workspace.example.workspace_name +} + +data "mongodbatlas_stream_workspaces" "example" { + project_id = mongodbatlas_project.example.id +} diff --git a/examples/mongodbatlas_stream_workspace/outputs.tf b/examples/mongodbatlas_stream_workspace/outputs.tf new file mode 100644 index 0000000000..054365fcae --- /dev/null +++ b/examples/mongodbatlas_stream_workspace/outputs.tf @@ -0,0 +1,25 @@ +# Resource outputs +output "stream_workspace_id" { + description = "The stream workspace ID" + value = mongodbatlas_stream_workspace.example.id +} + +output "stream_workspace_hostnames" { + description = "The stream workspace hostnames" + value = mongodbatlas_stream_workspace.example.hostnames +} + +# Data source outputs +output "workspace_from_data_source" { + description = "Stream workspace details from data source" + value = { + id = data.mongodbatlas_stream_workspace.example.id + workspace_name = data.mongodbatlas_stream_workspace.example.workspace_name + hostnames = data.mongodbatlas_stream_workspace.example.hostnames + } +} + +output "total_workspaces_count" { + description = "Total number of stream workspaces in the project" + value = length(data.mongodbatlas_stream_workspaces.example.results) +} diff --git a/examples/mongodbatlas_stream_workspace/provider.tf b/examples/mongodbatlas_stream_workspace/provider.tf new file mode 100644 index 0000000000..edb52cb687 --- /dev/null +++ b/examples/mongodbatlas_stream_workspace/provider.tf @@ -0,0 +1,4 @@ +provider "mongodbatlas" { + client_id = var.atlas_client_id + client_secret = var.atlas_client_secret +} diff --git a/examples/mongodbatlas_stream_workspace/variables.tf b/examples/mongodbatlas_stream_workspace/variables.tf new file mode 100644 index 0000000000..dbb044d777 --- /dev/null +++ b/examples/mongodbatlas_stream_workspace/variables.tf @@ -0,0 +1,15 @@ +variable "atlas_client_id" { + description = "MongoDB Atlas Service Account Client ID" + type = string + default = "" +} +variable "atlas_client_secret" { + description = "MongoDB Atlas Service Account Client Secret" + type = string + sensitive = true + default = "" +} +variable "org_id" { + description = "Unique 24-hexadecimal digit string that identifies your Atlas Organization" + type = string +} diff --git a/examples/mongodbatlas_stream_workspace/versions.tf b/examples/mongodbatlas_stream_workspace/versions.tf new file mode 100644 index 0000000000..0fe79cfac9 --- /dev/null +++ b/examples/mongodbatlas_stream_workspace/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + mongodbatlas = { + source = "mongodb/mongodbatlas" + } + } + required_version = ">= 1.0" +} diff --git a/internal/provider/provider.go b/internal/provider/provider.go index 46bb9a7e0c..e16e6e2096 100644 --- a/internal/provider/provider.go +++ b/internal/provider/provider.go @@ -45,6 +45,7 @@ import ( "github.com/mongodb/terraform-provider-mongodbatlas/internal/service/streaminstance" "github.com/mongodb/terraform-provider-mongodbatlas/internal/service/streamprivatelinkendpoint" "github.com/mongodb/terraform-provider-mongodbatlas/internal/service/streamprocessor" + "github.com/mongodb/terraform-provider-mongodbatlas/internal/service/streamworkspace" "github.com/mongodb/terraform-provider-mongodbatlas/internal/service/teamprojectassignment" "github.com/mongodb/terraform-provider-mongodbatlas/version" ) @@ -271,6 +272,8 @@ func (p *MongodbtlasProvider) DataSources(context.Context) []func() datasource.D pushbasedlogexport.DataSource, streaminstance.DataSource, streaminstance.PluralDataSource, + streamworkspace.DataSource, + streamworkspace.PluralDataSource, streamconnection.DataSource, streamconnection.PluralDataSource, controlplaneipaddresses.DataSource, @@ -314,6 +317,7 @@ func (p *MongodbtlasProvider) Resources(context.Context) []func() resource.Resou searchdeployment.Resource, pushbasedlogexport.Resource, streaminstance.Resource, + streamworkspace.Resource, streamconnection.Resource, streamprocessor.Resource, encryptionatrestprivateendpoint.Resource, diff --git a/internal/service/streaminstance/data_source_stream_instances.go b/internal/service/streaminstance/data_source_stream_instances.go index 2afd363c80..788b77480e 100644 --- a/internal/service/streaminstance/data_source_stream_instances.go +++ b/internal/service/streaminstance/data_source_stream_instances.go @@ -6,6 +6,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/mongodb/terraform-provider-mongodbatlas/internal/common/constant" "github.com/mongodb/terraform-provider-mongodbatlas/internal/common/conversion" "github.com/mongodb/terraform-provider-mongodbatlas/internal/config" "go.mongodb.org/atlas-sdk/v20250312009/admin" @@ -31,6 +32,7 @@ func (d *streamInstancesDS) Schema(ctx context.Context, req datasource.SchemaReq RequiredFields: []string{"project_id"}, HasLegacyFields: true, }) + resp.Schema.DeprecationMessage = fmt.Sprintf(constant.DeprecationNextMajorWithReplacementGuide, "data source", "mongodbatlas_stream_workspaces", "https://registry.terraform.io/providers/mongodb/mongodbatlas/latest/docs/guides/stream-instance-to-stream-workspace-migraton-guide") } func (d *streamInstancesDS) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { diff --git a/internal/service/streaminstance/resource_schema.go b/internal/service/streaminstance/resource_schema.go index 974ca06364..07acfeffed 100644 --- a/internal/service/streaminstance/resource_schema.go +++ b/internal/service/streaminstance/resource_schema.go @@ -2,16 +2,19 @@ package streaminstance import ( "context" + "fmt" "github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/resource/schema" "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/mongodb/terraform-provider-mongodbatlas/internal/common/constant" ) func ResourceSchema(ctx context.Context) schema.Schema { return schema.Schema{ + DeprecationMessage: fmt.Sprintf(constant.DeprecationNextMajorWithReplacementGuide, "resource", "mongodbatlas_stream_workspace", "https://registry.terraform.io/providers/mongodb/mongodbatlas/latest/docs/guides/stream-instance-to-stream-workspace-migraton-guide"), Attributes: map[string]schema.Attribute{ "id": schema.StringAttribute{ Computed: true, diff --git a/internal/service/streamworkspace/data_source.go b/internal/service/streamworkspace/data_source.go new file mode 100644 index 0000000000..8d54e76b8a --- /dev/null +++ b/internal/service/streamworkspace/data_source.go @@ -0,0 +1,59 @@ +package streamworkspace + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/mongodb/terraform-provider-mongodbatlas/internal/common/conversion" + "github.com/mongodb/terraform-provider-mongodbatlas/internal/config" + "github.com/mongodb/terraform-provider-mongodbatlas/internal/service/streaminstance" +) + +var _ datasource.DataSource = &streamsWorkspaceDS{} +var _ datasource.DataSourceWithConfigure = &streamsWorkspaceDS{} + +func DataSource() datasource.DataSource { + return &streamsWorkspaceDS{ + DSCommon: config.DSCommon{ + DataSourceName: streamsWorkspaceName, + }, + } +} + +type streamsWorkspaceDS struct { + config.DSCommon +} + +func (d *streamsWorkspaceDS) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = conversion.DataSourceSchemaFromResource(ResourceSchema(ctx), &conversion.DataSourceSchemaRequest{ + RequiredFields: []string{"project_id", "workspace_name"}, + }) +} + +func (d *streamsWorkspaceDS) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + var streamsWorkspaceConfig TFModel + resp.Diagnostics.Append(req.Config.Get(ctx, &streamsWorkspaceConfig)...) + if resp.Diagnostics.HasError() { + return + } + + connV2 := d.Client.AtlasV2 + projectID := streamsWorkspaceConfig.ProjectID.ValueString() + workspaceName := streamsWorkspaceConfig.WorkspaceName.ValueString() + apiResp, _, err := connV2.StreamsApi.GetStreamWorkspace(ctx, projectID, workspaceName).Execute() + if err != nil { + resp.Diagnostics.AddError("error fetching resource", err.Error()) + return + } + + newInstanceModel, diags := streaminstance.NewTFStreamInstance(ctx, apiResp) + if diags.HasError() { + resp.Diagnostics.Append(diags...) + return + } + + var newWorkspaceModel TFModel + newWorkspaceModel.FromInstanceModel(newInstanceModel) + + resp.Diagnostics.Append(resp.State.Set(ctx, newWorkspaceModel)...) +} diff --git a/internal/service/streamworkspace/data_source_test.go b/internal/service/streamworkspace/data_source_test.go new file mode 100644 index 0000000000..7c857c05b0 --- /dev/null +++ b/internal/service/streamworkspace/data_source_test.go @@ -0,0 +1,48 @@ +package streamworkspace_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/mongodb/terraform-provider-mongodbatlas/internal/testutil/acc" +) + +const ( + region = "VIRGINIA_USA" + cloudProvider = "AWS" +) + +func TestAccStreamsWorkspaceDS_basic(t *testing.T) { + var ( + dataSourceName = "data.mongodbatlas_stream_workspace.test" + projectID = acc.ProjectIDExecution(t) + workspaceName = acc.RandomName() + ) + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acc.PreCheckBasic(t) }, + ProtoV6ProviderFactories: acc.TestAccProviderV6Factories, + CheckDestroy: acc.CheckDestroyStreamInstance, + Steps: []resource.TestStep{ + { + Config: streamsWorkspaceDataSourceConfig(projectID, workspaceName, region, cloudProvider), + Check: resource.ComposeAggregateTestCheckFunc( + streamsWorkspaceAttributeChecks(dataSourceName, workspaceName, region, cloudProvider), + resource.TestCheckResourceAttr(dataSourceName, "stream_config.tier", "SP30"), + ), + }, + }, + }) +} + +func streamsWorkspaceDataSourceConfig(projectID, workspaceName, region, cloudProvider string) string { + return fmt.Sprintf(` + %s + + data "mongodbatlas_stream_workspace" "test" { + project_id = mongodbatlas_stream_workspace.test.project_id + workspace_name = mongodbatlas_stream_workspace.test.workspace_name + } + `, streamsWorkspaceConfig(projectID, workspaceName, region, cloudProvider)) +} diff --git a/internal/service/streamworkspace/main_test.go b/internal/service/streamworkspace/main_test.go new file mode 100644 index 0000000000..b497136d03 --- /dev/null +++ b/internal/service/streamworkspace/main_test.go @@ -0,0 +1,15 @@ +package streamworkspace_test + +import ( + "os" + "testing" + + "github.com/mongodb/terraform-provider-mongodbatlas/internal/testutil/acc" +) + +func TestMain(m *testing.M) { + cleanup := acc.SetupSharedResources() + exitCode := m.Run() + cleanup() + os.Exit(exitCode) +} diff --git a/internal/service/streamworkspace/model.go b/internal/service/streamworkspace/model.go new file mode 100644 index 0000000000..d0205d007e --- /dev/null +++ b/internal/service/streamworkspace/model.go @@ -0,0 +1,59 @@ +package streamworkspace + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/mongodb/terraform-provider-mongodbatlas/internal/service/streaminstance" + "go.mongodb.org/atlas-sdk/v20250312009/admin" +) + +// newStreamWorkspaceCreateReq creates an API request for creating a stream workspace +func newStreamWorkspaceCreateReq(ctx context.Context, plan *TFModel) (*admin.StreamsTenant, diag.Diagnostics) { + dataProcessRegion := &TFWorkspaceProcessRegionSpecModel{} + if diags := plan.DataProcessRegion.As(ctx, dataProcessRegion, basetypes.ObjectAsOptions{}); diags.HasError() { + return nil, diags + } + streamTenant := &admin.StreamsTenant{ + GroupId: plan.ProjectID.ValueStringPointer(), + Name: plan.WorkspaceName.ValueStringPointer(), + DataProcessRegion: &admin.StreamsDataProcessRegion{ + CloudProvider: dataProcessRegion.CloudProvider.ValueString(), + Region: dataProcessRegion.Region.ValueString(), + }, + } + if !plan.StreamConfig.IsNull() && !plan.StreamConfig.IsUnknown() { + streamConfig := new(TFWorkspaceStreamConfigModel) + if diags := plan.StreamConfig.As(ctx, streamConfig, basetypes.ObjectAsOptions{}); diags.HasError() { + return nil, diags + } + streamTenant.StreamConfig = &admin.StreamConfig{ + Tier: streamConfig.Tier.ValueStringPointer(), + } + } + return streamTenant, nil +} + +// newStreamWorkspaceUpdateReq creates an API request for updating a stream workspace +func newStreamWorkspaceUpdateReq(ctx context.Context, plan *TFModel) (*admin.StreamsDataProcessRegion, diag.Diagnostics) { + dataProcessRegion := &TFWorkspaceProcessRegionSpecModel{} + if diags := plan.DataProcessRegion.As(ctx, dataProcessRegion, basetypes.ObjectAsOptions{}); diags.HasError() { + return nil, diags + } + return &admin.StreamsDataProcessRegion{ + CloudProvider: dataProcessRegion.CloudProvider.ValueString(), + Region: dataProcessRegion.Region.ValueString(), + }, nil +} + +// FromInstanceModel populates this workspace model from a TFStreamInstanceModel +// This eliminates the need for conversion functions by directly updating fields +func (m *TFModel) FromInstanceModel(instanceModel *streaminstance.TFStreamInstanceModel) { + m.ID = instanceModel.ID + m.WorkspaceName = instanceModel.InstanceName // Map instance_name to workspace_name + m.ProjectID = instanceModel.ProjectID + m.DataProcessRegion = instanceModel.DataProcessRegion + m.StreamConfig = instanceModel.StreamConfig + m.Hostnames = instanceModel.Hostnames +} diff --git a/internal/service/streamworkspace/move_state.go b/internal/service/streamworkspace/move_state.go new file mode 100644 index 0000000000..69f7f5f7ea --- /dev/null +++ b/internal/service/streamworkspace/move_state.go @@ -0,0 +1,148 @@ +package streamworkspace + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-go/tfprotov6" + "github.com/hashicorp/terraform-plugin-go/tftypes" + + "github.com/mongodb/terraform-provider-mongodbatlas/internal/common/conversion" + "github.com/mongodb/terraform-provider-mongodbatlas/internal/common/schemafunc" +) + +// MoveState is used with moved block to upgrade from stream_instance to stream_workspace +func (r *rs) MoveState(context.Context) []resource.StateMover { + return []resource.StateMover{{StateMover: stateMover}} +} + +func stateMover(ctx context.Context, req resource.MoveStateRequest, resp *resource.MoveStateResponse) { + if req.SourceTypeName != "mongodbatlas_stream_instance" || !strings.HasSuffix(req.SourceProviderAddress, "/mongodbatlas") { + return + } + + // Extract all fields from source state to preserve values during move + stateAttrs := map[string]tftypes.Type{ + "project_id": tftypes.String, + "instance_name": tftypes.String, + "data_process_region": tftypes.Object{ + AttributeTypes: map[string]tftypes.Type{ + "cloud_provider": tftypes.String, + "region": tftypes.String, + }, + }, + "stream_config": tftypes.Object{ + AttributeTypes: map[string]tftypes.Type{ + "tier": tftypes.String, + }, + }, + "hostnames": tftypes.List{ + ElementType: tftypes.String, + }, + } + + rawStateValue, err := req.SourceRawState.UnmarshalWithOpts(tftypes.Object{ + AttributeTypes: stateAttrs, + }, tfprotov6.UnmarshalOpts{ValueFromJSONOpts: tftypes.ValueFromJSONOpts{IgnoreUndefinedAttributes: true}}) + if err != nil { + resp.Diagnostics.AddError("Unable to Unmarshal state", err.Error()) + return + } + + var stateObj map[string]tftypes.Value + if err := rawStateValue.As(&stateObj); err != nil { + resp.Diagnostics.AddError("Unable to Parse state", err.Error()) + return + } + + projectID := schemafunc.GetAttrFromStateObj[string](stateObj, "project_id") + instanceName := schemafunc.GetAttrFromStateObj[string](stateObj, "instance_name") + + if !conversion.IsStringPresent(projectID) || !conversion.IsStringPresent(instanceName) { + resp.Diagnostics.AddError("Unable to read project_id or instance_name from state", + fmt.Sprintf("project_id: %s, instance_name: %s", conversion.SafeString(projectID), conversion.SafeString(instanceName))) + return + } + + // Create model with actual values from source state + model := &TFModel{ + ID: types.StringNull(), // Will be computed during read + ProjectID: types.StringPointerValue(projectID), + WorkspaceName: types.StringPointerValue(instanceName), // Map instance_name to workspace_name + } + + // Extract and preserve data_process_region if present + if dataProcessRegionVal, exists := stateObj["data_process_region"]; exists && !dataProcessRegionVal.IsNull() { + var regionObj map[string]tftypes.Value + if err := dataProcessRegionVal.As(®ionObj); err == nil { + cloudProvider := schemafunc.GetAttrFromStateObj[string](regionObj, "cloud_provider") + region := schemafunc.GetAttrFromStateObj[string](regionObj, "region") + + objValue, diags := types.ObjectValue(map[string]attr.Type{ + "cloud_provider": types.StringType, + "region": types.StringType, + }, map[string]attr.Value{ + "cloud_provider": types.StringPointerValue(cloudProvider), + "region": types.StringPointerValue(region), + }) + if !diags.HasError() { + model.DataProcessRegion = objValue + } + } + } + if model.DataProcessRegion.IsNull() { + model.DataProcessRegion = types.ObjectNull(map[string]attr.Type{ + "cloud_provider": types.StringType, + "region": types.StringType, + }) + } + + // Extract and preserve stream_config if present + if streamConfigVal, exists := stateObj["stream_config"]; exists && !streamConfigVal.IsNull() { + var configObj map[string]tftypes.Value + if err := streamConfigVal.As(&configObj); err == nil { + tier := schemafunc.GetAttrFromStateObj[string](configObj, "tier") + + objValue, diags := types.ObjectValue(map[string]attr.Type{ + "tier": types.StringType, + }, map[string]attr.Value{ + "tier": types.StringPointerValue(tier), + }) + if !diags.HasError() { + model.StreamConfig = objValue + } + } + } + if model.StreamConfig.IsNull() { + model.StreamConfig = types.ObjectNull(map[string]attr.Type{ + "tier": types.StringType, + }) + } + + // Extract and preserve hostnames if present + if hostnamesVal, exists := stateObj["hostnames"]; exists && !hostnamesVal.IsNull() { + var hostnamesList []tftypes.Value + if err := hostnamesVal.As(&hostnamesList); err == nil { + var hostnames []string + for _, hostnameVal := range hostnamesList { + var hostname string + if err := hostnameVal.As(&hostname); err == nil { + hostnames = append(hostnames, hostname) + } + } + listValue, diags := types.ListValueFrom(ctx, types.StringType, hostnames) + if !diags.HasError() { + model.Hostnames = listValue + } + } + } + if model.Hostnames.IsNull() { + model.Hostnames = types.ListNull(types.StringType) + } + + resp.Diagnostics.Append(resp.TargetState.Set(ctx, model)...) +} diff --git a/internal/service/streamworkspace/move_state_test.go b/internal/service/streamworkspace/move_state_test.go new file mode 100644 index 0000000000..66b07d71a8 --- /dev/null +++ b/internal/service/streamworkspace/move_state_test.go @@ -0,0 +1,60 @@ +package streamworkspace_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/tfversion" + + "github.com/mongodb/terraform-provider-mongodbatlas/internal/testutil/acc" +) + +func TestAccStreamWorkspace_moveInstance(t *testing.T) { + var ( + projectID = acc.ProjectIDExecution(t) + workspaceName = acc.RandomName() + ) + + resource.ParallelTest(t, resource.TestCase{ + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.SkipBelow(tfversion.Version1_8_0), // moved blocks require Terraform 1.8+ + }, + PreCheck: func() { acc.PreCheckBasic(t) }, + ProtoV6ProviderFactories: acc.TestAccProviderV6Factories, + CheckDestroy: acc.CheckDestroyStreamInstance, + Steps: []resource.TestStep{ + { + Config: configMoveToStreamWorkspace(projectID, workspaceName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("mongodbatlas_stream_workspace.test", "project_id", projectID), + resource.TestCheckResourceAttr("mongodbatlas_stream_workspace.test", "workspace_name", workspaceName), + resource.TestCheckResourceAttrSet("mongodbatlas_stream_workspace.test", "data_process_region.cloud_provider"), + resource.TestCheckResourceAttrSet("mongodbatlas_stream_workspace.test", "data_process_region.region"), + resource.TestCheckResourceAttrSet("mongodbatlas_stream_workspace.test", "stream_config.tier"), + ), + }, + }, + }) +} + +func configMoveToStreamWorkspace(projectID, workspaceName string) string { + return fmt.Sprintf(` +moved { + from = mongodbatlas_stream_instance.test + to = mongodbatlas_stream_workspace.test +} + +resource "mongodbatlas_stream_workspace" "test" { + project_id = "%s" + workspace_name = "%s" + data_process_region = { + region = "VIRGINIA_USA" + cloud_provider = "AWS" + } + stream_config = { + tier = "SP30" + } +} +`, projectID, workspaceName) +} diff --git a/internal/service/streamworkspace/plural_data_source.go b/internal/service/streamworkspace/plural_data_source.go new file mode 100644 index 0000000000..a8083815b2 --- /dev/null +++ b/internal/service/streamworkspace/plural_data_source.go @@ -0,0 +1,107 @@ +package streamworkspace + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/mongodb/terraform-provider-mongodbatlas/internal/common/conversion" + "github.com/mongodb/terraform-provider-mongodbatlas/internal/config" + "github.com/mongodb/terraform-provider-mongodbatlas/internal/service/streaminstance" + "go.mongodb.org/atlas-sdk/v20250312009/admin" +) + +var _ datasource.DataSource = &streamsWorkspacesDS{} +var _ datasource.DataSourceWithConfigure = &streamsWorkspacesDS{} + +func PluralDataSource() datasource.DataSource { + return &streamsWorkspacesDS{ + DSCommon: config.DSCommon{ + DataSourceName: fmt.Sprintf("%ss", streamsWorkspaceName), + }, + } +} + +type streamsWorkspacesDS struct { + config.DSCommon +} + +func (d *streamsWorkspacesDS) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = conversion.PluralDataSourceSchemaFromResource(ResourceSchema(ctx), &conversion.PluralDataSourceSchemaRequest{ + RequiredFields: []string{"project_id"}, + HasLegacyFields: true, + }) +} + +func (d *streamsWorkspacesDS) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + var streamsWorkspacesConfig TFStreamsWorkspacesModel + resp.Diagnostics.Append(req.Config.Get(ctx, &streamsWorkspacesConfig)...) + if resp.Diagnostics.HasError() { + return + } + + connV2 := d.Client.AtlasV2 + projectID := streamsWorkspacesConfig.ProjectID.ValueString() + itemsPerPage := streamsWorkspacesConfig.ItemsPerPage.ValueInt64Pointer() + pageNum := streamsWorkspacesConfig.PageNum.ValueInt64Pointer() + apiResp, _, err := connV2.StreamsApi.ListStreamWorkspacesWithParams(ctx, &admin.ListStreamWorkspacesApiParams{ + GroupId: projectID, + ItemsPerPage: conversion.Int64PtrToIntPtr(itemsPerPage), + PageNum: conversion.Int64PtrToIntPtr(pageNum), + }).Execute() + if err != nil { + resp.Diagnostics.AddError("error fetching results", err.Error()) + return + } + + newStreamsWorkspacesModel, diags := NewTFStreamsWorkspaces(ctx, &streamsWorkspacesConfig, apiResp) + if diags.HasError() { + resp.Diagnostics.Append(diags...) + return + } + resp.Diagnostics.Append(resp.State.Set(ctx, newStreamsWorkspacesModel)...) +} + +type TFStreamsWorkspacesModel struct { + ID types.String `tfsdk:"id"` + ProjectID types.String `tfsdk:"project_id"` + Results []TFModel `tfsdk:"results"` + PageNum types.Int64 `tfsdk:"page_num"` + ItemsPerPage types.Int64 `tfsdk:"items_per_page"` + TotalCount types.Int64 `tfsdk:"total_count"` +} + +func NewTFStreamsWorkspaces(ctx context.Context, streamsWorkspacesConfig *TFStreamsWorkspacesModel, apiResp *admin.PaginatedApiStreamsTenant) (*TFStreamsWorkspacesModel, diag.Diagnostics) { + var diags diag.Diagnostics + + // Convert the stream instances response to stream instances model first + instancesModel := &streaminstance.TFStreamInstancesModel{ + ID: streamsWorkspacesConfig.ID, + ProjectID: streamsWorkspacesConfig.ProjectID, + PageNum: streamsWorkspacesConfig.PageNum, + ItemsPerPage: streamsWorkspacesConfig.ItemsPerPage, + } + + newInstancesModel, instanceDiags := streaminstance.NewTFStreamInstances(ctx, instancesModel, apiResp) + if instanceDiags.HasError() { + diags.Append(instanceDiags...) + return nil, diags + } + + // Convert each instance result to workspace result + workspaceResults := make([]TFModel, len(newInstancesModel.Results)) + for i := range newInstancesModel.Results { + workspaceResults[i].FromInstanceModel(&newInstancesModel.Results[i]) + } + + return &TFStreamsWorkspacesModel{ + ID: newInstancesModel.ID, + ProjectID: newInstancesModel.ProjectID, + Results: workspaceResults, + PageNum: newInstancesModel.PageNum, + ItemsPerPage: newInstancesModel.ItemsPerPage, + TotalCount: newInstancesModel.TotalCount, + }, diags +} diff --git a/internal/service/streamworkspace/plural_data_source_test.go b/internal/service/streamworkspace/plural_data_source_test.go new file mode 100644 index 0000000000..c99722dcde --- /dev/null +++ b/internal/service/streamworkspace/plural_data_source_test.go @@ -0,0 +1,83 @@ +package streamworkspace_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/mongodb/terraform-provider-mongodbatlas/internal/testutil/acc" +) + +func TestAccStreamsWorkspacesDS_basic(t *testing.T) { + var ( + dataSourceName = "data.mongodbatlas_stream_workspaces.test" + projectID = acc.ProjectIDExecution(t) + workspaceName = acc.RandomName() + ) + + checks := []resource.TestCheckFunc{ + resource.TestCheckResourceAttrSet(dataSourceName, "project_id"), + resource.TestCheckResourceAttrSet(dataSourceName, "results.#"), + resource.TestCheckResourceAttrSet(dataSourceName, "results.0.workspace_name"), + resource.TestCheckResourceAttrSet(dataSourceName, "results.0.data_process_region.region"), + resource.TestCheckResourceAttrSet(dataSourceName, "results.0.data_process_region.cloud_provider"), + resource.TestCheckResourceAttrSet(dataSourceName, "results.0.hostnames.#"), + } + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acc.PreCheckBasic(t) }, + ProtoV6ProviderFactories: acc.TestAccProviderV6Factories, + CheckDestroy: acc.CheckDestroyStreamInstance, + Steps: []resource.TestStep{ + { + Config: streamsWorkspacesDataSourceConfig(projectID, workspaceName, region, cloudProvider), + Check: resource.ComposeAggregateTestCheckFunc(checks...), + }, + }, + }) +} + +func TestAccStreamsWorkspacesDS_withPageConfig(t *testing.T) { + var ( + dataSourceName = "data.mongodbatlas_stream_workspaces.test" + projectID = acc.ProjectIDExecution(t) + workspaceName = acc.RandomName() + pageNumber = 1000 // high page number so no results are returned + ) + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acc.PreCheckBasic(t) }, + ProtoV6ProviderFactories: acc.TestAccProviderV6Factories, + CheckDestroy: acc.CheckDestroyStreamInstance, + Steps: []resource.TestStep{ + { + Config: streamsWorkspacesWithPageAttrDataSourceConfig(projectID, workspaceName, region, cloudProvider, pageNumber), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(dataSourceName, "results.#", "0"), + ), + }, + }, + }) +} + +func streamsWorkspacesDataSourceConfig(projectID, workspaceName, region, cloudProvider string) string { + return fmt.Sprintf(` + %s + + data "mongodbatlas_stream_workspaces" "test" { + project_id = mongodbatlas_stream_workspace.test.project_id + } + `, streamsWorkspaceConfig(projectID, workspaceName, region, cloudProvider)) +} + +func streamsWorkspacesWithPageAttrDataSourceConfig(projectID, workspaceName, region, cloudProvider string, pageNum int) string { + return fmt.Sprintf(` + %s + + data "mongodbatlas_stream_workspaces" "test" { + project_id = mongodbatlas_stream_workspace.test.project_id + page_num = %d + items_per_page = 1 + } + `, streamsWorkspaceConfig(projectID, workspaceName, region, cloudProvider), pageNum) +} diff --git a/internal/service/streamworkspace/resource.go b/internal/service/streamworkspace/resource.go new file mode 100644 index 0000000000..bec614579d --- /dev/null +++ b/internal/service/streamworkspace/resource.go @@ -0,0 +1,175 @@ +package streamworkspace + +import ( + "context" + "errors" + "regexp" + + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + + "github.com/mongodb/terraform-provider-mongodbatlas/internal/common/validate" + "github.com/mongodb/terraform-provider-mongodbatlas/internal/config" + "github.com/mongodb/terraform-provider-mongodbatlas/internal/service/streaminstance" +) + +var _ resource.ResourceWithConfigure = &rs{} +var _ resource.ResourceWithImportState = &rs{} +var _ resource.ResourceWithMoveState = &rs{} + +const streamsWorkspaceName = "stream_workspace" + +func Resource() resource.Resource { + return &rs{ + RSCommon: config.RSCommon{ + ResourceName: streamsWorkspaceName, + }, + } +} + +type rs struct { + config.RSCommon +} + +func (r *rs) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = ResourceSchema(ctx) +} + +func (r *rs) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + var plan TFModel + resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) + if resp.Diagnostics.HasError() { + return + } + + connV2 := r.Client.AtlasV2 + projectID := plan.ProjectID.ValueString() + streamWorkspaceReq, diags := newStreamWorkspaceCreateReq(ctx, &plan) + if diags.HasError() { + resp.Diagnostics.Append(diags...) + return + } + apiResp, _, err := connV2.StreamsApi.CreateStreamWorkspace(ctx, projectID, streamWorkspaceReq).Execute() + if err != nil { + resp.Diagnostics.AddError("error creating resource", err.Error()) + return + } + + newInstanceModel, diags := streaminstance.NewTFStreamInstance(ctx, apiResp) + if diags.HasError() { + resp.Diagnostics.Append(diags...) + return + } + + // Convert back to workspace model + var newWorkspaceModel TFModel + newWorkspaceModel.FromInstanceModel(newInstanceModel) + + resp.Diagnostics.Append(resp.State.Set(ctx, newWorkspaceModel)...) +} + +func (r *rs) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var streamsWorkspaceState TFModel + resp.Diagnostics.Append(req.State.Get(ctx, &streamsWorkspaceState)...) + if resp.Diagnostics.HasError() { + return + } + + connV2 := r.Client.AtlasV2 + projectID := streamsWorkspaceState.ProjectID.ValueString() + workspaceName := streamsWorkspaceState.WorkspaceName.ValueString() + apiResp, getResp, err := connV2.StreamsApi.GetStreamWorkspace(ctx, projectID, workspaceName).Execute() + if err != nil { + if validate.StatusNotFound(getResp) { + resp.State.RemoveResource(ctx) + return + } + resp.Diagnostics.AddError("error fetching resource", err.Error()) + return + } + + newInstanceModel, diags := streaminstance.NewTFStreamInstance(ctx, apiResp) + if diags.HasError() { + resp.Diagnostics.Append(diags...) + return + } + + // Convert back to workspace model + var newWorkspaceModel TFModel + newWorkspaceModel.FromInstanceModel(newInstanceModel) + + resp.Diagnostics.Append(resp.State.Set(ctx, newWorkspaceModel)...) +} + +func (r *rs) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + var plan TFModel + resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) + if resp.Diagnostics.HasError() { + return + } + + connV2 := r.Client.AtlasV2 + projectID := plan.ProjectID.ValueString() + workspaceName := plan.WorkspaceName.ValueString() + streamWorkspaceReq, diags := newStreamWorkspaceUpdateReq(ctx, &plan) + if diags.HasError() { + resp.Diagnostics.Append(diags...) + return + } + apiResp, _, err := connV2.StreamsApi.UpdateStreamWorkspace(ctx, projectID, workspaceName, streamWorkspaceReq).Execute() + if err != nil { + resp.Diagnostics.AddError("error updating resource", err.Error()) + return + } + + newInstanceModel, diags := streaminstance.NewTFStreamInstance(ctx, apiResp) + if diags.HasError() { + resp.Diagnostics.Append(diags...) + return + } + + var newWorkspaceModel TFModel + newWorkspaceModel.FromInstanceModel(newInstanceModel) + + resp.Diagnostics.Append(resp.State.Set(ctx, newWorkspaceModel)...) +} + +func (r *rs) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var streamsWorkspaceState *TFModel + resp.Diagnostics.Append(req.State.Get(ctx, &streamsWorkspaceState)...) + if resp.Diagnostics.HasError() { + return + } + + connV2 := r.Client.AtlasV2 + projectID := streamsWorkspaceState.ProjectID.ValueString() + workspaceName := streamsWorkspaceState.WorkspaceName.ValueString() + if _, err := connV2.StreamsApi.DeleteStreamWorkspace(ctx, projectID, workspaceName).Execute(); err != nil { + resp.Diagnostics.AddError("error during resource delete", err.Error()) + return + } +} + +func (r *rs) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + projectID, workspaceName, err := splitStreamsWorkspaceImportID(req.ID) + if err != nil { + resp.Diagnostics.AddError("error splitting streams workspace import ID", err.Error()) + return + } + + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectID)...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("workspace_name"), workspaceName)...) +} + +func splitStreamsWorkspaceImportID(id string) (projectID, workspaceName string, err error) { + var re = regexp.MustCompile(`(?s)^([0-9a-fA-F]{24})-(.*)$`) + parts := re.FindStringSubmatch(id) + + if len(parts) != 3 { + err = errors.New("use the format {project_id}-{workspace_name}") + return + } + + projectID, workspaceName = parts[1], parts[2] + return +} diff --git a/internal/service/streamworkspace/resource_schema.go b/internal/service/streamworkspace/resource_schema.go new file mode 100644 index 0000000000..ea256d935e --- /dev/null +++ b/internal/service/streamworkspace/resource_schema.go @@ -0,0 +1,75 @@ +package streamworkspace + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +func ResourceSchema(ctx context.Context) schema.Schema { + return schema.Schema{ + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + Computed: true, + }, + "workspace_name": schema.StringAttribute{ + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "project_id": schema.StringAttribute{ + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "data_process_region": schema.SingleNestedAttribute{ + Required: true, + Attributes: map[string]schema.Attribute{ + "cloud_provider": schema.StringAttribute{ + Required: true, + }, + "region": schema.StringAttribute{ + Required: true, + }, + }, + }, + "hostnames": schema.ListAttribute{ + ElementType: types.StringType, + Computed: true, + }, + "stream_config": schema.SingleNestedAttribute{ + Optional: true, + Computed: true, + Attributes: map[string]schema.Attribute{ + "tier": schema.StringAttribute{ + Optional: true, + Computed: true, + }, + }, + }, + }, + } +} + +type TFModel struct { + ID types.String `tfsdk:"id"` + WorkspaceName types.String `tfsdk:"workspace_name"` // Only difference from TFStreamInstanceModel + ProjectID types.String `tfsdk:"project_id"` + DataProcessRegion types.Object `tfsdk:"data_process_region"` + StreamConfig types.Object `tfsdk:"stream_config"` + Hostnames types.List `tfsdk:"hostnames"` +} + +type TFWorkspaceProcessRegionSpecModel struct { + CloudProvider types.String `tfsdk:"cloud_provider"` + Region types.String `tfsdk:"region"` +} + +type TFWorkspaceStreamConfigModel struct { + Tier types.String `tfsdk:"tier"` +} diff --git a/internal/service/streamworkspace/resource_test.go b/internal/service/streamworkspace/resource_test.go new file mode 100644 index 0000000000..81cf67b45e --- /dev/null +++ b/internal/service/streamworkspace/resource_test.go @@ -0,0 +1,130 @@ +package streamworkspace_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/mongodb/terraform-provider-mongodbatlas/internal/testutil/acc" +) + +func TestAccStreamWorkspaceRS_basic(t *testing.T) { + var ( + resourceName = "mongodbatlas_stream_workspace.test" + projectID = acc.ProjectIDExecution(t) + workspaceName = acc.RandomName() + ) + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acc.PreCheckBasic(t) }, + ProtoV6ProviderFactories: acc.TestAccProviderV6Factories, + CheckDestroy: acc.CheckDestroyStreamInstance, // Reuse the same destroy check + Steps: []resource.TestStep{ + { + Config: streamsWorkspaceConfig(projectID, workspaceName, region, cloudProvider), + Check: resource.ComposeAggregateTestCheckFunc( + streamsWorkspaceAttributeChecks(resourceName, workspaceName, region, cloudProvider), + resource.TestCheckResourceAttr(resourceName, "stream_config.tier", "SP30"), + ), + }, + { + ResourceName: resourceName, + ImportStateIdFunc: checkStreamsWorkspaceImportStateIDFunc(resourceName), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccStreamWorkspaceRS_withStreamConfig(t *testing.T) { + var ( + resourceName = "mongodbatlas_stream_workspace.test" + projectID = acc.ProjectIDExecution(t) + workspaceName = acc.RandomName() + ) + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acc.PreCheckBasic(t) }, + ProtoV6ProviderFactories: acc.TestAccProviderV6Factories, + CheckDestroy: acc.CheckDestroyStreamInstance, // Reuse the same destroy check + Steps: []resource.TestStep{ + { + Config: streamsWorkspaceConfigWithStreamConfig(projectID, workspaceName, region, cloudProvider), + Check: resource.ComposeAggregateTestCheckFunc( + streamsWorkspaceAttributeChecks(resourceName, workspaceName, region, cloudProvider), + resource.TestCheckResourceAttr(resourceName, "stream_config.tier", "SP30"), + ), + }, + }, + }) +} + +func streamsWorkspaceAttributeChecks(resourceName, workspaceName, region, cloudProvider string) resource.TestCheckFunc { + return resource.ComposeAggregateTestCheckFunc( + checkStreamsWorkspaceExists(resourceName), + resource.TestCheckResourceAttrSet(resourceName, "project_id"), + resource.TestCheckResourceAttr(resourceName, "workspace_name", workspaceName), + resource.TestCheckResourceAttr(resourceName, "data_process_region.region", region), + resource.TestCheckResourceAttr(resourceName, "data_process_region.cloud_provider", cloudProvider), + resource.TestCheckResourceAttrSet(resourceName, "hostnames.#"), + ) +} + +func checkStreamsWorkspaceExists(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("not found: %s", resourceName) + } + if rs.Primary.ID == "" { + return fmt.Errorf("no ID is set") + } + projectID := rs.Primary.Attributes["project_id"] + workspaceName := rs.Primary.Attributes["workspace_name"] + _, _, err := acc.ConnV2().StreamsApi.GetStreamWorkspace(context.Background(), projectID, workspaceName).Execute() + if err != nil { + return fmt.Errorf("stream workspace (%s:%s) does not exist: %s", projectID, workspaceName, err) + } + return nil + } +} + +func checkStreamsWorkspaceImportStateIDFunc(resourceName string) resource.ImportStateIdFunc { + return func(s *terraform.State) (string, error) { + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return "", fmt.Errorf("not found: %s", resourceName) + } + return fmt.Sprintf("%s-%s", rs.Primary.Attributes["project_id"], rs.Primary.Attributes["workspace_name"]), nil + } +} + +func streamsWorkspaceConfig(projectID, workspaceName, region, cloudProvider string) string { + return fmt.Sprintf(` + resource "mongodbatlas_stream_workspace" "test" { + project_id = %[1]q + workspace_name = %[2]q + data_process_region = { + region = %[3]q + cloud_provider = %[4]q + } + } + `, projectID, workspaceName, region, cloudProvider) +} + +func streamsWorkspaceConfigWithStreamConfig(projectID, workspaceName, region, cloudProvider string) string { + return fmt.Sprintf(` + resource "mongodbatlas_stream_workspace" "test" { + project_id = %[1]q + workspace_name = %[2]q + data_process_region = { + region = %[3]q + cloud_provider = %[4]q + } + stream_config = { + tier = "SP30" + } + } + `, projectID, workspaceName, region, cloudProvider) +}