Skip to content

Commit c93207c

Browse files
authored
chore: Upgrades datalake_pipeline resource to auto-generated SDK (#1911)
* rename * change region to avoid out of capacity errors * plural datasource doesn't depend on resources so we can ensure the 2 resources are created when ds is executed * Revert "change region to avoid out of capacity errors" This reverts commit 7c78823. * connv2 in tests * data sources, read, import * data source runs * delete * create * migration test
1 parent 34c8eb1 commit c93207c

12 files changed

+328
-315
lines changed

.github/workflows/migration-tests.yml

+27-1
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,7 @@ jobs:
6565
network: ${{ steps.filter.outputs.network == 'true' || env.mustTrigger == 'true' }}
6666
encryption: ${{ steps.filter.outputs.encryption == 'true' || env.mustTrigger == 'true' }}
6767
serverless: ${{ steps.filter.outputs.serverless == 'true' || env.mustTrigger == 'true' }}
68+
data_lake: ${{ steps.filter.outputs.data_lake == 'true' || env.mustTrigger == 'true' }}
6869
steps:
6970
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
7071
- uses: dorny/paths-filter@0bc4621a3135347011ad047f9ecf449bf72ce2bd
@@ -129,6 +130,8 @@ jobs:
129130
- 'internal/service/serverlessinstance/*.go'
130131
- 'internal/service/privatelinkendpointserverless/*.go'
131132
- 'internal/service/privatelinkendpointserviceserverless/*.go'
133+
data_lake:
134+
- 'internal/service/datalakepipeline/*.go'
132135
133136
project:
134137
needs: [ change-detection, get-provider-version ]
@@ -435,4 +438,27 @@ jobs:
435438
MONGODB_ATLAS_LAST_VERSION: ${{ needs.get-provider-version.outputs.provider_version }}
436439
TEST_REGEX: "^TestAccMigrationServerless"
437440
run: make testacc
438-
441+
data_lake:
442+
needs: [ change-detection, get-provider-version ]
443+
if: ${{ needs.change-detection.outputs.data_lake == 'true' || inputs.test_group == 'data_lake' }}
444+
runs-on: ubuntu-latest
445+
steps:
446+
- name: Checkout
447+
uses: actions/checkout@v4
448+
- name: Set up Go
449+
uses: actions/setup-go@v5
450+
with:
451+
go-version-file: 'go.mod'
452+
- uses: hashicorp/setup-terraform@v3
453+
with:
454+
terraform_version: ${{ env.terraform_version }}
455+
terraform_wrapper: false
456+
- name: Migration Tests
457+
env:
458+
MONGODB_ATLAS_PUBLIC_KEY: ${{ secrets.MONGODB_ATLAS_PUBLIC_KEY_CLOUD_DEV }}
459+
MONGODB_ATLAS_PRIVATE_KEY: ${{ secrets.MONGODB_ATLAS_PRIVATE_KEY_CLOUD_DEV }}
460+
MONGODB_ATLAS_ORG_ID: ${{ vars.MONGODB_ATLAS_ORG_ID_CLOUD_DEV }}
461+
MONGODB_ATLAS_BASE_URL: ${{ vars.MONGODB_ATLAS_BASE_URL }}
462+
MONGODB_ATLAS_LAST_VERSION: ${{ needs.get-provider-version.outputs.provider_version }}
463+
TEST_REGEX: "^TestAccMigrationcDataLake"
464+
run: make testacc

internal/service/datalakepipeline/data_source_data_lake_pipeline.go

+31-38
Original file line numberDiff line numberDiff line change
@@ -8,12 +8,11 @@ import (
88
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
99
"github.com/mongodb/terraform-provider-mongodbatlas/internal/common/conversion"
1010
"github.com/mongodb/terraform-provider-mongodbatlas/internal/config"
11-
matlas "go.mongodb.org/atlas/mongodbatlas"
1211
)
1312

1413
func DataSource() *schema.Resource {
1514
return &schema.Resource{
16-
ReadContext: dataSourceMongoDBAtlasDataLakePipelineRead,
15+
ReadContext: dataSourceRead,
1716
Schema: map[string]*schema.Schema{
1817
"project_id": {
1918
Type: schema.TypeString,
@@ -119,13 +118,13 @@ func DataSource() *schema.Resource {
119118
},
120119
},
121120
},
122-
"snapshots": dataSourceSchemaDataLakePipelineSnapshots(),
123-
"ingestion_schedules": dataSourceSchemaDataLakePipelineIngestionSchedules(),
121+
"snapshots": dataSourceSchemaSnapshots(),
122+
"ingestion_schedules": dataSourceSchemaIngestionSchedules(),
124123
},
125124
}
126125
}
127126

128-
func dataSourceSchemaDataLakePipelineIngestionSchedules() *schema.Schema {
127+
func dataSourceSchemaIngestionSchedules() *schema.Schema {
129128
return &schema.Schema{
130129
Type: schema.TypeSet,
131130
Computed: true,
@@ -156,7 +155,7 @@ func dataSourceSchemaDataLakePipelineIngestionSchedules() *schema.Schema {
156155
}
157156
}
158157

159-
func dataSourceSchemaDataLakePipelineSnapshots() *schema.Schema {
158+
func dataSourceSchemaSnapshots() *schema.Schema {
160159
return &schema.Schema{
161160
Type: schema.TypeSet,
162161
Computed: true,
@@ -222,73 +221,67 @@ func dataSourceSchemaDataLakePipelineSnapshots() *schema.Schema {
222221
}
223222
}
224223

225-
func dataSourceMongoDBAtlasDataLakePipelineRead(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics {
226-
conn := meta.(*config.MongoDBClient).Atlas
224+
func dataSourceRead(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics {
225+
connV2 := meta.(*config.MongoDBClient).AtlasV2
227226
projectID := d.Get("project_id").(string)
228227
name := d.Get("name").(string)
229228

230-
dataLakePipeline, _, err := conn.DataLakePipeline.Get(ctx, projectID, name)
229+
pipeline, _, err := connV2.DataLakePipelinesApi.GetPipeline(ctx, projectID, name).Execute()
231230
if err != nil {
232231
return diag.FromErr(fmt.Errorf(errorDataLakePipelineRead, name, err))
233232
}
234233

235-
snapshots, _, err := conn.DataLakePipeline.ListSnapshots(ctx, projectID, name, nil)
234+
snapshots, _, err := connV2.DataLakePipelinesApi.ListPipelineSnapshots(ctx, projectID, name).Execute()
236235
if err != nil {
237236
return diag.FromErr(fmt.Errorf(errorDataLakePipelineRead, name, err))
238237
}
239238

240-
ingestionSchedules, _, err := conn.DataLakePipeline.ListIngestionSchedules(ctx, projectID, name)
239+
ingestionSchedules, _, err := connV2.DataLakePipelinesApi.ListPipelineSchedules(ctx, projectID, name).Execute()
241240
if err != nil {
242241
return diag.FromErr(fmt.Errorf(errorDataLakePipelineRead, name, err))
243242
}
244243

245-
return setDataLakeResourceData(d, dataLakePipeline, snapshots, ingestionSchedules)
246-
}
244+
pipelineName := pipeline.GetName()
247245

248-
func setDataLakeResourceData(
249-
d *schema.ResourceData,
250-
pipeline *matlas.DataLakePipeline,
251-
snapshots *matlas.DataLakePipelineSnapshotsResponse,
252-
ingestionSchedules []*matlas.DataLakePipelineIngestionSchedule) diag.Diagnostics {
253-
if err := d.Set("id", pipeline.ID); err != nil {
254-
return diag.FromErr(fmt.Errorf(ErrorDataLakeSetting, "id", pipeline.Name, err))
246+
if err := d.Set("id", pipeline.GetId()); err != nil {
247+
return diag.FromErr(fmt.Errorf(ErrorDataLakeSetting, "id", pipelineName, err))
255248
}
256249

257-
if err := d.Set("state", pipeline.State); err != nil {
258-
return diag.FromErr(fmt.Errorf(errorDataLakePipelineSetting, "state", pipeline.Name, err))
250+
if err := d.Set("state", pipeline.GetState()); err != nil {
251+
return diag.FromErr(fmt.Errorf(errorDataLakePipelineSetting, "state", pipelineName, err))
259252
}
260253

261-
if err := d.Set("created_date", pipeline.CreatedDate); err != nil {
262-
return diag.FromErr(fmt.Errorf(errorDataLakePipelineSetting, "created_date", pipeline.Name, err))
254+
if err := d.Set("created_date", conversion.TimePtrToStringPtr(pipeline.CreatedDate)); err != nil {
255+
return diag.FromErr(fmt.Errorf(errorDataLakePipelineSetting, "created_date", pipelineName, err))
263256
}
264257

265-
if err := d.Set("last_updated_date", pipeline.LastUpdatedDate); err != nil {
266-
return diag.FromErr(fmt.Errorf(errorDataLakePipelineSetting, "last_updated_date", pipeline.Name, err))
258+
if err := d.Set("last_updated_date", conversion.TimePtrToStringPtr(pipeline.LastUpdatedDate)); err != nil {
259+
return diag.FromErr(fmt.Errorf(errorDataLakePipelineSetting, "last_updated_date", pipelineName, err))
267260
}
268261

269-
if err := d.Set("sink", flattenDataLakePipelineSink(pipeline.Sink)); err != nil {
270-
return diag.FromErr(fmt.Errorf(errorDataLakePipelineSetting, "sink", pipeline.Name, err))
262+
if err := d.Set("sink", flattenSink(pipeline.Sink)); err != nil {
263+
return diag.FromErr(fmt.Errorf(errorDataLakePipelineSetting, "sink", pipelineName, err))
271264
}
272265

273-
if err := d.Set("source", flattenDataLakePipelineSource(pipeline.Source)); err != nil {
274-
return diag.FromErr(fmt.Errorf(errorDataLakePipelineSetting, "source", pipeline.Name, err))
266+
if err := d.Set("source", flattenSource(pipeline.Source)); err != nil {
267+
return diag.FromErr(fmt.Errorf(errorDataLakePipelineSetting, "source", pipelineName, err))
275268
}
276269

277-
if err := d.Set("transformations", flattenDataLakePipelineTransformations(pipeline.Transformations)); err != nil {
278-
return diag.FromErr(fmt.Errorf(errorDataLakePipelineSetting, "transformations", pipeline.Name, err))
270+
if err := d.Set("transformations", flattenTransformations(pipeline.GetTransformations())); err != nil {
271+
return diag.FromErr(fmt.Errorf(errorDataLakePipelineSetting, "transformations", pipelineName, err))
279272
}
280273

281-
if err := d.Set("snapshots", flattenDataLakePipelineSnapshots(snapshots.Results)); err != nil {
282-
return diag.FromErr(fmt.Errorf(errorDataLakePipelineSetting, "snapshots", pipeline.Name, err))
274+
if err := d.Set("snapshots", flattenSnapshots(snapshots.GetResults())); err != nil {
275+
return diag.FromErr(fmt.Errorf(errorDataLakePipelineSetting, "snapshots", pipelineName, err))
283276
}
284277

285-
if err := d.Set("ingestion_schedules", flattenDataLakePipelineIngestionSchedules(ingestionSchedules)); err != nil {
286-
return diag.FromErr(fmt.Errorf(errorDataLakePipelineSetting, "ingestion_schedules", pipeline.Name, err))
278+
if err := d.Set("ingestion_schedules", flattenIngestionSchedules(ingestionSchedules)); err != nil {
279+
return diag.FromErr(fmt.Errorf(errorDataLakePipelineSetting, "ingestion_schedules", pipelineName, err))
287280
}
288281

289282
d.SetId(conversion.EncodeStateID(map[string]string{
290-
"project_id": pipeline.GroupID,
291-
"name": pipeline.Name,
283+
"project_id": pipeline.GetGroupId(),
284+
"name": pipelineName,
292285
}))
293286

294287
return nil

internal/service/datalakepipeline/data_source_data_lake_pipeline_run.go

+20-21
Original file line numberDiff line numberDiff line change
@@ -9,14 +9,14 @@ import (
99
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
1010
"github.com/mongodb/terraform-provider-mongodbatlas/internal/common/conversion"
1111
"github.com/mongodb/terraform-provider-mongodbatlas/internal/config"
12-
matlas "go.mongodb.org/atlas/mongodbatlas"
12+
"go.mongodb.org/atlas-sdk/v20231115005/admin"
1313
)
1414

1515
const errorDataLakePipelineRunRead = "error reading MongoDB Atlas DataLake Run (%s): %s"
1616

1717
func DataSourceRun() *schema.Resource {
1818
return &schema.Resource{
19-
ReadContext: dataSourceMongoDBAtlasDataLakeRunRead,
19+
ReadContext: dataSourceRunRead,
2020
Schema: map[string]*schema.Schema{
2121
"project_id": {
2222
Type: schema.TypeString,
@@ -87,13 +87,13 @@ func DataSourceRun() *schema.Resource {
8787
}
8888
}
8989

90-
func dataSourceMongoDBAtlasDataLakeRunRead(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics {
91-
conn := meta.(*config.MongoDBClient).Atlas
90+
func dataSourceRunRead(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics {
91+
connV2 := meta.(*config.MongoDBClient).AtlasV2
9292
projectID := d.Get("project_id").(string)
9393
name := d.Get("pipeline_name").(string)
9494
pipelineRunID := d.Get("pipeline_run_id").(string)
9595

96-
dataLakeRun, resp, err := conn.DataLakePipeline.GetRun(ctx, projectID, name, pipelineRunID)
96+
run, resp, err := connV2.DataLakePipelinesApi.GetPipelineRun(ctx, projectID, name, pipelineRunID).Execute()
9797
if err != nil {
9898
if resp != nil && resp.StatusCode == http.StatusNotFound {
9999
d.SetId("")
@@ -103,47 +103,47 @@ func dataSourceMongoDBAtlasDataLakeRunRead(ctx context.Context, d *schema.Resour
103103
return diag.FromErr(fmt.Errorf(errorDataLakePipelineRunRead, name, err))
104104
}
105105

106-
if err := d.Set("id", dataLakeRun.ID); err != nil {
106+
if err := d.Set("id", run.GetId()); err != nil {
107107
return diag.FromErr(fmt.Errorf(ErrorDataLakeSetting, "hostnames", name, err))
108108
}
109109

110-
if err := d.Set("project_id", dataLakeRun.GroupID); err != nil {
110+
if err := d.Set("project_id", run.GetGroupId()); err != nil {
111111
return diag.FromErr(fmt.Errorf(ErrorDataLakeSetting, "state", name, err))
112112
}
113113

114-
if err := d.Set("created_date", dataLakeRun.CreatedDate); err != nil {
114+
if err := d.Set("created_date", conversion.TimePtrToStringPtr(run.CreatedDate)); err != nil {
115115
return diag.FromErr(fmt.Errorf(ErrorDataLakeSetting, "storage_databases", name, err))
116116
}
117117

118-
if err := d.Set("last_updated_date", dataLakeRun.LastUpdatedDate); err != nil {
118+
if err := d.Set("last_updated_date", conversion.TimePtrToStringPtr(run.LastUpdatedDate)); err != nil {
119119
return diag.FromErr(fmt.Errorf(ErrorDataLakeSetting, "storage_databases", name, err))
120120
}
121121

122-
if err := d.Set("state", dataLakeRun.State); err != nil {
122+
if err := d.Set("state", run.GetState()); err != nil {
123123
return diag.FromErr(fmt.Errorf(ErrorDataLakeSetting, "storage_databases", name, err))
124124
}
125125

126-
if err := d.Set("phase", dataLakeRun.Phase); err != nil {
126+
if err := d.Set("phase", run.GetPhase()); err != nil {
127127
return diag.FromErr(fmt.Errorf(ErrorDataLakeSetting, "storage_databases", name, err))
128128
}
129129

130-
if err := d.Set("pipeline_id", dataLakeRun.PipelineID); err != nil {
130+
if err := d.Set("pipeline_id", run.GetPipelineId()); err != nil {
131131
return diag.FromErr(fmt.Errorf(ErrorDataLakeSetting, "storage_stores", name, err))
132132
}
133133

134-
if err := d.Set("dataset_name", dataLakeRun.DatasetName); err != nil {
134+
if err := d.Set("dataset_name", run.GetDatasetName()); err != nil {
135135
return diag.FromErr(fmt.Errorf(ErrorDataLakeSetting, "storage_stores", name, err))
136136
}
137137

138-
if err := d.Set("snapshot_id", dataLakeRun.SnapshotID); err != nil {
138+
if err := d.Set("snapshot_id", run.GetSnapshotId()); err != nil {
139139
return diag.FromErr(fmt.Errorf(ErrorDataLakeSetting, "storage_stores", name, err))
140140
}
141141

142-
if err := d.Set("backup_frequency_type", dataLakeRun.BackupFrequencyType); err != nil {
142+
if err := d.Set("backup_frequency_type", run.GetBackupFrequencyType()); err != nil {
143143
return diag.FromErr(fmt.Errorf(ErrorDataLakeSetting, "storage_stores", name, err))
144144
}
145145

146-
if err := d.Set("stats", flattenDataLakePipelineRunStats(dataLakeRun.Stats)); err != nil {
146+
if err := d.Set("stats", flattenRunStats(run.Stats)); err != nil {
147147
return diag.FromErr(fmt.Errorf(ErrorDataLakeSetting, "storage_stores", name, err))
148148
}
149149

@@ -156,15 +156,14 @@ func dataSourceMongoDBAtlasDataLakeRunRead(ctx context.Context, d *schema.Resour
156156
return nil
157157
}
158158

159-
func flattenDataLakePipelineRunStats(datalakeRunStats *matlas.DataLakePipelineRunStats) []map[string]any {
160-
if datalakeRunStats == nil {
159+
func flattenRunStats(stats *admin.PipelineRunStats) []map[string]any {
160+
if stats == nil {
161161
return nil
162162
}
163-
164163
maps := make([]map[string]any, 1)
165164
maps[0] = map[string]any{
166-
"bytes_exported": datalakeRunStats.BytesExported,
167-
"num_docs": datalakeRunStats.NumDocs,
165+
"bytes_exported": stats.GetBytesExported(),
166+
"num_docs": stats.GetNumDocs(),
168167
}
169168
return maps
170169
}

internal/service/datalakepipeline/data_source_data_lake_pipeline_run_test.go

+2-2
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ func TestAccDataLakeRunDS_basic(t *testing.T) {
2323
ProtoV6ProviderFactories: acc.TestAccProviderV6Factories,
2424
Steps: []resource.TestStep{
2525
{
26-
Config: testAccMongoDBAtlasDataLakeDataSourcePipelineRunConfig(projectID, pipelineName, runID),
26+
Config: configRunDS(projectID, pipelineName, runID),
2727
Check: resource.ComposeTestCheckFunc(
2828
resource.TestCheckResourceAttrSet(dataSourceName, "project_id"),
2929
resource.TestCheckResourceAttr(dataSourceName, "pipeline_name", pipelineName),
@@ -38,7 +38,7 @@ func TestAccDataLakeRunDS_basic(t *testing.T) {
3838
})
3939
}
4040

41-
func testAccMongoDBAtlasDataLakeDataSourcePipelineRunConfig(projectID, pipelineName, runID string) string {
41+
func configRunDS(projectID, pipelineName, runID string) string {
4242
return fmt.Sprintf(`
4343
4444
data "mongodbatlas_data_lake_pipeline_run" "test" {

internal/service/datalakepipeline/data_source_data_lake_pipeline_runs.go

+18-25
Original file line numberDiff line numberDiff line change
@@ -7,15 +7,16 @@ import (
77
"github.com/hashicorp/terraform-plugin-sdk/v2/diag"
88
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/id"
99
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
10+
"github.com/mongodb/terraform-provider-mongodbatlas/internal/common/conversion"
1011
"github.com/mongodb/terraform-provider-mongodbatlas/internal/config"
11-
matlas "go.mongodb.org/atlas/mongodbatlas"
12+
"go.mongodb.org/atlas-sdk/v20231115005/admin"
1213
)
1314

1415
const errorDataLakePipelineRunList = "error reading MongoDB Atlas DataLake Runs (%s): %s"
1516

1617
func PluralDataSourceRun() *schema.Resource {
1718
return &schema.Resource{
18-
ReadContext: dataSourceMongoDBAtlasDataLakeRunsRead,
19+
ReadContext: dataSourcePluralRunRead,
1920
Schema: map[string]*schema.Schema{
2021
"project_id": {
2122
Type: schema.TypeString,
@@ -90,46 +91,38 @@ func PluralDataSourceRun() *schema.Resource {
9091
}
9192
}
9293

93-
func dataSourceMongoDBAtlasDataLakeRunsRead(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics {
94-
conn := meta.(*config.MongoDBClient).Atlas
94+
func dataSourcePluralRunRead(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics {
95+
connV2 := meta.(*config.MongoDBClient).AtlasV2
9596
projectID := d.Get("project_id").(string)
9697
name := d.Get("pipeline_name").(string)
97-
98-
dataLakeRuns, _, err := conn.DataLakePipeline.ListRuns(ctx, projectID, name)
98+
runs, _, err := connV2.DataLakePipelinesApi.ListPipelineRuns(ctx, projectID, name).Execute()
9999
if err != nil {
100100
return diag.FromErr(fmt.Errorf(errorDataLakePipelineRunList, projectID, err))
101101
}
102-
103-
if err := d.Set("results", flattenDataLakePipelineRunResult(dataLakeRuns.Results)); err != nil {
102+
if err := d.Set("results", flattenRunResults(runs.GetResults())); err != nil {
104103
return diag.FromErr(fmt.Errorf(ErrorDataLakeSetting, "results", projectID, err))
105104
}
106-
107105
d.SetId(id.UniqueId())
108-
109106
return nil
110107
}
111108

112-
func flattenDataLakePipelineRunResult(datalakePipelineRuns []*matlas.DataLakePipelineRun) []map[string]any {
113-
var results []map[string]any
114-
109+
func flattenRunResults(datalakePipelineRuns []admin.IngestionPipelineRun) []map[string]any {
115110
if len(datalakePipelineRuns) == 0 {
116-
return results
111+
return nil
117112
}
118-
119-
results = make([]map[string]any, len(datalakePipelineRuns))
113+
results := make([]map[string]any, len(datalakePipelineRuns))
120114

121115
for k, run := range datalakePipelineRuns {
122116
results[k] = map[string]any{
123-
"id": run.ID,
124-
"created_date": run.CreatedDate,
125-
"last_updated_date": run.LastUpdatedDate,
126-
"state": run.State,
127-
"pipeline_id": run.PipelineID,
128-
"snapshot_id": run.SnapshotID,
129-
"backup_frequency_type": run.BackupFrequencyType,
130-
"stats": flattenDataLakePipelineRunStats(run.Stats),
117+
"id": run.GetId(),
118+
"created_date": conversion.TimePtrToStringPtr(run.CreatedDate),
119+
"last_updated_date": conversion.TimePtrToStringPtr(run.LastUpdatedDate),
120+
"state": run.GetState(),
121+
"pipeline_id": run.GetPipelineId(),
122+
"snapshot_id": run.GetSnapshotId(),
123+
"backup_frequency_type": run.GetBackupFrequencyType(),
124+
"stats": flattenRunStats(run.Stats),
131125
}
132126
}
133-
134127
return results
135128
}

0 commit comments

Comments
 (0)