Skip to content

Commit e93e8fb

Browse files
committed
Update gcp and aws existing bucket data sources to align with the azure data source.
Use common.RemoteStorage to initialize the data sources. Using rclone to verify storage during Read. Remove aws s3 client mocks and tests that rely on them.
1 parent 9cc100e commit e93e8fb

File tree

11 files changed

+86
-179
lines changed

11 files changed

+86
-179
lines changed

Makefile

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,5 +25,3 @@ sweep:
2525

2626
testacc:
2727
TF_ACC=1 go test ./... -v ${TESTARGS} -timeout 120m
28-
29-
generate:

task/aws/resources/data_source_bucket.go

Lines changed: 23 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -3,65 +3,60 @@ package resources
33
import (
44
"context"
55
"fmt"
6-
"strings"
76

87
"github.com/aws/aws-sdk-go-v2/aws"
9-
"github.com/aws/aws-sdk-go-v2/service/s3"
108

119
"terraform-provider-iterative/task/common"
10+
"terraform-provider-iterative/task/common/machine"
1211
)
1312

1413
// NewExistingS3Bucket returns a new data source refering to a pre-allocated
1514
// S3 bucket.
16-
func NewExistingS3Bucket(client S3Client, credentials aws.Credentials, storageParams common.RemoteStorage) *ExistingS3Bucket {
15+
func NewExistingS3Bucket(credentials aws.Credentials, storageParams common.RemoteStorage) *ExistingS3Bucket {
1716
return &ExistingS3Bucket{
18-
client: client,
1917
credentials: credentials,
2018
params: storageParams,
2119
}
2220
}
2321

2422
// ExistingS3Bucket identifies an existing S3 bucket.
2523
type ExistingS3Bucket struct {
26-
client S3Client
2724
credentials aws.Credentials
2825

2926
params common.RemoteStorage
3027
}
3128

3229
// Read verifies the specified S3 bucket is accessible.
3330
func (b *ExistingS3Bucket) Read(ctx context.Context) error {
34-
input := s3.HeadBucketInput{
35-
Bucket: aws.String(b.params.Container),
36-
}
37-
if _, err := b.client.HeadBucket(ctx, &input); err != nil {
38-
if errorCodeIs(err, errNotFound) {
39-
return common.NotFoundError
40-
}
41-
return err
31+
err := machine.CheckStorage(ctx, b.connection())
32+
if err != nil {
33+
return fmt.Errorf("failed to verify existing s3 bucket: %w", err)
4234
}
4335
return nil
4436
}
4537

38+
func (b *ExistingS3Bucket) connection() machine.RcloneConnection {
39+
region := b.params.Config["region"]
40+
return machine.RcloneConnection{
41+
Backend: machine.RcloneBackendS3,
42+
Container: b.params.Container,
43+
Path: b.params.Path,
44+
Config: map[string]string{
45+
"provider": "AWS",
46+
"region": region,
47+
"access_key_id": b.credentials.AccessKeyID,
48+
"secret_access_key": b.credentials.SecretAccessKey,
49+
"session_token": b.credentials.SessionToken,
50+
},
51+
}
52+
}
53+
4654
// ConnectionString implements common.StorageCredentials.
4755
// The method returns the rclone connection string for the specific bucket.
4856
func (b *ExistingS3Bucket) ConnectionString(ctx context.Context) (string, error) {
49-
region := b.params.Config["region"]
50-
connectionString := fmt.Sprintf(
51-
":s3,provider=AWS,region=%s,access_key_id=%s,secret_access_key=%s,session_token=%s:%s/%s",
52-
region,
53-
b.credentials.AccessKeyID,
54-
b.credentials.SecretAccessKey,
55-
b.credentials.SessionToken,
56-
b.params.Container,
57-
strings.TrimPrefix(b.params.Path, "/"))
58-
return connectionString, nil
57+
connection := b.connection()
58+
return connection.String(), nil
5959
}
6060

6161
// build-time check to ensure Bucket implements BucketCredentials.
6262
var _ common.StorageCredentials = (*ExistingS3Bucket)(nil)
63-
64-
// S3Client defines the functions of the AWS S3 API used.
65-
type S3Client interface {
66-
HeadBucket(context.Context, *s3.HeadBucketInput, ...func(*s3.Options)) (*s3.HeadBucketOutput, error)
67-
}

task/aws/resources/data_source_bucket_test.go

Lines changed: 2 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -5,13 +5,9 @@ import (
55
"testing"
66

77
"github.com/aws/aws-sdk-go-v2/aws"
8-
"github.com/aws/aws-sdk-go-v2/service/s3"
9-
"github.com/aws/smithy-go"
10-
"github.com/golang/mock/gomock"
118
"github.com/stretchr/testify/require"
129

1310
"terraform-provider-iterative/task/aws/resources"
14-
"terraform-provider-iterative/task/aws/resources/mocks"
1511
"terraform-provider-iterative/task/common"
1612
)
1713

@@ -22,46 +18,11 @@ func TestExistingBucketConnectionString(t *testing.T) {
2218
SecretAccessKey: "secret-access-key",
2319
SessionToken: "session-token",
2420
}
25-
b := resources.NewExistingS3Bucket(nil, creds, common.RemoteStorage{
21+
b := resources.NewExistingS3Bucket(creds, common.RemoteStorage{
2622
Container: "pre-created-bucket",
2723
Config: map[string]string{"region": "us-east-1"},
2824
Path: "subdirectory"})
2925
connStr, err := b.ConnectionString(ctx)
3026
require.NoError(t, err)
31-
require.Equal(t, connStr, ":s3,provider=AWS,region=us-east-1,access_key_id=access-key-id,secret_access_key=secret-access-key,session_token=session-token:pre-created-bucket/subdirectory")
32-
}
33-
34-
func TestExistingBucketRead(t *testing.T) {
35-
ctx := context.Background()
36-
ctl := gomock.NewController(t)
37-
defer ctl.Finish()
38-
39-
s3Cl := mocks.NewMockS3Client(ctl)
40-
s3Cl.EXPECT().HeadBucket(gomock.Any(), &s3.HeadBucketInput{Bucket: aws.String("bucket-id")}).Return(nil, nil)
41-
b := resources.NewExistingS3Bucket(s3Cl, aws.Credentials{}, common.RemoteStorage{
42-
Container: "bucket-id",
43-
Config: map[string]string{"region": "us-east-1"},
44-
Path: "subdirectory"})
45-
err := b.Read(ctx)
46-
require.NoError(t, err)
47-
}
48-
49-
// TestExistingBucketReadNotFound tests the case where the s3 client indicates that the bucket could not be
50-
// found.
51-
func TestExistingBucketReadNotFound(t *testing.T) {
52-
ctx := context.Background()
53-
ctl := gomock.NewController(t)
54-
defer ctl.Finish()
55-
56-
s3Cl := mocks.NewMockS3Client(ctl)
57-
58-
s3Cl.EXPECT().
59-
HeadBucket(gomock.Any(), &s3.HeadBucketInput{Bucket: aws.String("bucket-id")}).
60-
Return(nil, &smithy.GenericAPIError{Code: "NotFound"})
61-
b := resources.NewExistingS3Bucket(s3Cl, aws.Credentials{}, common.RemoteStorage{
62-
Container: "bucket-id",
63-
Config: map[string]string{"region": "us-east-1"},
64-
Path: "subdirectory"})
65-
err := b.Read(ctx)
66-
require.ErrorIs(t, err, common.NotFoundError)
27+
require.Equal(t, ":s3,access_key_id='access-key-id',provider='AWS',region='us-east-1',secret_access_key='secret-access-key',session_token='session-token':pre-created-bucket/subdirectory", connStr)
6728
}

task/aws/resources/mocks/gen.go

Lines changed: 0 additions & 5 deletions
This file was deleted.

task/aws/resources/mocks/s3client_generated.go

Lines changed: 0 additions & 56 deletions
This file was deleted.

task/aws/task.go

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -52,18 +52,16 @@ func New(ctx context.Context, cloud common.Cloud, identifier common.Identifier,
5252
)
5353
var bucketCredentials common.StorageCredentials
5454
if task.RemoteStorage != nil {
55-
containerPath := task.RemoteStorage.Path
5655
// If a subdirectory was not specified, the task id will
5756
// be used.
58-
if containerPath == "" {
59-
containerPath = string(t.Identifier)
57+
if task.RemoteStorage.Path == "" {
58+
task.RemoteStorage.Path = string(t.Identifier)
6059
}
6160
// Container config may override the s3 region.
6261
if region, ok := task.RemoteStorage.Config[s3_region]; !ok || region == "" {
6362
task.RemoteStorage.Config[s3_region] = t.Client.Region
6463
}
6564
bucket := resources.NewExistingS3Bucket(
66-
t.Client.Services.S3,
6765
t.Client.Credentials(),
6866
*task.RemoteStorage)
6967
t.DataSources.Bucket = bucket

task/az/task.go

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,11 @@ func New(ctx context.Context, cloud common.Cloud, identifier common.Identifier,
4242
)
4343
var bucketCredentials common.StorageCredentials
4444
if task.RemoteStorage != nil {
45+
// If a subdirectory was not specified, the task id will
46+
// be used.
47+
if task.RemoteStorage.Path == "" {
48+
task.RemoteStorage.Path = string(t.Identifier)
49+
}
4550
bucket := resources.NewExistingBlobContainer(t.Client, *task.RemoteStorage)
4651
t.DataSources.BlobContainer = bucket
4752
bucketCredentials = bucket

task/common/machine/storage.go

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,9 @@ func CheckStorage(ctx context.Context, remoteConn RcloneConnection) error {
219219
type RcloneBackend string
220220

221221
const (
222-
RcloneBackendAzureBlob = "azureblob"
222+
RcloneBackendAzureBlob = "azureblob"
223+
RcloneBackendS3 = "s3"
224+
RcloneBackendGoogleCloudStorage = "googlecloudstorage"
223225
)
224226

225227
// RcloneConnection is used to construct an rclone connection string.

task/gcp/resources/data_source_bucket.go

Lines changed: 20 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -2,66 +2,50 @@ package resources
22

33
import (
44
"context"
5-
"errors"
65
"fmt"
7-
"path"
8-
9-
"google.golang.org/api/googleapi"
10-
"google.golang.org/api/storage/v1"
116

127
"terraform-provider-iterative/task/common"
13-
"terraform-provider-iterative/task/gcp/client"
8+
"terraform-provider-iterative/task/common/machine"
149
)
1510

1611
// NewExistingBucket creates a new data source referring to a pre-allocated GCP storage bucket.
17-
func NewExistingBucket(client *client.Client, id string, path string) *ExistingBucket {
12+
func NewExistingBucket(clientCredentials string, storageParams common.RemoteStorage) *ExistingBucket {
1813
return &ExistingBucket{
19-
client: client,
20-
21-
id: id,
22-
path: path,
14+
clientCredentials: clientCredentials,
15+
params: storageParams,
2316
}
2417
}
2518

2619
// ExistingBucket identifies a pre-allocated storage bucket.
2720
type ExistingBucket struct {
28-
client *client.Client
29-
30-
resource *storage.Bucket
31-
id string
32-
path string
21+
clientCredentials string
22+
params common.RemoteStorage
3323
}
3424

3525
// Read verifies the specified storage bucket exists and is accessible.
3626
func (b *ExistingBucket) Read(ctx context.Context) error {
37-
bucket, err := b.client.Services.Storage.Buckets.Get(b.id).Do()
27+
connection := b.connection()
28+
err := machine.CheckStorage(ctx, connection)
3829
if err != nil {
39-
var e *googleapi.Error
40-
if errors.As(err, &e) && e.Code == 404 {
41-
return common.NotFoundError
42-
}
43-
return err
30+
return fmt.Errorf("failed to verify storage: %w", err)
4431
}
45-
46-
b.resource = bucket
4732
return nil
4833
}
4934

35+
func (b *ExistingBucket) connection() machine.RcloneConnection {
36+
return machine.RcloneConnection{
37+
Backend: machine.RcloneBackendGoogleCloudStorage,
38+
Container: b.params.Container,
39+
Path: b.params.Path,
40+
Config: map[string]string{
41+
"service_account_credentials": b.clientCredentials,
42+
}}
43+
}
44+
5045
// ConnectionString implements common.StorageCredentials.
5146
// The method returns the rclone connection string for the specific bucket.
5247
func (b *ExistingBucket) ConnectionString(ctx context.Context) (string, error) {
53-
if len(b.client.Credentials.JSON) == 0 {
54-
return "", errors.New("unable to find credentials JSON string")
55-
}
56-
credentials := string(b.client.Credentials.JSON)
57-
containerPath := path.Join(b.id, b.path)
58-
connStr := fmt.Sprintf(
59-
":googlecloudstorage,service_account_credentials='%s':%s",
60-
credentials,
61-
containerPath,
62-
)
63-
64-
return connStr, nil
48+
return b.connection().String(), nil
6549
}
6650

6751
var _ common.StorageCredentials = (*ExistingBucket)(nil)
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
package resources_test
2+
3+
import (
4+
"context"
5+
"testing"
6+
7+
"github.com/stretchr/testify/require"
8+
9+
"terraform-provider-iterative/task/common"
10+
"terraform-provider-iterative/task/gcp/resources"
11+
)
12+
13+
func TestExistingBucketConnectionString(t *testing.T) {
14+
ctx := context.Background()
15+
creds := "gcp-credentials-json"
16+
b := resources.NewExistingBucket(creds, common.RemoteStorage{
17+
Container: "pre-created-bucket",
18+
Path: "subdirectory"})
19+
connStr, err := b.ConnectionString(ctx)
20+
require.NoError(t, err)
21+
require.Equal(t, ":googlecloudstorage,service_account_credentials='gcp-credentials-json':pre-created-bucket/subdirectory", connStr)
22+
}

0 commit comments

Comments
 (0)