Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -25,3 +25,5 @@ sweep:

testacc:
TF_ACC=1 go test ./... -v ${TESTARGS} -timeout 120m

generate:
2 changes: 2 additions & 0 deletions docs/resources/task.md
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,8 @@ resource "iterative_task" "example" {
- `parallelism` - (Optional) Number of machines to be launched in parallel.
- `storage.workdir` - (Optional) Local working directory to upload and use as the `script` working directory.
- `storage.output` - (Optional) Results directory (**relative to `workdir`**) to download (default: no download).
- `storage.container` - (Optional) Pre-allocated container to use for storage of task data, results and status.
- `storage.container_path` - (Optional) Subdirectory in pre-allocated container to use for storage. If omitted, the task's identifier will be used.
- `environment` - (Optional) Map of environment variable names and values for the task script. Empty string values are replaced with local environment values. Empty values may also be combined with a [glob](<https://en.wikipedia.org/wiki/Glob_(programming)>) name to import all matching variables.
- `timeout` - (Optional) Maximum number of seconds to run before instances are force-terminated. The countdown is reset each time TPI auto-respawns a spot instance.
- `tags` - (Optional) Map of tags for the created cloud resources.
Expand Down
1 change: 1 addition & 0 deletions go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ require (
github.com/cloudflare/gokey v0.1.0
github.com/docker/go-units v0.4.0
github.com/gobwas/glob v0.2.3
github.com/golang/mock v1.6.0 // indirect
github.com/google/go-github/v42 v42.0.0
github.com/google/go-github/v45 v45.2.0
github.com/google/uuid v1.3.0
Expand Down
1 change: 1 addition & 0 deletions go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -436,6 +436,7 @@ github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt
github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4=
github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8=
github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc=
github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs=
github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
Expand Down
44 changes: 44 additions & 0 deletions iterative/resource_task.go
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,26 @@ func resourceTask() *schema.Resource {
Optional: true,
Default: "",
},
"container": {
Type: schema.TypeString,
ForceNew: true,
Optional: true,
Default: "",
},
"container_path": {
Type: schema.TypeString,
ForceNew: true,
Optional: true,
Default: "",
},
"container_opts": {
Type: schema.TypeMap,
ForceNew: true,
Optional: true,
Elem: &schema.Schema{
Type: schema.TypeString,
},
},
},
},
},
Expand Down Expand Up @@ -338,10 +358,33 @@ func resourceTaskBuild(ctx context.Context, d *schema.ResourceData, m interface{

directory := ""
directory_out := ""

var remoteStorage *common.RemoteStorage
if d.Get("storage").(*schema.Set).Len() > 0 {
storage := d.Get("storage").(*schema.Set).List()[0].(map[string]interface{})
directory = storage["workdir"].(string)
directory_out = storage["output"].(string)

// Propagate configuration for pre-allocated storage container.
container := storage["container"].(string)
containerPath := storage["container_path"].(string)
if container != "" {
remoteStorage = &common.RemoteStorage{
Container: container,
Path: containerPath,
Config: map[string]string{},
}
}
if storage["container_opts"] != nil {
remoteConfig := storage["container_opts"].(map[string]interface{})
var ok bool
for key, value := range remoteConfig {
if remoteStorage.Config[key], ok = value.(string); !ok {
return nil, fmt.Errorf("invalid value for remote config key %q: %v", key, value)
}
}
}

}

t := common.Task{
Expand All @@ -363,6 +406,7 @@ func resourceTaskBuild(ctx context.Context, d *schema.ResourceData, m interface{
},
// Egress is open on every port
},
RemoteStorage: remoteStorage,
Spot: common.Spot(d.Get("spot").(float64)),
Parallelism: uint16(d.Get("parallelism").(int)),
PermissionSet: d.Get("permission_set").(string),
Expand Down
28 changes: 19 additions & 9 deletions task/aws/client/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,17 @@ func New(ctx context.Context, cloud common.Cloud, tags map[string]string) (*Clie
if err != nil {
return nil, err
}

c := new(Client)
c.Cloud = cloud
c.Region = region
c.Tags = cloud.Tags

c.Config = config
credentials, err := config.Credentials.Retrieve(ctx)
if err != nil {
return nil, err
}
c := &Client{
Cloud: cloud,
Region: region,
Tags: cloud.Tags,
Config: config,
credentials: credentials,
}

c.Services.EC2 = ec2.NewFromConfig(config)
c.Services.S3 = s3.NewFromConfig(config)
Expand All @@ -53,8 +57,9 @@ type Client struct {
Region string
Tags map[string]string

Config aws.Config
Services struct {
Config aws.Config
credentials aws.Credentials
Services struct {
EC2 *ec2.Client
S3 *s3.Client
STS *sts.Client
Expand Down Expand Up @@ -94,3 +99,8 @@ func (c *Client) DecodeError(ctx context.Context, encoded error) error {

return fmt.Errorf("unable to decode: %s", encoded.Error())
}

// Credentials returns the AWS credentials the client is currently using.
func (c *Client) Credentials() aws.Credentials {
return c.credentials
}
71 changes: 71 additions & 0 deletions task/aws/resources/data_source_bucket.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
package resources

import (
"context"
"fmt"
"path"

"terraform-provider-iterative/task/common"

"github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go-v2/service/s3"
)

// NewExistingS3Bucket returns a new data source refering to a pre-allocated
// S3 bucket.
func NewExistingS3Bucket(client S3Client, credentials aws.Credentials, id string, region string, path string) *ExistingS3Bucket {
return &ExistingS3Bucket{
client: client,
credentials: credentials,
region: region,

id: id,
path: path,
}
}

// ExistingS3Bucket identifies an existing S3 bucket.
type ExistingS3Bucket struct {
client S3Client
credentials aws.Credentials

id string
region string
path string
}

// Read verifies the specified S3 bucket is accessible.
func (b *ExistingS3Bucket) Read(ctx context.Context) error {
input := s3.HeadBucketInput{
Bucket: aws.String(b.id),
}
if _, err := b.client.HeadBucket(ctx, &input); err != nil {
if errorCodeIs(err, errNotFound) {
return common.NotFoundError
}
return err
}
return nil
}

// ConnectionString implements common.StorageCredentials.
// The method returns the rclone connection string for the specific bucket.
func (b *ExistingS3Bucket) ConnectionString(ctx context.Context) (string, error) {
containerPath := path.Join(b.id, b.path)
connectionString := fmt.Sprintf(
":s3,provider=AWS,region=%s,access_key_id=%s,secret_access_key=%s,session_token=%s:%s",
b.region,
b.credentials.AccessKeyID,
b.credentials.SecretAccessKey,
b.credentials.SessionToken,
containerPath)
return connectionString, nil
}

// build-time check to ensure Bucket implements BucketCredentials.
var _ common.StorageCredentials = (*ExistingS3Bucket)(nil)

// S3Client defines the functions of the AWS S3 API used.
type S3Client interface {
HeadBucket(context.Context, *s3.HeadBucketInput, ...func(*s3.Options)) (*s3.HeadBucketOutput, error)
}
58 changes: 58 additions & 0 deletions task/aws/resources/data_source_bucket_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
package resources_test
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Tests are nice, although mocking cloud infrastructure is rather risky. Maybe we can rely on rclone to use functionality that it's already tested on their side or can be tested locally with the same API as cloud resources?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't see how mocking cloud apis (not infrastructure) is riskier than having no tests at all.

My current proposal is twofold:

  1. Each resource defines limited interfaces (implemented by corresponding cloud apis) that include only the methods actually used.
  2. We generate mocks for those interfaces and use them in tests to ensure conditions are handled correctly and api calls are made in the right order.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If your proposal implies accounting for a considerable amount of cloud-specific details (e.g. “limited interfaces” for S3 and EC2, others for Cloud Storage and Compute Engine, ...) I wonder if maintaining them will be any easier than testing the code against actual cloud APIs, even if it's noticeably slower.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The thing with unit tests is that we can test for various edge cases in a repeatable and deterministic way. And while they are in no way a replacement for integration tests, accounting for edge cases in integration tests will either require developing extensive test suites or exhaustive checklists, completing which will take considerable time.


import (
"context"
"testing"

"terraform-provider-iterative/task/aws/resources"
"terraform-provider-iterative/task/aws/resources/mocks"
"terraform-provider-iterative/task/common"

"github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go-v2/service/s3"
"github.com/aws/smithy-go"
"github.com/golang/mock/gomock"
"github.com/stretchr/testify/require"
)

func TestExistingBucketConnectionString(t *testing.T) {
ctx := context.Background()
creds := aws.Credentials{
AccessKeyID: "access-key-id",
SecretAccessKey: "secret-access-key",
SessionToken: "session-token",
}
b := resources.NewExistingS3Bucket(nil, creds, "pre-created-bucket", "us-east-1", "subdirectory")
connStr, err := b.ConnectionString(ctx)
require.NoError(t, err)
require.Equal(t, connStr, ":s3,provider=AWS,region=us-east-1,access_key_id=access-key-id,secret_access_key=secret-access-key,session_token=session-token:pre-created-bucket/subdirectory")
}

func TestExistingBucketRead(t *testing.T) {
ctx := context.Background()
ctl := gomock.NewController(t)
defer ctl.Finish()

s3Cl := mocks.NewMockS3Client(ctl)
s3Cl.EXPECT().HeadBucket(gomock.Any(), &s3.HeadBucketInput{Bucket: aws.String("bucket-id")}).Return(nil, nil)
b := resources.NewExistingS3Bucket(s3Cl, aws.Credentials{}, "bucket-id", "us-east-1", "subdirectory")
err := b.Read(ctx)
require.NoError(t, err)
}

// TestExistingBucketReadNotFound tests the case where the s3 client indicates that the bucket could not be
// found.
func TestExistingBucketReadNotFound(t *testing.T) {
ctx := context.Background()
ctl := gomock.NewController(t)
defer ctl.Finish()

s3Cl := mocks.NewMockS3Client(ctl)

s3Cl.EXPECT().
HeadBucket(gomock.Any(), &s3.HeadBucketInput{Bucket: aws.String("bucket-id")}).
Return(nil, &smithy.GenericAPIError{Code: "NotFound"})
b := resources.NewExistingS3Bucket(s3Cl, aws.Credentials{}, "bucket-id", "us-east-1", "subdirectory")
err := b.Read(ctx)
require.ErrorIs(t, err, common.NotFoundError)
}
19 changes: 7 additions & 12 deletions task/aws/resources/data_source_credentials.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,12 @@ package resources

import (
"context"
"fmt"

"terraform-provider-iterative/task/aws/client"
"terraform-provider-iterative/task/common"
)

func NewCredentials(client *client.Client, identifier common.Identifier, bucket *Bucket) *Credentials {
func NewCredentials(client *client.Client, identifier common.Identifier, bucket common.StorageCredentials) *Credentials {
c := new(Credentials)
c.Client = client
c.Identifier = identifier.Long()
Expand All @@ -20,7 +19,7 @@ type Credentials struct {
Client *client.Client
Identifier string
Dependencies struct {
*Bucket
Bucket common.StorageCredentials
}
Resource map[string]string
}
Expand All @@ -31,20 +30,16 @@ func (c *Credentials) Read(ctx context.Context) error {
return err
}

connectionString := fmt.Sprintf(
":s3,provider=AWS,region=%s,access_key_id=%s,secret_access_key=%s,session_token=%s:%s",
c.Client.Region,
credentials.AccessKeyID,
credentials.SecretAccessKey,
credentials.SessionToken,
c.Dependencies.Bucket.Identifier,
)
bucketConnStr, err := c.Dependencies.Bucket.ConnectionString(ctx)
if err != nil {
return err
}

c.Resource = map[string]string{
"AWS_ACCESS_KEY_ID": credentials.AccessKeyID,
"AWS_SECRET_ACCESS_KEY": credentials.SecretAccessKey,
"AWS_SESSION_TOKEN": credentials.SessionToken,
"RCLONE_REMOTE": connectionString,
"RCLONE_REMOTE": bucketConnStr,
"TPI_TASK_CLOUD_PROVIDER": string(c.Client.Cloud.Provider),
"TPI_TASK_CLOUD_REGION": c.Client.Region,
"TPI_TASK_IDENTIFIER": c.Identifier,
Expand Down
5 changes: 5 additions & 0 deletions task/aws/resources/mocks/gen.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
package mocks

// This file includes go:generate statements for regenerating mocks.

//go:generate mockgen -destination s3client_generated.go -package mocks .. S3Client
56 changes: 56 additions & 0 deletions task/aws/resources/mocks/s3client_generated.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading