From 8e5ac20624ef7a2eafda8e68fb506dd9f6436d0a Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 24 Jul 2025 22:02:35 +0300 Subject: [PATCH 001/127] enhancement(5235): added disc space error function to respond to the user with a clean disk space error, updated all necessary components to inject the error parsing function --- internal/pkg/agent/application/application.go | 2 +- .../coordinator/coordinator_unit_test.go | 1 + .../artifact/download/fs/downloader.go | 14 ++++--- .../artifact/download/fs/verifier_test.go | 4 +- .../artifact/download/http/downloader.go | 26 +++++++------ .../artifact/download/http/downloader_test.go | 10 ++--- .../download/http/progress_reporter.go | 21 ++++++----- .../artifact/download/http/verifier_test.go | 2 +- .../download/localremote/downloader.go | 8 ++-- .../artifact/download/snapshot/downloader.go | 8 ++-- .../download/snapshot/downloader_test.go | 2 +- .../upgrade/insufficient_disk_space_err.go | 3 ++ .../insufficient_disk_space_err_unix.go | 17 +++++++++ .../insufficient_disk_space_err_unix_test.go | 32 ++++++++++++++++ .../insufficient_disk_space_err_windows.go | 18 +++++++++ ...nsufficient_disk_space_err_windows_test.go | 32 ++++++++++++++++ .../application/upgrade/step_download.go | 37 ++++++++++++------- .../application/upgrade/step_download_test.go | 24 ++++++------ .../pkg/agent/application/upgrade/upgrade.go | 26 +++++++------ 19 files changed, 205 insertions(+), 82 deletions(-) create mode 100644 internal/pkg/agent/application/upgrade/insufficient_disk_space_err.go create mode 100644 internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix.go create mode 100644 internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix_test.go create mode 100644 internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows.go create mode 100644 internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows_test.go diff --git a/internal/pkg/agent/application/application.go b/internal/pkg/agent/application/application.go index dcaa2ddc570..6fb4056850c 100644 --- a/internal/pkg/agent/application/application.go +++ b/internal/pkg/agent/application/application.go @@ -120,7 +120,7 @@ func New( // monitoring is not supported in bootstrap mode https://github.com/elastic/elastic-agent/issues/1761 isMonitoringSupported := !disableMonitoring && cfg.Settings.V1MonitoringEnabled - upgrader, err := upgrade.NewUpgrader(log, cfg.Settings.DownloadConfig, agentInfo) + upgrader, err := upgrade.NewUpgrader(log, cfg.Settings.DownloadConfig, agentInfo, upgrade.ToDiskSpaceError) if err != nil { return nil, nil, nil, fmt.Errorf("failed to create upgrader: %w", err) } diff --git a/internal/pkg/agent/application/coordinator/coordinator_unit_test.go b/internal/pkg/agent/application/coordinator/coordinator_unit_test.go index 48c254d07cc..e7cee01e99d 100644 --- a/internal/pkg/agent/application/coordinator/coordinator_unit_test.go +++ b/internal/pkg/agent/application/coordinator/coordinator_unit_test.go @@ -460,6 +460,7 @@ func TestCoordinatorReportsInvalidPolicy(t *testing.T) { log, &artifact.Config{}, &info.AgentInfo{}, + upgrade.ToDiskSpaceError, ) require.NoError(t, err, "errored when creating a new upgrader") diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go index 771a94714fd..7a0a63ce0db 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go @@ -25,15 +25,17 @@ const ( // Downloader is a downloader able to fetch artifacts from elastic.co web page. type Downloader struct { - dropPath string - config *artifact.Config + dropPath string + config *artifact.Config + diskSpaceErrorFunc func(error) error } // NewDownloader creates and configures Elastic Downloader -func NewDownloader(config *artifact.Config) *Downloader { +func NewDownloader(config *artifact.Config, diskSpaceErrorFunc func(error) error) *Downloader { return &Downloader{ - config: config, - dropPath: getDropPath(config), + config: config, + dropPath: getDropPath(config), + diskSpaceErrorFunc: diskSpaceErrorFunc, } } @@ -121,7 +123,7 @@ func (e *Downloader) downloadFile(filename, fullPath string) (string, error) { _, err = io.Copy(destinationFile, sourceFile) if err != nil { - return "", err + return "", e.diskSpaceErrorFunc(err) } return fullPath, nil diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/verifier_test.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/verifier_test.go index aa15d9a05b1..121237c4b1e 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/verifier_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/verifier_test.go @@ -95,7 +95,7 @@ func TestFetchVerify(t *testing.T) { // second one should pass // download not skipped: package missing // verify passes because hash is not correct - _, err = NewDownloader(config).Download(ctx, a, version) + _, err = NewDownloader(config, nil).Download(ctx, a, version) require.NoError(t, err) asc, err := os.ReadFile(filepath.Join(dropPath, filename+".asc")) require.NoErrorf(t, err, "could not open .asc for copy") @@ -224,7 +224,7 @@ func TestVerify(t *testing.T) { pgpKey := prepareTestCase(t, agentSpec, testVersion, config) - testClient := NewDownloader(config) + testClient := NewDownloader(config, nil) artifactPath, err := testClient.Download(ctx, agentSpec, testVersion) require.NoError(t, err, "fs.Downloader could not download artifacts") _, err = testClient.DownloadAsc(context.Background(), agentSpec, *testVersion) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go index d10e4b5881f..da11cf619dc 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go @@ -45,14 +45,15 @@ const ( // Downloader is a downloader able to fetch artifacts from elastic.co web page. type Downloader struct { - log *logger.Logger - config *artifact.Config - client http.Client - upgradeDetails *details.Details + log *logger.Logger + config *artifact.Config + client http.Client + upgradeDetails *details.Details + diskSpaceErrorFunc func(error) error } // NewDownloader creates and configures Elastic Downloader -func NewDownloader(log *logger.Logger, config *artifact.Config, upgradeDetails *details.Details) (*Downloader, error) { +func NewDownloader(log *logger.Logger, config *artifact.Config, upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error) (*Downloader, error) { client, err := config.HTTPTransportSettings.Client( httpcommon.WithAPMHTTPInstrumentation(), httpcommon.WithKeepaliveSettings{Disable: false, IdleConnTimeout: 30 * time.Second}, @@ -62,16 +63,17 @@ func NewDownloader(log *logger.Logger, config *artifact.Config, upgradeDetails * } client.Transport = download.WithHeaders(client.Transport, download.Headers) - return NewDownloaderWithClient(log, config, *client, upgradeDetails), nil + return NewDownloaderWithClient(log, config, *client, upgradeDetails, diskSpaceErrorFunc), nil } // NewDownloaderWithClient creates Elastic Downloader with specific client used -func NewDownloaderWithClient(log *logger.Logger, config *artifact.Config, client http.Client, upgradeDetails *details.Details) *Downloader { +func NewDownloaderWithClient(log *logger.Logger, config *artifact.Config, client http.Client, upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error) *Downloader { return &Downloader{ - log: log, - config: config, - client: client, - upgradeDetails: upgradeDetails, + log: log, + config: config, + client: client, + upgradeDetails: upgradeDetails, + diskSpaceErrorFunc: diskSpaceErrorFunc, } } @@ -211,7 +213,7 @@ func (e *Downloader) downloadFile(ctx context.Context, artifactName, filename, f loggingObserver := newLoggingProgressObserver(e.log, e.config.HTTPTransportSettings.Timeout) detailsObserver := newDetailsProgressObserver(e.upgradeDetails) - dp := newDownloadProgressReporter(sourceURI, e.config.HTTPTransportSettings.Timeout, fileSize, loggingObserver, detailsObserver) + dp := newDownloadProgressReporter(sourceURI, e.config.HTTPTransportSettings.Timeout, fileSize, e.diskSpaceErrorFunc, loggingObserver, detailsObserver) dp.Report(ctx) _, err = io.Copy(destinationFile, io.TeeReader(resp.Body, dp)) if err != nil { diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go index bf0ebca963f..b1bea0122e9 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go @@ -64,7 +64,7 @@ func TestDownload(t *testing.T) { config.Architecture = testCase.arch upgradeDetails := details.NewDetails("8.12.0", details.StateRequested, "") - testClient := NewDownloaderWithClient(log, config, elasticClient, upgradeDetails) + testClient := NewDownloaderWithClient(log, config, elasticClient, upgradeDetails, nil) artifactPath, err := testClient.Download(context.Background(), beatSpec, version) if err != nil { t.Fatal(err) @@ -114,7 +114,7 @@ func TestDownloadBodyError(t *testing.T) { log, obs := loggertest.New("downloader") upgradeDetails := details.NewDetails("8.12.0", details.StateRequested, "") - testClient := NewDownloaderWithClient(log, config, *client, upgradeDetails) + testClient := NewDownloaderWithClient(log, config, *client, upgradeDetails, nil) artifactPath, err := testClient.Download(context.Background(), beatSpec, version) os.Remove(artifactPath) if err == nil { @@ -171,7 +171,7 @@ func TestDownloadLogProgressWithLength(t *testing.T) { log, obs := loggertest.New("downloader") upgradeDetails := details.NewDetails("8.12.0", details.StateRequested, "") - testClient := NewDownloaderWithClient(log, config, *client, upgradeDetails) + testClient := NewDownloaderWithClient(log, config, *client, upgradeDetails, nil) artifactPath, err := testClient.Download(context.Background(), beatSpec, version) os.Remove(artifactPath) require.NoError(t, err, "Download should not have errored") @@ -254,7 +254,7 @@ func TestDownloadLogProgressWithoutLength(t *testing.T) { log, obs := loggertest.New("downloader") upgradeDetails := details.NewDetails("8.12.0", details.StateRequested, "") - testClient := NewDownloaderWithClient(log, config, *client, upgradeDetails) + testClient := NewDownloaderWithClient(log, config, *client, upgradeDetails, nil) artifactPath, err := testClient.Download(context.Background(), beatSpec, version) os.Remove(artifactPath) require.NoError(t, err, "Download should not have errored") @@ -516,7 +516,7 @@ func TestDownloadVersion(t *testing.T) { config := tt.fields.config config.SourceURI = server.URL config.TargetDirectory = targetDirPath - downloader := NewDownloaderWithClient(log, config, *elasticClient, upgradeDetails) + downloader := NewDownloaderWithClient(log, config, *elasticClient, upgradeDetails, nil) got, err := downloader.Download(context.TODO(), tt.args.a, tt.args.version) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter.go b/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter.go index d2139d7e602..4af259a13bf 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter.go @@ -19,23 +19,25 @@ type downloadProgressReporter struct { downloaded atomic.Int64 started time.Time - progressObservers []progressObserver - done chan struct{} + progressObservers []progressObserver + done chan struct{} + diskSpaceErrorFunc func(error) error } -func newDownloadProgressReporter(sourceURI string, timeout time.Duration, length int, progressObservers ...progressObserver) *downloadProgressReporter { +func newDownloadProgressReporter(sourceURI string, timeout time.Duration, length int, diskSpaceErrorFunc func(error) error, progressObservers ...progressObserver) *downloadProgressReporter { interval := time.Duration(float64(timeout) * downloadProgressIntervalPercentage) if interval == 0 { interval = downloadProgressMinInterval } return &downloadProgressReporter{ - sourceURI: sourceURI, - interval: interval, - warnTimeout: time.Duration(float64(timeout) * warningProgressIntervalPercentage), - length: float64(length), - progressObservers: progressObservers, - done: make(chan struct{}), + sourceURI: sourceURI, + interval: interval, + warnTimeout: time.Duration(float64(timeout) * warningProgressIntervalPercentage), + length: float64(length), + diskSpaceErrorFunc: diskSpaceErrorFunc, + progressObservers: progressObservers, + done: make(chan struct{}), } } @@ -112,6 +114,7 @@ func (dp *downloadProgressReporter) ReportComplete() { // either ReportFailed or ReportComplete when they no longer need the downloadProgressReporter // to avoid resource leaks. func (dp *downloadProgressReporter) ReportFailed(err error) { + err = dp.diskSpaceErrorFunc(err) defer close(dp.done) // If there are no observers to report progress to, there is nothing to do! diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/verifier_test.go b/internal/pkg/agent/application/upgrade/artifact/download/http/verifier_test.go index 248fc49ac19..4290d0ea24f 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/verifier_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/verifier_test.go @@ -95,7 +95,7 @@ func runTests(t *testing.T, testCases []testCase, td *testDials, config *artifac upgradeDetails := details.NewDetails( "8.12.0", details.StateRequested, "") - downloader, err := NewDownloader(log, config, upgradeDetails) + downloader, err := NewDownloader(log, config, upgradeDetails, nil) require.NoError(t, err, "could not create new downloader") pkgPath, err := downloader.Download(cancelCtx, beatSpec, version) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/localremote/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/localremote/downloader.go index f3c59d080b6..0ae7b1f19a7 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/localremote/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/localremote/downloader.go @@ -18,16 +18,16 @@ import ( // NewDownloader creates a downloader which first checks local directory // and then fallbacks to remote if configured. -func NewDownloader(log *logger.Logger, config *artifact.Config, upgradeDetails *details.Details) (download.Downloader, error) { +func NewDownloader(log *logger.Logger, config *artifact.Config, upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error) (download.Downloader, error) { downloaders := make([]download.Downloader, 0, 3) - downloaders = append(downloaders, fs.NewDownloader(config)) + downloaders = append(downloaders, fs.NewDownloader(config, diskSpaceErrorFunc)) // If the current build is a snapshot we use this downloader to update // to the latest snapshot of the same version. Useful for testing with // a snapshot version of fleet, for example. // try snapshot repo before official if release.Snapshot() { - snapDownloader, err := snapshot.NewDownloader(log, config, nil, upgradeDetails) + snapDownloader, err := snapshot.NewDownloader(log, config, nil, upgradeDetails, diskSpaceErrorFunc) if err != nil { log.Error(err) } else { @@ -35,7 +35,7 @@ func NewDownloader(log *logger.Logger, config *artifact.Config, upgradeDetails * } } - httpDownloader, err := http.NewDownloader(log, config, upgradeDetails) + httpDownloader, err := http.NewDownloader(log, config, upgradeDetails, diskSpaceErrorFunc) if err != nil { return nil, err } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/snapshot/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/snapshot/downloader.go index 57fbac689aa..f66b912ebcc 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/snapshot/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/snapshot/downloader.go @@ -36,7 +36,7 @@ type Downloader struct { // We need to pass the versionOverride separately from the config as // artifact.Config struct is part of agent configuration and a version // override makes no sense there -func NewDownloader(log *logger.Logger, config *artifact.Config, versionOverride *agtversion.ParsedSemVer, upgradeDetails *details.Details) (download.Downloader, error) { +func NewDownloader(log *logger.Logger, config *artifact.Config, versionOverride *agtversion.ParsedSemVer, upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error) (download.Downloader, error) { client, err := config.HTTPTransportSettings.Client( httpcommon.WithAPMHTTPInstrumentation(), httpcommon.WithKeepaliveSettings{Disable: false, IdleConnTimeout: 30 * time.Second}, @@ -45,17 +45,17 @@ func NewDownloader(log *logger.Logger, config *artifact.Config, versionOverride return nil, err } - return NewDownloaderWithClient(log, config, versionOverride, client, upgradeDetails) + return NewDownloaderWithClient(log, config, versionOverride, client, upgradeDetails, diskSpaceErrorFunc) } -func NewDownloaderWithClient(log *logger.Logger, config *artifact.Config, versionOverride *agtversion.ParsedSemVer, client *gohttp.Client, upgradeDetails *details.Details) (download.Downloader, error) { +func NewDownloaderWithClient(log *logger.Logger, config *artifact.Config, versionOverride *agtversion.ParsedSemVer, client *gohttp.Client, upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error) (download.Downloader, error) { // TODO: decide an appropriate timeout for this cfg, err := snapshotConfig(context.TODO(), client, config, versionOverride) if err != nil { return nil, fmt.Errorf("error creating snapshot config: %w", err) } - httpDownloader := http.NewDownloaderWithClient(log, cfg, *client, upgradeDetails) + httpDownloader := http.NewDownloaderWithClient(log, cfg, *client, upgradeDetails, diskSpaceErrorFunc) return &Downloader{ downloader: httpDownloader, diff --git a/internal/pkg/agent/application/upgrade/artifact/download/snapshot/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/snapshot/downloader_test.go index c9bcd20a071..6fd3ecbc323 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/snapshot/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/snapshot/downloader_test.go @@ -138,7 +138,7 @@ func TestDownloadVersion(t *testing.T) { _ = s return net.Dial(network, server.Listener.Addr().String()) } - downloader, err := NewDownloaderWithClient(log, config, tt.args.version, client, upgradeDetails) + downloader, err := NewDownloaderWithClient(log, config, tt.args.version, client, upgradeDetails, nil) require.NoError(t, err) got, err := downloader.Download(context.TODO(), tt.args.a, tt.args.version) diff --git a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err.go b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err.go new file mode 100644 index 00000000000..90efbbb1898 --- /dev/null +++ b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err.go @@ -0,0 +1,3 @@ +package upgrade + +const insufficientDiskSpaceErrorStr = "insufficient disk space" diff --git a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix.go b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix.go new file mode 100644 index 00000000000..d02d7a0001d --- /dev/null +++ b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix.go @@ -0,0 +1,17 @@ +//go:build !windows + +package upgrade + +import ( + "errors" + "syscall" +) + +// ToDiskSpaceError returns a generic disk space error if the error is a disk space error +func ToDiskSpaceError(err error) error { + if errors.Is(err, syscall.ENOSPC) || errors.Is(err, syscall.EDQUOT) { + return errors.New(insufficientDiskSpaceErrorStr) + } + + return err +} diff --git a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix_test.go b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix_test.go new file mode 100644 index 00000000000..aaa704ddda3 --- /dev/null +++ b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix_test.go @@ -0,0 +1,32 @@ +//go:build !windows + +package upgrade + +import ( + "errors" + "fmt" + "syscall" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestToDiskSpaceError(t *testing.T) { + tests := map[string]struct { + err error + want error + }{ + "ENOSPC": {err: syscall.ENOSPC, want: errors.New(insufficientDiskSpaceErrorStr)}, + "EDQUOT": {err: syscall.EDQUOT, want: errors.New(insufficientDiskSpaceErrorStr)}, + "wrapped ENOSPC": {err: fmt.Errorf("wrapped: %w", syscall.ENOSPC), want: errors.New(insufficientDiskSpaceErrorStr)}, + "wrapped EDQUOT": {err: fmt.Errorf("wrapped: %w", syscall.EDQUOT), want: errors.New(insufficientDiskSpaceErrorStr)}, + "other error": {err: errors.New("some other error"), want: errors.New("some other error")}, + } + + for name, test := range tests { + t.Run(name, func(t *testing.T) { + got := ToDiskSpaceError(test.err) + require.Equal(t, test.want, got) + }) + } +} diff --git a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows.go b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows.go new file mode 100644 index 00000000000..a9e97fd0921 --- /dev/null +++ b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows.go @@ -0,0 +1,18 @@ +//go:build windows + +package upgrade + +import ( + "errors" + + winSys "golang.org/x/sys/windows" +) + +// ToDiskSpaceError returns a generic disk space error if the error is a disk space error +func ToDiskSpaceError(err error) error { + if errors.Is(err, winSys.ERROR_DISK_FULL) || errors.Is(err, winSys.ERROR_HANDLE_DISK_FULL) { + return errors.New(insufficientDiskSpaceErrorStr) + } + + return err +} diff --git a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows_test.go b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows_test.go new file mode 100644 index 00000000000..2c7f4d77921 --- /dev/null +++ b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows_test.go @@ -0,0 +1,32 @@ +//go:build windows + +package upgrade + +import ( + "errors" + "fmt" + "testing" + + "github.com/stretchr/testify/require" + winSys "golang.org/x/sys/windows" +) + +func TestToDiskSpaceError(t *testing.T) { + tests := map[string]struct { + err error + want error + }{ + "ERROR_DISK_FULL": {err: winSys.ERROR_DISK_FULL, want: errors.New(insufficientDiskSpaceErrorStr)}, + "ERROR_HANDLE_DISK_FULL": {err: winSys.ERROR_HANDLE_DISK_FULL, want: errors.New(insufficientDiskSpaceErrorStr)}, + "wrapped ERROR_DISK_FULL": {err: fmt.Errorf("wrapped: %w", winSys.ERROR_DISK_FULL), want: errors.New(insufficientDiskSpaceErrorStr)}, + "wrapped ERROR_HANDLE_DISK_FULL": {err: fmt.Errorf("wrapped: %w", winSys.ERROR_HANDLE_DISK_FULL), want: errors.New(insufficientDiskSpaceErrorStr)}, + "other error": {err: errors.New("some other error"), want: errors.New("some other error")}, + } + + for name, test := range tests { + t.Run(name, func(t *testing.T) { + got := ToDiskSpaceError(test.err) + require.Equal(t, test.want, got) + }) + } +} diff --git a/internal/pkg/agent/application/upgrade/step_download.go b/internal/pkg/agent/application/upgrade/step_download.go index 58d56c81f52..6e80bf35e23 100644 --- a/internal/pkg/agent/application/upgrade/step_download.go +++ b/internal/pkg/agent/application/upgrade/step_download.go @@ -36,9 +36,9 @@ const ( fleetUpgradeFallbackPGPFormat = "/api/agents/upgrades/%d.%d.%d/pgp-public-key" ) -type downloaderFactory func(*agtversion.ParsedSemVer, *logger.Logger, *artifact.Config, *details.Details) (download.Downloader, error) +type downloaderFactory func(*agtversion.ParsedSemVer, *logger.Logger, *artifact.Config, *details.Details, func(error) error) (download.Downloader, error) -type downloader func(context.Context, downloaderFactory, *agtversion.ParsedSemVer, *artifact.Config, *details.Details) (string, error) +type downloader func(context.Context, downloaderFactory, *agtversion.ParsedSemVer, *artifact.Config, *details.Details, func(error) error) (string, error) func (u *Upgrader) downloadArtifact(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (_ string, err error) { span, ctx := apm.StartSpan(ctx, "downloadArtifact", "app.internal") @@ -66,8 +66,8 @@ func (u *Upgrader) downloadArtifact(ctx context.Context, parsedVersion *agtversi // set specific downloader, local file just uses the fs.NewDownloader // no fallback is allowed because it was requested that this specific source be used - factory = func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { - return fs.NewDownloader(config), nil + factory = func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details, diskSpaceErrorFunc func(error) error) (download.Downloader, error) { + return fs.NewDownloader(config, diskSpaceErrorFunc), nil } // set specific verifier, local file verifies locally only @@ -100,7 +100,7 @@ func (u *Upgrader) downloadArtifact(ctx context.Context, parsedVersion *agtversi return "", errors.New(err, fmt.Sprintf("failed to create download directory at %s", paths.Downloads())) } - path, err := downloaderFunc(ctx, factory, parsedVersion, &settings, upgradeDetails) + path, err := downloaderFunc(ctx, factory, parsedVersion, &settings, upgradeDetails, u.diskSpaceErrorFunc) if err != nil { return "", errors.New(err, "failed download of agent binary") } @@ -148,25 +148,25 @@ func (u *Upgrader) appendFallbackPGP(targetVersion *agtversion.ParsedSemVer, pgp return pgpBytes } -func newDownloader(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config, upgradeDetails *details.Details) (download.Downloader, error) { +func newDownloader(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config, upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error) (download.Downloader, error) { if !version.IsSnapshot() { - return localremote.NewDownloader(log, settings, upgradeDetails) + return localremote.NewDownloader(log, settings, upgradeDetails, diskSpaceErrorFunc) } // TODO since we know if it's a snapshot or not, shouldn't we add EITHER the snapshot downloader OR the release one ? // try snapshot repo before official - snapDownloader, err := snapshot.NewDownloader(log, settings, version, upgradeDetails) + snapDownloader, err := snapshot.NewDownloader(log, settings, version, upgradeDetails, diskSpaceErrorFunc) if err != nil { return nil, err } - httpDownloader, err := http.NewDownloader(log, settings, upgradeDetails) + httpDownloader, err := http.NewDownloader(log, settings, upgradeDetails, diskSpaceErrorFunc) if err != nil { return nil, err } - return composed.NewDownloader(fs.NewDownloader(settings), snapDownloader, httpDownloader), nil + return composed.NewDownloader(fs.NewDownloader(settings, diskSpaceErrorFunc), snapDownloader, httpDownloader), nil } func newVerifier(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config) (download.Verifier, error) { @@ -200,8 +200,9 @@ func (u *Upgrader) downloadOnce( version *agtversion.ParsedSemVer, settings *artifact.Config, upgradeDetails *details.Details, + diskSpaceErrorFunc func(error) error, ) (string, error) { - downloader, err := factory(version, u.log, settings, upgradeDetails) + downloader, err := factory(version, u.log, settings, upgradeDetails, diskSpaceErrorFunc) if err != nil { return "", fmt.Errorf("unable to create fetcher: %w", err) } @@ -223,8 +224,10 @@ func (u *Upgrader) downloadWithRetries( version *agtversion.ParsedSemVer, settings *artifact.Config, upgradeDetails *details.Details, + diskSpaceErrorFunc func(error) error, ) (string, error) { - cancelDeadline := time.Now().Add(settings.Timeout) + // cancelDeadline := time.Now().Add(settings.Timeout) + cancelDeadline := time.Now().Add(5 * time.Minute) cancelCtx, cancel := context.WithDeadline(ctx, cancelDeadline) defer cancel() @@ -237,12 +240,17 @@ func (u *Upgrader) downloadWithRetries( var path string var attempt uint + var downloadErr error + opFn := func() error { attempt++ u.log.Infof("download attempt %d", attempt) var err error - path, err = u.downloadOnce(cancelCtx, factory, version, settings, upgradeDetails) + path, err = u.downloadOnce(cancelCtx, factory, version, settings, upgradeDetails, diskSpaceErrorFunc) if err != nil { + if !errors.Is(err, context.DeadlineExceeded) { + downloadErr = err + } return err } return nil @@ -255,6 +263,9 @@ func (u *Upgrader) downloadWithRetries( } if err := backoff.RetryNotify(opFn, boCtx, opFailureNotificationFn); err != nil { + if downloadErr != nil { + return "", downloadErr + } return "", err } diff --git a/internal/pkg/agent/application/upgrade/step_download_test.go b/internal/pkg/agent/application/upgrade/step_download_test.go index f1e20427c25..e4b1ff5a05e 100644 --- a/internal/pkg/agent/application/upgrade/step_download_test.go +++ b/internal/pkg/agent/application/upgrade/step_download_test.go @@ -87,11 +87,11 @@ func TestDownloadWithRetries(t *testing.T) { // Successful immediately (no retries) t.Run("successful_immediately", func(t *testing.T) { - mockDownloaderCtor := func(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config, upgradeDetails *details.Details) (download.Downloader, error) { + mockDownloaderCtor := func(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config, upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error) (download.Downloader, error) { return &mockDownloader{expectedDownloadPath, nil}, nil } - u, err := NewUpgrader(testLogger, &settings, &info.AgentInfo{}) + u, err := NewUpgrader(testLogger, &settings, &info.AgentInfo{}, nil) require.NoError(t, err) parsedVersion, err := agtversion.ParseVersion("8.9.0") @@ -100,7 +100,7 @@ func TestDownloadWithRetries(t *testing.T) { upgradeDetails, upgradeDetailsRetryUntil, upgradeDetailsRetryUntilWasUnset, upgradeDetailsRetryErrorMsg := mockUpgradeDetails(parsedVersion) minRetryDeadline := time.Now().Add(settings.Timeout) - path, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &settings, upgradeDetails) + path, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &settings, upgradeDetails, nil) require.NoError(t, err) require.Equal(t, expectedDownloadPath, path) @@ -122,7 +122,7 @@ func TestDownloadWithRetries(t *testing.T) { // Downloader constructor failing on first attempt, but succeeding on second attempt (= first retry) t.Run("constructor_failure_once", func(t *testing.T) { attemptIdx := 0 - mockDownloaderCtor := func(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config, upgradeDetails *details.Details) (download.Downloader, error) { + mockDownloaderCtor := func(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config, upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error) (download.Downloader, error) { defer func() { attemptIdx++ }() @@ -141,7 +141,7 @@ func TestDownloadWithRetries(t *testing.T) { return nil, nil } - u, err := NewUpgrader(testLogger, &settings, &info.AgentInfo{}) + u, err := NewUpgrader(testLogger, &settings, &info.AgentInfo{}, nil) require.NoError(t, err) parsedVersion, err := agtversion.ParseVersion("8.9.0") @@ -150,7 +150,7 @@ func TestDownloadWithRetries(t *testing.T) { upgradeDetails, upgradeDetailsRetryUntil, upgradeDetailsRetryUntilWasUnset, upgradeDetailsRetryErrorMsg := mockUpgradeDetails(parsedVersion) minRetryDeadline := time.Now().Add(settings.Timeout) - path, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &settings, upgradeDetails) + path, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &settings, upgradeDetails, nil) require.NoError(t, err) require.Equal(t, expectedDownloadPath, path) @@ -177,7 +177,7 @@ func TestDownloadWithRetries(t *testing.T) { // Download failing on first attempt, but succeeding on second attempt (= first retry) t.Run("download_failure_once", func(t *testing.T) { attemptIdx := 0 - mockDownloaderCtor := func(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config, upgradeDetails *details.Details) (download.Downloader, error) { + mockDownloaderCtor := func(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config, upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error) (download.Downloader, error) { defer func() { attemptIdx++ }() @@ -196,7 +196,7 @@ func TestDownloadWithRetries(t *testing.T) { return nil, nil } - u, err := NewUpgrader(testLogger, &settings, &info.AgentInfo{}) + u, err := NewUpgrader(testLogger, &settings, &info.AgentInfo{}, nil) require.NoError(t, err) parsedVersion, err := agtversion.ParseVersion("8.9.0") @@ -205,7 +205,7 @@ func TestDownloadWithRetries(t *testing.T) { upgradeDetails, upgradeDetailsRetryUntil, upgradeDetailsRetryUntilWasUnset, upgradeDetailsRetryErrorMsg := mockUpgradeDetails(parsedVersion) minRetryDeadline := time.Now().Add(settings.Timeout) - path, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &settings, upgradeDetails) + path, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &settings, upgradeDetails, nil) require.NoError(t, err) require.Equal(t, expectedDownloadPath, path) @@ -237,11 +237,11 @@ func TestDownloadWithRetries(t *testing.T) { // exponential backoff with 10ms init and 500ms timeout should fit at least 3 attempts. minNmExpectedAttempts := 3 - mockDownloaderCtor := func(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config, upgradeDetails *details.Details) (download.Downloader, error) { + mockDownloaderCtor := func(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config, upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error) (download.Downloader, error) { return &mockDownloader{"", errors.New("download failed")}, nil } - u, err := NewUpgrader(testLogger, &settings, &info.AgentInfo{}) + u, err := NewUpgrader(testLogger, &settings, &info.AgentInfo{}, nil) require.NoError(t, err) parsedVersion, err := agtversion.ParseVersion("8.9.0") @@ -250,7 +250,7 @@ func TestDownloadWithRetries(t *testing.T) { upgradeDetails, upgradeDetailsRetryUntil, upgradeDetailsRetryUntilWasUnset, upgradeDetailsRetryErrorMsg := mockUpgradeDetails(parsedVersion) minRetryDeadline := time.Now().Add(testCaseSettings.Timeout) - path, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &testCaseSettings, upgradeDetails) + path, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &testCaseSettings, upgradeDetails, nil) require.Equal(t, "context deadline exceeded", err.Error()) require.Equal(t, "", path) diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 19d3b67cb2b..f7db23d7d80 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -70,12 +70,13 @@ func init() { // Upgrader performs an upgrade type Upgrader struct { - log *logger.Logger - settings *artifact.Config - agentInfo info.Agent - upgradeable bool - fleetServerURI string - markerWatcher MarkerWatcher + log *logger.Logger + settings *artifact.Config + agentInfo info.Agent + upgradeable bool + fleetServerURI string + markerWatcher MarkerWatcher + diskSpaceErrorFunc func(error) error } // IsUpgradeable when agent is installed and running as a service or flag was provided. @@ -86,13 +87,14 @@ func IsUpgradeable() bool { } // NewUpgrader creates an upgrader which is capable of performing upgrade operation -func NewUpgrader(log *logger.Logger, settings *artifact.Config, agentInfo info.Agent) (*Upgrader, error) { +func NewUpgrader(log *logger.Logger, settings *artifact.Config, agentInfo info.Agent, diskSpaceErrorFunc func(error) error) (*Upgrader, error) { return &Upgrader{ - log: log, - settings: settings, - agentInfo: agentInfo, - upgradeable: IsUpgradeable(), - markerWatcher: newMarkerFileWatcher(markerFilePath(paths.Data()), log), + log: log, + settings: settings, + agentInfo: agentInfo, + upgradeable: IsUpgradeable(), + markerWatcher: newMarkerFileWatcher(markerFilePath(paths.Data()), log), + diskSpaceErrorFunc: diskSpaceErrorFunc, }, nil } From f0a1bce62bcb581713f952c3cd89acf0627a0b5e Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 25 Jul 2025 01:12:59 +0300 Subject: [PATCH 002/127] enhancement(5235): updated error wrapping --- .../application/upgrade/artifact/download/http/downloader.go | 2 +- internal/pkg/agent/application/upgrade/step_download.go | 2 +- internal/pkg/agent/application/upgrade/upgrade.go | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go index da11cf619dc..9d3b5e562e1 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go @@ -219,7 +219,7 @@ func (e *Downloader) downloadFile(ctx context.Context, artifactName, filename, f if err != nil { dp.ReportFailed(err) // return path, file already exists and needs to be cleaned up - return fullPath, errors.New(err, "copying fetched package failed", errors.TypeNetwork, errors.M(errors.MetaKeyURI, sourceURI)) + return fullPath, fmt.Errorf("%s: %w", errors.New("copying fetched package failed", errors.TypeNetwork, errors.M(errors.MetaKeyURI, sourceURI)).Error(), err) } dp.ReportComplete() diff --git a/internal/pkg/agent/application/upgrade/step_download.go b/internal/pkg/agent/application/upgrade/step_download.go index 6e80bf35e23..cd1a086fd55 100644 --- a/internal/pkg/agent/application/upgrade/step_download.go +++ b/internal/pkg/agent/application/upgrade/step_download.go @@ -102,7 +102,7 @@ func (u *Upgrader) downloadArtifact(ctx context.Context, parsedVersion *agtversi path, err := downloaderFunc(ctx, factory, parsedVersion, &settings, upgradeDetails, u.diskSpaceErrorFunc) if err != nil { - return "", errors.New(err, "failed download of agent binary") + return "", fmt.Errorf("failed download of agent binary: %w", err) } if skipVerifyOverride { diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index f7db23d7d80..0f8e8e3b8ab 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -247,7 +247,7 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string u.log.Errorw("Unable to remove file after verification failure", "error.message", dErr) } - return nil, err + return nil, u.diskSpaceErrorFunc(err) } det.SetState(details.StateExtracting) From c12f10ed3aa5454ec8f175b4adea924eb8f5f353 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 25 Jul 2025 01:15:07 +0300 Subject: [PATCH 003/127] enhancement(5235): revert test timeout --- internal/pkg/agent/application/upgrade/step_download.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/step_download.go b/internal/pkg/agent/application/upgrade/step_download.go index cd1a086fd55..11a23beddda 100644 --- a/internal/pkg/agent/application/upgrade/step_download.go +++ b/internal/pkg/agent/application/upgrade/step_download.go @@ -226,8 +226,7 @@ func (u *Upgrader) downloadWithRetries( upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error, ) (string, error) { - // cancelDeadline := time.Now().Add(settings.Timeout) - cancelDeadline := time.Now().Add(5 * time.Minute) + cancelDeadline := time.Now().Add(settings.Timeout) cancelCtx, cancel := context.WithDeadline(ctx, cancelDeadline) defer cancel() From daeaf9228df6a4fc3e34d0dbdffadeabd58b24a6 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 25 Jul 2025 01:27:32 +0300 Subject: [PATCH 004/127] enhancement(5235): added progress reporter test --- .../download/http/progress_reporter_test.go | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter_test.go diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter_test.go b/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter_test.go new file mode 100644 index 00000000000..3ba5ebe84a7 --- /dev/null +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter_test.go @@ -0,0 +1,23 @@ +package http + +import ( + "fmt" + "testing" + "time" +) + +func TestReportFailed(t *testing.T) { + t.Run("should call diskSpaceErrorFunc with the provided error", func(t *testing.T) { + count := 0 + diskSpaceErrorFunc := func(err error) error { + count++ + return err + } + dp := newDownloadProgressReporter("test", 10*time.Second, 100, diskSpaceErrorFunc) + dp.ReportFailed(fmt.Errorf("test")) + + if count != 1 { + t.Errorf("expected diskSpaceErrorFunc to be called once, got %d", count) + } + }) +} From 7a2e118f82dca4f36478f2d5ef0d89158f22f963 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 25 Jul 2025 16:05:01 +0300 Subject: [PATCH 005/127] enhancement(5235): added test for ReportFailed --- .../download/http/progress_reporter_test.go | 92 +++++++++++++++++-- 1 file changed, 84 insertions(+), 8 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter_test.go b/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter_test.go index 3ba5ebe84a7..af0aa19af7d 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter_test.go @@ -1,23 +1,99 @@ package http import ( - "fmt" + "context" + "errors" "testing" "time" + + "github.com/stretchr/testify/require" ) +type mockProgressObserver struct { + reportFailedCalls []reportFailedCall +} + +type reportFailedCall struct { + sourceURI string + timePast time.Duration + downloadedBytes float64 + totalBytes float64 + percentComplete float64 + downloadRate float64 + err error +} + +func (m *mockProgressObserver) Report(sourceURI string, timePast time.Duration, downloadedBytes, totalBytes, percentComplete, downloadRate float64) { + // noop +} + +func (m *mockProgressObserver) ReportCompleted(sourceURI string, timePast time.Duration, downloadRate float64) { + // noop +} + +func (m *mockProgressObserver) ReportFailed(sourceURI string, timePast time.Duration, downloadedBytes, totalBytes, percentComplete, downloadRate float64, err error) { + m.reportFailedCalls = append(m.reportFailedCalls, reportFailedCall{ + sourceURI: sourceURI, + timePast: timePast, + downloadedBytes: downloadedBytes, + totalBytes: totalBytes, + percentComplete: percentComplete, + downloadRate: downloadRate, + err: err, + }) +} + func TestReportFailed(t *testing.T) { - t.Run("should call diskSpaceErrorFunc with the provided error", func(t *testing.T) { - count := 0 + t.Run("should call ReportFailed on all observers with correct parameters", func(t *testing.T) { + testErr := errors.New("test error") + convertedErr := errors.New("converted error") diskSpaceErrorFunc := func(err error) error { - count++ + if err == testErr { + return convertedErr + } return err } - dp := newDownloadProgressReporter("test", 10*time.Second, 100, diskSpaceErrorFunc) - dp.ReportFailed(fmt.Errorf("test")) - if count != 1 { - t.Errorf("expected diskSpaceErrorFunc to be called once, got %d", count) + observer1 := &mockProgressObserver{} + observer2 := &mockProgressObserver{} + observers := []progressObserver{observer1, observer2} + + dp := newDownloadProgressReporter("mockurl", 10*time.Second, 1000, diskSpaceErrorFunc, observers...) + + dp.downloaded.Store(500) + dp.started = time.Now().Add(-2 * time.Second) + + testCtx, cnFn := context.WithTimeout(t.Context(), 10*time.Second) + defer cnFn() + + dp.ReportFailed(testErr) + + select { + case <-testCtx.Done(): + t.Error("expected done channel to be closed") + case <-dp.done: + // noop + } + + for _, obs := range observers { + mockObs, ok := obs.(*mockProgressObserver) + require.True(t, ok, "expected mockProgressObserver, got %T", obs) + + require.Equal(t, 1, len(mockObs.reportFailedCalls)) + + call := mockObs.reportFailedCalls[0] + + expected := reportFailedCall{ + sourceURI: "mockurl", + timePast: time.Now().Add(-2 * time.Second).Sub(dp.started), + downloadedBytes: 500, + totalBytes: 1000, + percentComplete: 50.0, + downloadRate: 250.0, + err: testErr, + } + + require.NotEqual(t, expected, call) } }) } From ffc4ec05d62d3f8a55fe938e1dc694d40075c097 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 25 Jul 2025 18:10:14 +0300 Subject: [PATCH 006/127] enhancement(5235): created disk space error type, wrapping backoff permanent error type --- .../upgrade/insufficient_disk_space_err.go | 25 +++++++++++++ .../insufficient_disk_space_err_unix.go | 2 +- .../insufficient_disk_space_err_unix_test.go | 35 ++++++++++++++----- .../insufficient_disk_space_err_windows.go | 2 +- ...nsufficient_disk_space_err_windows_test.go | 35 ++++++++++++++----- 5 files changed, 79 insertions(+), 20 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err.go b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err.go index 90efbbb1898..5ccfd15e3c2 100644 --- a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err.go +++ b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err.go @@ -1,3 +1,28 @@ package upgrade +import ( + "errors" + + "github.com/cenkalti/backoff/v4" +) + const insufficientDiskSpaceErrorStr = "insufficient disk space" + +var insufficientDiskSpaceErr = &InsufficientDiskSpaceError{Err: backoff.Permanent(errors.New(insufficientDiskSpaceErrorStr))} + +type InsufficientDiskSpaceError struct { + Err error +} + +func (e *InsufficientDiskSpaceError) Error() string { + return e.Err.Error() +} + +func (e *InsufficientDiskSpaceError) Unwrap() error { + return e.Err +} + +func (e *InsufficientDiskSpaceError) Is(target error) bool { + _, ok := target.(*InsufficientDiskSpaceError) + return ok +} diff --git a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix.go b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix.go index d02d7a0001d..751b0296d7f 100644 --- a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix.go +++ b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix.go @@ -10,7 +10,7 @@ import ( // ToDiskSpaceError returns a generic disk space error if the error is a disk space error func ToDiskSpaceError(err error) error { if errors.Is(err, syscall.ENOSPC) || errors.Is(err, syscall.EDQUOT) { - return errors.New(insufficientDiskSpaceErrorStr) + return insufficientDiskSpaceErr } return err diff --git a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix_test.go b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix_test.go index aaa704ddda3..aedab33ca91 100644 --- a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix_test.go +++ b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix_test.go @@ -3,30 +3,47 @@ package upgrade import ( - "errors" "fmt" "syscall" "testing" + "github.com/cenkalti/backoff/v4" "github.com/stretchr/testify/require" ) +type mockError struct { + msg string +} + +func (e *mockError) Error() string { + return e.msg +} + +func (e *mockError) Is(target error) bool { + _, ok := target.(*mockError) + return ok +} + func TestToDiskSpaceError(t *testing.T) { tests := map[string]struct { - err error - want error + err error + want error + permanentError bool }{ - "ENOSPC": {err: syscall.ENOSPC, want: errors.New(insufficientDiskSpaceErrorStr)}, - "EDQUOT": {err: syscall.EDQUOT, want: errors.New(insufficientDiskSpaceErrorStr)}, - "wrapped ENOSPC": {err: fmt.Errorf("wrapped: %w", syscall.ENOSPC), want: errors.New(insufficientDiskSpaceErrorStr)}, - "wrapped EDQUOT": {err: fmt.Errorf("wrapped: %w", syscall.EDQUOT), want: errors.New(insufficientDiskSpaceErrorStr)}, - "other error": {err: errors.New("some other error"), want: errors.New("some other error")}, + "ENOSPC": {err: syscall.ENOSPC, want: insufficientDiskSpaceErr, permanentError: true}, + "EDQUOT": {err: syscall.EDQUOT, want: insufficientDiskSpaceErr, permanentError: true}, + "wrapped ENOSPC": {err: fmt.Errorf("wrapped: %w", syscall.ENOSPC), want: insufficientDiskSpaceErr, permanentError: true}, + "wrapped EDQUOT": {err: fmt.Errorf("wrapped: %w", syscall.EDQUOT), want: insufficientDiskSpaceErr, permanentError: true}, + "other error": {err: &mockError{msg: "some other error"}, want: &mockError{msg: "some other error"}, permanentError: false}, } for name, test := range tests { t.Run(name, func(t *testing.T) { got := ToDiskSpaceError(test.err) - require.Equal(t, test.want, got) + if test.permanentError { + require.ErrorIs(t, got, &backoff.PermanentError{}) + } + require.ErrorIs(t, got, test.want) }) } } diff --git a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows.go b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows.go index a9e97fd0921..2f25df5da75 100644 --- a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows.go +++ b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows.go @@ -11,7 +11,7 @@ import ( // ToDiskSpaceError returns a generic disk space error if the error is a disk space error func ToDiskSpaceError(err error) error { if errors.Is(err, winSys.ERROR_DISK_FULL) || errors.Is(err, winSys.ERROR_HANDLE_DISK_FULL) { - return errors.New(insufficientDiskSpaceErrorStr) + return insufficientDiskSpaceErr } return err diff --git a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows_test.go b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows_test.go index 2c7f4d77921..ee8353f8ee5 100644 --- a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows_test.go +++ b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows_test.go @@ -3,30 +3,47 @@ package upgrade import ( - "errors" "fmt" "testing" + "github.com/cenkalti/backoff/v4" "github.com/stretchr/testify/require" winSys "golang.org/x/sys/windows" ) +type mockError struct { + msg string +} + +func (e *mockError) Error() string { + return e.msg +} + +func (e *mockError) Is(target error) bool { + _, ok := target.(*mockError) + return ok +} func TestToDiskSpaceError(t *testing.T) { tests := map[string]struct { - err error - want error + err error + want error + permanentError bool }{ - "ERROR_DISK_FULL": {err: winSys.ERROR_DISK_FULL, want: errors.New(insufficientDiskSpaceErrorStr)}, - "ERROR_HANDLE_DISK_FULL": {err: winSys.ERROR_HANDLE_DISK_FULL, want: errors.New(insufficientDiskSpaceErrorStr)}, - "wrapped ERROR_DISK_FULL": {err: fmt.Errorf("wrapped: %w", winSys.ERROR_DISK_FULL), want: errors.New(insufficientDiskSpaceErrorStr)}, - "wrapped ERROR_HANDLE_DISK_FULL": {err: fmt.Errorf("wrapped: %w", winSys.ERROR_HANDLE_DISK_FULL), want: errors.New(insufficientDiskSpaceErrorStr)}, - "other error": {err: errors.New("some other error"), want: errors.New("some other error")}, + "ERROR_DISK_FULL": {err: winSys.ERROR_DISK_FULL, want: insufficientDiskSpaceErr, permanentError: true}, + "ERROR_HANDLE_DISK_FULL": {err: winSys.ERROR_HANDLE_DISK_FULL, want: insufficientDiskSpaceErr, permanentError: true}, + "wrapped ERROR_DISK_FULL": {err: fmt.Errorf("wrapped: %w", winSys.ERROR_DISK_FULL), want: insufficientDiskSpaceErr, permanentError: true}, + "wrapped ERROR_HANDLE_DISK_FULL": {err: fmt.Errorf("wrapped: %w", winSys.ERROR_HANDLE_DISK_FULL), want: insufficientDiskSpaceErr, permanentError: true}, + "other error": {err: &mockError{msg: "some other error"}, want: &mockError{msg: "some other error"}, permanentError: false}, } for name, test := range tests { t.Run(name, func(t *testing.T) { got := ToDiskSpaceError(test.err) - require.Equal(t, test.want, got) + + if test.permanentError { + require.ErrorIs(t, got, &backoff.PermanentError{}) + } + require.ErrorIs(t, got, test.want) }) } } From 9d072f98b69aa83a6f4f5738a507665575a4a4a7 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 25 Jul 2025 18:10:59 +0300 Subject: [PATCH 007/127] enhancement(5235): reverted downloader with retry changes --- internal/pkg/agent/application/upgrade/step_download.go | 8 -------- 1 file changed, 8 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/step_download.go b/internal/pkg/agent/application/upgrade/step_download.go index 11a23beddda..3c052f615c5 100644 --- a/internal/pkg/agent/application/upgrade/step_download.go +++ b/internal/pkg/agent/application/upgrade/step_download.go @@ -239,17 +239,12 @@ func (u *Upgrader) downloadWithRetries( var path string var attempt uint - var downloadErr error - opFn := func() error { attempt++ u.log.Infof("download attempt %d", attempt) var err error path, err = u.downloadOnce(cancelCtx, factory, version, settings, upgradeDetails, diskSpaceErrorFunc) if err != nil { - if !errors.Is(err, context.DeadlineExceeded) { - downloadErr = err - } return err } return nil @@ -262,9 +257,6 @@ func (u *Upgrader) downloadWithRetries( } if err := backoff.RetryNotify(opFn, boCtx, opFailureNotificationFn); err != nil { - if downloadErr != nil { - return "", downloadErr - } return "", err } From 9f5c84acfc9a35aee837668a625de63adf832bea Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sat, 26 Jul 2025 02:36:31 +0300 Subject: [PATCH 008/127] enhancement(5235): added insufficient disk space error type, moved error conversion into the downloader for http downloader --- internal/pkg/agent/application/application.go | 2 +- .../agent/application/coordinator/coordinator.go | 10 ++++++++++ .../coordinator/coordinator_unit_test.go | 2 +- .../upgrade/artifact/download/http/downloader.go | 1 + .../upgrade/insufficient_disk_space_err.go | 2 +- .../upgrade/insufficient_disk_space_err_unix.go | 15 ++++++++++----- .../insufficient_disk_space_err_unix_test.go | 14 +++++++++----- .../insufficient_disk_space_err_windows.go | 15 ++++++++++----- .../insufficient_disk_space_err_windows_test.go | 14 +++++++++----- .../agent/application/upgrade/step_download.go | 3 ++- internal/pkg/agent/application/upgrade/upgrade.go | 2 +- 11 files changed, 55 insertions(+), 25 deletions(-) diff --git a/internal/pkg/agent/application/application.go b/internal/pkg/agent/application/application.go index 6fb4056850c..52c41434ea3 100644 --- a/internal/pkg/agent/application/application.go +++ b/internal/pkg/agent/application/application.go @@ -120,7 +120,7 @@ func New( // monitoring is not supported in bootstrap mode https://github.com/elastic/elastic-agent/issues/1761 isMonitoringSupported := !disableMonitoring && cfg.Settings.V1MonitoringEnabled - upgrader, err := upgrade.NewUpgrader(log, cfg.Settings.DownloadConfig, agentInfo, upgrade.ToDiskSpaceError) + upgrader, err := upgrade.NewUpgrader(log, cfg.Settings.DownloadConfig, agentInfo, upgrade.ToDiskSpaceErrorFunc(log)) if err != nil { return nil, nil, nil, fmt.Errorf("failed to create upgrader: %w", err) } diff --git a/internal/pkg/agent/application/coordinator/coordinator.go b/internal/pkg/agent/application/coordinator/coordinator.go index 4e83aba52ee..ef1f80c14a6 100644 --- a/internal/pkg/agent/application/coordinator/coordinator.go +++ b/internal/pkg/agent/application/coordinator/coordinator.go @@ -745,6 +745,16 @@ func (c *Coordinator) Upgrade(ctx context.Context, version string, sourceURI str det.SetState(details.StateCompleted) return c.upgradeMgr.AckAction(ctx, c.fleetAcker, action) } + + // c.logger.Infof("Checking if error is insufficient disk space: errors.Is(err, upgrade.ErrInsufficientDiskSpace) = %v", errors.Is(err, upgrade.ErrInsufficientDiskSpace)) + // if errors.Is(err, upgrade.ErrInsufficientDiskSpace) { + // c.logger.Infof("insufficient disk space: %v", err) + // c.logger.Infof("Unwrapping disk space error from %v to %v", err, upgrade.ErrInsufficientDiskSpace.Err) + // err = upgrade.ErrInsufficientDiskSpace.Err + // c.logger.Infof("After unwrapping, error is now: %v (type: %T)", err, err) + // } + + // c.logger.Errorf("Setting upgrade details to failed with final error: %v", err) det.Fail(err) return err } diff --git a/internal/pkg/agent/application/coordinator/coordinator_unit_test.go b/internal/pkg/agent/application/coordinator/coordinator_unit_test.go index e7cee01e99d..0f46f922f85 100644 --- a/internal/pkg/agent/application/coordinator/coordinator_unit_test.go +++ b/internal/pkg/agent/application/coordinator/coordinator_unit_test.go @@ -460,7 +460,7 @@ func TestCoordinatorReportsInvalidPolicy(t *testing.T) { log, &artifact.Config{}, &info.AgentInfo{}, - upgrade.ToDiskSpaceError, + upgrade.ToDiskSpaceErrorFunc(log), ) require.NoError(t, err, "errored when creating a new upgrader") diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go index 9d3b5e562e1..53bcd083c39 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go @@ -217,6 +217,7 @@ func (e *Downloader) downloadFile(ctx context.Context, artifactName, filename, f dp.Report(ctx) _, err = io.Copy(destinationFile, io.TeeReader(resp.Body, dp)) if err != nil { + err = e.diskSpaceErrorFunc(err) dp.ReportFailed(err) // return path, file already exists and needs to be cleaned up return fullPath, fmt.Errorf("%s: %w", errors.New("copying fetched package failed", errors.TypeNetwork, errors.M(errors.MetaKeyURI, sourceURI)).Error(), err) diff --git a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err.go b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err.go index 5ccfd15e3c2..3108f510a63 100644 --- a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err.go +++ b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err.go @@ -8,7 +8,7 @@ import ( const insufficientDiskSpaceErrorStr = "insufficient disk space" -var insufficientDiskSpaceErr = &InsufficientDiskSpaceError{Err: backoff.Permanent(errors.New(insufficientDiskSpaceErrorStr))} +var ErrInsufficientDiskSpace = &InsufficientDiskSpaceError{Err: backoff.Permanent(errors.New(insufficientDiskSpaceErrorStr))} type InsufficientDiskSpaceError struct { Err error diff --git a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix.go b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix.go index 751b0296d7f..dca8e08d009 100644 --- a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix.go +++ b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix.go @@ -5,13 +5,18 @@ package upgrade import ( "errors" "syscall" + + "github.com/elastic/elastic-agent/pkg/core/logger" ) // ToDiskSpaceError returns a generic disk space error if the error is a disk space error -func ToDiskSpaceError(err error) error { - if errors.Is(err, syscall.ENOSPC) || errors.Is(err, syscall.EDQUOT) { - return insufficientDiskSpaceErr - } +func ToDiskSpaceErrorFunc(log *logger.Logger) func(error) error { + return func(err error) error { + if errors.Is(err, syscall.ENOSPC) || errors.Is(err, syscall.EDQUOT) { + log.Infof("ToDiskSpaceError detected disk space error: %v, returning ErrInsufficientDiskSpace", err) + return ErrInsufficientDiskSpace + } - return err + return err + } } diff --git a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix_test.go b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix_test.go index aedab33ca91..0cef992bdbc 100644 --- a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix_test.go +++ b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix_test.go @@ -8,6 +8,7 @@ import ( "testing" "github.com/cenkalti/backoff/v4" + "github.com/elastic/elastic-agent/pkg/core/logger" "github.com/stretchr/testify/require" ) @@ -30,16 +31,19 @@ func TestToDiskSpaceError(t *testing.T) { want error permanentError bool }{ - "ENOSPC": {err: syscall.ENOSPC, want: insufficientDiskSpaceErr, permanentError: true}, - "EDQUOT": {err: syscall.EDQUOT, want: insufficientDiskSpaceErr, permanentError: true}, - "wrapped ENOSPC": {err: fmt.Errorf("wrapped: %w", syscall.ENOSPC), want: insufficientDiskSpaceErr, permanentError: true}, - "wrapped EDQUOT": {err: fmt.Errorf("wrapped: %w", syscall.EDQUOT), want: insufficientDiskSpaceErr, permanentError: true}, + "ENOSPC": {err: syscall.ENOSPC, want: ErrInsufficientDiskSpace, permanentError: true}, + "EDQUOT": {err: syscall.EDQUOT, want: ErrInsufficientDiskSpace, permanentError: true}, + "wrapped ENOSPC": {err: fmt.Errorf("wrapped: %w", syscall.ENOSPC), want: ErrInsufficientDiskSpace, permanentError: true}, + "wrapped EDQUOT": {err: fmt.Errorf("wrapped: %w", syscall.EDQUOT), want: ErrInsufficientDiskSpace, permanentError: true}, "other error": {err: &mockError{msg: "some other error"}, want: &mockError{msg: "some other error"}, permanentError: false}, } for name, test := range tests { t.Run(name, func(t *testing.T) { - got := ToDiskSpaceError(test.err) + log, err := logger.New("test", true) + require.NoError(t, err) + + got := ToDiskSpaceErrorFunc(log)(test.err) if test.permanentError { require.ErrorIs(t, got, &backoff.PermanentError{}) } diff --git a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows.go b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows.go index 2f25df5da75..8bcba82ac92 100644 --- a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows.go +++ b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows.go @@ -5,14 +5,19 @@ package upgrade import ( "errors" + "github.com/elastic/elastic-agent/pkg/core/logger" + winSys "golang.org/x/sys/windows" ) // ToDiskSpaceError returns a generic disk space error if the error is a disk space error -func ToDiskSpaceError(err error) error { - if errors.Is(err, winSys.ERROR_DISK_FULL) || errors.Is(err, winSys.ERROR_HANDLE_DISK_FULL) { - return insufficientDiskSpaceErr - } +func ToDiskSpaceErrorFunc(log *logger.Logger) func(error) error { + return func(err error) error { + if errors.Is(err, winSys.ERROR_DISK_FULL) || errors.Is(err, winSys.ERROR_HANDLE_DISK_FULL) { + log.Infof("ToDiskSpaceError detected disk space error: %v, returning ErrInsufficientDiskSpace", err) + return ErrInsufficientDiskSpace + } - return err + return err + } } diff --git a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows_test.go b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows_test.go index ee8353f8ee5..6b98628c18a 100644 --- a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows_test.go +++ b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows_test.go @@ -7,6 +7,7 @@ import ( "testing" "github.com/cenkalti/backoff/v4" + "github.com/elastic/elastic-agent/pkg/core/logger" "github.com/stretchr/testify/require" winSys "golang.org/x/sys/windows" ) @@ -29,16 +30,19 @@ func TestToDiskSpaceError(t *testing.T) { want error permanentError bool }{ - "ERROR_DISK_FULL": {err: winSys.ERROR_DISK_FULL, want: insufficientDiskSpaceErr, permanentError: true}, - "ERROR_HANDLE_DISK_FULL": {err: winSys.ERROR_HANDLE_DISK_FULL, want: insufficientDiskSpaceErr, permanentError: true}, - "wrapped ERROR_DISK_FULL": {err: fmt.Errorf("wrapped: %w", winSys.ERROR_DISK_FULL), want: insufficientDiskSpaceErr, permanentError: true}, - "wrapped ERROR_HANDLE_DISK_FULL": {err: fmt.Errorf("wrapped: %w", winSys.ERROR_HANDLE_DISK_FULL), want: insufficientDiskSpaceErr, permanentError: true}, + "ERROR_DISK_FULL": {err: winSys.ERROR_DISK_FULL, want: ErrInsufficientDiskSpace, permanentError: true}, + "ERROR_HANDLE_DISK_FULL": {err: winSys.ERROR_HANDLE_DISK_FULL, want: ErrInsufficientDiskSpace, permanentError: true}, + "wrapped ERROR_DISK_FULL": {err: fmt.Errorf("wrapped: %w", winSys.ERROR_DISK_FULL), want: ErrInsufficientDiskSpace, permanentError: true}, + "wrapped ERROR_HANDLE_DISK_FULL": {err: fmt.Errorf("wrapped: %w", winSys.ERROR_HANDLE_DISK_FULL), want: ErrInsufficientDiskSpace, permanentError: true}, "other error": {err: &mockError{msg: "some other error"}, want: &mockError{msg: "some other error"}, permanentError: false}, } for name, test := range tests { t.Run(name, func(t *testing.T) { - got := ToDiskSpaceError(test.err) + log, err := logger.New("test", true) + require.NoError(t, err) + + got := ToDiskSpaceErrorFunc(log)(test.err) if test.permanentError { require.ErrorIs(t, got, &backoff.PermanentError{}) diff --git a/internal/pkg/agent/application/upgrade/step_download.go b/internal/pkg/agent/application/upgrade/step_download.go index 3c052f615c5..3a821355228 100644 --- a/internal/pkg/agent/application/upgrade/step_download.go +++ b/internal/pkg/agent/application/upgrade/step_download.go @@ -226,7 +226,8 @@ func (u *Upgrader) downloadWithRetries( upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error, ) (string, error) { - cancelDeadline := time.Now().Add(settings.Timeout) + // cancelDeadline := time.Now().Add(settings.Timeout) + cancelDeadline := time.Now().Add(10 * time.Minute) cancelCtx, cancel := context.WithDeadline(ctx, cancelDeadline) defer cancel() diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 0f8e8e3b8ab..f7db23d7d80 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -247,7 +247,7 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string u.log.Errorw("Unable to remove file after verification failure", "error.message", dErr) } - return nil, u.diskSpaceErrorFunc(err) + return nil, err } det.SetState(details.StateExtracting) From eb915fbf1d0f031067830a239f3ef88bf549d665 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Mon, 28 Jul 2025 20:24:19 +0300 Subject: [PATCH 009/127] enhancement(5235): updated insufficient disk error, removed backoff related wrapping. updated download with retries to wrap the error in permanent error if the error is insufficient disk error. --- .../application/coordinator/coordinator.go | 18 +++--- .../artifact/download/fs/downloader.go | 4 +- .../artifact/download/fs/downloader_test.go | 47 ++++++++++++++ .../artifact/download/http/downloader.go | 6 +- .../artifact/download/http/downloader_test.go | 61 +++++++++++++++++++ .../download/http/progress_reporter.go | 1 - .../upgrade/insufficient_disk_space_err.go | 4 +- .../insufficient_disk_space_err_unix_test.go | 19 +++--- ...nsufficient_disk_space_err_windows_test.go | 19 +++--- .../application/upgrade/step_download.go | 9 ++- .../application/upgrade/step_download_test.go | 31 ++++++++++ 11 files changed, 177 insertions(+), 42 deletions(-) diff --git a/internal/pkg/agent/application/coordinator/coordinator.go b/internal/pkg/agent/application/coordinator/coordinator.go index ef1f80c14a6..1919b104e58 100644 --- a/internal/pkg/agent/application/coordinator/coordinator.go +++ b/internal/pkg/agent/application/coordinator/coordinator.go @@ -746,15 +746,15 @@ func (c *Coordinator) Upgrade(ctx context.Context, version string, sourceURI str return c.upgradeMgr.AckAction(ctx, c.fleetAcker, action) } - // c.logger.Infof("Checking if error is insufficient disk space: errors.Is(err, upgrade.ErrInsufficientDiskSpace) = %v", errors.Is(err, upgrade.ErrInsufficientDiskSpace)) - // if errors.Is(err, upgrade.ErrInsufficientDiskSpace) { - // c.logger.Infof("insufficient disk space: %v", err) - // c.logger.Infof("Unwrapping disk space error from %v to %v", err, upgrade.ErrInsufficientDiskSpace.Err) - // err = upgrade.ErrInsufficientDiskSpace.Err - // c.logger.Infof("After unwrapping, error is now: %v (type: %T)", err, err) - // } - - // c.logger.Errorf("Setting upgrade details to failed with final error: %v", err) + c.logger.Infof("Checking if error is insufficient disk space: errors.Is(err, upgrade.ErrInsufficientDiskSpace) = %v", errors.Is(err, upgrade.ErrInsufficientDiskSpace)) + if errors.Is(err, upgrade.ErrInsufficientDiskSpace) { + c.logger.Infof("insufficient disk space detected: %v", err) + c.logger.Infof("Unwrapping disk space error from %v to %v", err, upgrade.ErrInsufficientDiskSpace.Err) + err = upgrade.ErrInsufficientDiskSpace.Err + c.logger.Infof("After unwrapping, error is now: %v (type: %T)", err, err) + } + + c.logger.Errorf("Setting upgrade details to failed with final error: %v", err) det.Fail(err) return err } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go index 7a0a63ce0db..b9c3238fcd6 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go @@ -28,6 +28,7 @@ type Downloader struct { dropPath string config *artifact.Config diskSpaceErrorFunc func(error) error + copyFunc func(dst io.Writer, src io.Reader) (written int64, err error) } // NewDownloader creates and configures Elastic Downloader @@ -36,6 +37,7 @@ func NewDownloader(config *artifact.Config, diskSpaceErrorFunc func(error) error config: config, dropPath: getDropPath(config), diskSpaceErrorFunc: diskSpaceErrorFunc, + copyFunc: io.Copy, } } @@ -121,7 +123,7 @@ func (e *Downloader) downloadFile(filename, fullPath string) (string, error) { } defer destinationFile.Close() - _, err = io.Copy(destinationFile, sourceFile) + _, err = e.copyFunc(destinationFile, sourceFile) if err != nil { return "", e.diskSpaceErrorFunc(err) } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go index 67d2b3079d7..a23c60608b8 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go @@ -7,6 +7,7 @@ package fs import ( "context" "fmt" + "io" "os" "path/filepath" "testing" @@ -291,3 +292,49 @@ func TestDownloader_DownloadAsc(t *testing.T) { }) } } + +type testCopyError struct { + msg string +} + +func (e *testCopyError) Error() string { + return e.msg +} + +func (e *testCopyError) Is(target error) bool { + _, ok := target.(*testCopyError) + return ok +} + +func TestDownloader_downloadFile(t *testing.T) { + dropPath := t.TempDir() + targetDirPath := t.TempDir() + + createFiles(t, dropPath, []file{ + { + "elastic-agent-1.2.3-linux-x86_64.tar.gz", + []byte("mock content"), + }, + }) + + config := &artifact.Config{ + DropPath: dropPath, + TargetDirectory: targetDirPath, + } + + diskSpaceErrorFunc := func(err error) error { + return err + } + + copyFuncError := &testCopyError{msg: "mock error"} + + copyFunc := func(dst io.Writer, src io.Reader) (int64, error) { + return 0, copyFuncError + } + e := NewDownloader(config, diskSpaceErrorFunc) + e.copyFunc = copyFunc + + _, err := e.downloadFile("elastic-agent-1.2.3-linux-x86_64.tar.gz", filepath.Join(targetDirPath, "elastic-agent-1.2.3-linux-x86_64.tar.gz")) + require.Error(t, err) + assert.ErrorIs(t, err, copyFuncError) +} diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go index 53bcd083c39..55ab8207a9a 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go @@ -50,6 +50,7 @@ type Downloader struct { client http.Client upgradeDetails *details.Details diskSpaceErrorFunc func(error) error + copyFunc func(dst io.Writer, src io.Reader) (written int64, err error) } // NewDownloader creates and configures Elastic Downloader @@ -74,6 +75,7 @@ func NewDownloaderWithClient(log *logger.Logger, config *artifact.Config, client client: client, upgradeDetails: upgradeDetails, diskSpaceErrorFunc: diskSpaceErrorFunc, + copyFunc: io.Copy, } } @@ -215,12 +217,12 @@ func (e *Downloader) downloadFile(ctx context.Context, artifactName, filename, f detailsObserver := newDetailsProgressObserver(e.upgradeDetails) dp := newDownloadProgressReporter(sourceURI, e.config.HTTPTransportSettings.Timeout, fileSize, e.diskSpaceErrorFunc, loggingObserver, detailsObserver) dp.Report(ctx) - _, err = io.Copy(destinationFile, io.TeeReader(resp.Body, dp)) + _, err = e.copyFunc(destinationFile, io.TeeReader(resp.Body, dp)) if err != nil { err = e.diskSpaceErrorFunc(err) dp.ReportFailed(err) // return path, file already exists and needs to be cleaned up - return fullPath, fmt.Errorf("%s: %w", errors.New("copying fetched package failed", errors.TypeNetwork, errors.M(errors.MetaKeyURI, sourceURI)).Error(), err) + return fullPath, errors.New("copying fetched package failed", err, errors.TypeNetwork, errors.M(errors.MetaKeyURI, sourceURI)) } dp.ReportComplete() diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go index b1bea0122e9..f2a2f22bf67 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go @@ -528,3 +528,64 @@ func TestDownloadVersion(t *testing.T) { }) } } + +type testCopyError struct { + msg string +} + +func (e *testCopyError) Error() string { + return e.msg +} + +func (e *testCopyError) Is(target error) bool { + _, ok := target.(*testCopyError) + return ok +} + +func TestDownloadFile(t *testing.T) { + t.Run("calls diskSpaceErrorFunc on any copy error", func(t *testing.T) { + ctx := t.Context() + artifactName := "beat/elastic-agent" + filename := "elastic-agent-1.2.3-linux-x86_64.tar.gz" + fullPath := filepath.Join(t.TempDir(), filename) + + server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + rw.WriteHeader(http.StatusOK) + _, _ = rw.Write([]byte("mock content")) + })) + defer server.Close() + + config := &artifact.Config{ + OperatingSystem: "linux", + Architecture: "64", + SourceURI: server.URL, + TargetDirectory: filepath.Dir(fullPath), + } + + log, _ := loggertest.New("downloader") + upgradeDetails := details.NewDetails("1.2.3", details.StateRequested, "") + + var receivedError error + diskSpaceErrorFunc := func(err error) error { + receivedError = err + return err + } + + copyFuncError := &testCopyError{msg: "mock error"} + copyFunc := func(dst io.Writer, src io.Reader) (int64, error) { + return 0, copyFuncError + } + + downloader := NewDownloaderWithClient(log, config, *server.Client(), upgradeDetails, diskSpaceErrorFunc) + downloader.copyFunc = copyFunc + + _, err := downloader.downloadFile(ctx, artifactName, filename, fullPath) + + assert.Error(t, err) + + assert.ErrorIs(t, err, copyFuncError) + + assert.Equal(t, copyFuncError, receivedError) + }) + +} diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter.go b/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter.go index 4af259a13bf..ee37eb7736a 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter.go @@ -114,7 +114,6 @@ func (dp *downloadProgressReporter) ReportComplete() { // either ReportFailed or ReportComplete when they no longer need the downloadProgressReporter // to avoid resource leaks. func (dp *downloadProgressReporter) ReportFailed(err error) { - err = dp.diskSpaceErrorFunc(err) defer close(dp.done) // If there are no observers to report progress to, there is nothing to do! diff --git a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err.go b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err.go index 3108f510a63..b0833eb4d58 100644 --- a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err.go +++ b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err.go @@ -2,13 +2,11 @@ package upgrade import ( "errors" - - "github.com/cenkalti/backoff/v4" ) const insufficientDiskSpaceErrorStr = "insufficient disk space" -var ErrInsufficientDiskSpace = &InsufficientDiskSpaceError{Err: backoff.Permanent(errors.New(insufficientDiskSpaceErrorStr))} +var ErrInsufficientDiskSpace = &InsufficientDiskSpaceError{Err: errors.New(insufficientDiskSpaceErrorStr)} type InsufficientDiskSpaceError struct { Err error diff --git a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix_test.go b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix_test.go index 0cef992bdbc..d5637b169e5 100644 --- a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix_test.go +++ b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix_test.go @@ -7,7 +7,6 @@ import ( "syscall" "testing" - "github.com/cenkalti/backoff/v4" "github.com/elastic/elastic-agent/pkg/core/logger" "github.com/stretchr/testify/require" ) @@ -27,15 +26,14 @@ func (e *mockError) Is(target error) bool { func TestToDiskSpaceError(t *testing.T) { tests := map[string]struct { - err error - want error - permanentError bool + err error + want error }{ - "ENOSPC": {err: syscall.ENOSPC, want: ErrInsufficientDiskSpace, permanentError: true}, - "EDQUOT": {err: syscall.EDQUOT, want: ErrInsufficientDiskSpace, permanentError: true}, - "wrapped ENOSPC": {err: fmt.Errorf("wrapped: %w", syscall.ENOSPC), want: ErrInsufficientDiskSpace, permanentError: true}, - "wrapped EDQUOT": {err: fmt.Errorf("wrapped: %w", syscall.EDQUOT), want: ErrInsufficientDiskSpace, permanentError: true}, - "other error": {err: &mockError{msg: "some other error"}, want: &mockError{msg: "some other error"}, permanentError: false}, + "ENOSPC": {err: syscall.ENOSPC, want: ErrInsufficientDiskSpace}, + "EDQUOT": {err: syscall.EDQUOT, want: ErrInsufficientDiskSpace}, + "wrapped ENOSPC": {err: fmt.Errorf("wrapped: %w", syscall.ENOSPC), want: ErrInsufficientDiskSpace}, + "wrapped EDQUOT": {err: fmt.Errorf("wrapped: %w", syscall.EDQUOT), want: ErrInsufficientDiskSpace}, + "other error": {err: &mockError{msg: "some other error"}, want: &mockError{msg: "some other error"}}, } for name, test := range tests { @@ -44,9 +42,6 @@ func TestToDiskSpaceError(t *testing.T) { require.NoError(t, err) got := ToDiskSpaceErrorFunc(log)(test.err) - if test.permanentError { - require.ErrorIs(t, got, &backoff.PermanentError{}) - } require.ErrorIs(t, got, test.want) }) } diff --git a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows_test.go b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows_test.go index 6b98628c18a..ca296654481 100644 --- a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows_test.go +++ b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows_test.go @@ -6,7 +6,6 @@ import ( "fmt" "testing" - "github.com/cenkalti/backoff/v4" "github.com/elastic/elastic-agent/pkg/core/logger" "github.com/stretchr/testify/require" winSys "golang.org/x/sys/windows" @@ -26,15 +25,14 @@ func (e *mockError) Is(target error) bool { } func TestToDiskSpaceError(t *testing.T) { tests := map[string]struct { - err error - want error - permanentError bool + err error + want error }{ - "ERROR_DISK_FULL": {err: winSys.ERROR_DISK_FULL, want: ErrInsufficientDiskSpace, permanentError: true}, - "ERROR_HANDLE_DISK_FULL": {err: winSys.ERROR_HANDLE_DISK_FULL, want: ErrInsufficientDiskSpace, permanentError: true}, - "wrapped ERROR_DISK_FULL": {err: fmt.Errorf("wrapped: %w", winSys.ERROR_DISK_FULL), want: ErrInsufficientDiskSpace, permanentError: true}, - "wrapped ERROR_HANDLE_DISK_FULL": {err: fmt.Errorf("wrapped: %w", winSys.ERROR_HANDLE_DISK_FULL), want: ErrInsufficientDiskSpace, permanentError: true}, - "other error": {err: &mockError{msg: "some other error"}, want: &mockError{msg: "some other error"}, permanentError: false}, + "ERROR_DISK_FULL": {err: winSys.ERROR_DISK_FULL, want: ErrInsufficientDiskSpace}, + "ERROR_HANDLE_DISK_FULL": {err: winSys.ERROR_HANDLE_DISK_FULL, want: ErrInsufficientDiskSpace}, + "wrapped ERROR_DISK_FULL": {err: fmt.Errorf("wrapped: %w", winSys.ERROR_DISK_FULL), want: ErrInsufficientDiskSpace}, + "wrapped ERROR_HANDLE_DISK_FULL": {err: fmt.Errorf("wrapped: %w", winSys.ERROR_HANDLE_DISK_FULL), want: ErrInsufficientDiskSpace}, + "other error": {err: &mockError{msg: "some other error"}, want: &mockError{msg: "some other error"}}, } for name, test := range tests { @@ -44,9 +42,6 @@ func TestToDiskSpaceError(t *testing.T) { got := ToDiskSpaceErrorFunc(log)(test.err) - if test.permanentError { - require.ErrorIs(t, got, &backoff.PermanentError{}) - } require.ErrorIs(t, got, test.want) }) } diff --git a/internal/pkg/agent/application/upgrade/step_download.go b/internal/pkg/agent/application/upgrade/step_download.go index 3a821355228..7ae166683ce 100644 --- a/internal/pkg/agent/application/upgrade/step_download.go +++ b/internal/pkg/agent/application/upgrade/step_download.go @@ -226,8 +226,7 @@ func (u *Upgrader) downloadWithRetries( upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error, ) (string, error) { - // cancelDeadline := time.Now().Add(settings.Timeout) - cancelDeadline := time.Now().Add(10 * time.Minute) + cancelDeadline := time.Now().Add(settings.Timeout) cancelCtx, cancel := context.WithDeadline(ctx, cancelDeadline) defer cancel() @@ -246,6 +245,12 @@ func (u *Upgrader) downloadWithRetries( var err error path, err = u.downloadOnce(cancelCtx, factory, version, settings, upgradeDetails, diskSpaceErrorFunc) if err != nil { + + if errors.Is(err, ErrInsufficientDiskSpace) { + u.log.Infof("Insufficient disk space error detected, stopping retries") + return backoff.Permanent(err) + } + return err } return nil diff --git a/internal/pkg/agent/application/upgrade/step_download_test.go b/internal/pkg/agent/application/upgrade/step_download_test.go index e4b1ff5a05e..88b94a2053a 100644 --- a/internal/pkg/agent/application/upgrade/step_download_test.go +++ b/internal/pkg/agent/application/upgrade/step_download_test.go @@ -276,6 +276,37 @@ func TestDownloadWithRetries(t *testing.T) { require.NotEmpty(t, *upgradeDetailsRetryErrorMsg) require.Equal(t, *upgradeDetailsRetryErrorMsg, upgradeDetails.Metadata.RetryErrorMsg) }) + + t.Run("insufficient_disk_space_stops_retries", func(t *testing.T) { + mockDownloaderCtor := func(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config, upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error) (download.Downloader, error) { + return &mockDownloader{"", ErrInsufficientDiskSpace}, nil + } + + u, err := NewUpgrader(testLogger, &settings, &info.AgentInfo{}, nil) + require.NoError(t, err) + + parsedVersion, err := agtversion.ParseVersion("8.9.0") + require.NoError(t, err) + + upgradeDetails, upgradeDetailsRetryUntil, upgradeDetailsRetryUntilWasUnset, upgradeDetailsRetryErrorMsg := mockUpgradeDetails(parsedVersion) + + path, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &settings, upgradeDetails, nil) + + require.Error(t, err) + require.Equal(t, "", path) + + require.ErrorIs(t, err, ErrInsufficientDiskSpace) + + logs := obs.TakeAll() + require.Len(t, logs, 2) + require.Equal(t, "download attempt 1", logs[0].Message) + require.Contains(t, logs[1].Message, "Insufficient disk space error detected, stopping retries") + + require.NotZero(t, *upgradeDetailsRetryUntil) + require.False(t, *upgradeDetailsRetryUntilWasUnset) + + require.Empty(t, *upgradeDetailsRetryErrorMsg) + }) } // mockUpgradeDetails returns a *details.Details value that has an observer registered on it for inspecting From c4aa9857ea68a7fbe2073ada6c40fa799883a872 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Mon, 28 Jul 2025 20:52:12 +0300 Subject: [PATCH 010/127] enhancement(5235): moved disk space error into its own package under upgrades. reverted constructor signature changes. updated tests --- internal/pkg/agent/application/application.go | 2 +- .../application/coordinator/coordinator.go | 9 +-- .../coordinator/coordinator_unit_test.go | 1 - .../artifact/download/fs/downloader.go | 5 +- .../artifact/download/fs/downloader_test.go | 3 +- .../artifact/download/fs/verifier_test.go | 4 +- .../artifact/download/http/downloader.go | 11 ++-- .../artifact/download/http/downloader_test.go | 13 ++-- .../artifact/download/http/verifier_test.go | 2 +- .../download/localremote/downloader.go | 8 +-- .../artifact/download/snapshot/downloader.go | 8 +-- .../download/snapshot/downloader_test.go | 2 +- .../application/upgrade/errors/disk_space.go | 30 +++++++++ .../upgrade/errors/disk_space_unix.go | 28 ++++++++ .../upgrade/errors/disk_space_unix_test.go | 64 ++++++++++++++++++ .../upgrade/errors/disk_space_windows.go | 29 +++++++++ .../upgrade/errors/disk_space_windows_test.go | 65 +++++++++++++++++++ .../application/upgrade/step_download.go | 13 ++-- .../application/upgrade/step_download_test.go | 15 +++-- .../pkg/agent/application/upgrade/upgrade.go | 5 +- 20 files changed, 270 insertions(+), 47 deletions(-) create mode 100644 internal/pkg/agent/application/upgrade/errors/disk_space.go create mode 100644 internal/pkg/agent/application/upgrade/errors/disk_space_unix.go create mode 100644 internal/pkg/agent/application/upgrade/errors/disk_space_unix_test.go create mode 100644 internal/pkg/agent/application/upgrade/errors/disk_space_windows.go create mode 100644 internal/pkg/agent/application/upgrade/errors/disk_space_windows_test.go diff --git a/internal/pkg/agent/application/application.go b/internal/pkg/agent/application/application.go index 52c41434ea3..dcaa2ddc570 100644 --- a/internal/pkg/agent/application/application.go +++ b/internal/pkg/agent/application/application.go @@ -120,7 +120,7 @@ func New( // monitoring is not supported in bootstrap mode https://github.com/elastic/elastic-agent/issues/1761 isMonitoringSupported := !disableMonitoring && cfg.Settings.V1MonitoringEnabled - upgrader, err := upgrade.NewUpgrader(log, cfg.Settings.DownloadConfig, agentInfo, upgrade.ToDiskSpaceErrorFunc(log)) + upgrader, err := upgrade.NewUpgrader(log, cfg.Settings.DownloadConfig, agentInfo) if err != nil { return nil, nil, nil, fmt.Errorf("failed to create upgrader: %w", err) } diff --git a/internal/pkg/agent/application/coordinator/coordinator.go b/internal/pkg/agent/application/coordinator/coordinator.go index 1919b104e58..2407a9f7eab 100644 --- a/internal/pkg/agent/application/coordinator/coordinator.go +++ b/internal/pkg/agent/application/coordinator/coordinator.go @@ -32,6 +32,7 @@ import ( "github.com/elastic/elastic-agent/internal/pkg/agent/application/reexec" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/details" + upgradeErrors "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/errors" "github.com/elastic/elastic-agent/internal/pkg/agent/configuration" "github.com/elastic/elastic-agent/internal/pkg/agent/storage" "github.com/elastic/elastic-agent/internal/pkg/agent/transpiler" @@ -746,11 +747,11 @@ func (c *Coordinator) Upgrade(ctx context.Context, version string, sourceURI str return c.upgradeMgr.AckAction(ctx, c.fleetAcker, action) } - c.logger.Infof("Checking if error is insufficient disk space: errors.Is(err, upgrade.ErrInsufficientDiskSpace) = %v", errors.Is(err, upgrade.ErrInsufficientDiskSpace)) - if errors.Is(err, upgrade.ErrInsufficientDiskSpace) { + c.logger.Infof("Checking if error is insufficient disk space: errors.Is(err, upgradeErrors.ErrInsufficientDiskSpace) = %v", errors.Is(err, upgradeErrors.ErrInsufficientDiskSpace)) + if errors.Is(err, upgradeErrors.ErrInsufficientDiskSpace) { c.logger.Infof("insufficient disk space detected: %v", err) - c.logger.Infof("Unwrapping disk space error from %v to %v", err, upgrade.ErrInsufficientDiskSpace.Err) - err = upgrade.ErrInsufficientDiskSpace.Err + c.logger.Infof("Unwrapping disk space error from %v to %v", err, upgradeErrors.ErrInsufficientDiskSpace.Err) + err = upgradeErrors.ErrInsufficientDiskSpace.Err c.logger.Infof("After unwrapping, error is now: %v (type: %T)", err, err) } diff --git a/internal/pkg/agent/application/coordinator/coordinator_unit_test.go b/internal/pkg/agent/application/coordinator/coordinator_unit_test.go index 0f46f922f85..48c254d07cc 100644 --- a/internal/pkg/agent/application/coordinator/coordinator_unit_test.go +++ b/internal/pkg/agent/application/coordinator/coordinator_unit_test.go @@ -460,7 +460,6 @@ func TestCoordinatorReportsInvalidPolicy(t *testing.T) { log, &artifact.Config{}, &info.AgentInfo{}, - upgrade.ToDiskSpaceErrorFunc(log), ) require.NoError(t, err, "errored when creating a new upgrader") diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go index b9c3238fcd6..3647ae04dc4 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go @@ -15,6 +15,7 @@ import ( "github.com/elastic/elastic-agent/internal/pkg/agent/application/paths" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact" + upgradeErrors "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/errors" "github.com/elastic/elastic-agent/internal/pkg/agent/errors" agtversion "github.com/elastic/elastic-agent/pkg/version" ) @@ -32,11 +33,11 @@ type Downloader struct { } // NewDownloader creates and configures Elastic Downloader -func NewDownloader(config *artifact.Config, diskSpaceErrorFunc func(error) error) *Downloader { +func NewDownloader(config *artifact.Config) *Downloader { return &Downloader{ config: config, dropPath: getDropPath(config), - diskSpaceErrorFunc: diskSpaceErrorFunc, + diskSpaceErrorFunc: upgradeErrors.ToDiskSpaceErrorFunc(nil), copyFunc: io.Copy, } } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go index a23c60608b8..dc5b9f27763 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go @@ -331,8 +331,9 @@ func TestDownloader_downloadFile(t *testing.T) { copyFunc := func(dst io.Writer, src io.Reader) (int64, error) { return 0, copyFuncError } - e := NewDownloader(config, diskSpaceErrorFunc) + e := NewDownloader(config) e.copyFunc = copyFunc + e.diskSpaceErrorFunc = diskSpaceErrorFunc _, err := e.downloadFile("elastic-agent-1.2.3-linux-x86_64.tar.gz", filepath.Join(targetDirPath, "elastic-agent-1.2.3-linux-x86_64.tar.gz")) require.Error(t, err) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/verifier_test.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/verifier_test.go index 121237c4b1e..aa15d9a05b1 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/verifier_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/verifier_test.go @@ -95,7 +95,7 @@ func TestFetchVerify(t *testing.T) { // second one should pass // download not skipped: package missing // verify passes because hash is not correct - _, err = NewDownloader(config, nil).Download(ctx, a, version) + _, err = NewDownloader(config).Download(ctx, a, version) require.NoError(t, err) asc, err := os.ReadFile(filepath.Join(dropPath, filename+".asc")) require.NoErrorf(t, err, "could not open .asc for copy") @@ -224,7 +224,7 @@ func TestVerify(t *testing.T) { pgpKey := prepareTestCase(t, agentSpec, testVersion, config) - testClient := NewDownloader(config, nil) + testClient := NewDownloader(config) artifactPath, err := testClient.Download(ctx, agentSpec, testVersion) require.NoError(t, err, "fs.Downloader could not download artifacts") _, err = testClient.DownloadAsc(context.Background(), agentSpec, *testVersion) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go index 55ab8207a9a..5f2ff9a465c 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go @@ -21,6 +21,7 @@ import ( "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/details" + upgradeErrors "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/errors" "github.com/elastic/elastic-agent/internal/pkg/agent/errors" "github.com/elastic/elastic-agent/pkg/core/logger" agtversion "github.com/elastic/elastic-agent/pkg/version" @@ -54,7 +55,7 @@ type Downloader struct { } // NewDownloader creates and configures Elastic Downloader -func NewDownloader(log *logger.Logger, config *artifact.Config, upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error) (*Downloader, error) { +func NewDownloader(log *logger.Logger, config *artifact.Config, upgradeDetails *details.Details) (*Downloader, error) { client, err := config.HTTPTransportSettings.Client( httpcommon.WithAPMHTTPInstrumentation(), httpcommon.WithKeepaliveSettings{Disable: false, IdleConnTimeout: 30 * time.Second}, @@ -64,17 +65,17 @@ func NewDownloader(log *logger.Logger, config *artifact.Config, upgradeDetails * } client.Transport = download.WithHeaders(client.Transport, download.Headers) - return NewDownloaderWithClient(log, config, *client, upgradeDetails, diskSpaceErrorFunc), nil + return NewDownloaderWithClient(log, config, *client, upgradeDetails), nil } // NewDownloaderWithClient creates Elastic Downloader with specific client used -func NewDownloaderWithClient(log *logger.Logger, config *artifact.Config, client http.Client, upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error) *Downloader { +func NewDownloaderWithClient(log *logger.Logger, config *artifact.Config, client http.Client, upgradeDetails *details.Details) *Downloader { return &Downloader{ log: log, config: config, client: client, upgradeDetails: upgradeDetails, - diskSpaceErrorFunc: diskSpaceErrorFunc, + diskSpaceErrorFunc: upgradeErrors.ToDiskSpaceErrorFunc(log), copyFunc: io.Copy, } } @@ -222,7 +223,7 @@ func (e *Downloader) downloadFile(ctx context.Context, artifactName, filename, f err = e.diskSpaceErrorFunc(err) dp.ReportFailed(err) // return path, file already exists and needs to be cleaned up - return fullPath, errors.New("copying fetched package failed", err, errors.TypeNetwork, errors.M(errors.MetaKeyURI, sourceURI)) + return fullPath, fmt.Errorf("%s: %w", errors.New("copying fetched package failed", errors.TypeNetwork, errors.M(errors.MetaKeyURI, sourceURI)).Error(), err) } dp.ReportComplete() diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go index f2a2f22bf67..ea96c2ed269 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go @@ -64,7 +64,7 @@ func TestDownload(t *testing.T) { config.Architecture = testCase.arch upgradeDetails := details.NewDetails("8.12.0", details.StateRequested, "") - testClient := NewDownloaderWithClient(log, config, elasticClient, upgradeDetails, nil) + testClient := NewDownloaderWithClient(log, config, elasticClient, upgradeDetails) artifactPath, err := testClient.Download(context.Background(), beatSpec, version) if err != nil { t.Fatal(err) @@ -114,7 +114,7 @@ func TestDownloadBodyError(t *testing.T) { log, obs := loggertest.New("downloader") upgradeDetails := details.NewDetails("8.12.0", details.StateRequested, "") - testClient := NewDownloaderWithClient(log, config, *client, upgradeDetails, nil) + testClient := NewDownloaderWithClient(log, config, *client, upgradeDetails) artifactPath, err := testClient.Download(context.Background(), beatSpec, version) os.Remove(artifactPath) if err == nil { @@ -171,7 +171,7 @@ func TestDownloadLogProgressWithLength(t *testing.T) { log, obs := loggertest.New("downloader") upgradeDetails := details.NewDetails("8.12.0", details.StateRequested, "") - testClient := NewDownloaderWithClient(log, config, *client, upgradeDetails, nil) + testClient := NewDownloaderWithClient(log, config, *client, upgradeDetails) artifactPath, err := testClient.Download(context.Background(), beatSpec, version) os.Remove(artifactPath) require.NoError(t, err, "Download should not have errored") @@ -254,7 +254,7 @@ func TestDownloadLogProgressWithoutLength(t *testing.T) { log, obs := loggertest.New("downloader") upgradeDetails := details.NewDetails("8.12.0", details.StateRequested, "") - testClient := NewDownloaderWithClient(log, config, *client, upgradeDetails, nil) + testClient := NewDownloaderWithClient(log, config, *client, upgradeDetails) artifactPath, err := testClient.Download(context.Background(), beatSpec, version) os.Remove(artifactPath) require.NoError(t, err, "Download should not have errored") @@ -516,7 +516,7 @@ func TestDownloadVersion(t *testing.T) { config := tt.fields.config config.SourceURI = server.URL config.TargetDirectory = targetDirPath - downloader := NewDownloaderWithClient(log, config, *elasticClient, upgradeDetails, nil) + downloader := NewDownloaderWithClient(log, config, *elasticClient, upgradeDetails) got, err := downloader.Download(context.TODO(), tt.args.a, tt.args.version) @@ -576,8 +576,9 @@ func TestDownloadFile(t *testing.T) { return 0, copyFuncError } - downloader := NewDownloaderWithClient(log, config, *server.Client(), upgradeDetails, diskSpaceErrorFunc) + downloader := NewDownloaderWithClient(log, config, *server.Client(), upgradeDetails) downloader.copyFunc = copyFunc + downloader.diskSpaceErrorFunc = diskSpaceErrorFunc _, err := downloader.downloadFile(ctx, artifactName, filename, fullPath) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/verifier_test.go b/internal/pkg/agent/application/upgrade/artifact/download/http/verifier_test.go index 4290d0ea24f..248fc49ac19 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/verifier_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/verifier_test.go @@ -95,7 +95,7 @@ func runTests(t *testing.T, testCases []testCase, td *testDials, config *artifac upgradeDetails := details.NewDetails( "8.12.0", details.StateRequested, "") - downloader, err := NewDownloader(log, config, upgradeDetails, nil) + downloader, err := NewDownloader(log, config, upgradeDetails) require.NoError(t, err, "could not create new downloader") pkgPath, err := downloader.Download(cancelCtx, beatSpec, version) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/localremote/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/localremote/downloader.go index 0ae7b1f19a7..f3c59d080b6 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/localremote/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/localremote/downloader.go @@ -18,16 +18,16 @@ import ( // NewDownloader creates a downloader which first checks local directory // and then fallbacks to remote if configured. -func NewDownloader(log *logger.Logger, config *artifact.Config, upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error) (download.Downloader, error) { +func NewDownloader(log *logger.Logger, config *artifact.Config, upgradeDetails *details.Details) (download.Downloader, error) { downloaders := make([]download.Downloader, 0, 3) - downloaders = append(downloaders, fs.NewDownloader(config, diskSpaceErrorFunc)) + downloaders = append(downloaders, fs.NewDownloader(config)) // If the current build is a snapshot we use this downloader to update // to the latest snapshot of the same version. Useful for testing with // a snapshot version of fleet, for example. // try snapshot repo before official if release.Snapshot() { - snapDownloader, err := snapshot.NewDownloader(log, config, nil, upgradeDetails, diskSpaceErrorFunc) + snapDownloader, err := snapshot.NewDownloader(log, config, nil, upgradeDetails) if err != nil { log.Error(err) } else { @@ -35,7 +35,7 @@ func NewDownloader(log *logger.Logger, config *artifact.Config, upgradeDetails * } } - httpDownloader, err := http.NewDownloader(log, config, upgradeDetails, diskSpaceErrorFunc) + httpDownloader, err := http.NewDownloader(log, config, upgradeDetails) if err != nil { return nil, err } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/snapshot/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/snapshot/downloader.go index f66b912ebcc..57fbac689aa 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/snapshot/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/snapshot/downloader.go @@ -36,7 +36,7 @@ type Downloader struct { // We need to pass the versionOverride separately from the config as // artifact.Config struct is part of agent configuration and a version // override makes no sense there -func NewDownloader(log *logger.Logger, config *artifact.Config, versionOverride *agtversion.ParsedSemVer, upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error) (download.Downloader, error) { +func NewDownloader(log *logger.Logger, config *artifact.Config, versionOverride *agtversion.ParsedSemVer, upgradeDetails *details.Details) (download.Downloader, error) { client, err := config.HTTPTransportSettings.Client( httpcommon.WithAPMHTTPInstrumentation(), httpcommon.WithKeepaliveSettings{Disable: false, IdleConnTimeout: 30 * time.Second}, @@ -45,17 +45,17 @@ func NewDownloader(log *logger.Logger, config *artifact.Config, versionOverride return nil, err } - return NewDownloaderWithClient(log, config, versionOverride, client, upgradeDetails, diskSpaceErrorFunc) + return NewDownloaderWithClient(log, config, versionOverride, client, upgradeDetails) } -func NewDownloaderWithClient(log *logger.Logger, config *artifact.Config, versionOverride *agtversion.ParsedSemVer, client *gohttp.Client, upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error) (download.Downloader, error) { +func NewDownloaderWithClient(log *logger.Logger, config *artifact.Config, versionOverride *agtversion.ParsedSemVer, client *gohttp.Client, upgradeDetails *details.Details) (download.Downloader, error) { // TODO: decide an appropriate timeout for this cfg, err := snapshotConfig(context.TODO(), client, config, versionOverride) if err != nil { return nil, fmt.Errorf("error creating snapshot config: %w", err) } - httpDownloader := http.NewDownloaderWithClient(log, cfg, *client, upgradeDetails, diskSpaceErrorFunc) + httpDownloader := http.NewDownloaderWithClient(log, cfg, *client, upgradeDetails) return &Downloader{ downloader: httpDownloader, diff --git a/internal/pkg/agent/application/upgrade/artifact/download/snapshot/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/snapshot/downloader_test.go index 6fd3ecbc323..c9bcd20a071 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/snapshot/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/snapshot/downloader_test.go @@ -138,7 +138,7 @@ func TestDownloadVersion(t *testing.T) { _ = s return net.Dial(network, server.Listener.Addr().String()) } - downloader, err := NewDownloaderWithClient(log, config, tt.args.version, client, upgradeDetails, nil) + downloader, err := NewDownloaderWithClient(log, config, tt.args.version, client, upgradeDetails) require.NoError(t, err) got, err := downloader.Download(context.TODO(), tt.args.a, tt.args.version) diff --git a/internal/pkg/agent/application/upgrade/errors/disk_space.go b/internal/pkg/agent/application/upgrade/errors/disk_space.go new file mode 100644 index 00000000000..a551837cb9d --- /dev/null +++ b/internal/pkg/agent/application/upgrade/errors/disk_space.go @@ -0,0 +1,30 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + +package errors + +import ( + "errors" +) + +const insufficientDiskSpaceErrorStr = "insufficient disk space" + +var ErrInsufficientDiskSpace = &InsufficientDiskSpaceError{Err: errors.New(insufficientDiskSpaceErrorStr)} + +type InsufficientDiskSpaceError struct { + Err error +} + +func (e *InsufficientDiskSpaceError) Error() string { + return e.Err.Error() +} + +func (e *InsufficientDiskSpaceError) Unwrap() error { + return e.Err +} + +func (e *InsufficientDiskSpaceError) Is(target error) bool { + _, ok := target.(*InsufficientDiskSpaceError) + return ok +} diff --git a/internal/pkg/agent/application/upgrade/errors/disk_space_unix.go b/internal/pkg/agent/application/upgrade/errors/disk_space_unix.go new file mode 100644 index 00000000000..75542bca3d7 --- /dev/null +++ b/internal/pkg/agent/application/upgrade/errors/disk_space_unix.go @@ -0,0 +1,28 @@ +//go:build !windows + +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + +package errors + +import ( + "errors" + "syscall" + + "github.com/elastic/elastic-agent/pkg/core/logger" +) + +// ToDiskSpaceError returns a generic disk space error if the error is a disk space error +func ToDiskSpaceErrorFunc(log *logger.Logger) func(error) error { + return func(err error) error { + if errors.Is(err, syscall.ENOSPC) || errors.Is(err, syscall.EDQUOT) { + if log != nil { + log.Infof("ToDiskSpaceError detected disk space error: %v, returning ErrInsufficientDiskSpace", err) + } + return ErrInsufficientDiskSpace + } + + return err + } +} diff --git a/internal/pkg/agent/application/upgrade/errors/disk_space_unix_test.go b/internal/pkg/agent/application/upgrade/errors/disk_space_unix_test.go new file mode 100644 index 00000000000..2ab891f1233 --- /dev/null +++ b/internal/pkg/agent/application/upgrade/errors/disk_space_unix_test.go @@ -0,0 +1,64 @@ +//go:build !windows + +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + +package errors + +import ( + "fmt" + "syscall" + "testing" + + "github.com/elastic/elastic-agent/pkg/core/logger" + "github.com/stretchr/testify/require" +) + +type mockError struct { + msg string +} + +func (e *mockError) Error() string { + return e.msg +} + +func (e *mockError) Is(target error) bool { + _, ok := target.(*mockError) + return ok +} + +func TestToDiskSpaceError(t *testing.T) { + t.Run("return ErrInsufficientDiskSpace for disk space errors and pass through others", func(t *testing.T) { + tests := map[string]struct { + err error + want error + }{ + "ENOSPC": {err: syscall.ENOSPC, want: ErrInsufficientDiskSpace}, + "EDQUOT": {err: syscall.EDQUOT, want: ErrInsufficientDiskSpace}, + "wrapped ENOSPC": {err: fmt.Errorf("wrapped: %w", syscall.ENOSPC), want: ErrInsufficientDiskSpace}, + "wrapped EDQUOT": {err: fmt.Errorf("wrapped: %w", syscall.EDQUOT), want: ErrInsufficientDiskSpace}, + "other error": {err: &mockError{msg: "some other error"}, want: &mockError{msg: "some other error"}}, + } + + for name, test := range tests { + t.Run(name, func(t *testing.T) { + log, err := logger.New("test", true) + require.NoError(t, err) + + got := ToDiskSpaceErrorFunc(log)(test.err) + require.ErrorIs(t, got, test.want) + }) + } + }) + t.Run("does not panic when logger is nil", func(t *testing.T) { + defer func() { + if r := recover(); r != nil { + t.Fatalf("expected no panic, but got: %v", r) + } + }() + _ = ToDiskSpaceErrorFunc(nil)(syscall.ENOSPC) + _ = ToDiskSpaceErrorFunc(nil)(fmt.Errorf("wrapped: %w", syscall.EDQUOT)) + _ = ToDiskSpaceErrorFunc(nil)(&mockError{msg: "not disk space"}) + }) +} diff --git a/internal/pkg/agent/application/upgrade/errors/disk_space_windows.go b/internal/pkg/agent/application/upgrade/errors/disk_space_windows.go new file mode 100644 index 00000000000..92bee102ed1 --- /dev/null +++ b/internal/pkg/agent/application/upgrade/errors/disk_space_windows.go @@ -0,0 +1,29 @@ +//go:build windows + +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + +package errors + +import ( + "errors" + + "github.com/elastic/elastic-agent/pkg/core/logger" + + winSys "golang.org/x/sys/windows" +) + +// ToDiskSpaceError returns a generic disk space error if the error is a disk space error +func ToDiskSpaceErrorFunc(log *logger.Logger) func(error) error { + return func(err error) error { + if errors.Is(err, winSys.ERROR_DISK_FULL) || errors.Is(err, winSys.ERROR_HANDLE_DISK_FULL) { + if log != nil { + log.Infof("ToDiskSpaceError detected disk space error: %v, returning ErrInsufficientDiskSpace", err) + } + return ErrInsufficientDiskSpace + } + + return err + } +} diff --git a/internal/pkg/agent/application/upgrade/errors/disk_space_windows_test.go b/internal/pkg/agent/application/upgrade/errors/disk_space_windows_test.go new file mode 100644 index 00000000000..2ac1621eab3 --- /dev/null +++ b/internal/pkg/agent/application/upgrade/errors/disk_space_windows_test.go @@ -0,0 +1,65 @@ +//go:build windows + +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + +package errors + +import ( + "fmt" + "testing" + + "github.com/elastic/elastic-agent/pkg/core/logger" + "github.com/stretchr/testify/require" + winSys "golang.org/x/sys/windows" +) + +type mockError struct { + msg string +} + +func (e *mockError) Error() string { + return e.msg +} + +func (e *mockError) Is(target error) bool { + _, ok := target.(*mockError) + return ok +} + +func TestToDiskSpaceError(t *testing.T) { + t.Run("return ErrInsufficientDiskSpace for disk space errors and pass through others", func(t *testing.T) { + tests := map[string]struct { + err error + want error + }{ + "ERROR_DISK_FULL": {err: winSys.ERROR_DISK_FULL, want: ErrInsufficientDiskSpace}, + "ERROR_HANDLE_DISK_FULL": {err: winSys.ERROR_HANDLE_DISK_FULL, want: ErrInsufficientDiskSpace}, + "wrapped ERROR_DISK_FULL": {err: fmt.Errorf("wrapped: %w", winSys.ERROR_DISK_FULL), want: ErrInsufficientDiskSpace}, + "wrapped ERROR_HANDLE_DISK_FULL": {err: fmt.Errorf("wrapped: %w", winSys.ERROR_HANDLE_DISK_FULL), want: ErrInsufficientDiskSpace}, + "other error": {err: &mockError{msg: "some other error"}, want: &mockError{msg: "some other error"}}, + } + + for name, test := range tests { + t.Run(name, func(t *testing.T) { + log, err := logger.New("test", true) + require.NoError(t, err) + + got := ToDiskSpaceErrorFunc(log)(test.err) + + require.ErrorIs(t, got, test.want) + }) + } + }) + t.Run("does not panic when logger is nil", func(t *testing.T) { + defer func() { + if r := recover(); r != nil { + t.Fatalf("expected no panic, but got: %v", r) + } + }() + _ = ToDiskSpaceErrorFunc(nil)(winSys.ERROR_DISK_FULL) + _ = ToDiskSpaceErrorFunc(nil)(fmt.Errorf("wrapped: %w", winSys.ERROR_HANDLE_DISK_FULL)) + _ = ToDiskSpaceErrorFunc(nil)(&mockError{msg: "not disk space"}) + }) +} diff --git a/internal/pkg/agent/application/upgrade/step_download.go b/internal/pkg/agent/application/upgrade/step_download.go index 7ae166683ce..1a20cac1de3 100644 --- a/internal/pkg/agent/application/upgrade/step_download.go +++ b/internal/pkg/agent/application/upgrade/step_download.go @@ -25,6 +25,7 @@ import ( "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download/localremote" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download/snapshot" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/details" + upgradeErrors "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/errors" "github.com/elastic/elastic-agent/internal/pkg/agent/errors" "github.com/elastic/elastic-agent/internal/pkg/release" "github.com/elastic/elastic-agent/pkg/core/logger" @@ -67,7 +68,7 @@ func (u *Upgrader) downloadArtifact(ctx context.Context, parsedVersion *agtversi // set specific downloader, local file just uses the fs.NewDownloader // no fallback is allowed because it was requested that this specific source be used factory = func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details, diskSpaceErrorFunc func(error) error) (download.Downloader, error) { - return fs.NewDownloader(config, diskSpaceErrorFunc), nil + return fs.NewDownloader(config), nil } // set specific verifier, local file verifies locally only @@ -150,23 +151,23 @@ func (u *Upgrader) appendFallbackPGP(targetVersion *agtversion.ParsedSemVer, pgp func newDownloader(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config, upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error) (download.Downloader, error) { if !version.IsSnapshot() { - return localremote.NewDownloader(log, settings, upgradeDetails, diskSpaceErrorFunc) + return localremote.NewDownloader(log, settings, upgradeDetails) } // TODO since we know if it's a snapshot or not, shouldn't we add EITHER the snapshot downloader OR the release one ? // try snapshot repo before official - snapDownloader, err := snapshot.NewDownloader(log, settings, version, upgradeDetails, diskSpaceErrorFunc) + snapDownloader, err := snapshot.NewDownloader(log, settings, version, upgradeDetails) if err != nil { return nil, err } - httpDownloader, err := http.NewDownloader(log, settings, upgradeDetails, diskSpaceErrorFunc) + httpDownloader, err := http.NewDownloader(log, settings, upgradeDetails) if err != nil { return nil, err } - return composed.NewDownloader(fs.NewDownloader(settings, diskSpaceErrorFunc), snapDownloader, httpDownloader), nil + return composed.NewDownloader(fs.NewDownloader(settings), snapDownloader, httpDownloader), nil } func newVerifier(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config) (download.Verifier, error) { @@ -246,7 +247,7 @@ func (u *Upgrader) downloadWithRetries( path, err = u.downloadOnce(cancelCtx, factory, version, settings, upgradeDetails, diskSpaceErrorFunc) if err != nil { - if errors.Is(err, ErrInsufficientDiskSpace) { + if errors.Is(err, upgradeErrors.ErrInsufficientDiskSpace) { u.log.Infof("Insufficient disk space error detected, stopping retries") return backoff.Permanent(err) } diff --git a/internal/pkg/agent/application/upgrade/step_download_test.go b/internal/pkg/agent/application/upgrade/step_download_test.go index 88b94a2053a..5f7b01a6d4a 100644 --- a/internal/pkg/agent/application/upgrade/step_download_test.go +++ b/internal/pkg/agent/application/upgrade/step_download_test.go @@ -19,6 +19,7 @@ import ( "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/details" + upgradeErrors "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/errors" "github.com/elastic/elastic-agent/internal/pkg/agent/errors" "github.com/elastic/elastic-agent/pkg/core/logger" "github.com/elastic/elastic-agent/pkg/core/logger/loggertest" @@ -91,7 +92,7 @@ func TestDownloadWithRetries(t *testing.T) { return &mockDownloader{expectedDownloadPath, nil}, nil } - u, err := NewUpgrader(testLogger, &settings, &info.AgentInfo{}, nil) + u, err := NewUpgrader(testLogger, &settings, &info.AgentInfo{}) require.NoError(t, err) parsedVersion, err := agtversion.ParseVersion("8.9.0") @@ -141,7 +142,7 @@ func TestDownloadWithRetries(t *testing.T) { return nil, nil } - u, err := NewUpgrader(testLogger, &settings, &info.AgentInfo{}, nil) + u, err := NewUpgrader(testLogger, &settings, &info.AgentInfo{}) require.NoError(t, err) parsedVersion, err := agtversion.ParseVersion("8.9.0") @@ -196,7 +197,7 @@ func TestDownloadWithRetries(t *testing.T) { return nil, nil } - u, err := NewUpgrader(testLogger, &settings, &info.AgentInfo{}, nil) + u, err := NewUpgrader(testLogger, &settings, &info.AgentInfo{}) require.NoError(t, err) parsedVersion, err := agtversion.ParseVersion("8.9.0") @@ -241,7 +242,7 @@ func TestDownloadWithRetries(t *testing.T) { return &mockDownloader{"", errors.New("download failed")}, nil } - u, err := NewUpgrader(testLogger, &settings, &info.AgentInfo{}, nil) + u, err := NewUpgrader(testLogger, &settings, &info.AgentInfo{}) require.NoError(t, err) parsedVersion, err := agtversion.ParseVersion("8.9.0") @@ -279,10 +280,10 @@ func TestDownloadWithRetries(t *testing.T) { t.Run("insufficient_disk_space_stops_retries", func(t *testing.T) { mockDownloaderCtor := func(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config, upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error) (download.Downloader, error) { - return &mockDownloader{"", ErrInsufficientDiskSpace}, nil + return &mockDownloader{"", upgradeErrors.ErrInsufficientDiskSpace}, nil } - u, err := NewUpgrader(testLogger, &settings, &info.AgentInfo{}, nil) + u, err := NewUpgrader(testLogger, &settings, &info.AgentInfo{}) require.NoError(t, err) parsedVersion, err := agtversion.ParseVersion("8.9.0") @@ -295,7 +296,7 @@ func TestDownloadWithRetries(t *testing.T) { require.Error(t, err) require.Equal(t, "", path) - require.ErrorIs(t, err, ErrInsufficientDiskSpace) + require.ErrorIs(t, err, upgradeErrors.ErrInsufficientDiskSpace) logs := obs.TakeAll() require.Len(t, logs, 2) diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index f7db23d7d80..662ccbf6b25 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -24,6 +24,7 @@ import ( "github.com/elastic/elastic-agent/internal/pkg/agent/application/reexec" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/details" + upgradeErrors "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/errors" "github.com/elastic/elastic-agent/internal/pkg/agent/configuration" "github.com/elastic/elastic-agent/internal/pkg/agent/errors" "github.com/elastic/elastic-agent/internal/pkg/agent/install" @@ -87,14 +88,14 @@ func IsUpgradeable() bool { } // NewUpgrader creates an upgrader which is capable of performing upgrade operation -func NewUpgrader(log *logger.Logger, settings *artifact.Config, agentInfo info.Agent, diskSpaceErrorFunc func(error) error) (*Upgrader, error) { +func NewUpgrader(log *logger.Logger, settings *artifact.Config, agentInfo info.Agent) (*Upgrader, error) { return &Upgrader{ log: log, settings: settings, agentInfo: agentInfo, upgradeable: IsUpgradeable(), markerWatcher: newMarkerFileWatcher(markerFilePath(paths.Data()), log), - diskSpaceErrorFunc: diskSpaceErrorFunc, + diskSpaceErrorFunc: upgradeErrors.ToDiskSpaceErrorFunc(log), }, nil } From fd35453689530060ac87402b08c1dcff72842e41 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Mon, 28 Jul 2025 20:53:01 +0300 Subject: [PATCH 011/127] enhancement(5235): removed unnecessary error files --- .../upgrade/insufficient_disk_space_err.go | 26 ---------- .../insufficient_disk_space_err_unix.go | 22 --------- .../insufficient_disk_space_err_unix_test.go | 48 ------------------- .../insufficient_disk_space_err_windows.go | 23 --------- ...nsufficient_disk_space_err_windows_test.go | 48 ------------------- 5 files changed, 167 deletions(-) delete mode 100644 internal/pkg/agent/application/upgrade/insufficient_disk_space_err.go delete mode 100644 internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix.go delete mode 100644 internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix_test.go delete mode 100644 internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows.go delete mode 100644 internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows_test.go diff --git a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err.go b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err.go deleted file mode 100644 index b0833eb4d58..00000000000 --- a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err.go +++ /dev/null @@ -1,26 +0,0 @@ -package upgrade - -import ( - "errors" -) - -const insufficientDiskSpaceErrorStr = "insufficient disk space" - -var ErrInsufficientDiskSpace = &InsufficientDiskSpaceError{Err: errors.New(insufficientDiskSpaceErrorStr)} - -type InsufficientDiskSpaceError struct { - Err error -} - -func (e *InsufficientDiskSpaceError) Error() string { - return e.Err.Error() -} - -func (e *InsufficientDiskSpaceError) Unwrap() error { - return e.Err -} - -func (e *InsufficientDiskSpaceError) Is(target error) bool { - _, ok := target.(*InsufficientDiskSpaceError) - return ok -} diff --git a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix.go b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix.go deleted file mode 100644 index dca8e08d009..00000000000 --- a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix.go +++ /dev/null @@ -1,22 +0,0 @@ -//go:build !windows - -package upgrade - -import ( - "errors" - "syscall" - - "github.com/elastic/elastic-agent/pkg/core/logger" -) - -// ToDiskSpaceError returns a generic disk space error if the error is a disk space error -func ToDiskSpaceErrorFunc(log *logger.Logger) func(error) error { - return func(err error) error { - if errors.Is(err, syscall.ENOSPC) || errors.Is(err, syscall.EDQUOT) { - log.Infof("ToDiskSpaceError detected disk space error: %v, returning ErrInsufficientDiskSpace", err) - return ErrInsufficientDiskSpace - } - - return err - } -} diff --git a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix_test.go b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix_test.go deleted file mode 100644 index d5637b169e5..00000000000 --- a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_unix_test.go +++ /dev/null @@ -1,48 +0,0 @@ -//go:build !windows - -package upgrade - -import ( - "fmt" - "syscall" - "testing" - - "github.com/elastic/elastic-agent/pkg/core/logger" - "github.com/stretchr/testify/require" -) - -type mockError struct { - msg string -} - -func (e *mockError) Error() string { - return e.msg -} - -func (e *mockError) Is(target error) bool { - _, ok := target.(*mockError) - return ok -} - -func TestToDiskSpaceError(t *testing.T) { - tests := map[string]struct { - err error - want error - }{ - "ENOSPC": {err: syscall.ENOSPC, want: ErrInsufficientDiskSpace}, - "EDQUOT": {err: syscall.EDQUOT, want: ErrInsufficientDiskSpace}, - "wrapped ENOSPC": {err: fmt.Errorf("wrapped: %w", syscall.ENOSPC), want: ErrInsufficientDiskSpace}, - "wrapped EDQUOT": {err: fmt.Errorf("wrapped: %w", syscall.EDQUOT), want: ErrInsufficientDiskSpace}, - "other error": {err: &mockError{msg: "some other error"}, want: &mockError{msg: "some other error"}}, - } - - for name, test := range tests { - t.Run(name, func(t *testing.T) { - log, err := logger.New("test", true) - require.NoError(t, err) - - got := ToDiskSpaceErrorFunc(log)(test.err) - require.ErrorIs(t, got, test.want) - }) - } -} diff --git a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows.go b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows.go deleted file mode 100644 index 8bcba82ac92..00000000000 --- a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows.go +++ /dev/null @@ -1,23 +0,0 @@ -//go:build windows - -package upgrade - -import ( - "errors" - - "github.com/elastic/elastic-agent/pkg/core/logger" - - winSys "golang.org/x/sys/windows" -) - -// ToDiskSpaceError returns a generic disk space error if the error is a disk space error -func ToDiskSpaceErrorFunc(log *logger.Logger) func(error) error { - return func(err error) error { - if errors.Is(err, winSys.ERROR_DISK_FULL) || errors.Is(err, winSys.ERROR_HANDLE_DISK_FULL) { - log.Infof("ToDiskSpaceError detected disk space error: %v, returning ErrInsufficientDiskSpace", err) - return ErrInsufficientDiskSpace - } - - return err - } -} diff --git a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows_test.go b/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows_test.go deleted file mode 100644 index ca296654481..00000000000 --- a/internal/pkg/agent/application/upgrade/insufficient_disk_space_err_windows_test.go +++ /dev/null @@ -1,48 +0,0 @@ -//go:build windows - -package upgrade - -import ( - "fmt" - "testing" - - "github.com/elastic/elastic-agent/pkg/core/logger" - "github.com/stretchr/testify/require" - winSys "golang.org/x/sys/windows" -) - -type mockError struct { - msg string -} - -func (e *mockError) Error() string { - return e.msg -} - -func (e *mockError) Is(target error) bool { - _, ok := target.(*mockError) - return ok -} -func TestToDiskSpaceError(t *testing.T) { - tests := map[string]struct { - err error - want error - }{ - "ERROR_DISK_FULL": {err: winSys.ERROR_DISK_FULL, want: ErrInsufficientDiskSpace}, - "ERROR_HANDLE_DISK_FULL": {err: winSys.ERROR_HANDLE_DISK_FULL, want: ErrInsufficientDiskSpace}, - "wrapped ERROR_DISK_FULL": {err: fmt.Errorf("wrapped: %w", winSys.ERROR_DISK_FULL), want: ErrInsufficientDiskSpace}, - "wrapped ERROR_HANDLE_DISK_FULL": {err: fmt.Errorf("wrapped: %w", winSys.ERROR_HANDLE_DISK_FULL), want: ErrInsufficientDiskSpace}, - "other error": {err: &mockError{msg: "some other error"}, want: &mockError{msg: "some other error"}}, - } - - for name, test := range tests { - t.Run(name, func(t *testing.T) { - log, err := logger.New("test", true) - require.NoError(t, err) - - got := ToDiskSpaceErrorFunc(log)(test.err) - - require.ErrorIs(t, got, test.want) - }) - } -} From cebb42e84ffa3737e9a934faa9e6fa6cefdc6f83 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Mon, 28 Jul 2025 23:13:20 +0300 Subject: [PATCH 012/127] enhancement(5235): added progress reporter interface in http downloader, updated tests --- .../artifact/download/http/downloader.go | 21 ++++++-- .../artifact/download/http/downloader_test.go | 54 +++++++++++++++++-- .../download/http/progress_reporter.go | 26 ++++----- .../download/http/progress_reporter_test.go | 14 ++--- 4 files changed, 79 insertions(+), 36 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go index 5f2ff9a465c..ce97cd716ed 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go @@ -44,12 +44,22 @@ const ( warningProgressIntervalPercentage = 0.75 ) +// ProgressReporter defines the interface for reporting download progress. +type ProgressReporter interface { + io.Writer + Prepare(sourceURI string, timeout time.Duration, length int, progressObservers ...progressObserver) + Report(ctx context.Context) + ReportComplete() + ReportFailed(err error) +} + // Downloader is a downloader able to fetch artifacts from elastic.co web page. type Downloader struct { log *logger.Logger config *artifact.Config client http.Client upgradeDetails *details.Details + progressReporter ProgressReporter diskSpaceErrorFunc func(error) error copyFunc func(dst io.Writer, src io.Reader) (written int64, err error) } @@ -77,6 +87,7 @@ func NewDownloaderWithClient(log *logger.Logger, config *artifact.Config, client upgradeDetails: upgradeDetails, diskSpaceErrorFunc: upgradeErrors.ToDiskSpaceErrorFunc(log), copyFunc: io.Copy, + progressReporter: &downloadProgressReporter{}, } } @@ -216,16 +227,16 @@ func (e *Downloader) downloadFile(ctx context.Context, artifactName, filename, f loggingObserver := newLoggingProgressObserver(e.log, e.config.HTTPTransportSettings.Timeout) detailsObserver := newDetailsProgressObserver(e.upgradeDetails) - dp := newDownloadProgressReporter(sourceURI, e.config.HTTPTransportSettings.Timeout, fileSize, e.diskSpaceErrorFunc, loggingObserver, detailsObserver) - dp.Report(ctx) - _, err = e.copyFunc(destinationFile, io.TeeReader(resp.Body, dp)) + e.progressReporter.Prepare(sourceURI, e.config.HTTPTransportSettings.Timeout, fileSize, loggingObserver, detailsObserver) + e.progressReporter.Report(ctx) + _, err = e.copyFunc(destinationFile, io.TeeReader(resp.Body, e.progressReporter)) if err != nil { err = e.diskSpaceErrorFunc(err) - dp.ReportFailed(err) + e.progressReporter.ReportFailed(err) // return path, file already exists and needs to be cleaned up return fullPath, fmt.Errorf("%s: %w", errors.New("copying fetched package failed", errors.TypeNetwork, errors.M(errors.MetaKeyURI, sourceURI)).Error(), err) } - dp.ReportComplete() + e.progressReporter.ReportComplete() return fullPath, nil } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go index ea96c2ed269..7a6b3d22a58 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go @@ -14,6 +14,7 @@ import ( "net/http/httptest" "os" "path/filepath" + "reflect" "regexp" "strconv" "testing" @@ -542,6 +543,30 @@ func (e *testCopyError) Is(target error) bool { return ok } +type mockProgressReporter struct { + reportFailedCalls []reportFailedCall +} + +func (m *mockProgressReporter) Prepare(sourceURI string, timeout time.Duration, length int, progressObservers ...progressObserver) { + // noop +} + +func (m *mockProgressReporter) Report(ctx context.Context) { + // noop +} + +func (m *mockProgressReporter) ReportComplete() { + // noop +} + +func (m *mockProgressReporter) ReportFailed(err error) { + m.reportFailedCalls = append(m.reportFailedCalls, reportFailedCall{err: err}) +} + +func (m *mockProgressReporter) Write(b []byte) (int, error) { + return len(b), nil +} + func TestDownloadFile(t *testing.T) { t.Run("calls diskSpaceErrorFunc on any copy error", func(t *testing.T) { ctx := t.Context() @@ -565,10 +590,13 @@ func TestDownloadFile(t *testing.T) { log, _ := loggertest.New("downloader") upgradeDetails := details.NewDetails("1.2.3", details.StateRequested, "") + progressReporter := &mockProgressReporter{} + var receivedError error + diskSpaceErr := &testCopyError{msg: "disk space error"} diskSpaceErrorFunc := func(err error) error { receivedError = err - return err + return diskSpaceErr } copyFuncError := &testCopyError{msg: "mock error"} @@ -579,14 +607,30 @@ func TestDownloadFile(t *testing.T) { downloader := NewDownloaderWithClient(log, config, *server.Client(), upgradeDetails) downloader.copyFunc = copyFunc downloader.diskSpaceErrorFunc = diskSpaceErrorFunc + downloader.progressReporter = progressReporter _, err := downloader.downloadFile(ctx, artifactName, filename, fullPath) - assert.Error(t, err) + assert.Equal(t, receivedError, copyFuncError) - assert.ErrorIs(t, err, copyFuncError) - - assert.Equal(t, copyFuncError, receivedError) + assert.ErrorIs(t, err, diskSpaceErr) + assert.Equal(t, len(progressReporter.reportFailedCalls), 1) + assert.Equal(t, progressReporter.reportFailedCalls[0].err, diskSpaceErr) }) + t.Run("constructor assigns copyFunc, diskSpaceErrorFunc, and progressReporter", func(t *testing.T) { + config := &artifact.Config{ + OperatingSystem: "linux", + Architecture: "amd64", + } + upgradeDetails := details.NewDetails("1.0.0", details.StateRequested, "") + + downloader := NewDownloaderWithClient(nil, config, http.Client{}, upgradeDetails) + + expectedCopyFunc := reflect.ValueOf(io.Copy) + actualCopyFunc := reflect.ValueOf(downloader.copyFunc) + assert.Equal(t, expectedCopyFunc.Pointer(), actualCopyFunc.Pointer()) + assert.NotNil(t, downloader.diskSpaceErrorFunc) + assert.NotNil(t, downloader.progressReporter) + }) } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter.go b/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter.go index ee37eb7736a..93ef4ddf4cb 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter.go @@ -19,26 +19,18 @@ type downloadProgressReporter struct { downloaded atomic.Int64 started time.Time - progressObservers []progressObserver - done chan struct{} - diskSpaceErrorFunc func(error) error + progressObservers []progressObserver + done chan struct{} } -func newDownloadProgressReporter(sourceURI string, timeout time.Duration, length int, diskSpaceErrorFunc func(error) error, progressObservers ...progressObserver) *downloadProgressReporter { - interval := time.Duration(float64(timeout) * downloadProgressIntervalPercentage) - if interval == 0 { - interval = downloadProgressMinInterval - } - - return &downloadProgressReporter{ - sourceURI: sourceURI, - interval: interval, - warnTimeout: time.Duration(float64(timeout) * warningProgressIntervalPercentage), - length: float64(length), - diskSpaceErrorFunc: diskSpaceErrorFunc, - progressObservers: progressObservers, - done: make(chan struct{}), +func (dp *downloadProgressReporter) Prepare(sourceURI string, timeout time.Duration, length int, progressObservers ...progressObserver) { + dp.sourceURI = sourceURI + dp.interval = time.Duration(float64(timeout) * downloadProgressIntervalPercentage) + if dp.interval == 0 { + dp.interval = downloadProgressMinInterval } + dp.warnTimeout = time.Duration(float64(timeout) * warningProgressIntervalPercentage) + dp.length = float64(length) } func (dp *downloadProgressReporter) Write(b []byte) (int, error) { diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter_test.go b/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter_test.go index af0aa19af7d..b920dc6f652 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter_test.go @@ -46,19 +46,15 @@ func (m *mockProgressObserver) ReportFailed(sourceURI string, timePast time.Dura func TestReportFailed(t *testing.T) { t.Run("should call ReportFailed on all observers with correct parameters", func(t *testing.T) { testErr := errors.New("test error") - convertedErr := errors.New("converted error") - diskSpaceErrorFunc := func(err error) error { - if err == testErr { - return convertedErr - } - return err - } observer1 := &mockProgressObserver{} observer2 := &mockProgressObserver{} observers := []progressObserver{observer1, observer2} - dp := newDownloadProgressReporter("mockurl", 10*time.Second, 1000, diskSpaceErrorFunc, observers...) + dp := &downloadProgressReporter{} + dp.Prepare("mockurl", 10*time.Second, 1000, observers...) + + dp.Report(t.Context()) dp.downloaded.Store(500) dp.started = time.Now().Add(-2 * time.Second) @@ -72,7 +68,7 @@ func TestReportFailed(t *testing.T) { case <-testCtx.Done(): t.Error("expected done channel to be closed") case <-dp.done: - // noop + t.Log("done channel closed") } for _, obs := range observers { From 197ad8dfa4c21570f927ded51648c86ad6a8994c Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Mon, 28 Jul 2025 23:33:02 +0300 Subject: [PATCH 013/127] enhancement(5235): updated tests --- .../artifact/download/fs/downloader_test.go | 10 +++++--- .../artifact/download/http/downloader_test.go | 24 ++++++++++++++----- 2 files changed, 25 insertions(+), 9 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go index dc5b9f27763..adaa5569708 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go @@ -16,6 +16,7 @@ import ( "github.com/stretchr/testify/require" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact" + "github.com/elastic/elastic-agent/internal/pkg/agent/errors" agtversion "github.com/elastic/elastic-agent/pkg/version" ) @@ -322,8 +323,11 @@ func TestDownloader_downloadFile(t *testing.T) { TargetDirectory: targetDirPath, } + var receivedError error + diskSpaceErr := errors.New("disk space error") diskSpaceErrorFunc := func(err error) error { - return err + receivedError = err + return diskSpaceErr } copyFuncError := &testCopyError{msg: "mock error"} @@ -336,6 +340,6 @@ func TestDownloader_downloadFile(t *testing.T) { e.diskSpaceErrorFunc = diskSpaceErrorFunc _, err := e.downloadFile("elastic-agent-1.2.3-linux-x86_64.tar.gz", filepath.Join(targetDirPath, "elastic-agent-1.2.3-linux-x86_64.tar.gz")) - require.Error(t, err) - assert.ErrorIs(t, err, copyFuncError) + assert.Equal(t, err, diskSpaceErr) + assert.Equal(t, receivedError, copyFuncError) } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go index 7a6b3d22a58..50cb1b46612 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go @@ -545,10 +545,11 @@ func (e *testCopyError) Is(target error) bool { type mockProgressReporter struct { reportFailedCalls []reportFailedCall + observers []progressObserver } func (m *mockProgressReporter) Prepare(sourceURI string, timeout time.Duration, length int, progressObservers ...progressObserver) { - // noop + m.observers = progressObservers } func (m *mockProgressReporter) Report(ctx context.Context) { @@ -568,7 +569,7 @@ func (m *mockProgressReporter) Write(b []byte) (int, error) { } func TestDownloadFile(t *testing.T) { - t.Run("calls diskSpaceErrorFunc on any copy error", func(t *testing.T) { + t.Run("disk space error", func(t *testing.T) { ctx := t.Context() artifactName := "beat/elastic-agent" filename := "elastic-agent-1.2.3-linux-x86_64.tar.gz" @@ -611,11 +612,22 @@ func TestDownloadFile(t *testing.T) { _, err := downloader.downloadFile(ctx, artifactName, filename, fullPath) - assert.Equal(t, receivedError, copyFuncError) + t.Run("prepares reporter with details and logging observers", func(t *testing.T) { + assert.Equal(t, len(progressReporter.observers), 2) + assert.IsType(t, &loggingProgressObserver{}, progressReporter.observers[0]) + assert.IsType(t, &detailsProgressObserver{}, progressReporter.observers[1]) + }) + + t.Run("calls diskSpaceErrorFunc on any copy error", func(t *testing.T) { + assert.Equal(t, receivedError, copyFuncError) + }) + + t.Run("calls ReportFailed with the result of diskSpaceErrorFunc", func(t *testing.T) { + assert.ErrorIs(t, err, diskSpaceErr) + assert.Equal(t, len(progressReporter.reportFailedCalls), 1) + assert.Equal(t, progressReporter.reportFailedCalls[0].err, diskSpaceErr) + }) - assert.ErrorIs(t, err, diskSpaceErr) - assert.Equal(t, len(progressReporter.reportFailedCalls), 1) - assert.Equal(t, progressReporter.reportFailedCalls[0].err, diskSpaceErr) }) t.Run("constructor assigns copyFunc, diskSpaceErrorFunc, and progressReporter", func(t *testing.T) { From 5b7378f02e25277d27445d8276b0bdc7c3338bf5 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Tue, 29 Jul 2025 01:35:43 +0300 Subject: [PATCH 014/127] enhancement(5235): updated tests --- .../artifact/download/fs/downloader_test.go | 17 ++++++++++ .../artifact/download/http/downloader_test.go | 33 +++++++++++-------- .../application/upgrade/step_download.go | 16 ++++----- .../application/upgrade/step_download_test.go | 20 +++++------ 4 files changed, 52 insertions(+), 34 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go index adaa5569708..4141f9dc7bd 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go @@ -10,6 +10,7 @@ import ( "io" "os" "path/filepath" + "reflect" "testing" "github.com/stretchr/testify/assert" @@ -343,3 +344,19 @@ func TestDownloader_downloadFile(t *testing.T) { assert.Equal(t, err, diskSpaceErr) assert.Equal(t, receivedError, copyFuncError) } + +func TestDownloader_NewDownloader(t *testing.T) { + dropPath := t.TempDir() + config := &artifact.Config{ + OperatingSystem: "linux", + Architecture: "64", + DropPath: dropPath, + } + + downloader := NewDownloader(config) + + expectedCopyFunc := reflect.ValueOf(io.Copy).Pointer() + actualCopyFunc := reflect.ValueOf(downloader.copyFunc).Pointer() + assert.Equal(t, expectedCopyFunc, actualCopyFunc) + assert.Equal(t, config, downloader.config) +} diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go index 50cb1b46612..40f3d0d0ac6 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go @@ -627,22 +627,27 @@ func TestDownloadFile(t *testing.T) { assert.Equal(t, len(progressReporter.reportFailedCalls), 1) assert.Equal(t, progressReporter.reportFailedCalls[0].err, diskSpaceErr) }) - }) +} - t.Run("constructor assigns copyFunc, diskSpaceErrorFunc, and progressReporter", func(t *testing.T) { - config := &artifact.Config{ - OperatingSystem: "linux", - Architecture: "amd64", - } - upgradeDetails := details.NewDetails("1.0.0", details.StateRequested, "") +func TestDownloader_NewDownloaderWithClient(t *testing.T) { + config := &artifact.Config{ + OperatingSystem: "linux", + Architecture: "amd64", + } + upgradeDetails := details.NewDetails("1.0.0", details.StateRequested, "") + log, _ := loggertest.New("downloader") - downloader := NewDownloaderWithClient(nil, config, http.Client{}, upgradeDetails) + downloader := NewDownloaderWithClient(log, config, http.Client{}, upgradeDetails) - expectedCopyFunc := reflect.ValueOf(io.Copy) - actualCopyFunc := reflect.ValueOf(downloader.copyFunc) - assert.Equal(t, expectedCopyFunc.Pointer(), actualCopyFunc.Pointer()) - assert.NotNil(t, downloader.diskSpaceErrorFunc) - assert.NotNil(t, downloader.progressReporter) - }) + expectedCopyFunc := reflect.ValueOf(io.Copy) + actualCopyFunc := reflect.ValueOf(downloader.copyFunc) + assert.Equal(t, expectedCopyFunc.Pointer(), actualCopyFunc.Pointer()) + + assert.NotNil(t, downloader.diskSpaceErrorFunc) + assert.NotNil(t, downloader.progressReporter) + assert.Equal(t, config, downloader.config) + assert.Equal(t, upgradeDetails, downloader.upgradeDetails) + assert.Equal(t, http.Client{}, downloader.client) + assert.Equal(t, log, downloader.log) } diff --git a/internal/pkg/agent/application/upgrade/step_download.go b/internal/pkg/agent/application/upgrade/step_download.go index 1a20cac1de3..548e7affc43 100644 --- a/internal/pkg/agent/application/upgrade/step_download.go +++ b/internal/pkg/agent/application/upgrade/step_download.go @@ -37,9 +37,7 @@ const ( fleetUpgradeFallbackPGPFormat = "/api/agents/upgrades/%d.%d.%d/pgp-public-key" ) -type downloaderFactory func(*agtversion.ParsedSemVer, *logger.Logger, *artifact.Config, *details.Details, func(error) error) (download.Downloader, error) - -type downloader func(context.Context, downloaderFactory, *agtversion.ParsedSemVer, *artifact.Config, *details.Details, func(error) error) (string, error) +type downloader func(context.Context, downloaderFactory, *agtversion.ParsedSemVer, *artifact.Config, *details.Details) (string, error) func (u *Upgrader) downloadArtifact(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (_ string, err error) { span, ctx := apm.StartSpan(ctx, "downloadArtifact", "app.internal") @@ -67,7 +65,7 @@ func (u *Upgrader) downloadArtifact(ctx context.Context, parsedVersion *agtversi // set specific downloader, local file just uses the fs.NewDownloader // no fallback is allowed because it was requested that this specific source be used - factory = func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details, diskSpaceErrorFunc func(error) error) (download.Downloader, error) { + factory = func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { return fs.NewDownloader(config), nil } @@ -101,7 +99,7 @@ func (u *Upgrader) downloadArtifact(ctx context.Context, parsedVersion *agtversi return "", errors.New(err, fmt.Sprintf("failed to create download directory at %s", paths.Downloads())) } - path, err := downloaderFunc(ctx, factory, parsedVersion, &settings, upgradeDetails, u.diskSpaceErrorFunc) + path, err := downloaderFunc(ctx, factory, parsedVersion, &settings, upgradeDetails) if err != nil { return "", fmt.Errorf("failed download of agent binary: %w", err) } @@ -149,7 +147,7 @@ func (u *Upgrader) appendFallbackPGP(targetVersion *agtversion.ParsedSemVer, pgp return pgpBytes } -func newDownloader(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config, upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error) (download.Downloader, error) { +func newDownloader(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config, upgradeDetails *details.Details) (download.Downloader, error) { if !version.IsSnapshot() { return localremote.NewDownloader(log, settings, upgradeDetails) } @@ -201,9 +199,8 @@ func (u *Upgrader) downloadOnce( version *agtversion.ParsedSemVer, settings *artifact.Config, upgradeDetails *details.Details, - diskSpaceErrorFunc func(error) error, ) (string, error) { - downloader, err := factory(version, u.log, settings, upgradeDetails, diskSpaceErrorFunc) + downloader, err := factory(version, u.log, settings, upgradeDetails) if err != nil { return "", fmt.Errorf("unable to create fetcher: %w", err) } @@ -225,7 +222,6 @@ func (u *Upgrader) downloadWithRetries( version *agtversion.ParsedSemVer, settings *artifact.Config, upgradeDetails *details.Details, - diskSpaceErrorFunc func(error) error, ) (string, error) { cancelDeadline := time.Now().Add(settings.Timeout) cancelCtx, cancel := context.WithDeadline(ctx, cancelDeadline) @@ -244,7 +240,7 @@ func (u *Upgrader) downloadWithRetries( attempt++ u.log.Infof("download attempt %d", attempt) var err error - path, err = u.downloadOnce(cancelCtx, factory, version, settings, upgradeDetails, diskSpaceErrorFunc) + path, err = u.downloadOnce(cancelCtx, factory, version, settings, upgradeDetails) if err != nil { if errors.Is(err, upgradeErrors.ErrInsufficientDiskSpace) { diff --git a/internal/pkg/agent/application/upgrade/step_download_test.go b/internal/pkg/agent/application/upgrade/step_download_test.go index 5f7b01a6d4a..3e1547ed67d 100644 --- a/internal/pkg/agent/application/upgrade/step_download_test.go +++ b/internal/pkg/agent/application/upgrade/step_download_test.go @@ -88,7 +88,7 @@ func TestDownloadWithRetries(t *testing.T) { // Successful immediately (no retries) t.Run("successful_immediately", func(t *testing.T) { - mockDownloaderCtor := func(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config, upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error) (download.Downloader, error) { + mockDownloaderCtor := func(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config, upgradeDetails *details.Details) (download.Downloader, error) { return &mockDownloader{expectedDownloadPath, nil}, nil } @@ -101,7 +101,7 @@ func TestDownloadWithRetries(t *testing.T) { upgradeDetails, upgradeDetailsRetryUntil, upgradeDetailsRetryUntilWasUnset, upgradeDetailsRetryErrorMsg := mockUpgradeDetails(parsedVersion) minRetryDeadline := time.Now().Add(settings.Timeout) - path, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &settings, upgradeDetails, nil) + path, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &settings, upgradeDetails) require.NoError(t, err) require.Equal(t, expectedDownloadPath, path) @@ -123,7 +123,7 @@ func TestDownloadWithRetries(t *testing.T) { // Downloader constructor failing on first attempt, but succeeding on second attempt (= first retry) t.Run("constructor_failure_once", func(t *testing.T) { attemptIdx := 0 - mockDownloaderCtor := func(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config, upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error) (download.Downloader, error) { + mockDownloaderCtor := func(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config, upgradeDetails *details.Details) (download.Downloader, error) { defer func() { attemptIdx++ }() @@ -151,7 +151,7 @@ func TestDownloadWithRetries(t *testing.T) { upgradeDetails, upgradeDetailsRetryUntil, upgradeDetailsRetryUntilWasUnset, upgradeDetailsRetryErrorMsg := mockUpgradeDetails(parsedVersion) minRetryDeadline := time.Now().Add(settings.Timeout) - path, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &settings, upgradeDetails, nil) + path, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &settings, upgradeDetails) require.NoError(t, err) require.Equal(t, expectedDownloadPath, path) @@ -178,7 +178,7 @@ func TestDownloadWithRetries(t *testing.T) { // Download failing on first attempt, but succeeding on second attempt (= first retry) t.Run("download_failure_once", func(t *testing.T) { attemptIdx := 0 - mockDownloaderCtor := func(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config, upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error) (download.Downloader, error) { + mockDownloaderCtor := func(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config, upgradeDetails *details.Details) (download.Downloader, error) { defer func() { attemptIdx++ }() @@ -206,7 +206,7 @@ func TestDownloadWithRetries(t *testing.T) { upgradeDetails, upgradeDetailsRetryUntil, upgradeDetailsRetryUntilWasUnset, upgradeDetailsRetryErrorMsg := mockUpgradeDetails(parsedVersion) minRetryDeadline := time.Now().Add(settings.Timeout) - path, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &settings, upgradeDetails, nil) + path, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &settings, upgradeDetails) require.NoError(t, err) require.Equal(t, expectedDownloadPath, path) @@ -238,7 +238,7 @@ func TestDownloadWithRetries(t *testing.T) { // exponential backoff with 10ms init and 500ms timeout should fit at least 3 attempts. minNmExpectedAttempts := 3 - mockDownloaderCtor := func(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config, upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error) (download.Downloader, error) { + mockDownloaderCtor := func(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config, upgradeDetails *details.Details) (download.Downloader, error) { return &mockDownloader{"", errors.New("download failed")}, nil } @@ -251,7 +251,7 @@ func TestDownloadWithRetries(t *testing.T) { upgradeDetails, upgradeDetailsRetryUntil, upgradeDetailsRetryUntilWasUnset, upgradeDetailsRetryErrorMsg := mockUpgradeDetails(parsedVersion) minRetryDeadline := time.Now().Add(testCaseSettings.Timeout) - path, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &testCaseSettings, upgradeDetails, nil) + path, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &testCaseSettings, upgradeDetails) require.Equal(t, "context deadline exceeded", err.Error()) require.Equal(t, "", path) @@ -279,7 +279,7 @@ func TestDownloadWithRetries(t *testing.T) { }) t.Run("insufficient_disk_space_stops_retries", func(t *testing.T) { - mockDownloaderCtor := func(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config, upgradeDetails *details.Details, diskSpaceErrorFunc func(error) error) (download.Downloader, error) { + mockDownloaderCtor := func(version *agtversion.ParsedSemVer, log *logger.Logger, settings *artifact.Config, upgradeDetails *details.Details) (download.Downloader, error) { return &mockDownloader{"", upgradeErrors.ErrInsufficientDiskSpace}, nil } @@ -291,7 +291,7 @@ func TestDownloadWithRetries(t *testing.T) { upgradeDetails, upgradeDetailsRetryUntil, upgradeDetailsRetryUntilWasUnset, upgradeDetailsRetryErrorMsg := mockUpgradeDetails(parsedVersion) - path, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &settings, upgradeDetails, nil) + path, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &settings, upgradeDetails) require.Error(t, err) require.Equal(t, "", path) From 5450532196b91cdc07bc5ae83aa827d70215f1fd Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Tue, 29 Jul 2025 02:37:41 +0300 Subject: [PATCH 015/127] enhancement(5235): added downloader factory provider and updated tests --- .../application/upgrade/step_download.go | 14 +++- .../application/upgrade/step_download_test.go | 45 +++++++++++ .../pkg/agent/application/upgrade/upgrade.go | 74 +++++++++++++------ 3 files changed, 109 insertions(+), 24 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/step_download.go b/internal/pkg/agent/application/upgrade/step_download.go index 548e7affc43..65fa2d5f497 100644 --- a/internal/pkg/agent/application/upgrade/step_download.go +++ b/internal/pkg/agent/application/upgrade/step_download.go @@ -65,8 +65,12 @@ func (u *Upgrader) downloadArtifact(ctx context.Context, parsedVersion *agtversi // set specific downloader, local file just uses the fs.NewDownloader // no fallback is allowed because it was requested that this specific source be used - factory = func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { - return fs.NewDownloader(config), nil + // factory = func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { + // return fs.NewDownloader(config), nil + // } + factory, err = u.downloaderFactoryProvider.GetDownloaderFactory(fileDownloaderFactory) + if err != nil { + return "", err } // set specific verifier, local file verifies locally only @@ -86,7 +90,11 @@ func (u *Upgrader) downloadArtifact(ctx context.Context, parsedVersion *agtversi if factory == nil { // set the factory to the newDownloader factory - factory = newDownloader + // factory = newDownloader + factory, err = u.downloaderFactoryProvider.GetDownloaderFactory(composedDownloaderFactory) + if err != nil { + return "", err + } u.log.Infow("Downloading upgrade artifact", "version", parsedVersion, "source_uri", settings.SourceURI, "drop_path", settings.DropPath, "target_path", settings.TargetDirectory, "install_path", settings.InstallPath) diff --git a/internal/pkg/agent/application/upgrade/step_download_test.go b/internal/pkg/agent/application/upgrade/step_download_test.go index 3e1547ed67d..8081bd8bbde 100644 --- a/internal/pkg/agent/application/upgrade/step_download_test.go +++ b/internal/pkg/agent/application/upgrade/step_download_test.go @@ -339,3 +339,48 @@ func mockUpgradeDetails(parsedVersion *agtversion.ParsedSemVer) (*details.Detail &upgradeDetailsRetryUntil, &upgradeDetailsRetryUntilWasUnset, &upgradeDetailsRetryErrorMsg } + +type mockDownloaderFactoryProvider struct { + calledWithName string +} + +var mockDownloaderFactoryError = errors.New("downloader factory not found") + +func (m *mockDownloaderFactoryProvider) GetDownloaderFactory(name string) (downloaderFactory, error) { + m.calledWithName = name + return nil, mockDownloaderFactoryError +} + +func TestDownloadArtifact(t *testing.T) { + t.Run("should return error if file downloader factory is not found", func(t *testing.T) { + logger, err := logger.New("test", false) + require.NoError(t, err) + + config := artifact.Config{} + u, err := NewUpgrader(logger, &config, nil) + require.NoError(t, err) + + u.downloaderFactoryProvider = &mockDownloaderFactoryProvider{} + + _, err = u.downloadArtifact(context.Background(), nil, "file://mockfilepath", nil, false, false) + require.Error(t, err) + require.ErrorIs(t, err, mockDownloaderFactoryError) + require.Equal(t, fileDownloaderFactory, u.downloaderFactoryProvider.(*mockDownloaderFactoryProvider).calledWithName) + }) + + t.Run("should return error if composed downloader factory is not found", func(t *testing.T) { + logger, err := logger.New("test", false) + require.NoError(t, err) + + config := artifact.Config{} + u, err := NewUpgrader(logger, &config, nil) + require.NoError(t, err) + + u.downloaderFactoryProvider = &mockDownloaderFactoryProvider{} + + _, err = u.downloadArtifact(context.Background(), nil, "https://mockuri", nil, false, false) + require.Error(t, err) + require.ErrorIs(t, err, mockDownloaderFactoryError) + require.Equal(t, composedDownloaderFactory, u.downloaderFactoryProvider.(*mockDownloaderFactoryProvider).calledWithName) + }) +} diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 662ccbf6b25..983393bec18 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -23,8 +23,9 @@ import ( "github.com/elastic/elastic-agent/internal/pkg/agent/application/paths" "github.com/elastic/elastic-agent/internal/pkg/agent/application/reexec" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact" + "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download" + fsDownloader "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download/fs" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/details" - upgradeErrors "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/errors" "github.com/elastic/elastic-agent/internal/pkg/agent/configuration" "github.com/elastic/elastic-agent/internal/pkg/agent/errors" "github.com/elastic/elastic-agent/internal/pkg/agent/install" @@ -41,13 +42,15 @@ import ( ) const ( - agentName = "elastic-agent" - hashLen = 6 - agentCommitFile = ".elastic-agent.active.commit" - runDirMod = 0770 - snapshotSuffix = "-SNAPSHOT" - watcherMaxWaitTime = 30 * time.Second - fipsPrefix = "-fips" + agentName = "elastic-agent" + hashLen = 6 + agentCommitFile = ".elastic-agent.active.commit" + runDirMod = 0770 + snapshotSuffix = "-SNAPSHOT" + watcherMaxWaitTime = 30 * time.Second + fipsPrefix = "-fips" + fileDownloaderFactory = "fileDownloaderFactory" + composedDownloaderFactory = "composedDownloaderFactory" ) var agentArtifact = artifact.Artifact{ @@ -69,15 +72,33 @@ func init() { } } +type downloaderFactory func(*agtversion.ParsedSemVer, *logger.Logger, *artifact.Config, *details.Details) (download.Downloader, error) + +type DownloaderFactoryProvider interface { + GetDownloaderFactory(name string) (downloaderFactory, error) +} + +type downloaderFactoryProvider struct { + downloaderFactories map[string]downloaderFactory +} + +func (d *downloaderFactoryProvider) GetDownloaderFactory(name string) (downloaderFactory, error) { + factory, ok := d.downloaderFactories[name] + if !ok { + return nil, fmt.Errorf("downloader factory %q not found", name) + } + return factory, nil +} + // Upgrader performs an upgrade type Upgrader struct { - log *logger.Logger - settings *artifact.Config - agentInfo info.Agent - upgradeable bool - fleetServerURI string - markerWatcher MarkerWatcher - diskSpaceErrorFunc func(error) error + log *logger.Logger + settings *artifact.Config + agentInfo info.Agent + upgradeable bool + fleetServerURI string + markerWatcher MarkerWatcher + downloaderFactoryProvider DownloaderFactoryProvider } // IsUpgradeable when agent is installed and running as a service or flag was provided. @@ -89,13 +110,24 @@ func IsUpgradeable() bool { // NewUpgrader creates an upgrader which is capable of performing upgrade operation func NewUpgrader(log *logger.Logger, settings *artifact.Config, agentInfo info.Agent) (*Upgrader, error) { + downloaderFactories := map[string]downloaderFactory{ + fileDownloaderFactory: func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { + return fsDownloader.NewDownloader(config), nil + }, + composedDownloaderFactory: newDownloader, + } + + downloaderFactoryProvider := &downloaderFactoryProvider{ + downloaderFactories: downloaderFactories, + } + return &Upgrader{ - log: log, - settings: settings, - agentInfo: agentInfo, - upgradeable: IsUpgradeable(), - markerWatcher: newMarkerFileWatcher(markerFilePath(paths.Data()), log), - diskSpaceErrorFunc: upgradeErrors.ToDiskSpaceErrorFunc(log), + log: log, + settings: settings, + agentInfo: agentInfo, + upgradeable: IsUpgradeable(), + markerWatcher: newMarkerFileWatcher(markerFilePath(paths.Data()), log), + downloaderFactoryProvider: downloaderFactoryProvider, }, nil } From 3d86720f369205b5c1564e3477d3628bb8b5566c Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Tue, 29 Jul 2025 20:42:44 +0300 Subject: [PATCH 016/127] enhancement(5235): updated progress reporter prepare function, fixed cleanup for file downloader, added end-to-end tests for the upgrade manager upgrade function --- .../artifact/download/fs/downloader.go | 41 +- .../artifact/download/fs/downloader_test.go | 4 +- .../artifact/download/http/downloader.go | 29 +- .../artifact/download/http/downloader_test.go | 4 +- .../download/http/progress_reporter.go | 2 + .../upgrade/errors/disk_space_windows.go | 5 +- .../upgrade/errors/disk_space_windows_test.go | 14 +- .../agent/application/upgrade/upgrade_test.go | 508 ++++++++++++++++++ .../application/upgrade/upgrade_unix_test.go | 10 + .../upgrade/upgrade_windows_test.go | 10 + 10 files changed, 601 insertions(+), 26 deletions(-) create mode 100644 internal/pkg/agent/application/upgrade/upgrade_unix_test.go create mode 100644 internal/pkg/agent/application/upgrade/upgrade_windows_test.go diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go index 3647ae04dc4..e4753c0c0bc 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go @@ -29,7 +29,7 @@ type Downloader struct { dropPath string config *artifact.Config diskSpaceErrorFunc func(error) error - copyFunc func(dst io.Writer, src io.Reader) (written int64, err error) + CopyFunc func(dst io.Writer, src io.Reader) (written int64, err error) } // NewDownloader creates and configures Elastic Downloader @@ -38,7 +38,7 @@ func NewDownloader(config *artifact.Config) *Downloader { config: config, dropPath: getDropPath(config), diskSpaceErrorFunc: upgradeErrors.ToDiskSpaceErrorFunc(nil), - copyFunc: io.Copy, + CopyFunc: io.Copy, } } @@ -57,16 +57,26 @@ func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version } }() + fmt.Printf("[FS_DOWNLOADER] Download called for artifact: %+v, version: %s\n", a, version.String()) + fmt.Printf("[FS_DOWNLOADER] Config OS: %s, TargetDirectory: %s\n", e.config.OS(), e.config.TargetDirectory) + // download from source to dest path, err := e.download(e.config.OS(), a, *version, "") + fmt.Printf("[FS_DOWNLOADER] download() returned path: %s, err: %v\n", path, err) downloadedFiles = append(downloadedFiles, path) if err != nil { return "", err } + // download from source to dest hashPath, err := e.download(e.config.OS(), a, *version, ".sha512") + fmt.Printf("[FS_DOWNLOADER] hash download() returned path: %s, err: %v\n", hashPath, err) downloadedFiles = append(downloadedFiles, hashPath) - return path, err + if err != nil { + return "", err + } + + return path, nil } // DownloadAsc downloads the package .asc file from configured source. @@ -86,48 +96,69 @@ func (e *Downloader) download( a artifact.Artifact, version agtversion.ParsedSemVer, extension string) (string, error) { + fmt.Printf("[FS DEBUG] Internal download called: OS=%s, artifact=%+v, version=%+v, ext=%s\n", operatingSystem, a, version, extension) filename, err := artifact.GetArtifactName(a, version, operatingSystem, e.config.Arch()) if err != nil { + fmt.Printf("[FS DEBUG] Failed to generate filename: %v\n", err) return "", errors.New(err, "generating package name failed") } + fmt.Printf("[FS DEBUG] Generated filename: %s\n", filename) fullPath, err := artifact.GetArtifactPath(a, version, operatingSystem, e.config.Arch(), e.config.TargetDirectory) if err != nil { + fmt.Printf("[FS DEBUG] Failed to generate path: %v\n", err) return "", errors.New(err, "generating package path failed") } + fmt.Printf("[FS DEBUG] Generated fullPath: %s\n", fullPath) if extension != "" { filename += extension fullPath += extension + fmt.Printf("[FS DEBUG] With extension - filename: %s, fullPath: %s\n", filename, fullPath) } + fmt.Printf("[FS DEBUG] Calling downloadFile with filename=%s, fullPath=%s\n", filename, fullPath) return e.downloadFile(filename, fullPath) } func (e *Downloader) downloadFile(filename, fullPath string) (string, error) { sourcePath := filepath.Join(e.dropPath, filename) + fmt.Printf("[FS DEBUG] downloadFile called - filename=%s, fullPath=%s\n", filename, fullPath) + fmt.Printf("[FS DEBUG] dropPath=%s, computed sourcePath=%s\n", e.dropPath, sourcePath) + sourceFile, err := os.Open(sourcePath) if err != nil { + fmt.Printf("[FS DEBUG] Failed to open source file %s: %v\n", sourcePath, err) return "", errors.New(err, fmt.Sprintf("package '%s' not found", sourcePath), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, fullPath)) } defer sourceFile.Close() + fmt.Printf("[FS DEBUG] Successfully opened source file: %s\n", sourcePath) if destinationDir := filepath.Dir(fullPath); destinationDir != "" && destinationDir != "." { + fmt.Printf("[FS DEBUG] Creating destination directory: %s\n", destinationDir) if err := os.MkdirAll(destinationDir, 0755); err != nil { + fmt.Printf("[FS DEBUG] Failed to create destination directory: %v\n", err) return "", err } } + fmt.Printf("[FS DEBUG] Creating destination file: %s\n", fullPath) destinationFile, err := os.OpenFile(fullPath, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, packagePermissions) if err != nil { + fmt.Printf("[FS DEBUG] Failed to create destination file: %v\n", err) return "", errors.New(err, "creating package file failed", errors.TypeFilesystem, errors.M(errors.MetaKeyPath, fullPath)) } defer destinationFile.Close() - _, err = e.copyFunc(destinationFile, sourceFile) + fmt.Printf("[FS DEBUG] About to call CopyFunc...\n") + _, err = e.CopyFunc(destinationFile, sourceFile) if err != nil { - return "", e.diskSpaceErrorFunc(err) + fmt.Printf("[FS DEBUG] CopyFunc failed with error: %v\n", err) + processedErr := e.diskSpaceErrorFunc(err) + fmt.Printf("[FS DEBUG] diskSpaceErrorFunc processed error: %v -> %v\n", err, processedErr) + return fullPath, processedErr // Return fullPath so cleanup can remove partial file } + fmt.Printf("[FS DEBUG] CopyFunc succeeded\n") return fullPath, nil } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go index 4141f9dc7bd..a51e344a005 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go @@ -337,7 +337,7 @@ func TestDownloader_downloadFile(t *testing.T) { return 0, copyFuncError } e := NewDownloader(config) - e.copyFunc = copyFunc + e.CopyFunc = copyFunc e.diskSpaceErrorFunc = diskSpaceErrorFunc _, err := e.downloadFile("elastic-agent-1.2.3-linux-x86_64.tar.gz", filepath.Join(targetDirPath, "elastic-agent-1.2.3-linux-x86_64.tar.gz")) @@ -356,7 +356,7 @@ func TestDownloader_NewDownloader(t *testing.T) { downloader := NewDownloader(config) expectedCopyFunc := reflect.ValueOf(io.Copy).Pointer() - actualCopyFunc := reflect.ValueOf(downloader.copyFunc).Pointer() + actualCopyFunc := reflect.ValueOf(downloader.CopyFunc).Pointer() assert.Equal(t, expectedCopyFunc, actualCopyFunc) assert.Equal(t, config, downloader.config) } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go index ce97cd716ed..4dd7584bb0d 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go @@ -61,7 +61,7 @@ type Downloader struct { upgradeDetails *details.Details progressReporter ProgressReporter diskSpaceErrorFunc func(error) error - copyFunc func(dst io.Writer, src io.Reader) (written int64, err error) + CopyFunc func(dst io.Writer, src io.Reader) (written int64, err error) } // NewDownloader creates and configures Elastic Downloader @@ -86,7 +86,7 @@ func NewDownloaderWithClient(log *logger.Logger, config *artifact.Config, client client: client, upgradeDetails: upgradeDetails, diskSpaceErrorFunc: upgradeErrors.ToDiskSpaceErrorFunc(log), - copyFunc: io.Copy, + CopyFunc: io.Copy, progressReporter: &downloadProgressReporter{}, } } @@ -111,28 +111,39 @@ func (e *Downloader) Reload(c *artifact.Config) error { // Download fetches the package from configured source. // Returns absolute path to downloaded package and an error. func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version *agtversion.ParsedSemVer) (_ string, err error) { + fmt.Printf("[HTTP_DOWNLOADER] Download called for artifact: %+v, version: %s\n", a, version.String()) + fmt.Printf("[HTTP_DOWNLOADER] Config TargetDirectory: %s\n", e.config.TargetDirectory) + remoteArtifact := a.Artifact downloadedFiles := make([]string, 0, 2) defer func() { if err != nil { + fmt.Printf("[HTTP_DOWNLOADER] Download failed with error: %v\n", err) for _, path := range downloadedFiles { - if err := os.Remove(path); err != nil { - e.log.Warnf("failed to cleanup %s: %v", path, err) - } + os.Remove(path) } + } else { + fmt.Printf("[HTTP_DOWNLOADER] Download succeeded\n") } }() // download from source to dest path, err := e.download(ctx, remoteArtifact, e.config.OS(), a, *version) + fmt.Printf("[HTTP_DOWNLOADER] download() returned path: %s, err: %v\n", path, err) downloadedFiles = append(downloadedFiles, path) if err != nil { return "", err } + // download hash from source to dest, only if hash does not exist hashPath, err := e.downloadHash(ctx, remoteArtifact, e.config.OS(), a, *version) + fmt.Printf("[HTTP_DOWNLOADER] hash download() returned path: %s, err: %v\n", hashPath, err) downloadedFiles = append(downloadedFiles, hashPath) - return path, err + if err != nil { + return "", err + } + + return path, nil } func (e *Downloader) composeURI(artifactName, packageName string) (string, error) { @@ -229,13 +240,17 @@ func (e *Downloader) downloadFile(ctx context.Context, artifactName, filename, f detailsObserver := newDetailsProgressObserver(e.upgradeDetails) e.progressReporter.Prepare(sourceURI, e.config.HTTPTransportSettings.Timeout, fileSize, loggingObserver, detailsObserver) e.progressReporter.Report(ctx) - _, err = e.copyFunc(destinationFile, io.TeeReader(resp.Body, e.progressReporter)) + fmt.Printf("[HTTP_DOWNLOADER] About to call CopyFunc for sourceURI: %s\n", sourceURI) + _, err = e.CopyFunc(destinationFile, io.TeeReader(resp.Body, e.progressReporter)) if err != nil { + fmt.Printf("[HTTP_DOWNLOADER] CopyFunc failed with error: %v\n", err) err = e.diskSpaceErrorFunc(err) + fmt.Printf("[HTTP_DOWNLOADER] diskSpaceErrorFunc processed error: %v\n", err) e.progressReporter.ReportFailed(err) // return path, file already exists and needs to be cleaned up return fullPath, fmt.Errorf("%s: %w", errors.New("copying fetched package failed", errors.TypeNetwork, errors.M(errors.MetaKeyURI, sourceURI)).Error(), err) } + fmt.Printf("[HTTP_DOWNLOADER] CopyFunc succeeded\n") e.progressReporter.ReportComplete() return fullPath, nil diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go index 40f3d0d0ac6..200ddd52110 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go @@ -606,7 +606,7 @@ func TestDownloadFile(t *testing.T) { } downloader := NewDownloaderWithClient(log, config, *server.Client(), upgradeDetails) - downloader.copyFunc = copyFunc + downloader.CopyFunc = copyFunc downloader.diskSpaceErrorFunc = diskSpaceErrorFunc downloader.progressReporter = progressReporter @@ -641,7 +641,7 @@ func TestDownloader_NewDownloaderWithClient(t *testing.T) { downloader := NewDownloaderWithClient(log, config, http.Client{}, upgradeDetails) expectedCopyFunc := reflect.ValueOf(io.Copy) - actualCopyFunc := reflect.ValueOf(downloader.copyFunc) + actualCopyFunc := reflect.ValueOf(downloader.CopyFunc) assert.Equal(t, expectedCopyFunc.Pointer(), actualCopyFunc.Pointer()) assert.NotNil(t, downloader.diskSpaceErrorFunc) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter.go b/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter.go index 93ef4ddf4cb..02d52bb8899 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter.go @@ -31,6 +31,8 @@ func (dp *downloadProgressReporter) Prepare(sourceURI string, timeout time.Durat } dp.warnTimeout = time.Duration(float64(timeout) * warningProgressIntervalPercentage) dp.length = float64(length) + dp.progressObservers = progressObservers + dp.done = make(chan struct{}) } func (dp *downloadProgressReporter) Write(b []byte) (int, error) { diff --git a/internal/pkg/agent/application/upgrade/errors/disk_space_windows.go b/internal/pkg/agent/application/upgrade/errors/disk_space_windows.go index 92bee102ed1..ec2f0dae796 100644 --- a/internal/pkg/agent/application/upgrade/errors/disk_space_windows.go +++ b/internal/pkg/agent/application/upgrade/errors/disk_space_windows.go @@ -10,14 +10,13 @@ import ( "errors" "github.com/elastic/elastic-agent/pkg/core/logger" - - winSys "golang.org/x/sys/windows" + "golang.org/x/sys/windows" ) // ToDiskSpaceError returns a generic disk space error if the error is a disk space error func ToDiskSpaceErrorFunc(log *logger.Logger) func(error) error { return func(err error) error { - if errors.Is(err, winSys.ERROR_DISK_FULL) || errors.Is(err, winSys.ERROR_HANDLE_DISK_FULL) { + if errors.Is(err, windows.ERROR_DISK_FULL) || errors.Is(err, windows.ERROR_HANDLE_DISK_FULL) { if log != nil { log.Infof("ToDiskSpaceError detected disk space error: %v, returning ErrInsufficientDiskSpace", err) } diff --git a/internal/pkg/agent/application/upgrade/errors/disk_space_windows_test.go b/internal/pkg/agent/application/upgrade/errors/disk_space_windows_test.go index 2ac1621eab3..03cd4fff212 100644 --- a/internal/pkg/agent/application/upgrade/errors/disk_space_windows_test.go +++ b/internal/pkg/agent/application/upgrade/errors/disk_space_windows_test.go @@ -12,7 +12,7 @@ import ( "github.com/elastic/elastic-agent/pkg/core/logger" "github.com/stretchr/testify/require" - winSys "golang.org/x/sys/windows" + "golang.org/x/sys/windows" ) type mockError struct { @@ -34,10 +34,10 @@ func TestToDiskSpaceError(t *testing.T) { err error want error }{ - "ERROR_DISK_FULL": {err: winSys.ERROR_DISK_FULL, want: ErrInsufficientDiskSpace}, - "ERROR_HANDLE_DISK_FULL": {err: winSys.ERROR_HANDLE_DISK_FULL, want: ErrInsufficientDiskSpace}, - "wrapped ERROR_DISK_FULL": {err: fmt.Errorf("wrapped: %w", winSys.ERROR_DISK_FULL), want: ErrInsufficientDiskSpace}, - "wrapped ERROR_HANDLE_DISK_FULL": {err: fmt.Errorf("wrapped: %w", winSys.ERROR_HANDLE_DISK_FULL), want: ErrInsufficientDiskSpace}, + "ERROR_DISK_FULL": {err: windows.ERROR_DISK_FULL, want: ErrInsufficientDiskSpace}, + "ERROR_HANDLE_DISK_FULL": {err: windows.ERROR_HANDLE_DISK_FULL, want: ErrInsufficientDiskSpace}, + "wrapped ERROR_DISK_FULL": {err: fmt.Errorf("wrapped: %w", windows.ERROR_DISK_FULL), want: ErrInsufficientDiskSpace}, + "wrapped ERROR_HANDLE_DISK_FULL": {err: fmt.Errorf("wrapped: %w", windows.ERROR_HANDLE_DISK_FULL), want: ErrInsufficientDiskSpace}, "other error": {err: &mockError{msg: "some other error"}, want: &mockError{msg: "some other error"}}, } @@ -58,8 +58,8 @@ func TestToDiskSpaceError(t *testing.T) { t.Fatalf("expected no panic, but got: %v", r) } }() - _ = ToDiskSpaceErrorFunc(nil)(winSys.ERROR_DISK_FULL) - _ = ToDiskSpaceErrorFunc(nil)(fmt.Errorf("wrapped: %w", winSys.ERROR_HANDLE_DISK_FULL)) + _ = ToDiskSpaceErrorFunc(nil)(windows.ERROR_DISK_FULL) + _ = ToDiskSpaceErrorFunc(nil)(fmt.Errorf("wrapped: %w", windows.ERROR_HANDLE_DISK_FULL)) _ = ToDiskSpaceErrorFunc(nil)(&mockError{msg: "not disk space"}) }) } diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index 17d19252f6e..522c9682cf3 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -8,8 +8,12 @@ import ( "context" "crypto/tls" "fmt" + "io" + "net/http" + "net/http/httptest" "os" "path/filepath" + "reflect" "runtime" "sync" "testing" @@ -25,7 +29,12 @@ import ( "github.com/elastic/elastic-agent-libs/transport/tlscommon" "github.com/elastic/elastic-agent/internal/pkg/agent/application/paths" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact" + "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download" + "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download/composed" + "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download/fs" + httpDownloader "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download/http" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/details" + upgradeErrors "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/errors" "github.com/elastic/elastic-agent/internal/pkg/agent/errors" "github.com/elastic/elastic-agent/internal/pkg/config" "github.com/elastic/elastic-agent/internal/pkg/fleetapi" @@ -35,6 +44,8 @@ import ( "github.com/elastic/elastic-agent/pkg/control/v2/client" "github.com/elastic/elastic-agent/pkg/control/v2/cproto" "github.com/elastic/elastic-agent/pkg/core/logger" + mockinfo "github.com/elastic/elastic-agent/testing/mocks/internal_/pkg/agent/application/info" + "github.com/elastic/elastic-agent/pkg/core/logger/loggertest" agtversion "github.com/elastic/elastic-agent/pkg/version" mocks "github.com/elastic/elastic-agent/testing/mocks/pkg/control/v2/client" @@ -1292,3 +1303,500 @@ func (f *fakeAcker) Commit(ctx context.Context) error { args := f.Called(ctx) return args.Error(0) } + +type MockDownloader struct { + downloadPath string + downloadErr error +} + +func (md *MockDownloader) Download(ctx context.Context, a artifact.Artifact, version *agtversion.ParsedSemVer) (string, error) { + return "", nil +} + +func TestDownloaderFactoryProvider(t *testing.T) { + factory := func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { + return &MockDownloader{}, nil + } + provider := &downloaderFactoryProvider{ + downloaderFactories: map[string]downloaderFactory{ + "mockDownloaderFactory": factory, + }, + } + + actual, err := provider.GetDownloaderFactory("mockDownloaderFactory") + require.NoError(t, err) + require.Equal(t, reflect.ValueOf(factory).Pointer(), reflect.ValueOf(actual).Pointer()) + + _, err = provider.GetDownloaderFactory("nonExistentFactory") + require.Error(t, err) + require.Equal(t, "downloader factory \"nonExistentFactory\" not found", err.Error()) +} + +func TestNewUpgrader(t *testing.T) { + logger, err := logger.New("test", false) + require.NoError(t, err) + + upgrader, err := NewUpgrader(logger, nil, nil) + require.NoError(t, err) + + fileDownloaderFactory, err := upgrader.downloaderFactoryProvider.GetDownloaderFactory(fileDownloaderFactory) + require.NoError(t, err) + + fileDownloader, err := fileDownloaderFactory(nil, nil, nil, nil) + require.NoError(t, err) + require.IsType(t, &fs.Downloader{}, fileDownloader) + + composedDownloader, err := upgrader.downloaderFactoryProvider.GetDownloaderFactory(composedDownloaderFactory) + require.NoError(t, err) + + require.Equal(t, reflect.ValueOf(composedDownloader).Pointer(), reflect.ValueOf(newDownloader).Pointer()) +} + +func TestUpgradeDownloadArtifactWithInsufficientDiskSpace(t *testing.T) { + // tests := map[string]struct { + // dropPath string + // targetPath string + // sourceURI string + // copyError error + // expectedError error + // }{} + t.Run("file downloader with error", func(t *testing.T) { + for _, testError := range TestErrors { + t.Run(fmt.Sprintf("file downloader with error %v", testError), func(t *testing.T) { + baseDir := t.TempDir() + testDownloadPath := filepath.Join(baseDir, "downloads") + testTargetPath := filepath.Join(baseDir, "target") + + originalDownloadsPath := paths.Downloads() + t.Cleanup(func() { + paths.SetDownloads(originalDownloadsPath) + err := os.RemoveAll(testDownloadPath) + require.NoError(t, err) + }) + + paths.SetDownloads(testTargetPath) + + testArtifact := artifact.Artifact{ + Name: "Elastic Agent", + Cmd: "elastic-agent", + Artifact: "beats/elastic-agent", + } + version := agtversion.NewParsedSemVer(8, 15, 0, "", "") + + expectedFileName, err := artifact.GetArtifactName(testArtifact, *version, runtime.GOOS, runtime.GOARCH) + require.NoError(t, err) + + partialData := []byte("partial content written before error") + err = os.MkdirAll(testDownloadPath, 0755) + require.NoError(t, err) + tempArtifactPath := filepath.Join(testDownloadPath, expectedFileName) + err = os.WriteFile(tempArtifactPath, partialData, 0644) + require.NoError(t, err) + + config := artifact.Config{ + OperatingSystem: runtime.GOOS, + Architecture: runtime.GOARCH, + DropPath: testDownloadPath, + TargetDirectory: testTargetPath, + SourceURI: "file://" + tempArtifactPath, + RetrySleepInitDuration: 1 * time.Second, + HTTPTransportSettings: httpcommon.HTTPTransportSettings{ + Timeout: 1 * time.Second, + }, + } + + err = os.MkdirAll(config.TargetDirectory, 0755) + require.NoError(t, err) + + var expectedDestPath string + expectedDestPath, err = artifact.GetArtifactPath(testArtifact, *version, config.OS(), config.Arch(), config.TargetDirectory) + require.NoError(t, err) + + copyFunc := func(dst io.Writer, src io.Reader) (int64, error) { + _, err := io.Copy(dst, src) + require.NoError(t, err) + + require.FileExists(t, expectedDestPath, "partially written file should exist before cleanup") + content, err := os.ReadFile(expectedDestPath) + require.NoError(t, err) + require.Equal(t, partialData, content) + + return 0, testError + } + + fileDownloader := fs.NewDownloader(&config) + fileDownloader.CopyFunc = copyFunc + + log, err := logger.New("test", false) + require.NoError(t, err) + + upgradeDetails := details.NewDetails(version.String(), details.StateDownloading, "test") + + fileFactory := func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { + return fileDownloader, nil + } + + downloaderFactoryProvider := &downloaderFactoryProvider{ + downloaderFactories: map[string]downloaderFactory{ + fileDownloaderFactory: fileFactory, + }, + } + + mockAgentInfo := mockinfo.NewAgent(t) + mockAgentInfo.On("Version").Return(version.String()) + + upgrader, err := NewUpgrader(log, &config, mockAgentInfo) + require.NoError(t, err) + upgrader.downloaderFactoryProvider = downloaderFactoryProvider + + _, err = upgrader.Upgrade(context.Background(), version.String(), config.SourceURI, nil, upgradeDetails, false, false) + require.Error(t, err) + require.ErrorIs(t, err, upgradeErrors.ErrInsufficientDiskSpace) + require.NoFileExists(t, expectedDestPath, "FS downloader should clean up partial files on error") + }) + } + }) + + t.Run("composed downloader with error file downloader", func(t *testing.T) { + for _, testError := range TestErrors { + t.Run(fmt.Sprintf("file downloader with error %v", testError), func(t *testing.T) { + baseDir := t.TempDir() + testDownloadPath := filepath.Join(baseDir, "downloads") + testTargetPath := filepath.Join(baseDir, "target") + + originalDownloadsPath := paths.Downloads() + t.Cleanup(func() { + paths.SetDownloads(originalDownloadsPath) + err := os.RemoveAll(testDownloadPath) + require.NoError(t, err) + }) + + paths.SetDownloads(testTargetPath) + + testArtifact := artifact.Artifact{ + Name: "Elastic Agent", + Cmd: "elastic-agent", + Artifact: "beats/elastic-agent", + } + version := agtversion.NewParsedSemVer(8, 15, 0, "", "") + + expectedFileName, err := artifact.GetArtifactName(testArtifact, *version, runtime.GOOS, runtime.GOARCH) + require.NoError(t, err) + + partialData := []byte("partial content written before error") + err = os.MkdirAll(testDownloadPath, 0755) + require.NoError(t, err) + tempArtifactPath := filepath.Join(testDownloadPath, expectedFileName) + err = os.WriteFile(tempArtifactPath, partialData, 0644) + require.NoError(t, err) + + config := artifact.Config{ + OperatingSystem: runtime.GOOS, + Architecture: runtime.GOARCH, + DropPath: testDownloadPath, + TargetDirectory: testTargetPath, + SourceURI: tempArtifactPath, + RetrySleepInitDuration: 1 * time.Second, + HTTPTransportSettings: httpcommon.HTTPTransportSettings{ + Timeout: 1 * time.Second, + }, + } + + err = os.MkdirAll(config.TargetDirectory, 0755) + require.NoError(t, err) + + var expectedDestPath string + expectedDestPath, err = artifact.GetArtifactPath(testArtifact, *version, config.OS(), config.Arch(), config.TargetDirectory) + require.NoError(t, err) + + copyFunc := func(dst io.Writer, src io.Reader) (int64, error) { + _, err := io.Copy(dst, src) + require.NoError(t, err) + + require.FileExists(t, expectedDestPath, "partially written file should exist before cleanup") + content, err := os.ReadFile(expectedDestPath) + require.NoError(t, err) + require.Equal(t, partialData, content) + + return 0, testError + } + + fileDownloader := fs.NewDownloader(&config) + fileDownloader.CopyFunc = copyFunc + + log, err := logger.New("test", false) + require.NoError(t, err) + + upgradeDetails := details.NewDetails(version.String(), details.StateDownloading, "test") + + httpDownloader := httpDownloader.NewDownloaderWithClient(log, &config, http.Client{}, upgradeDetails) + + composedDownloader := composed.NewDownloader(fileDownloader, httpDownloader) + + fileFactory := func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { + return fileDownloader, nil + } + + composedFactory := func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { + return composedDownloader, nil + } + + downloaderFactoryProvider := &downloaderFactoryProvider{ + downloaderFactories: map[string]downloaderFactory{ + fileDownloaderFactory: fileFactory, + composedDownloaderFactory: composedFactory, + }, + } + + mockAgentInfo := mockinfo.NewAgent(t) + mockAgentInfo.On("Version").Return(version.String()) + + upgrader, err := NewUpgrader(log, &config, mockAgentInfo) + require.NoError(t, err) + upgrader.downloaderFactoryProvider = downloaderFactoryProvider + + _, err = upgrader.Upgrade(context.Background(), version.String(), config.SourceURI, nil, upgradeDetails, false, false) + require.Error(t, err) + require.ErrorIs(t, err, upgradeErrors.ErrInsufficientDiskSpace) + require.NoFileExists(t, expectedDestPath, "FS downloader should clean up partial files on error") + }) + } + }) + + t.Run("composed downloader http downloader", func(t *testing.T) { + for _, testError := range TestErrors { + t.Run(fmt.Sprintf("composed downloader with error %v", testError), func(t *testing.T) { + baseDir := t.TempDir() + testTargetPath := filepath.Join(baseDir, "target") + + originalDownloadsPath := paths.Downloads() + t.Cleanup(func() { + paths.SetDownloads(originalDownloadsPath) + err := os.RemoveAll(testTargetPath) + require.NoError(t, err) + }) + + paths.SetDownloads(testTargetPath) + + testArtifact := artifact.Artifact{ + Name: "Elastic Agent", + Cmd: "elastic-agent", + Artifact: "beats/elastic-agent", + } + version := agtversion.NewParsedSemVer(8, 15, 0, "", "") + + partialData := []byte("partial content written before error") + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + w.Write(partialData) + })) + defer server.Close() + + config := artifact.Config{ + OperatingSystem: runtime.GOOS, + Architecture: runtime.GOARCH, + TargetDirectory: testTargetPath, + SourceURI: server.URL, + RetrySleepInitDuration: 1 * time.Second, + HTTPTransportSettings: httpcommon.HTTPTransportSettings{ + Timeout: 1 * time.Second, + }, + } + + var expectedDestPath string + expectedDestPath, err := artifact.GetArtifactPath(testArtifact, *version, config.OS(), config.Arch(), config.TargetDirectory) + require.NoError(t, err) + + copyFunc := func(dst io.Writer, src io.Reader) (int64, error) { + _, err := io.Copy(dst, src) + require.NoError(t, err) + + require.FileExists(t, expectedDestPath, "partially written file should exist before cleanup") + content, err := os.ReadFile(expectedDestPath) + require.NoError(t, err) + require.Equal(t, partialData, content) + + return 0, testError + } + + log, err := logger.New("test", false) + require.NoError(t, err) + + upgradeDetails := details.NewDetails(version.String(), details.StateDownloading, "test") + + fileDownloader := fs.NewDownloader(&config) + + httpDownloader := httpDownloader.NewDownloaderWithClient(log, &config, http.Client{}, upgradeDetails) + httpDownloader.CopyFunc = copyFunc + + composedDownloader := composed.NewDownloader(fileDownloader, httpDownloader) + + fileFactory := func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { + return fileDownloader, nil + } + + composedFactory := func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { + return composedDownloader, nil + } + + downloaderFactoryProvider := &downloaderFactoryProvider{ + downloaderFactories: map[string]downloaderFactory{ + fileDownloaderFactory: fileFactory, + composedDownloaderFactory: composedFactory, + }, + } + + mockAgentInfo := mockinfo.NewAgent(t) + mockAgentInfo.On("Version").Return(version.String()) + + upgrader, err := NewUpgrader(log, &config, mockAgentInfo) + require.NoError(t, err) + upgrader.downloaderFactoryProvider = downloaderFactoryProvider + + _, err = upgrader.Upgrade(context.Background(), version.String(), config.SourceURI, nil, upgradeDetails, false, false) + require.Error(t, err) + require.ErrorIs(t, err, upgradeErrors.ErrInsufficientDiskSpace) + require.NoFileExists(t, expectedDestPath, "HTTP downloader should clean up partial files on error") + }) + } + }) + + // t.Run("composed downloader file downloader", func(t *testing.T) { + // partialData := []byte("partial content written before error") + // copyFunc := func(dst io.Writer, src io.Reader) (int64, error) { + // // Write some data first to simulate partial copy + // n, writeErr := dst.Write(partialData) + // if writeErr != nil { + // return 0, writeErr + // } + // // Then return the disk space error + // return int64(n), TestErrors[0] + // } + + // // Use separate paths for source files and destination files + // testDropPath := "/tmp/elastic-agent-test-downloads" // Where FS downloader looks for source files + // testTargetPath := "/tmp/elastic-agent-test-targets" // Where downloads should be placed + + // // Store the original downloads path to restore it later + // originalDownloadsPath := paths.Downloads() + // defer paths.SetDownloads(originalDownloadsPath) + + // // Set the downloads path for cleanup to look in the target directory + // paths.SetDownloads(testTargetPath) + + // // Create test artifact for path generation + // testArtifact := artifact.Artifact{ + // Name: "Elastic Agent", + // Cmd: "elastic-agent", + // Artifact: "beats/elastic-agent", + // } + // version := agtversion.NewParsedSemVer(8, 15, 0, "", "") + + // // Generate the correct filename for current OS/architecture + // expectedFileName, err := artifact.GetArtifactName(testArtifact, *version, runtime.GOOS, runtime.GOARCH) + // require.NoError(t, err) + + // config := artifact.Config{ + // OperatingSystem: runtime.GOOS, + // Architecture: runtime.GOARCH, + // DropPath: testDropPath, // Where FS downloader looks for source files + // TargetDirectory: testTargetPath, // Where downloads should go (same as cleanup path) + // RetrySleepInitDuration: 1 * time.Second, + // HTTPTransportSettings: httpcommon.HTTPTransportSettings{ + // Timeout: 1 * time.Second, + // }, + // } + + // // Create the source directory and file (where FS downloader reads from) + // err = os.MkdirAll(testDropPath, 0755) + // require.NoError(t, err) + // tempFilePath := filepath.Join(testDropPath, expectedFileName) + // err = os.WriteFile(tempFilePath, []byte("test content"), 0644) + // require.NoError(t, err) + + // // Create the target directory (where downloads go) + // err = os.MkdirAll(testTargetPath, 0755) + // require.NoError(t, err) + + // // Get the expected destination path (in target directory) + // var expectedDestPath string + // expectedDestPath, err = artifact.GetArtifactPath(testArtifact, *version, config.OS(), config.Arch(), config.TargetDirectory) + // require.NoError(t, err) + + // // Verify destination file doesn't exist before upgrade + // require.NoFileExists(t, expectedDestPath, "destination file should not exist before upgrade") + + // // Create file downloader with disk space error copy function + // fileDownloader := fs.NewDownloader(&config) + // fileDownloader.CopyFunc = copyFunc + + // // Create HTTP downloader with disk space error copy function + // log, err := logger.New("test", false) + // require.NoError(t, err) + // upgradeDetails := details.NewDetails("8.15.0", details.StateDownloading, "test") + + // httpDownloader, err := downloadhttp.NewDownloader(log, &config, upgradeDetails) + // require.NoError(t, err) + // httpDownloader.CopyFunc = copyFunc + + // // Create composed downloader with both file and HTTP downloaders + // composedDownloader := composed.NewDownloader(fileDownloader, httpDownloader) + + // composedFactory := func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { + // // Return the composed downloader that will try both file and HTTP + // return composedDownloader, nil + // } + + // downloaderFactoryProvider := &downloaderFactoryProvider{ + // downloaderFactories: map[string]downloaderFactory{ + // composedDownloaderFactory: composedFactory, + // }, + // } + + // mockAgentInfo := mockinfo.NewAgent(t) + // mockAgentInfo.On("Version").Return("8.15.0") + + // upgrader, err := NewUpgrader(log, &config, mockAgentInfo) + // require.NoError(t, err) + // upgrader.downloaderFactoryProvider = downloaderFactoryProvider + + // // Get the expected destination path that would be created by the fs downloader + // expectedDestPath, err = artifact.GetArtifactPath(testArtifact, *version, config.OS(), config.Arch(), config.TargetDirectory) + // require.NoError(t, err) + + // // Verify destination file doesn't exist before upgrade + // require.NoFileExists(t, expectedDestPath, "destination file should not exist before upgrade") + + // _, err = upgrader.Upgrade(context.Background(), "8.15.0", filepath.Join(testDropPath, expectedFileName), nil, upgradeDetails, false, false) + // require.Error(t, err) + + // // The composed downloader should return a joined error containing both: + // // 1. The insufficient disk space error from the file downloader + // // 2. The network error from the HTTP downloader + // require.ErrorIs(t, err, upgradeErrors.ErrInsufficientDiskSpace, "joined error should contain insufficient disk space error") + + // // Verify that the error message contains both errors + // errMsg := err.Error() + // require.Contains(t, errMsg, "insufficient disk space", "error message should mention disk space") + // require.Contains(t, errMsg, "lookup beats", "error message should mention the network failure") + + // // FS downloader should leave partial files (demonstrating the bug) + // // This is currently a bug - FS downloader doesn't clean up partial files like HTTP downloader does + // if _, statErr := os.Stat(expectedDestPath); statErr == nil { + // // File exists - check if it contains the partial data + // content, readErr := os.ReadFile(expectedDestPath) + // if readErr == nil && len(content) > 0 { + // t.Logf("BUG: FS downloader left partial file with %d bytes: %q", len(content), string(content)) + // require.Contains(t, string(content), string(partialData), "partial file should contain the data written before error") + + // // Clean up the partial file for the test + // os.Remove(expectedDestPath) + // } + // } + + // // Cleanup the test directory + // os.RemoveAll(testDropPath) + // }) + +} diff --git a/internal/pkg/agent/application/upgrade/upgrade_unix_test.go b/internal/pkg/agent/application/upgrade/upgrade_unix_test.go new file mode 100644 index 00000000000..3b5d92abab5 --- /dev/null +++ b/internal/pkg/agent/application/upgrade/upgrade_unix_test.go @@ -0,0 +1,10 @@ +//go:build !windows + +package upgrade + +import "syscall" + +var TestErrors = []error{ + syscall.ENOSPC, + syscall.EDQUOT, +} diff --git a/internal/pkg/agent/application/upgrade/upgrade_windows_test.go b/internal/pkg/agent/application/upgrade/upgrade_windows_test.go new file mode 100644 index 00000000000..a0984cbe6dc --- /dev/null +++ b/internal/pkg/agent/application/upgrade/upgrade_windows_test.go @@ -0,0 +1,10 @@ +//go:build windows + +package upgrade + +import winSys "golang.org/x/sys/windows" + +var TestErrors = []error{ + winSys.ERROR_DISK_FULL, + winSys.ERROR_HANDLE_DISK_FULL, +} From 55c05e4a4385be9a9a93da9664036c95c76693bc Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Wed, 30 Jul 2025 15:48:21 +0300 Subject: [PATCH 017/127] enhancement(5235): refactored end to end upgrade tests to assert copy errors for file downloader and composed downloader --- .../agent/application/upgrade/upgrade_test.go | 597 ++++++------------ 1 file changed, 183 insertions(+), 414 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index 522c9682cf3..9ae3cce04b7 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -15,6 +15,7 @@ import ( "path/filepath" "reflect" "runtime" + "strings" "sync" "testing" "time" @@ -1352,451 +1353,219 @@ func TestNewUpgrader(t *testing.T) { require.Equal(t, reflect.ValueOf(composedDownloader).Pointer(), reflect.ValueOf(newDownloader).Pointer()) } -func TestUpgradeDownloadArtifactWithInsufficientDiskSpace(t *testing.T) { - // tests := map[string]struct { - // dropPath string - // targetPath string - // sourceURI string - // copyError error - // expectedError error - // }{} - t.Run("file downloader with error", func(t *testing.T) { - for _, testError := range TestErrors { - t.Run(fmt.Sprintf("file downloader with error %v", testError), func(t *testing.T) { - baseDir := t.TempDir() - testDownloadPath := filepath.Join(baseDir, "downloads") - testTargetPath := filepath.Join(baseDir, "target") - - originalDownloadsPath := paths.Downloads() - t.Cleanup(func() { - paths.SetDownloads(originalDownloadsPath) - err := os.RemoveAll(testDownloadPath) - require.NoError(t, err) - }) - - paths.SetDownloads(testTargetPath) - - testArtifact := artifact.Artifact{ - Name: "Elastic Agent", - Cmd: "elastic-agent", - Artifact: "beats/elastic-agent", - } - version := agtversion.NewParsedSemVer(8, 15, 0, "", "") - - expectedFileName, err := artifact.GetArtifactName(testArtifact, *version, runtime.GOOS, runtime.GOARCH) - require.NoError(t, err) - - partialData := []byte("partial content written before error") - err = os.MkdirAll(testDownloadPath, 0755) - require.NoError(t, err) - tempArtifactPath := filepath.Join(testDownloadPath, expectedFileName) - err = os.WriteFile(tempArtifactPath, partialData, 0644) - require.NoError(t, err) - - config := artifact.Config{ - OperatingSystem: runtime.GOOS, - Architecture: runtime.GOARCH, - DropPath: testDownloadPath, - TargetDirectory: testTargetPath, - SourceURI: "file://" + tempArtifactPath, - RetrySleepInitDuration: 1 * time.Second, - HTTPTransportSettings: httpcommon.HTTPTransportSettings{ - Timeout: 1 * time.Second, - }, - } - - err = os.MkdirAll(config.TargetDirectory, 0755) - require.NoError(t, err) - - var expectedDestPath string - expectedDestPath, err = artifact.GetArtifactPath(testArtifact, *version, config.OS(), config.Arch(), config.TargetDirectory) - require.NoError(t, err) - - copyFunc := func(dst io.Writer, src io.Reader) (int64, error) { - _, err := io.Copy(dst, src) - require.NoError(t, err) - - require.FileExists(t, expectedDestPath, "partially written file should exist before cleanup") - content, err := os.ReadFile(expectedDestPath) - require.NoError(t, err) - require.Equal(t, partialData, content) - - return 0, testError - } - - fileDownloader := fs.NewDownloader(&config) - fileDownloader.CopyFunc = copyFunc - - log, err := logger.New("test", false) - require.NoError(t, err) - - upgradeDetails := details.NewDetails(version.String(), details.StateDownloading, "test") - - fileFactory := func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { - return fileDownloader, nil - } - - downloaderFactoryProvider := &downloaderFactoryProvider{ - downloaderFactories: map[string]downloaderFactory{ - fileDownloaderFactory: fileFactory, - }, - } - - mockAgentInfo := mockinfo.NewAgent(t) - mockAgentInfo.On("Version").Return(version.String()) - - upgrader, err := NewUpgrader(log, &config, mockAgentInfo) - require.NoError(t, err) - upgrader.downloaderFactoryProvider = downloaderFactoryProvider +func setupForFileDownloader(sourcePrefix string, expectedFileName string, partialData []byte) setupFunc { + return func(t *testing.T, config *artifact.Config, basePath string, targetPath string) { + testDownloadPath := filepath.Join(basePath, "downloads") + originalDownloadsPath := paths.Downloads() + t.Cleanup(func() { + paths.SetDownloads(originalDownloadsPath) + }) + paths.SetDownloads(targetPath) + err := os.MkdirAll(testDownloadPath, 0755) + require.NoError(t, err) + tempArtifactPath := filepath.Join(testDownloadPath, expectedFileName) + err = os.WriteFile(tempArtifactPath, partialData, 0644) + require.NoError(t, err) + + config.SourceURI = sourcePrefix + tempArtifactPath + config.DropPath = testDownloadPath + } +} - _, err = upgrader.Upgrade(context.Background(), version.String(), config.SourceURI, nil, upgradeDetails, false, false) - require.Error(t, err) - require.ErrorIs(t, err, upgradeErrors.ErrInsufficientDiskSpace) - require.NoFileExists(t, expectedDestPath, "FS downloader should clean up partial files on error") - }) +func setupForHttpDownloader(partialData []byte) (setupFunc, *httptest.Server) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { + w.WriteHeader(http.StatusOK) + w.Write(partialData) + })) + + return func(t *testing.T, config *artifact.Config, basePath string, targetPath string) { + config.SourceURI = server.URL + config.RetrySleepInitDuration = 1 * time.Second + config.HTTPTransportSettings = httpcommon.HTTPTransportSettings{ + Timeout: 1 * time.Second, } - }) - - t.Run("composed downloader with error file downloader", func(t *testing.T) { - for _, testError := range TestErrors { - t.Run(fmt.Sprintf("file downloader with error %v", testError), func(t *testing.T) { - baseDir := t.TempDir() - testDownloadPath := filepath.Join(baseDir, "downloads") - testTargetPath := filepath.Join(baseDir, "target") - - originalDownloadsPath := paths.Downloads() - t.Cleanup(func() { - paths.SetDownloads(originalDownloadsPath) - err := os.RemoveAll(testDownloadPath) - require.NoError(t, err) - }) - - paths.SetDownloads(testTargetPath) - - testArtifact := artifact.Artifact{ - Name: "Elastic Agent", - Cmd: "elastic-agent", - Artifact: "beats/elastic-agent", - } - version := agtversion.NewParsedSemVer(8, 15, 0, "", "") - - expectedFileName, err := artifact.GetArtifactName(testArtifact, *version, runtime.GOOS, runtime.GOARCH) - require.NoError(t, err) - - partialData := []byte("partial content written before error") - err = os.MkdirAll(testDownloadPath, 0755) - require.NoError(t, err) - tempArtifactPath := filepath.Join(testDownloadPath, expectedFileName) - err = os.WriteFile(tempArtifactPath, partialData, 0644) - require.NoError(t, err) - - config := artifact.Config{ - OperatingSystem: runtime.GOOS, - Architecture: runtime.GOARCH, - DropPath: testDownloadPath, - TargetDirectory: testTargetPath, - SourceURI: tempArtifactPath, - RetrySleepInitDuration: 1 * time.Second, - HTTPTransportSettings: httpcommon.HTTPTransportSettings{ - Timeout: 1 * time.Second, - }, - } + }, server +} - err = os.MkdirAll(config.TargetDirectory, 0755) - require.NoError(t, err) +func fileDownloaderFactoryProvider(config *artifact.Config, copyFunc func(dst io.Writer, src io.Reader) (int64, error)) *downloaderFactoryProvider { + fileDownloader := fs.NewDownloader(config) + fileDownloader.CopyFunc = copyFunc - var expectedDestPath string - expectedDestPath, err = artifact.GetArtifactPath(testArtifact, *version, config.OS(), config.Arch(), config.TargetDirectory) - require.NoError(t, err) + fileFactory := func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { + return fileDownloader, nil + } - copyFunc := func(dst io.Writer, src io.Reader) (int64, error) { - _, err := io.Copy(dst, src) - require.NoError(t, err) + return &downloaderFactoryProvider{ + downloaderFactories: map[string]downloaderFactory{ + fileDownloaderFactory: fileFactory, + }, + } +} - require.FileExists(t, expectedDestPath, "partially written file should exist before cleanup") - content, err := os.ReadFile(expectedDestPath) - require.NoError(t, err) - require.Equal(t, partialData, content) +func composedDownloaderFactoryProvider(config *artifact.Config, copyFunc func(dst io.Writer, src io.Reader) (int64, error), log *logger.Logger, upgradeDetails *details.Details) *downloaderFactoryProvider { + fileDownloader := fs.NewDownloader(config) + httpDownloader := httpDownloader.NewDownloaderWithClient(log, config, http.Client{}, upgradeDetails) - return 0, testError - } + if strings.HasPrefix(config.SourceURI, "http://") || strings.HasPrefix(config.SourceURI, "https://") { + httpDownloader.CopyFunc = copyFunc + } else { + fileDownloader.CopyFunc = copyFunc + } - fileDownloader := fs.NewDownloader(&config) - fileDownloader.CopyFunc = copyFunc + composedDownloader := composed.NewDownloader(fileDownloader, httpDownloader) - log, err := logger.New("test", false) - require.NoError(t, err) + fileFactory := func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { + return fileDownloader, nil + } + composedFactory := func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { + return composedDownloader, nil + } - upgradeDetails := details.NewDetails(version.String(), details.StateDownloading, "test") + return &downloaderFactoryProvider{ + downloaderFactories: map[string]downloaderFactory{ + fileDownloaderFactory: fileFactory, + composedDownloaderFactory: composedFactory, + }, + } +} - httpDownloader := httpDownloader.NewDownloaderWithClient(log, &config, http.Client{}, upgradeDetails) +type setupFunc func(t *testing.T, config *artifact.Config, basePath string, targetPath string) +type factoryProviderFunc func(config *artifact.Config, copyFunc func(dst io.Writer, src io.Reader) (int64, error)) *downloaderFactoryProvider +type mockError struct { + message string +} - composedDownloader := composed.NewDownloader(fileDownloader, httpDownloader) +func (e *mockError) Error() string { + return e.message +} - fileFactory := func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { - return fileDownloader, nil - } +func (e *mockError) Is(target error) bool { + return e.message == target.Error() +} - composedFactory := func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { - return composedDownloader, nil - } +type testError struct { + copyFuncError error + expectedError error +} - downloaderFactoryProvider := &downloaderFactoryProvider{ - downloaderFactories: map[string]downloaderFactory{ - fileDownloaderFactory: fileFactory, - composedDownloaderFactory: composedFactory, - }, - } +func TestRefactoredDownloader(t *testing.T) { + testArtifact := artifact.Artifact{ + Name: "Elastic Agent", + Cmd: "elastic-agent", + Artifact: "beats/elastic-agent", + } + version := agtversion.NewParsedSemVer(8, 15, 0, "", "") + expectedFileName, err := artifact.GetArtifactName(testArtifact, *version, runtime.GOOS, runtime.GOARCH) + require.NoError(t, err) + partialData := []byte("partial content written before error") - mockAgentInfo := mockinfo.NewAgent(t) - mockAgentInfo.On("Version").Return(version.String()) + testErrors := []testError{} - upgrader, err := NewUpgrader(log, &config, mockAgentInfo) - require.NoError(t, err) - upgrader.downloaderFactoryProvider = downloaderFactoryProvider + for _, te := range TestErrors { + testErrors = append(testErrors, testError{ + copyFuncError: te, + expectedError: upgradeErrors.ErrInsufficientDiskSpace, + }) + } - _, err = upgrader.Upgrade(context.Background(), version.String(), config.SourceURI, nil, upgradeDetails, false, false) - require.Error(t, err) - require.ErrorIs(t, err, upgradeErrors.ErrInsufficientDiskSpace) - require.NoFileExists(t, expectedDestPath, "FS downloader should clean up partial files on error") - }) - } + mockTestError := &mockError{message: "test error"} + fileDownloaderTestErrors := []testError{} + fileDownloaderTestErrors = append(fileDownloaderTestErrors, testError{ + copyFuncError: mockTestError, + expectedError: mockTestError, }) - t.Run("composed downloader http downloader", func(t *testing.T) { - for _, testError := range TestErrors { - t.Run(fmt.Sprintf("composed downloader with error %v", testError), func(t *testing.T) { - baseDir := t.TempDir() - testTargetPath := filepath.Join(baseDir, "target") - - originalDownloadsPath := paths.Downloads() - t.Cleanup(func() { - paths.SetDownloads(originalDownloadsPath) - err := os.RemoveAll(testTargetPath) - require.NoError(t, err) - }) + composedDownloaderTestErrors := []testError{} + composedDownloaderTestErrors = append(composedDownloaderTestErrors, testError{ + copyFuncError: mockTestError, + expectedError: context.DeadlineExceeded, + }) - paths.SetDownloads(testTargetPath) + log, err := logger.New("test", false) + require.NoError(t, err) + upgradeDetails := details.NewDetails(version.String(), details.StateDownloading, "test") - testArtifact := artifact.Artifact{ - Name: "Elastic Agent", - Cmd: "elastic-agent", - Artifact: "beats/elastic-agent", - } - version := agtversion.NewParsedSemVer(8, 15, 0, "", "") - - partialData := []byte("partial content written before error") - - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusOK) - w.Write(partialData) - })) - defer server.Close() - - config := artifact.Config{ - OperatingSystem: runtime.GOOS, - Architecture: runtime.GOARCH, - TargetDirectory: testTargetPath, - SourceURI: server.URL, - RetrySleepInitDuration: 1 * time.Second, - HTTPTransportSettings: httpcommon.HTTPTransportSettings{ - Timeout: 1 * time.Second, - }, - } + testCases := map[string]struct { + setupFunc setupFunc + factoryProviderFunc factoryProviderFunc + cleanupMsg string + errors []testError + }{ + "file downloader": { + setupFunc: setupForFileDownloader("file://", expectedFileName, partialData), + factoryProviderFunc: func(config *artifact.Config, copyFunc func(io.Writer, io.Reader) (int64, error)) *downloaderFactoryProvider { + return fileDownloaderFactoryProvider(config, copyFunc) + }, + cleanupMsg: "file downloader should clean up partial files on error", + errors: fileDownloaderTestErrors, + }, + "composed file downloader": { + setupFunc: setupForFileDownloader("", expectedFileName, partialData), + factoryProviderFunc: func(config *artifact.Config, copyFunc func(io.Writer, io.Reader) (int64, error)) *downloaderFactoryProvider { + return composedDownloaderFactoryProvider(config, copyFunc, log, upgradeDetails) + }, + cleanupMsg: "composed file downloader should clean up partial files on error", + errors: composedDownloaderTestErrors, + }, + "composed http downloader": { + setupFunc: func() setupFunc { + setupFunc, server := setupForHttpDownloader(partialData) + t.Cleanup(server.Close) + return setupFunc + }(), + factoryProviderFunc: func(config *artifact.Config, copyFunc func(io.Writer, io.Reader) (int64, error)) *downloaderFactoryProvider { + return composedDownloaderFactoryProvider(config, copyFunc, log, upgradeDetails) + }, + cleanupMsg: "composed http downloader should clean up partial files on error", + errors: composedDownloaderTestErrors, + }, + } - var expectedDestPath string - expectedDestPath, err := artifact.GetArtifactPath(testArtifact, *version, config.OS(), config.Arch(), config.TargetDirectory) - require.NoError(t, err) + for name, tc := range testCases { + t.Run(name, func(t *testing.T) { + for _, testError := range tc.errors { + t.Run(fmt.Sprintf("with error %v", testError.copyFuncError), func(t *testing.T) { + baseDir := t.TempDir() + testTargetPath := filepath.Join(baseDir, "target") + + config := artifact.Config{ + OperatingSystem: runtime.GOOS, + Architecture: runtime.GOARCH, + TargetDirectory: testTargetPath, + } - copyFunc := func(dst io.Writer, src io.Reader) (int64, error) { - _, err := io.Copy(dst, src) - require.NoError(t, err) + tc.setupFunc(t, &config, baseDir, testTargetPath) - require.FileExists(t, expectedDestPath, "partially written file should exist before cleanup") - content, err := os.ReadFile(expectedDestPath) + expectedDestPath, err := artifact.GetArtifactPath(testArtifact, *version, config.OS(), config.Arch(), config.TargetDirectory) require.NoError(t, err) - require.Equal(t, partialData, content) - - return 0, testError - } - - log, err := logger.New("test", false) - require.NoError(t, err) - - upgradeDetails := details.NewDetails(version.String(), details.StateDownloading, "test") - - fileDownloader := fs.NewDownloader(&config) - httpDownloader := httpDownloader.NewDownloaderWithClient(log, &config, http.Client{}, upgradeDetails) - httpDownloader.CopyFunc = copyFunc + copyFunc := func(dst io.Writer, src io.Reader) (int64, error) { + _, err := io.Copy(dst, src) + require.NoError(t, err) - composedDownloader := composed.NewDownloader(fileDownloader, httpDownloader) + require.FileExists(t, expectedDestPath, "partially written file should exist before cleanup") + content, err := os.ReadFile(expectedDestPath) + require.NoError(t, err) + require.Equal(t, partialData, content) - fileFactory := func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { - return fileDownloader, nil - } - - composedFactory := func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { - return composedDownloader, nil - } - - downloaderFactoryProvider := &downloaderFactoryProvider{ - downloaderFactories: map[string]downloaderFactory{ - fileDownloaderFactory: fileFactory, - composedDownloaderFactory: composedFactory, - }, - } - - mockAgentInfo := mockinfo.NewAgent(t) - mockAgentInfo.On("Version").Return(version.String()) + return 0, testError.copyFuncError + } - upgrader, err := NewUpgrader(log, &config, mockAgentInfo) - require.NoError(t, err) - upgrader.downloaderFactoryProvider = downloaderFactoryProvider + downloaderFactoryProvider := tc.factoryProviderFunc(&config, copyFunc) - _, err = upgrader.Upgrade(context.Background(), version.String(), config.SourceURI, nil, upgradeDetails, false, false) - require.Error(t, err) - require.ErrorIs(t, err, upgradeErrors.ErrInsufficientDiskSpace) - require.NoFileExists(t, expectedDestPath, "HTTP downloader should clean up partial files on error") - }) - } - }) + mockAgentInfo := mockinfo.NewAgent(t) + mockAgentInfo.On("Version").Return(version.String()) - // t.Run("composed downloader file downloader", func(t *testing.T) { - // partialData := []byte("partial content written before error") - // copyFunc := func(dst io.Writer, src io.Reader) (int64, error) { - // // Write some data first to simulate partial copy - // n, writeErr := dst.Write(partialData) - // if writeErr != nil { - // return 0, writeErr - // } - // // Then return the disk space error - // return int64(n), TestErrors[0] - // } - - // // Use separate paths for source files and destination files - // testDropPath := "/tmp/elastic-agent-test-downloads" // Where FS downloader looks for source files - // testTargetPath := "/tmp/elastic-agent-test-targets" // Where downloads should be placed - - // // Store the original downloads path to restore it later - // originalDownloadsPath := paths.Downloads() - // defer paths.SetDownloads(originalDownloadsPath) - - // // Set the downloads path for cleanup to look in the target directory - // paths.SetDownloads(testTargetPath) - - // // Create test artifact for path generation - // testArtifact := artifact.Artifact{ - // Name: "Elastic Agent", - // Cmd: "elastic-agent", - // Artifact: "beats/elastic-agent", - // } - // version := agtversion.NewParsedSemVer(8, 15, 0, "", "") - - // // Generate the correct filename for current OS/architecture - // expectedFileName, err := artifact.GetArtifactName(testArtifact, *version, runtime.GOOS, runtime.GOARCH) - // require.NoError(t, err) - - // config := artifact.Config{ - // OperatingSystem: runtime.GOOS, - // Architecture: runtime.GOARCH, - // DropPath: testDropPath, // Where FS downloader looks for source files - // TargetDirectory: testTargetPath, // Where downloads should go (same as cleanup path) - // RetrySleepInitDuration: 1 * time.Second, - // HTTPTransportSettings: httpcommon.HTTPTransportSettings{ - // Timeout: 1 * time.Second, - // }, - // } - - // // Create the source directory and file (where FS downloader reads from) - // err = os.MkdirAll(testDropPath, 0755) - // require.NoError(t, err) - // tempFilePath := filepath.Join(testDropPath, expectedFileName) - // err = os.WriteFile(tempFilePath, []byte("test content"), 0644) - // require.NoError(t, err) - - // // Create the target directory (where downloads go) - // err = os.MkdirAll(testTargetPath, 0755) - // require.NoError(t, err) - - // // Get the expected destination path (in target directory) - // var expectedDestPath string - // expectedDestPath, err = artifact.GetArtifactPath(testArtifact, *version, config.OS(), config.Arch(), config.TargetDirectory) - // require.NoError(t, err) - - // // Verify destination file doesn't exist before upgrade - // require.NoFileExists(t, expectedDestPath, "destination file should not exist before upgrade") - - // // Create file downloader with disk space error copy function - // fileDownloader := fs.NewDownloader(&config) - // fileDownloader.CopyFunc = copyFunc - - // // Create HTTP downloader with disk space error copy function - // log, err := logger.New("test", false) - // require.NoError(t, err) - // upgradeDetails := details.NewDetails("8.15.0", details.StateDownloading, "test") - - // httpDownloader, err := downloadhttp.NewDownloader(log, &config, upgradeDetails) - // require.NoError(t, err) - // httpDownloader.CopyFunc = copyFunc - - // // Create composed downloader with both file and HTTP downloaders - // composedDownloader := composed.NewDownloader(fileDownloader, httpDownloader) - - // composedFactory := func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { - // // Return the composed downloader that will try both file and HTTP - // return composedDownloader, nil - // } - - // downloaderFactoryProvider := &downloaderFactoryProvider{ - // downloaderFactories: map[string]downloaderFactory{ - // composedDownloaderFactory: composedFactory, - // }, - // } - - // mockAgentInfo := mockinfo.NewAgent(t) - // mockAgentInfo.On("Version").Return("8.15.0") - - // upgrader, err := NewUpgrader(log, &config, mockAgentInfo) - // require.NoError(t, err) - // upgrader.downloaderFactoryProvider = downloaderFactoryProvider - - // // Get the expected destination path that would be created by the fs downloader - // expectedDestPath, err = artifact.GetArtifactPath(testArtifact, *version, config.OS(), config.Arch(), config.TargetDirectory) - // require.NoError(t, err) - - // // Verify destination file doesn't exist before upgrade - // require.NoFileExists(t, expectedDestPath, "destination file should not exist before upgrade") - - // _, err = upgrader.Upgrade(context.Background(), "8.15.0", filepath.Join(testDropPath, expectedFileName), nil, upgradeDetails, false, false) - // require.Error(t, err) - - // // The composed downloader should return a joined error containing both: - // // 1. The insufficient disk space error from the file downloader - // // 2. The network error from the HTTP downloader - // require.ErrorIs(t, err, upgradeErrors.ErrInsufficientDiskSpace, "joined error should contain insufficient disk space error") - - // // Verify that the error message contains both errors - // errMsg := err.Error() - // require.Contains(t, errMsg, "insufficient disk space", "error message should mention disk space") - // require.Contains(t, errMsg, "lookup beats", "error message should mention the network failure") - - // // FS downloader should leave partial files (demonstrating the bug) - // // This is currently a bug - FS downloader doesn't clean up partial files like HTTP downloader does - // if _, statErr := os.Stat(expectedDestPath); statErr == nil { - // // File exists - check if it contains the partial data - // content, readErr := os.ReadFile(expectedDestPath) - // if readErr == nil && len(content) > 0 { - // t.Logf("BUG: FS downloader left partial file with %d bytes: %q", len(content), string(content)) - // require.Contains(t, string(content), string(partialData), "partial file should contain the data written before error") - - // // Clean up the partial file for the test - // os.Remove(expectedDestPath) - // } - // } - - // // Cleanup the test directory - // os.RemoveAll(testDropPath) - // }) + upgrader, err := NewUpgrader(log, &config, mockAgentInfo) + require.NoError(t, err) + upgrader.downloaderFactoryProvider = downloaderFactoryProvider + _, err = upgrader.Upgrade(context.Background(), version.String(), config.SourceURI, nil, upgradeDetails, false, false) + require.Error(t, err) + require.ErrorIs(t, err, testError.expectedError) + require.NoFileExists(t, expectedDestPath, tc.cleanupMsg) + }) + } + }) + } } From 9ba200d34e24d5b119440f844c7daf1b1e3a936c Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Wed, 30 Jul 2025 16:35:23 +0300 Subject: [PATCH 018/127] enhancement(5235): updated fs downloader download tests --- .../artifact/download/fs/downloader_test.go | 37 +++++++++++-------- 1 file changed, 21 insertions(+), 16 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go index a51e344a005..a1bb473ece4 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go @@ -40,7 +40,7 @@ func TestDownloader_Download(t *testing.T) { fields fields args args want string - wantErr assert.ErrorAssertionFunc + wantErr bool }{ { name: "happy path released version", @@ -62,7 +62,7 @@ func TestDownloader_Download(t *testing.T) { }, args: args{a: agentSpec, version: agtversion.NewParsedSemVer(1, 2, 3, "", "")}, want: "elastic-agent-1.2.3-linux-x86_64.tar.gz", - wantErr: assert.NoError, + wantErr: false, }, { name: "no hash released version", @@ -80,7 +80,7 @@ func TestDownloader_Download(t *testing.T) { }, args: args{a: agentSpec, version: agtversion.NewParsedSemVer(1, 2, 3, "", "")}, want: "elastic-agent-1.2.3-linux-x86_64.tar.gz", - wantErr: assert.Error, + wantErr: true, }, { name: "happy path snapshot version", @@ -102,7 +102,7 @@ func TestDownloader_Download(t *testing.T) { }, args: args{a: agentSpec, version: agtversion.NewParsedSemVer(1, 2, 3, "SNAPSHOT", "")}, want: "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64.tar.gz", - wantErr: assert.NoError, + wantErr: false, }, { name: "happy path released version with build metadata", @@ -124,7 +124,7 @@ func TestDownloader_Download(t *testing.T) { }, args: args{a: agentSpec, version: agtversion.NewParsedSemVer(1, 2, 3, "", "build19700101")}, want: "elastic-agent-1.2.3+build19700101-linux-x86_64.tar.gz", - wantErr: assert.NoError, + wantErr: false, }, { name: "happy path snapshot version with build metadata", @@ -146,7 +146,7 @@ func TestDownloader_Download(t *testing.T) { }, args: args{a: agentSpec, version: agtversion.NewParsedSemVer(1, 2, 3, "SNAPSHOT", "build19700101")}, want: "elastic-agent-1.2.3-SNAPSHOT+build19700101-linux-x86_64.tar.gz", - wantErr: assert.NoError, + wantErr: false, }, } for _, tt := range tests { @@ -161,14 +161,21 @@ func TestDownloader_Download(t *testing.T) { config.DropPath = dropPath config.TargetDirectory = targetDirPath - e := &Downloader{ - dropPath: dropPath, - config: config, - } + e := NewDownloader(config) got, err := e.Download(context.TODO(), tt.args.a, tt.args.version) - if !tt.wantErr(t, err, fmt.Sprintf("Download(%v, %v)", tt.args.a, tt.args.version)) { + + if tt.wantErr { + assert.Error(t, err) + + expectedTargetFile := filepath.Join(targetDirPath, tt.want) + expectedHashFile := expectedTargetFile + ".sha512" + + assert.NoFileExists(t, expectedTargetFile, "downloader should clean up partial artifact file on error") + assert.NoFileExists(t, expectedHashFile, "downloader should clean up partial hash file on error") return } + + assert.NoError(t, err) assert.Equalf(t, filepath.Join(targetDirPath, tt.want), got, "Download(%v, %v)", tt.args.a, tt.args.version) }) } @@ -282,10 +289,7 @@ func TestDownloader_DownloadAsc(t *testing.T) { config.DropPath = dropPath config.TargetDirectory = targetDirPath - e := &Downloader{ - dropPath: dropPath, - config: config, - } + e := NewDownloader(config) got, err := e.DownloadAsc(context.TODO(), tt.args.a, tt.args.version) if !tt.wantErr(t, err, fmt.Sprintf("DownloadAsc(%v, %v)", tt.args.a, tt.args.version)) { return @@ -340,9 +344,10 @@ func TestDownloader_downloadFile(t *testing.T) { e.CopyFunc = copyFunc e.diskSpaceErrorFunc = diskSpaceErrorFunc - _, err := e.downloadFile("elastic-agent-1.2.3-linux-x86_64.tar.gz", filepath.Join(targetDirPath, "elastic-agent-1.2.3-linux-x86_64.tar.gz")) + path, err := e.downloadFile("elastic-agent-1.2.3-linux-x86_64.tar.gz", filepath.Join(targetDirPath, "elastic-agent-1.2.3-linux-x86_64.tar.gz")) assert.Equal(t, err, diskSpaceErr) assert.Equal(t, receivedError, copyFuncError) + assert.Equal(t, filepath.Join(targetDirPath, "elastic-agent-1.2.3-linux-x86_64.tar.gz"), path) } func TestDownloader_NewDownloader(t *testing.T) { From 8362d11dd3053f0112f8e9f3565e6a6247f25114 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Wed, 30 Jul 2025 16:40:54 +0300 Subject: [PATCH 019/127] enhancement(5235): added tests for progress reporter prepare --- .../download/http/progress_reporter_test.go | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter_test.go b/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter_test.go index b920dc6f652..0db5476af3a 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter_test.go @@ -43,6 +43,24 @@ func (m *mockProgressObserver) ReportFailed(sourceURI string, timePast time.Dura }) } +func TestPrepare(t *testing.T) { + t.Run("should set the sourceURI, interval, warnTimeout, length, progressObservers, and done channel", func(t *testing.T) { + dp := &downloadProgressReporter{} + dp.Prepare("mockurl", 10*time.Second, 1000, &mockProgressObserver{}) + require.Equal(t, "mockurl", dp.sourceURI) + require.Equal(t, time.Duration(float64(10*time.Second)*downloadProgressIntervalPercentage), dp.interval) + require.Equal(t, time.Duration(float64(10*time.Second)*warningProgressIntervalPercentage), dp.warnTimeout) + require.Equal(t, 1000.0, dp.length) + require.Equal(t, 1, len(dp.progressObservers)) + require.NotNil(t, dp.done) + }) + t.Run("should set the interval to downloadProgressMinInterval if the timeout is 0", func(t *testing.T) { + dp := &downloadProgressReporter{} + dp.Prepare("mockurl", 0, 1000, &mockProgressObserver{}) + require.Equal(t, downloadProgressMinInterval, dp.interval) + }) +} + func TestReportFailed(t *testing.T) { t.Run("should call ReportFailed on all observers with correct parameters", func(t *testing.T) { testErr := errors.New("test error") From dccf3410c682b7a61df2022e55740c582cd1deba Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Wed, 30 Jul 2025 16:46:03 +0300 Subject: [PATCH 020/127] enhancement(5235): removed unnecessary logging --- .../application/coordinator/coordinator.go | 5 ---- .../artifact/download/fs/downloader.go | 26 +------------------ .../artifact/download/http/downloader.go | 15 ----------- 3 files changed, 1 insertion(+), 45 deletions(-) diff --git a/internal/pkg/agent/application/coordinator/coordinator.go b/internal/pkg/agent/application/coordinator/coordinator.go index 2407a9f7eab..5f89897a98b 100644 --- a/internal/pkg/agent/application/coordinator/coordinator.go +++ b/internal/pkg/agent/application/coordinator/coordinator.go @@ -747,15 +747,10 @@ func (c *Coordinator) Upgrade(ctx context.Context, version string, sourceURI str return c.upgradeMgr.AckAction(ctx, c.fleetAcker, action) } - c.logger.Infof("Checking if error is insufficient disk space: errors.Is(err, upgradeErrors.ErrInsufficientDiskSpace) = %v", errors.Is(err, upgradeErrors.ErrInsufficientDiskSpace)) if errors.Is(err, upgradeErrors.ErrInsufficientDiskSpace) { - c.logger.Infof("insufficient disk space detected: %v", err) - c.logger.Infof("Unwrapping disk space error from %v to %v", err, upgradeErrors.ErrInsufficientDiskSpace.Err) err = upgradeErrors.ErrInsufficientDiskSpace.Err - c.logger.Infof("After unwrapping, error is now: %v (type: %T)", err, err) } - c.logger.Errorf("Setting upgrade details to failed with final error: %v", err) det.Fail(err) return err } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go index e4753c0c0bc..e095fed5b46 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go @@ -57,12 +57,8 @@ func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version } }() - fmt.Printf("[FS_DOWNLOADER] Download called for artifact: %+v, version: %s\n", a, version.String()) - fmt.Printf("[FS_DOWNLOADER] Config OS: %s, TargetDirectory: %s\n", e.config.OS(), e.config.TargetDirectory) - // download from source to dest path, err := e.download(e.config.OS(), a, *version, "") - fmt.Printf("[FS_DOWNLOADER] download() returned path: %s, err: %v\n", path, err) downloadedFiles = append(downloadedFiles, path) if err != nil { return "", err @@ -70,7 +66,6 @@ func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version // download from source to dest hashPath, err := e.download(e.config.OS(), a, *version, ".sha512") - fmt.Printf("[FS_DOWNLOADER] hash download() returned path: %s, err: %v\n", hashPath, err) downloadedFiles = append(downloadedFiles, hashPath) if err != nil { return "", err @@ -96,69 +91,50 @@ func (e *Downloader) download( a artifact.Artifact, version agtversion.ParsedSemVer, extension string) (string, error) { - fmt.Printf("[FS DEBUG] Internal download called: OS=%s, artifact=%+v, version=%+v, ext=%s\n", operatingSystem, a, version, extension) filename, err := artifact.GetArtifactName(a, version, operatingSystem, e.config.Arch()) if err != nil { - fmt.Printf("[FS DEBUG] Failed to generate filename: %v\n", err) return "", errors.New(err, "generating package name failed") } - fmt.Printf("[FS DEBUG] Generated filename: %s\n", filename) fullPath, err := artifact.GetArtifactPath(a, version, operatingSystem, e.config.Arch(), e.config.TargetDirectory) if err != nil { - fmt.Printf("[FS DEBUG] Failed to generate path: %v\n", err) return "", errors.New(err, "generating package path failed") } - fmt.Printf("[FS DEBUG] Generated fullPath: %s\n", fullPath) if extension != "" { filename += extension fullPath += extension - fmt.Printf("[FS DEBUG] With extension - filename: %s, fullPath: %s\n", filename, fullPath) } - fmt.Printf("[FS DEBUG] Calling downloadFile with filename=%s, fullPath=%s\n", filename, fullPath) return e.downloadFile(filename, fullPath) } func (e *Downloader) downloadFile(filename, fullPath string) (string, error) { sourcePath := filepath.Join(e.dropPath, filename) - fmt.Printf("[FS DEBUG] downloadFile called - filename=%s, fullPath=%s\n", filename, fullPath) - fmt.Printf("[FS DEBUG] dropPath=%s, computed sourcePath=%s\n", e.dropPath, sourcePath) sourceFile, err := os.Open(sourcePath) if err != nil { - fmt.Printf("[FS DEBUG] Failed to open source file %s: %v\n", sourcePath, err) return "", errors.New(err, fmt.Sprintf("package '%s' not found", sourcePath), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, fullPath)) } defer sourceFile.Close() - fmt.Printf("[FS DEBUG] Successfully opened source file: %s\n", sourcePath) if destinationDir := filepath.Dir(fullPath); destinationDir != "" && destinationDir != "." { - fmt.Printf("[FS DEBUG] Creating destination directory: %s\n", destinationDir) if err := os.MkdirAll(destinationDir, 0755); err != nil { - fmt.Printf("[FS DEBUG] Failed to create destination directory: %v\n", err) return "", err } } - fmt.Printf("[FS DEBUG] Creating destination file: %s\n", fullPath) destinationFile, err := os.OpenFile(fullPath, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, packagePermissions) if err != nil { - fmt.Printf("[FS DEBUG] Failed to create destination file: %v\n", err) return "", errors.New(err, "creating package file failed", errors.TypeFilesystem, errors.M(errors.MetaKeyPath, fullPath)) } defer destinationFile.Close() - fmt.Printf("[FS DEBUG] About to call CopyFunc...\n") _, err = e.CopyFunc(destinationFile, sourceFile) if err != nil { - fmt.Printf("[FS DEBUG] CopyFunc failed with error: %v\n", err) processedErr := e.diskSpaceErrorFunc(err) - fmt.Printf("[FS DEBUG] diskSpaceErrorFunc processed error: %v -> %v\n", err, processedErr) - return fullPath, processedErr // Return fullPath so cleanup can remove partial file + return fullPath, processedErr } - fmt.Printf("[FS DEBUG] CopyFunc succeeded\n") return fullPath, nil } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go index 4dd7584bb0d..3be68f8b31e 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go @@ -111,25 +111,18 @@ func (e *Downloader) Reload(c *artifact.Config) error { // Download fetches the package from configured source. // Returns absolute path to downloaded package and an error. func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version *agtversion.ParsedSemVer) (_ string, err error) { - fmt.Printf("[HTTP_DOWNLOADER] Download called for artifact: %+v, version: %s\n", a, version.String()) - fmt.Printf("[HTTP_DOWNLOADER] Config TargetDirectory: %s\n", e.config.TargetDirectory) - remoteArtifact := a.Artifact downloadedFiles := make([]string, 0, 2) defer func() { if err != nil { - fmt.Printf("[HTTP_DOWNLOADER] Download failed with error: %v\n", err) for _, path := range downloadedFiles { os.Remove(path) } - } else { - fmt.Printf("[HTTP_DOWNLOADER] Download succeeded\n") } }() // download from source to dest path, err := e.download(ctx, remoteArtifact, e.config.OS(), a, *version) - fmt.Printf("[HTTP_DOWNLOADER] download() returned path: %s, err: %v\n", path, err) downloadedFiles = append(downloadedFiles, path) if err != nil { return "", err @@ -137,7 +130,6 @@ func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version // download hash from source to dest, only if hash does not exist hashPath, err := e.downloadHash(ctx, remoteArtifact, e.config.OS(), a, *version) - fmt.Printf("[HTTP_DOWNLOADER] hash download() returned path: %s, err: %v\n", hashPath, err) downloadedFiles = append(downloadedFiles, hashPath) if err != nil { return "", err @@ -219,13 +211,11 @@ func (e *Downloader) downloadFile(ctx context.Context, artifactName, filename, f resp, err := e.client.Do(req.WithContext(ctx)) if err != nil { - // return path, file already exists and needs to be cleaned up return fullPath, errors.New(err, "fetching package failed", errors.TypeNetwork, errors.M(errors.MetaKeyURI, sourceURI)) } defer resp.Body.Close() if resp.StatusCode != 200 { - // return path, file already exists and needs to be cleaned up return fullPath, errors.New(fmt.Sprintf("call to '%s' returned unsuccessful status code: %d", sourceURI, resp.StatusCode), errors.TypeNetwork, errors.M(errors.MetaKeyURI, sourceURI)) } @@ -240,17 +230,12 @@ func (e *Downloader) downloadFile(ctx context.Context, artifactName, filename, f detailsObserver := newDetailsProgressObserver(e.upgradeDetails) e.progressReporter.Prepare(sourceURI, e.config.HTTPTransportSettings.Timeout, fileSize, loggingObserver, detailsObserver) e.progressReporter.Report(ctx) - fmt.Printf("[HTTP_DOWNLOADER] About to call CopyFunc for sourceURI: %s\n", sourceURI) _, err = e.CopyFunc(destinationFile, io.TeeReader(resp.Body, e.progressReporter)) if err != nil { - fmt.Printf("[HTTP_DOWNLOADER] CopyFunc failed with error: %v\n", err) err = e.diskSpaceErrorFunc(err) - fmt.Printf("[HTTP_DOWNLOADER] diskSpaceErrorFunc processed error: %v\n", err) e.progressReporter.ReportFailed(err) - // return path, file already exists and needs to be cleaned up return fullPath, fmt.Errorf("%s: %w", errors.New("copying fetched package failed", errors.TypeNetwork, errors.M(errors.MetaKeyURI, sourceURI)).Error(), err) } - fmt.Printf("[HTTP_DOWNLOADER] CopyFunc succeeded\n") e.progressReporter.ReportComplete() return fullPath, nil From 901bdf4c54dca913351f5d815511c0b22550725e Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 31 Jul 2025 13:17:53 +0300 Subject: [PATCH 021/127] enhancement(5235): injecting details provider. added tests for setting details provider, and end to end test to assert error messages correctly bubble up --- .../application/coordinator/coordinator.go | 8 +- .../coordinator/coordinator_unit_test.go | 153 ++++++++++++++++++ 2 files changed, 158 insertions(+), 3 deletions(-) diff --git a/internal/pkg/agent/application/coordinator/coordinator.go b/internal/pkg/agent/application/coordinator/coordinator.go index 5f89897a98b..bf2495d69e1 100644 --- a/internal/pkg/agent/application/coordinator/coordinator.go +++ b/internal/pkg/agent/application/coordinator/coordinator.go @@ -360,6 +360,7 @@ type Coordinator struct { // run a ticker that checks to see if we have a new PID. componentPIDTicker *time.Ticker componentPidRequiresUpdate *atomic.Bool + detailsProvider func(string, details.State, string) *details.Details } // The channels Coordinator reads to receive updates from the various managers. @@ -479,7 +480,8 @@ func New( componentPIDTicker: time.NewTicker(time.Second * 30), componentPidRequiresUpdate: &atomic.Bool{}, - fleetAcker: fleetAcker, + fleetAcker: fleetAcker, + detailsProvider: details.NewDetails, } // Setup communication channels for any non-nil components. This pattern // lets us transparently accept nil managers / simulated events during @@ -735,7 +737,7 @@ func (c *Coordinator) Upgrade(ctx context.Context, version string, sourceURI str if action != nil { actionID = action.ActionID } - det := details.NewDetails(version, details.StateRequested, actionID) + det := c.detailsProvider(version, details.StateRequested, actionID) det.RegisterObserver(c.SetUpgradeDetails) cb, err := c.upgradeMgr.Upgrade(ctx, version, sourceURI, action, det, skipVerifyOverride, skipDefaultPgp, pgpBytes...) @@ -748,7 +750,7 @@ func (c *Coordinator) Upgrade(ctx context.Context, version string, sourceURI str } if errors.Is(err, upgradeErrors.ErrInsufficientDiskSpace) { - err = upgradeErrors.ErrInsufficientDiskSpace.Err + err = upgradeErrors.ErrInsufficientDiskSpace } det.Fail(err) diff --git a/internal/pkg/agent/application/coordinator/coordinator_unit_test.go b/internal/pkg/agent/application/coordinator/coordinator_unit_test.go index 48c254d07cc..b600175ada2 100644 --- a/internal/pkg/agent/application/coordinator/coordinator_unit_test.go +++ b/internal/pkg/agent/application/coordinator/coordinator_unit_test.go @@ -15,16 +15,19 @@ import ( "context" "encoding/json" "errors" + "fmt" "net" "net/http" "net/http/httptest" "os" "path/filepath" + "reflect" "strings" "testing" "time" "github.com/elastic/elastic-agent-client/v7/pkg/proto" + "github.com/elastic/elastic-agent/internal/pkg/fleetapi/acker" "github.com/elastic/elastic-agent/internal/pkg/testutils" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/status" @@ -41,10 +44,12 @@ import ( "github.com/elastic/elastic-agent/internal/pkg/agent/application/info" "github.com/elastic/elastic-agent/internal/pkg/agent/application/monitoring/reload" "github.com/elastic/elastic-agent/internal/pkg/agent/application/paths" + "github.com/elastic/elastic-agent/internal/pkg/agent/application/reexec" "github.com/elastic/elastic-agent/internal/pkg/agent/application/secret" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/details" + upgradeErrors "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/errors" "github.com/elastic/elastic-agent/internal/pkg/agent/configuration" "github.com/elastic/elastic-agent/internal/pkg/agent/storage" "github.com/elastic/elastic-agent/internal/pkg/agent/transpiler" @@ -1959,3 +1964,151 @@ func TestHasEndpoint(t *testing.T) { }) } } + +type mockUpgradeMgrUpgradeErrorHandlingTest struct { + upgradeErr error +} + +func (m *mockUpgradeMgrUpgradeErrorHandlingTest) Upgradeable() bool { + return true +} + +func (m *mockUpgradeMgrUpgradeErrorHandlingTest) Reload(rawConfig *config.Config) error { + return nil +} + +func (m *mockUpgradeMgrUpgradeErrorHandlingTest) Upgrade(ctx context.Context, version string, sourceURI string, action *fleetapi.ActionUpgrade, details *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (reexec.ShutdownCallbackFn, error) { + return nil, m.upgradeErr +} + +func (m *mockUpgradeMgrUpgradeErrorHandlingTest) Ack(ctx context.Context, acker acker.Acker) error { + return nil +} + +func (m *mockUpgradeMgrUpgradeErrorHandlingTest) AckAction(ctx context.Context, acker acker.Acker, action fleetapi.Action) error { + return errors.New("ack action error") +} + +func (m *mockUpgradeMgrUpgradeErrorHandlingTest) MarkerWatcher() upgrade.MarkerWatcher { + return nil +} + +type testDetail struct { + initialState details.State + expectedState details.State + failedState details.State + errorMsg string +} + +func TestCoordinatorUpgradeErrorHandling(t *testing.T) { + testCases := map[string]struct { + upgradeErr error + expectedError error + detail testDetail + }{ + "insufficient disk space": { + upgradeErr: upgradeErrors.ErrInsufficientDiskSpace, + expectedError: upgradeErrors.ErrInsufficientDiskSpace, + detail: testDetail{ + initialState: details.StateRequested, + expectedState: details.StateFailed, + failedState: details.StateRequested, + errorMsg: upgradeErrors.ErrInsufficientDiskSpace.Error(), + }, + }, + "wrapped insufficient disk space": { + upgradeErr: fmt.Errorf("wrapped: %w", upgradeErrors.ErrInsufficientDiskSpace), + expectedError: upgradeErrors.ErrInsufficientDiskSpace, + detail: testDetail{ + initialState: details.StateRequested, + expectedState: details.StateFailed, + failedState: details.StateRequested, + errorMsg: upgradeErrors.ErrInsufficientDiskSpace.Error(), + }, + }, + "same version error": { + upgradeErr: upgrade.ErrUpgradeSameVersion, + expectedError: errors.New("ack action error"), + detail: testDetail{ + initialState: details.StateRequested, + expectedState: details.StateCompleted, + failedState: "", + errorMsg: "", + }, + }, + "generic error": { + upgradeErr: errors.New("test error"), + expectedError: errors.New("test error"), + detail: testDetail{ + initialState: details.StateRequested, + expectedState: details.StateFailed, + failedState: details.StateRequested, + errorMsg: "test error", + }, + }, + } + + for name, tc := range testCases { + t.Run(name, func(t *testing.T) { + upgradeMgr := &mockUpgradeMgrUpgradeErrorHandlingTest{} + upgradeMgr.upgradeErr = tc.upgradeErr + + det := details.NewDetails("1.0.0", tc.detail.initialState, "test-action-id") + detailsProvider := func(version string, state details.State, actionID string) *details.Details { + return det + } + + coord := &Coordinator{ + upgradeMgr: upgradeMgr, + detailsProvider: detailsProvider, + stateBroadcaster: broadcaster.New(State{State: agentclient.Healthy, Message: "Running"}, 64, 32), + overrideStateChan: make(chan *coordinatorOverrideState), + upgradeDetailsChan: make(chan *details.Details, 2), + } + + go func() { + state1 := <-coord.overrideStateChan + assert.Equal(t, agentclient.Upgrading, state1.state) + + state2 := <-coord.overrideStateChan + assert.Nil(t, state2) + }() + + err := coord.Upgrade(t.Context(), "mockversion", "mockuri", nil, false, false) + require.Error(t, err) + require.Equal(t, err, tc.expectedError) + + require.Equal(t, tc.detail.expectedState, det.State, "State mismatch") + require.Equal(t, tc.detail.failedState, det.Metadata.FailedState, "FailedState mismatch") + require.Equal(t, tc.detail.errorMsg, det.Metadata.ErrorMsg, "ErrorMsg mismatch") + }) + } + +} + +func TestCoordinatorNew(t *testing.T) { + t.Run("correctly sets details provider", func(t *testing.T) { + coord := New( + nil, + nil, + logp.InfoLevel, + nil, + component.RuntimeSpecs{}, + nil, + nil, + nil, + nil, + nil, + nil, + nil, + false, + nil, + nil, + ) + + assert.NotNil(t, coord.detailsProvider) + detailsProviderPtr := reflect.ValueOf(coord.detailsProvider).Pointer() + newDetailsPtr := reflect.ValueOf(details.NewDetails).Pointer() + assert.Equal(t, newDetailsPtr, detailsProviderPtr, "detailsProvider should be details.NewDetails") + }) +} From 7e18028c539f30aa2990991aa802b496015380fe Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 31 Jul 2025 13:27:22 +0300 Subject: [PATCH 022/127] enhancement(5235): ran mage check --- .../artifact/download/http/progress_reporter_test.go | 4 ++++ .../agent/application/upgrade/errors/disk_space_unix.go | 4 ++++ .../application/upgrade/errors/disk_space_unix_test.go | 7 ++++++- .../agent/application/upgrade/errors/disk_space_windows.go | 7 ++++++- .../application/upgrade/errors/disk_space_windows_test.go | 7 ++++++- .../pkg/agent/application/upgrade/upgrade_unix_test.go | 4 ++++ .../pkg/agent/application/upgrade/upgrade_windows_test.go | 4 ++++ 7 files changed, 34 insertions(+), 3 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter_test.go b/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter_test.go index 0db5476af3a..24675131722 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter_test.go @@ -1,3 +1,7 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + package http import ( diff --git a/internal/pkg/agent/application/upgrade/errors/disk_space_unix.go b/internal/pkg/agent/application/upgrade/errors/disk_space_unix.go index 75542bca3d7..c014bef92e8 100644 --- a/internal/pkg/agent/application/upgrade/errors/disk_space_unix.go +++ b/internal/pkg/agent/application/upgrade/errors/disk_space_unix.go @@ -1,3 +1,7 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + //go:build !windows // Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one diff --git a/internal/pkg/agent/application/upgrade/errors/disk_space_unix_test.go b/internal/pkg/agent/application/upgrade/errors/disk_space_unix_test.go index 2ab891f1233..f56bff74585 100644 --- a/internal/pkg/agent/application/upgrade/errors/disk_space_unix_test.go +++ b/internal/pkg/agent/application/upgrade/errors/disk_space_unix_test.go @@ -1,3 +1,7 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + //go:build !windows // Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one @@ -11,8 +15,9 @@ import ( "syscall" "testing" - "github.com/elastic/elastic-agent/pkg/core/logger" "github.com/stretchr/testify/require" + + "github.com/elastic/elastic-agent/pkg/core/logger" ) type mockError struct { diff --git a/internal/pkg/agent/application/upgrade/errors/disk_space_windows.go b/internal/pkg/agent/application/upgrade/errors/disk_space_windows.go index ec2f0dae796..5d3bdb67ded 100644 --- a/internal/pkg/agent/application/upgrade/errors/disk_space_windows.go +++ b/internal/pkg/agent/application/upgrade/errors/disk_space_windows.go @@ -1,3 +1,7 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + //go:build windows // Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one @@ -9,8 +13,9 @@ package errors import ( "errors" - "github.com/elastic/elastic-agent/pkg/core/logger" "golang.org/x/sys/windows" + + "github.com/elastic/elastic-agent/pkg/core/logger" ) // ToDiskSpaceError returns a generic disk space error if the error is a disk space error diff --git a/internal/pkg/agent/application/upgrade/errors/disk_space_windows_test.go b/internal/pkg/agent/application/upgrade/errors/disk_space_windows_test.go index 03cd4fff212..27ed3523bfc 100644 --- a/internal/pkg/agent/application/upgrade/errors/disk_space_windows_test.go +++ b/internal/pkg/agent/application/upgrade/errors/disk_space_windows_test.go @@ -1,3 +1,7 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + //go:build windows // Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one @@ -10,9 +14,10 @@ import ( "fmt" "testing" - "github.com/elastic/elastic-agent/pkg/core/logger" "github.com/stretchr/testify/require" "golang.org/x/sys/windows" + + "github.com/elastic/elastic-agent/pkg/core/logger" ) type mockError struct { diff --git a/internal/pkg/agent/application/upgrade/upgrade_unix_test.go b/internal/pkg/agent/application/upgrade/upgrade_unix_test.go index 3b5d92abab5..195d8f48126 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_unix_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_unix_test.go @@ -1,3 +1,7 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + //go:build !windows package upgrade diff --git a/internal/pkg/agent/application/upgrade/upgrade_windows_test.go b/internal/pkg/agent/application/upgrade/upgrade_windows_test.go index a0984cbe6dc..60b2e61737c 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_windows_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_windows_test.go @@ -1,3 +1,7 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + //go:build windows package upgrade From 54152f86aac1e6f5978bd912727c5e9d73b7c176 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 31 Jul 2025 13:43:14 +0300 Subject: [PATCH 023/127] enhancement(5235): added changelog --- ...ce-while-downloading-upgrade-artifact.yaml | 32 +++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 changelog/fragments/1753958476-return-disk-space-error-message-when-agent-runs-out-of-space-while-downloading-upgrade-artifact.yaml diff --git a/changelog/fragments/1753958476-return-disk-space-error-message-when-agent-runs-out-of-space-while-downloading-upgrade-artifact.yaml b/changelog/fragments/1753958476-return-disk-space-error-message-when-agent-runs-out-of-space-while-downloading-upgrade-artifact.yaml new file mode 100644 index 00000000000..7623bcce876 --- /dev/null +++ b/changelog/fragments/1753958476-return-disk-space-error-message-when-agent-runs-out-of-space-while-downloading-upgrade-artifact.yaml @@ -0,0 +1,32 @@ +# Kind can be one of: +# - breaking-change: a change to previously-documented behavior +# - deprecation: functionality that is being removed in a later release +# - bug-fix: fixes a problem in a previous version +# - enhancement: extends functionality but does not break or fix existing behavior +# - feature: new functionality +# - known-issue: problems that we are aware of in a given version +# - security: impacts on the security of a product or a user’s deployment. +# - upgrade: important information for someone upgrading from a prior version +# - other: does not fit into any of the other categories +kind: enhancement + +# Change summary; a 80ish characters long description of the change. +summary: Return disk space error message when agent runs out of space while downloading upgrade artifact. + +# Long description; in case the summary is not enough to describe the change +# this field accommodate a description without length limits. +# NOTE: This field will be rendered only for breaking-change and known-issue kinds at the moment. +#description: + +# Affected component; usually one of "elastic-agent", "fleet-server", "filebeat", "metricbeat", "auditbeat", "all", etc. +component: elastic-agent + +# PR URL; optional; the PR number that added the changeset. +# If not present is automatically filled by the tooling finding the PR where this changelog fragment has been added. +# NOTE: the tooling supports backports, so it's able to fill the original PR number instead of the backport PR number. +# Please provide it if you are adding a fragment for a different PR. +pr: https://github.com/elastic/elastic-agent/pull/9122 + +# Issue URL; optional; the GitHub issue related to this changeset (either closes or is part of). +# If not present is automatically filled by the tooling with the issue linked to the PR number. +issue: https://github.com/elastic/elastic-agent/issues/5235 \ No newline at end of file From 423839d88534cd651227ba4f8ead39d8fade3fe4 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 31 Jul 2025 13:48:49 +0300 Subject: [PATCH 024/127] enhancment(5235): removed commented code --- internal/pkg/agent/application/upgrade/step_download.go | 4 ---- 1 file changed, 4 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/step_download.go b/internal/pkg/agent/application/upgrade/step_download.go index 65fa2d5f497..a8594f00ff1 100644 --- a/internal/pkg/agent/application/upgrade/step_download.go +++ b/internal/pkg/agent/application/upgrade/step_download.go @@ -65,9 +65,6 @@ func (u *Upgrader) downloadArtifact(ctx context.Context, parsedVersion *agtversi // set specific downloader, local file just uses the fs.NewDownloader // no fallback is allowed because it was requested that this specific source be used - // factory = func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { - // return fs.NewDownloader(config), nil - // } factory, err = u.downloaderFactoryProvider.GetDownloaderFactory(fileDownloaderFactory) if err != nil { return "", err @@ -90,7 +87,6 @@ func (u *Upgrader) downloadArtifact(ctx context.Context, parsedVersion *agtversi if factory == nil { // set the factory to the newDownloader factory - // factory = newDownloader factory, err = u.downloaderFactoryProvider.GetDownloaderFactory(composedDownloaderFactory) if err != nil { return "", err From 0577d36708a655f4300a7a29c6bcf52573440374 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 31 Jul 2025 14:22:52 +0300 Subject: [PATCH 025/127] enhancement(5235): fix linting errors --- .../upgrade/artifact/download/http/downloader.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go index 3be68f8b31e..0febfe86d7e 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go @@ -66,7 +66,7 @@ type Downloader struct { // NewDownloader creates and configures Elastic Downloader func NewDownloader(log *logger.Logger, config *artifact.Config, upgradeDetails *details.Details) (*Downloader, error) { - client, err := config.HTTPTransportSettings.Client( + client, err := config.HTTPTransportSettings.Client( //nolint:staticcheck httpcommon.WithAPMHTTPInstrumentation(), httpcommon.WithKeepaliveSettings{Disable: false, IdleConnTimeout: 30 * time.Second}, ) @@ -93,7 +93,7 @@ func NewDownloaderWithClient(log *logger.Logger, config *artifact.Config, client func (e *Downloader) Reload(c *artifact.Config) error { // reload client - client, err := c.HTTPTransportSettings.Client( + client, err := c.HTTPTransportSettings.Client( //nolint:staticcheck httpcommon.WithAPMHTTPInstrumentation(), ) if err != nil { @@ -226,9 +226,9 @@ func (e *Downloader) downloadFile(ctx context.Context, artifactName, filename, f } } - loggingObserver := newLoggingProgressObserver(e.log, e.config.HTTPTransportSettings.Timeout) + loggingObserver := newLoggingProgressObserver(e.log, e.config.HTTPTransportSettings.Timeout) //nolint:staticcheck detailsObserver := newDetailsProgressObserver(e.upgradeDetails) - e.progressReporter.Prepare(sourceURI, e.config.HTTPTransportSettings.Timeout, fileSize, loggingObserver, detailsObserver) + e.progressReporter.Prepare(sourceURI, e.config.HTTPTransportSettings.Timeout, fileSize, loggingObserver, detailsObserver) //nolint:staticcheck e.progressReporter.Report(ctx) _, err = e.CopyFunc(destinationFile, io.TeeReader(resp.Body, e.progressReporter)) if err != nil { From 40e448536525d70986af36d75b6036f7737336fe Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 31 Jul 2025 14:23:23 +0300 Subject: [PATCH 026/127] enhancement(5235): fix test cases --- internal/pkg/agent/application/upgrade/upgrade_test.go | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index 9ae3cce04b7..bc18f15a2c2 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -1375,7 +1375,7 @@ func setupForFileDownloader(sourcePrefix string, expectedFileName string, partia func setupForHttpDownloader(partialData []byte) (setupFunc, *httptest.Server) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(http.StatusOK) - w.Write(partialData) + w.Write(partialData) //nolint:errcheck //test code })) return func(t *testing.T, config *artifact.Config, basePath string, targetPath string) { @@ -1474,12 +1474,14 @@ func TestRefactoredDownloader(t *testing.T) { copyFuncError: mockTestError, expectedError: mockTestError, }) + fileDownloaderTestErrors = append(fileDownloaderTestErrors, testErrors...) composedDownloaderTestErrors := []testError{} composedDownloaderTestErrors = append(composedDownloaderTestErrors, testError{ copyFuncError: mockTestError, expectedError: context.DeadlineExceeded, }) + composedDownloaderTestErrors = append(composedDownloaderTestErrors, testErrors...) log, err := logger.New("test", false) require.NoError(t, err) @@ -1561,8 +1563,8 @@ func TestRefactoredDownloader(t *testing.T) { upgrader.downloaderFactoryProvider = downloaderFactoryProvider _, err = upgrader.Upgrade(context.Background(), version.String(), config.SourceURI, nil, upgradeDetails, false, false) - require.Error(t, err) - require.ErrorIs(t, err, testError.expectedError) + require.Error(t, err, "expected error got none") + require.ErrorIs(t, err, testError.expectedError, "expected error mismatch") require.NoFileExists(t, expectedDestPath, tc.cleanupMsg) }) } From 1d38805b5a16911f893fd118cdba38bc80a9f506 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 31 Jul 2025 14:27:18 +0300 Subject: [PATCH 027/127] enhancement(5235): updated tests, removed unused var --- internal/pkg/agent/application/upgrade/upgrade_test.go | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index bc18f15a2c2..ef13258c12d 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -1305,18 +1305,16 @@ func (f *fakeAcker) Commit(ctx context.Context) error { return args.Error(0) } -type MockDownloader struct { - downloadPath string - downloadErr error +type mockDownloaderFactoryProviderTest struct { } -func (md *MockDownloader) Download(ctx context.Context, a artifact.Artifact, version *agtversion.ParsedSemVer) (string, error) { +func (md *mockDownloaderFactoryProviderTest) Download(ctx context.Context, a artifact.Artifact, version *agtversion.ParsedSemVer) (string, error) { return "", nil } func TestDownloaderFactoryProvider(t *testing.T) { factory := func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { - return &MockDownloader{}, nil + return &mockDownloaderFactoryProviderTest{}, nil } provider := &downloaderFactoryProvider{ downloaderFactories: map[string]downloaderFactory{ From 961a86deb71341b3fd11554949741ea6b8bc9d5e Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 31 Jul 2025 16:38:56 +0300 Subject: [PATCH 028/127] enhancement(5235): updated progress reporter to avoid data race --- .../coordinator/coordinator_unit_test.go | 1 + .../artifact/download/http/downloader.go | 46 +++++++++++-------- .../artifact/download/http/downloader_test.go | 22 ++++----- .../download/http/progress_reporter.go | 22 +++++---- .../download/http/progress_reporter_test.go | 37 +++++++-------- 5 files changed, 66 insertions(+), 62 deletions(-) diff --git a/internal/pkg/agent/application/coordinator/coordinator_unit_test.go b/internal/pkg/agent/application/coordinator/coordinator_unit_test.go index b600175ada2..0e42e82dde9 100644 --- a/internal/pkg/agent/application/coordinator/coordinator_unit_test.go +++ b/internal/pkg/agent/application/coordinator/coordinator_unit_test.go @@ -1505,6 +1505,7 @@ func TestCoordinatorInitiatesUpgrade(t *testing.T) { overrideStateChan: overrideStateChan, upgradeDetailsChan: upgradeDetailsChan, upgradeMgr: upgradeMgr, + detailsProvider: details.NewDetails, logger: logp.NewLogger("testing"), } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go index 0febfe86d7e..57c2080760b 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go @@ -47,21 +47,23 @@ const ( // ProgressReporter defines the interface for reporting download progress. type ProgressReporter interface { io.Writer - Prepare(sourceURI string, timeout time.Duration, length int, progressObservers ...progressObserver) + // Prepare(sourceURI string, timeout time.Duration, length int, progressObservers ...progressObserver) Report(ctx context.Context) ReportComplete() ReportFailed(err error) } +type progressReporterProvider func(sourceURI string, timeout time.Duration, length int, progressObservers ...progressObserver) ProgressReporter + // Downloader is a downloader able to fetch artifacts from elastic.co web page. type Downloader struct { - log *logger.Logger - config *artifact.Config - client http.Client - upgradeDetails *details.Details - progressReporter ProgressReporter - diskSpaceErrorFunc func(error) error - CopyFunc func(dst io.Writer, src io.Reader) (written int64, err error) + log *logger.Logger + config *artifact.Config + client http.Client + upgradeDetails *details.Details + progressReporterProvider progressReporterProvider + diskSpaceErrorFunc func(error) error + CopyFunc func(dst io.Writer, src io.Reader) (written int64, err error) } // NewDownloader creates and configures Elastic Downloader @@ -78,16 +80,20 @@ func NewDownloader(log *logger.Logger, config *artifact.Config, upgradeDetails * return NewDownloaderWithClient(log, config, *client, upgradeDetails), nil } +func progressReporterProviderFunc(sourceURI string, timeout time.Duration, length int, progressObservers ...progressObserver) ProgressReporter { + return newDownloadProgressReporter(sourceURI, timeout, length, progressObservers...) +} + // NewDownloaderWithClient creates Elastic Downloader with specific client used func NewDownloaderWithClient(log *logger.Logger, config *artifact.Config, client http.Client, upgradeDetails *details.Details) *Downloader { return &Downloader{ - log: log, - config: config, - client: client, - upgradeDetails: upgradeDetails, - diskSpaceErrorFunc: upgradeErrors.ToDiskSpaceErrorFunc(log), - CopyFunc: io.Copy, - progressReporter: &downloadProgressReporter{}, + log: log, + config: config, + client: client, + upgradeDetails: upgradeDetails, + diskSpaceErrorFunc: upgradeErrors.ToDiskSpaceErrorFunc(log), + CopyFunc: io.Copy, + progressReporterProvider: progressReporterProviderFunc, } } @@ -228,15 +234,15 @@ func (e *Downloader) downloadFile(ctx context.Context, artifactName, filename, f loggingObserver := newLoggingProgressObserver(e.log, e.config.HTTPTransportSettings.Timeout) //nolint:staticcheck detailsObserver := newDetailsProgressObserver(e.upgradeDetails) - e.progressReporter.Prepare(sourceURI, e.config.HTTPTransportSettings.Timeout, fileSize, loggingObserver, detailsObserver) //nolint:staticcheck - e.progressReporter.Report(ctx) - _, err = e.CopyFunc(destinationFile, io.TeeReader(resp.Body, e.progressReporter)) + progressReporter := e.progressReporterProvider(sourceURI, e.config.HTTPTransportSettings.Timeout, fileSize, loggingObserver, detailsObserver) //nolint:staticcheck + progressReporter.Report(ctx) + _, err = e.CopyFunc(destinationFile, io.TeeReader(resp.Body, progressReporter)) if err != nil { err = e.diskSpaceErrorFunc(err) - e.progressReporter.ReportFailed(err) + progressReporter.ReportFailed(err) return fullPath, fmt.Errorf("%s: %w", errors.New("copying fetched package failed", errors.TypeNetwork, errors.M(errors.MetaKeyURI, sourceURI)).Error(), err) } - e.progressReporter.ReportComplete() + progressReporter.ReportComplete() return fullPath, nil } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go index 200ddd52110..16c5fc1ef1c 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go @@ -545,11 +545,6 @@ func (e *testCopyError) Is(target error) bool { type mockProgressReporter struct { reportFailedCalls []reportFailedCall - observers []progressObserver -} - -func (m *mockProgressReporter) Prepare(sourceURI string, timeout time.Duration, length int, progressObservers ...progressObserver) { - m.observers = progressObservers } func (m *mockProgressReporter) Report(ctx context.Context) { @@ -608,16 +603,12 @@ func TestDownloadFile(t *testing.T) { downloader := NewDownloaderWithClient(log, config, *server.Client(), upgradeDetails) downloader.CopyFunc = copyFunc downloader.diskSpaceErrorFunc = diskSpaceErrorFunc - downloader.progressReporter = progressReporter + downloader.progressReporterProvider = func(sourceURI string, timeout time.Duration, length int, progressObservers ...progressObserver) ProgressReporter { + return progressReporter + } _, err := downloader.downloadFile(ctx, artifactName, filename, fullPath) - t.Run("prepares reporter with details and logging observers", func(t *testing.T) { - assert.Equal(t, len(progressReporter.observers), 2) - assert.IsType(t, &loggingProgressObserver{}, progressReporter.observers[0]) - assert.IsType(t, &detailsProgressObserver{}, progressReporter.observers[1]) - }) - t.Run("calls diskSpaceErrorFunc on any copy error", func(t *testing.T) { assert.Equal(t, receivedError, copyFuncError) }) @@ -645,7 +636,12 @@ func TestDownloader_NewDownloaderWithClient(t *testing.T) { assert.Equal(t, expectedCopyFunc.Pointer(), actualCopyFunc.Pointer()) assert.NotNil(t, downloader.diskSpaceErrorFunc) - assert.NotNil(t, downloader.progressReporter) + + assert.NotNil(t, downloader.progressReporterProvider) + expectedProvider := reflect.ValueOf(progressReporterProviderFunc) + actualProvider := reflect.ValueOf(downloader.progressReporterProvider) + assert.Equal(t, expectedProvider.Pointer(), actualProvider.Pointer()) + assert.Equal(t, config, downloader.config) assert.Equal(t, upgradeDetails, downloader.upgradeDetails) assert.Equal(t, http.Client{}, downloader.client) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter.go b/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter.go index 02d52bb8899..d2139d7e602 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter.go @@ -23,16 +23,20 @@ type downloadProgressReporter struct { done chan struct{} } -func (dp *downloadProgressReporter) Prepare(sourceURI string, timeout time.Duration, length int, progressObservers ...progressObserver) { - dp.sourceURI = sourceURI - dp.interval = time.Duration(float64(timeout) * downloadProgressIntervalPercentage) - if dp.interval == 0 { - dp.interval = downloadProgressMinInterval +func newDownloadProgressReporter(sourceURI string, timeout time.Duration, length int, progressObservers ...progressObserver) *downloadProgressReporter { + interval := time.Duration(float64(timeout) * downloadProgressIntervalPercentage) + if interval == 0 { + interval = downloadProgressMinInterval + } + + return &downloadProgressReporter{ + sourceURI: sourceURI, + interval: interval, + warnTimeout: time.Duration(float64(timeout) * warningProgressIntervalPercentage), + length: float64(length), + progressObservers: progressObservers, + done: make(chan struct{}), } - dp.warnTimeout = time.Duration(float64(timeout) * warningProgressIntervalPercentage) - dp.length = float64(length) - dp.progressObservers = progressObservers - dp.done = make(chan struct{}) } func (dp *downloadProgressReporter) Write(b []byte) (int, error) { diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter_test.go b/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter_test.go index 24675131722..9730705fa58 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/progress_reporter_test.go @@ -47,24 +47,6 @@ func (m *mockProgressObserver) ReportFailed(sourceURI string, timePast time.Dura }) } -func TestPrepare(t *testing.T) { - t.Run("should set the sourceURI, interval, warnTimeout, length, progressObservers, and done channel", func(t *testing.T) { - dp := &downloadProgressReporter{} - dp.Prepare("mockurl", 10*time.Second, 1000, &mockProgressObserver{}) - require.Equal(t, "mockurl", dp.sourceURI) - require.Equal(t, time.Duration(float64(10*time.Second)*downloadProgressIntervalPercentage), dp.interval) - require.Equal(t, time.Duration(float64(10*time.Second)*warningProgressIntervalPercentage), dp.warnTimeout) - require.Equal(t, 1000.0, dp.length) - require.Equal(t, 1, len(dp.progressObservers)) - require.NotNil(t, dp.done) - }) - t.Run("should set the interval to downloadProgressMinInterval if the timeout is 0", func(t *testing.T) { - dp := &downloadProgressReporter{} - dp.Prepare("mockurl", 0, 1000, &mockProgressObserver{}) - require.Equal(t, downloadProgressMinInterval, dp.interval) - }) -} - func TestReportFailed(t *testing.T) { t.Run("should call ReportFailed on all observers with correct parameters", func(t *testing.T) { testErr := errors.New("test error") @@ -73,8 +55,7 @@ func TestReportFailed(t *testing.T) { observer2 := &mockProgressObserver{} observers := []progressObserver{observer1, observer2} - dp := &downloadProgressReporter{} - dp.Prepare("mockurl", 10*time.Second, 1000, observers...) + dp := newDownloadProgressReporter("mockurl", 10*time.Second, 1000, observers...) dp.Report(t.Context()) @@ -115,3 +96,19 @@ func TestReportFailed(t *testing.T) { } }) } + +func TestNewDownloadProgressReporter(t *testing.T) { + t.Run("should create a new download progress reporter with the correct parameters", func(t *testing.T) { + dp := newDownloadProgressReporter("mockurl", 10*time.Second, 1000, &mockProgressObserver{}) + require.Equal(t, "mockurl", dp.sourceURI) + require.Equal(t, time.Duration(float64(10*time.Second)*downloadProgressIntervalPercentage), dp.interval) + require.Equal(t, time.Duration(float64(10*time.Second)*warningProgressIntervalPercentage), dp.warnTimeout) + require.Equal(t, float64(1000), dp.length) + require.Equal(t, 1, len(dp.progressObservers)) + require.NotNil(t, dp.done) + }) + t.Run("should set the interval to downloadProgressMinInterval if the timeout is 0", func(t *testing.T) { + dp := newDownloadProgressReporter("mockurl", 0, 1000, &mockProgressObserver{}) + require.Equal(t, time.Duration(float64(downloadProgressMinInterval)), dp.interval) + }) +} From 43e1f9d2fcd6e91895cc3b89244de2be0d09e50a Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 31 Jul 2025 16:49:47 +0300 Subject: [PATCH 029/127] enhancement(5235): removed reference to embedded type --- .../upgrade/artifact/download/http/downloader.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go index 57c2080760b..faa8281b335 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go @@ -68,7 +68,7 @@ type Downloader struct { // NewDownloader creates and configures Elastic Downloader func NewDownloader(log *logger.Logger, config *artifact.Config, upgradeDetails *details.Details) (*Downloader, error) { - client, err := config.HTTPTransportSettings.Client( //nolint:staticcheck + client, err := config.Client( httpcommon.WithAPMHTTPInstrumentation(), httpcommon.WithKeepaliveSettings{Disable: false, IdleConnTimeout: 30 * time.Second}, ) @@ -99,7 +99,7 @@ func NewDownloaderWithClient(log *logger.Logger, config *artifact.Config, client func (e *Downloader) Reload(c *artifact.Config) error { // reload client - client, err := c.HTTPTransportSettings.Client( //nolint:staticcheck + client, err := c.Client( httpcommon.WithAPMHTTPInstrumentation(), ) if err != nil { @@ -232,9 +232,9 @@ func (e *Downloader) downloadFile(ctx context.Context, artifactName, filename, f } } - loggingObserver := newLoggingProgressObserver(e.log, e.config.HTTPTransportSettings.Timeout) //nolint:staticcheck + loggingObserver := newLoggingProgressObserver(e.log, e.config.Timeout) detailsObserver := newDetailsProgressObserver(e.upgradeDetails) - progressReporter := e.progressReporterProvider(sourceURI, e.config.HTTPTransportSettings.Timeout, fileSize, loggingObserver, detailsObserver) //nolint:staticcheck + progressReporter := e.progressReporterProvider(sourceURI, e.config.Timeout, fileSize, loggingObserver, detailsObserver) progressReporter.Report(ctx) _, err = e.CopyFunc(destinationFile, io.TeeReader(resp.Body, progressReporter)) if err != nil { From f44e5a3e0280fdd2765ce5db52c99155d9cc8f5c Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 31 Jul 2025 17:52:29 +0300 Subject: [PATCH 030/127] enhancement(5235): returning path from download functions --- .../application/upgrade/artifact/download/fs/downloader.go | 5 +---- .../application/upgrade/artifact/download/http/downloader.go | 5 +---- 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go index e095fed5b46..c837b96c60d 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go @@ -67,11 +67,8 @@ func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version // download from source to dest hashPath, err := e.download(e.config.OS(), a, *version, ".sha512") downloadedFiles = append(downloadedFiles, hashPath) - if err != nil { - return "", err - } - return path, nil + return path, err } // DownloadAsc downloads the package .asc file from configured source. diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go index faa8281b335..96edf74f209 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go @@ -137,11 +137,8 @@ func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version // download hash from source to dest, only if hash does not exist hashPath, err := e.downloadHash(ctx, remoteArtifact, e.config.OS(), a, *version) downloadedFiles = append(downloadedFiles, hashPath) - if err != nil { - return "", err - } - return path, nil + return path, err } func (e *Downloader) composeURI(artifactName, packageName string) (string, error) { From 64b1047c68a123fb7b436e42d08aaf7fb4511e37 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 31 Jul 2025 20:16:04 +0300 Subject: [PATCH 031/127] enhancement(5235): using config arch instead of runtime arch --- internal/pkg/agent/application/upgrade/upgrade_test.go | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index ef13258c12d..f0d262f54e9 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -1446,14 +1446,15 @@ type testError struct { expectedError error } -func TestRefactoredDownloader(t *testing.T) { +func TestUpgradeDownloadErrors(t *testing.T) { testArtifact := artifact.Artifact{ Name: "Elastic Agent", Cmd: "elastic-agent", Artifact: "beats/elastic-agent", } version := agtversion.NewParsedSemVer(8, 15, 0, "", "") - expectedFileName, err := artifact.GetArtifactName(testArtifact, *version, runtime.GOOS, runtime.GOARCH) + tempConfig := &artifact.Config{} + expectedFileName, err := artifact.GetArtifactName(testArtifact, *version, tempConfig.OS(), tempConfig.Arch()) require.NoError(t, err) partialData := []byte("partial content written before error") @@ -1529,8 +1530,6 @@ func TestRefactoredDownloader(t *testing.T) { testTargetPath := filepath.Join(baseDir, "target") config := artifact.Config{ - OperatingSystem: runtime.GOOS, - Architecture: runtime.GOARCH, TargetDirectory: testTargetPath, } From 7b758d0c5919ba1d35974bfaaa9d4a21cfbaea0f Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 31 Jul 2025 21:33:39 +0300 Subject: [PATCH 032/127] enhancement(5235): updated download functions to remove the base directory to cleanup instead of individual files --- .../upgrade/artifact/artifact_test.go | 17 +++++++++++-- .../artifact/download/fs/downloader.go | 10 +++----- .../artifact/download/fs/downloader_test.go | 1 + .../artifact/download/http/downloader.go | 14 ++++++----- .../artifact/download/http/downloader_test.go | 25 +++++++++++++------ .../agent/application/upgrade/upgrade_test.go | 1 + 6 files changed, 46 insertions(+), 22 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/artifact/artifact_test.go b/internal/pkg/agent/application/upgrade/artifact/artifact_test.go index b7f70236695..6ab09f7508d 100644 --- a/internal/pkg/agent/application/upgrade/artifact/artifact_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/artifact_test.go @@ -5,6 +5,7 @@ package artifact import ( + "path/filepath" "testing" "github.com/stretchr/testify/require" @@ -12,33 +13,42 @@ import ( agtversion "github.com/elastic/elastic-agent/pkg/version" ) -func TestGetArtifactName(t *testing.T) { +func TestArtifacts(t *testing.T) { version, err := agtversion.ParseVersion("9.1.0") require.NoError(t, err) + targetDir := t.TempDir() tests := map[string]struct { a Artifact version agtversion.ParsedSemVer arch string expectedName string + targetDir string + expectedPath string }{ "no_fips_arm64": { a: Artifact{Cmd: "elastic-agent"}, version: *version, arch: "arm64", expectedName: "elastic-agent-9.1.0-linux-arm64.tar.gz", + targetDir: targetDir, + expectedPath: filepath.Join(targetDir, "elastic-agent-9.1.0-linux-arm64.tar.gz"), }, "fips_x86": { a: Artifact{Cmd: "elastic-agent-fips"}, version: *version, arch: "32", expectedName: "elastic-agent-fips-9.1.0-linux-x86.tar.gz", + targetDir: targetDir, + expectedPath: filepath.Join(targetDir, "elastic-agent-fips-9.1.0-linux-x86.tar.gz"), }, "fips_x86_64": { a: Artifact{Cmd: "elastic-agent-fips"}, version: *version, arch: "64", expectedName: "elastic-agent-fips-9.1.0-linux-x86_64.tar.gz", + targetDir: targetDir, + expectedPath: filepath.Join(targetDir, "elastic-agent-fips-9.1.0-linux-x86_64.tar.gz"), }, } @@ -47,7 +57,10 @@ func TestGetArtifactName(t *testing.T) { artifactName, err := GetArtifactName(test.a, test.version, "linux", test.arch) require.NoError(t, err) require.Equal(t, test.expectedName, artifactName) + + artifactPath, err := GetArtifactPath(test.a, test.version, "linux", test.arch, test.targetDir) + require.NoError(t, err) + require.Equal(t, test.expectedPath, artifactPath) }) } - } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go index c837b96c60d..dd3a394412a 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go @@ -47,11 +47,11 @@ func NewDownloader(config *artifact.Config) *Downloader { func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version *agtversion.ParsedSemVer) (_ string, err error) { span, ctx := apm.StartSpan(ctx, "download", "app.internal") defer span.End() - downloadedFiles := make([]string, 0, 2) defer func() { if err != nil { - for _, path := range downloadedFiles { - os.Remove(path) + rmErr := os.RemoveAll(e.config.TargetDirectory) + if rmErr != nil { + err = errors.New(err, "failed to remove target directory", errors.TypeFilesystem, errors.M(errors.MetaKeyPath, e.config.TargetDirectory)) } apm.CaptureError(ctx, err).Send() } @@ -59,14 +59,12 @@ func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version // download from source to dest path, err := e.download(e.config.OS(), a, *version, "") - downloadedFiles = append(downloadedFiles, path) if err != nil { return "", err } // download from source to dest - hashPath, err := e.download(e.config.OS(), a, *version, ".sha512") - downloadedFiles = append(downloadedFiles, hashPath) + _, err = e.download(e.config.OS(), a, *version, ".sha512") return path, err } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go index a1bb473ece4..5ffdae53ca0 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go @@ -172,6 +172,7 @@ func TestDownloader_Download(t *testing.T) { assert.NoFileExists(t, expectedTargetFile, "downloader should clean up partial artifact file on error") assert.NoFileExists(t, expectedHashFile, "downloader should clean up partial hash file on error") + assert.NoDirExists(t, targetDirPath, "downloader should clean up target directory on error") return } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go index 96edf74f209..944229ffac0 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go @@ -25,6 +25,7 @@ import ( "github.com/elastic/elastic-agent/internal/pkg/agent/errors" "github.com/elastic/elastic-agent/pkg/core/logger" agtversion "github.com/elastic/elastic-agent/pkg/version" + "go.elastic.co/apm/v2" ) const ( @@ -117,26 +118,27 @@ func (e *Downloader) Reload(c *artifact.Config) error { // Download fetches the package from configured source. // Returns absolute path to downloaded package and an error. func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version *agtversion.ParsedSemVer) (_ string, err error) { + span, ctx := apm.StartSpan(ctx, "download", "app.internal") + defer span.End() remoteArtifact := a.Artifact - downloadedFiles := make([]string, 0, 2) defer func() { if err != nil { - for _, path := range downloadedFiles { - os.Remove(path) + rmErr := os.RemoveAll(e.config.TargetDirectory) + if rmErr != nil { + err = errors.New(err, "failed to remove target directory", errors.TypeFilesystem, errors.M(errors.MetaKeyPath, e.config.TargetDirectory)) } + apm.CaptureError(ctx, err).Send() } }() // download from source to dest path, err := e.download(ctx, remoteArtifact, e.config.OS(), a, *version) - downloadedFiles = append(downloadedFiles, path) if err != nil { return "", err } // download hash from source to dest, only if hash does not exist - hashPath, err := e.downloadHash(ctx, remoteArtifact, e.config.OS(), a, *version) - downloadedFiles = append(downloadedFiles, hashPath) + _, err = e.downloadHash(ctx, remoteArtifact, e.config.OS(), a, *version) return path, err } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go index 16c5fc1ef1c..dc78812e6a6 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go @@ -71,6 +71,8 @@ func TestDownload(t *testing.T) { t.Fatal(err) } + require.Equal(t, targetDir, filepath.Dir(artifactPath)) + _, err = os.Stat(artifactPath) if err != nil { t.Fatal(err) @@ -375,7 +377,7 @@ func TestDownloadVersion(t *testing.T) { fields fields args args want string - wantErr assert.ErrorAssertionFunc + wantErr bool }{ { name: "happy path released version", @@ -397,7 +399,7 @@ func TestDownloadVersion(t *testing.T) { }, args: args{a: agentSpec, version: agtversion.NewParsedSemVer(1, 2, 3, "", "")}, want: "elastic-agent-1.2.3-linux-x86_64.tar.gz", - wantErr: assert.NoError, + wantErr: false, }, { name: "no hash released version", @@ -415,7 +417,7 @@ func TestDownloadVersion(t *testing.T) { }, args: args{a: agentSpec, version: agtversion.NewParsedSemVer(1, 2, 3, "", "")}, want: "elastic-agent-1.2.3-linux-x86_64.tar.gz", - wantErr: assert.Error, + wantErr: true, }, { name: "happy path snapshot version", @@ -437,7 +439,7 @@ func TestDownloadVersion(t *testing.T) { }, args: args{a: agentSpec, version: agtversion.NewParsedSemVer(1, 2, 3, "SNAPSHOT", "")}, want: "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64.tar.gz", - wantErr: assert.NoError, + wantErr: false, }, { name: "happy path released version with build metadata", @@ -459,7 +461,7 @@ func TestDownloadVersion(t *testing.T) { }, args: args{a: agentSpec, version: agtversion.NewParsedSemVer(1, 2, 3, "", "build19700101")}, want: "elastic-agent-1.2.3+build19700101-linux-x86_64.tar.gz", - wantErr: assert.NoError, + wantErr: false, }, { name: "happy path snapshot version with build metadata", @@ -481,7 +483,7 @@ func TestDownloadVersion(t *testing.T) { }, args: args{a: agentSpec, version: agtversion.NewParsedSemVer(1, 2, 3, "SNAPSHOT", "build19700101")}, want: "elastic-agent-1.2.3-SNAPSHOT+build19700101-linux-x86_64.tar.gz", - wantErr: assert.NoError, + wantErr: false, }, } @@ -521,11 +523,18 @@ func TestDownloadVersion(t *testing.T) { got, err := downloader.Download(context.TODO(), tt.args.a, tt.args.version) - if !tt.wantErr(t, err, fmt.Sprintf("Download(%v, %v)", tt.args.a, tt.args.version)) { + assert.Equalf(t, filepath.Join(targetDirPath, tt.want), got, "Download(%v, %v)", tt.args.a, tt.args.version) + + if tt.wantErr { + assert.Error(t, err) + + assert.NoFileExists(t, filepath.Join(targetDirPath, tt.want)) + assert.NoFileExists(t, filepath.Join(targetDirPath, tt.want+".sha512")) + assert.NoDirExists(t, targetDirPath) return } - assert.Equalf(t, filepath.Join(targetDirPath, tt.want), got, "Download(%v, %v)", tt.args.a, tt.args.version) + assert.NoError(t, err) }) } } diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index f0d262f54e9..7442f8fd4d7 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -1563,6 +1563,7 @@ func TestUpgradeDownloadErrors(t *testing.T) { require.Error(t, err, "expected error got none") require.ErrorIs(t, err, testError.expectedError, "expected error mismatch") require.NoFileExists(t, expectedDestPath, tc.cleanupMsg) + require.NoDirExists(t, testTargetPath, tc.cleanupMsg) }) } }) From d6c606370d141982f6434f094371b5b4ad555b5a Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 31 Jul 2025 21:35:51 +0300 Subject: [PATCH 033/127] enhancement(5235): refactored artifact test --- .../pkg/agent/application/upgrade/artifact/artifact_test.go | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/artifact/artifact_test.go b/internal/pkg/agent/application/upgrade/artifact/artifact_test.go index 6ab09f7508d..6498f8538e2 100644 --- a/internal/pkg/agent/application/upgrade/artifact/artifact_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/artifact_test.go @@ -23,7 +23,6 @@ func TestArtifacts(t *testing.T) { version agtversion.ParsedSemVer arch string expectedName string - targetDir string expectedPath string }{ "no_fips_arm64": { @@ -31,7 +30,6 @@ func TestArtifacts(t *testing.T) { version: *version, arch: "arm64", expectedName: "elastic-agent-9.1.0-linux-arm64.tar.gz", - targetDir: targetDir, expectedPath: filepath.Join(targetDir, "elastic-agent-9.1.0-linux-arm64.tar.gz"), }, "fips_x86": { @@ -39,7 +37,6 @@ func TestArtifacts(t *testing.T) { version: *version, arch: "32", expectedName: "elastic-agent-fips-9.1.0-linux-x86.tar.gz", - targetDir: targetDir, expectedPath: filepath.Join(targetDir, "elastic-agent-fips-9.1.0-linux-x86.tar.gz"), }, "fips_x86_64": { @@ -47,7 +44,6 @@ func TestArtifacts(t *testing.T) { version: *version, arch: "64", expectedName: "elastic-agent-fips-9.1.0-linux-x86_64.tar.gz", - targetDir: targetDir, expectedPath: filepath.Join(targetDir, "elastic-agent-fips-9.1.0-linux-x86_64.tar.gz"), }, } @@ -58,7 +54,7 @@ func TestArtifacts(t *testing.T) { require.NoError(t, err) require.Equal(t, test.expectedName, artifactName) - artifactPath, err := GetArtifactPath(test.a, test.version, "linux", test.arch, test.targetDir) + artifactPath, err := GetArtifactPath(test.a, test.version, "linux", test.arch, targetDir) require.NoError(t, err) require.Equal(t, test.expectedPath, artifactPath) }) From a6b4f16f8bec73c88f8a88aa3eb02e1cb7cb8ddf Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 31 Jul 2025 21:42:36 +0300 Subject: [PATCH 034/127] enhancement(5235): ran mage check --- .../application/upgrade/artifact/download/http/downloader.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go index 944229ffac0..499ef7c205b 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go @@ -17,6 +17,8 @@ import ( "strings" "time" + "go.elastic.co/apm/v2" + "github.com/elastic/elastic-agent-libs/transport/httpcommon" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download" @@ -25,7 +27,6 @@ import ( "github.com/elastic/elastic-agent/internal/pkg/agent/errors" "github.com/elastic/elastic-agent/pkg/core/logger" agtversion "github.com/elastic/elastic-agent/pkg/version" - "go.elastic.co/apm/v2" ) const ( From dae42d52a2c6602a5685d7d23dbe81c9afd9dcc8 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 31 Jul 2025 23:24:24 +0300 Subject: [PATCH 035/127] enhancement(5235): removed unnecessary comments --- .../agent/application/upgrade/artifact/download/fs/downloader.go | 1 - .../application/upgrade/artifact/download/http/downloader.go | 1 - 2 files changed, 2 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go index dd3a394412a..1fb7b3e2b0d 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go @@ -63,7 +63,6 @@ func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version return "", err } - // download from source to dest _, err = e.download(e.config.OS(), a, *version, ".sha512") return path, err diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go index 499ef7c205b..68407d7c4e7 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go @@ -138,7 +138,6 @@ func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version return "", err } - // download hash from source to dest, only if hash does not exist _, err = e.downloadHash(ctx, remoteArtifact, e.config.OS(), a, *version) return path, err From 0aedf851e4d7320485ac56ac65fccb2549e678bf Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Mon, 4 Aug 2025 15:13:33 +0300 Subject: [PATCH 036/127] enhancement(5235): reverted downloader cleanup to remove only the downloaded files not the download directory. updated tests. --- .../upgrade/artifact/download/fs/downloader.go | 10 ++++++---- .../upgrade/artifact/download/fs/downloader_test.go | 2 +- .../upgrade/artifact/download/http/downloader.go | 13 ++++++++----- .../artifact/download/http/downloader_test.go | 2 +- .../pkg/agent/application/upgrade/upgrade_test.go | 2 +- 5 files changed, 17 insertions(+), 12 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go index 1fb7b3e2b0d..02b7d12656c 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go @@ -47,11 +47,11 @@ func NewDownloader(config *artifact.Config) *Downloader { func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version *agtversion.ParsedSemVer) (_ string, err error) { span, ctx := apm.StartSpan(ctx, "download", "app.internal") defer span.End() + downloadedFiles := make([]string, 0, 2) defer func() { if err != nil { - rmErr := os.RemoveAll(e.config.TargetDirectory) - if rmErr != nil { - err = errors.New(err, "failed to remove target directory", errors.TypeFilesystem, errors.M(errors.MetaKeyPath, e.config.TargetDirectory)) + for _, path := range downloadedFiles { + os.Remove(path) } apm.CaptureError(ctx, err).Send() } @@ -59,11 +59,13 @@ func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version // download from source to dest path, err := e.download(e.config.OS(), a, *version, "") + downloadedFiles = append(downloadedFiles, path) if err != nil { return "", err } - _, err = e.download(e.config.OS(), a, *version, ".sha512") + hashPath, err := e.download(e.config.OS(), a, *version, ".sha512") + downloadedFiles = append(downloadedFiles, hashPath) return path, err } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go index 5ffdae53ca0..fff746d6c2b 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go @@ -172,7 +172,7 @@ func TestDownloader_Download(t *testing.T) { assert.NoFileExists(t, expectedTargetFile, "downloader should clean up partial artifact file on error") assert.NoFileExists(t, expectedHashFile, "downloader should clean up partial hash file on error") - assert.NoDirExists(t, targetDirPath, "downloader should clean up target directory on error") + assert.DirExists(t, targetDirPath, "downloader should not clean up target directory on error") return } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go index 68407d7c4e7..8317f634c00 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go @@ -122,23 +122,26 @@ func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version span, ctx := apm.StartSpan(ctx, "download", "app.internal") defer span.End() remoteArtifact := a.Artifact + downloadedFiles := make([]string, 0, 2) defer func() { if err != nil { - rmErr := os.RemoveAll(e.config.TargetDirectory) - if rmErr != nil { - err = errors.New(err, "failed to remove target directory", errors.TypeFilesystem, errors.M(errors.MetaKeyPath, e.config.TargetDirectory)) + for _, path := range downloadedFiles { + if err := os.Remove(path); err != nil { + e.log.Warnf("failed to cleanup %s: %v", path, err) + } } - apm.CaptureError(ctx, err).Send() } }() // download from source to dest path, err := e.download(ctx, remoteArtifact, e.config.OS(), a, *version) + downloadedFiles = append(downloadedFiles, path) if err != nil { return "", err } - _, err = e.downloadHash(ctx, remoteArtifact, e.config.OS(), a, *version) + hashPath, err := e.downloadHash(ctx, remoteArtifact, e.config.OS(), a, *version) + downloadedFiles = append(downloadedFiles, hashPath) return path, err } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go index dc78812e6a6..39471c633d2 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go @@ -530,7 +530,7 @@ func TestDownloadVersion(t *testing.T) { assert.NoFileExists(t, filepath.Join(targetDirPath, tt.want)) assert.NoFileExists(t, filepath.Join(targetDirPath, tt.want+".sha512")) - assert.NoDirExists(t, targetDirPath) + assert.DirExists(t, targetDirPath) return } diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index 7442f8fd4d7..74e050c69a7 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -1563,7 +1563,7 @@ func TestUpgradeDownloadErrors(t *testing.T) { require.Error(t, err, "expected error got none") require.ErrorIs(t, err, testError.expectedError, "expected error mismatch") require.NoFileExists(t, expectedDestPath, tc.cleanupMsg) - require.NoDirExists(t, testTargetPath, tc.cleanupMsg) + require.DirExists(t, testTargetPath, "target directory should not be cleaned up") }) } }) From 064c411d5045372d0068e098991f75e668e1cc1c Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 1 Aug 2025 16:50:44 +0300 Subject: [PATCH 037/127] enhancement(5235): took notes --- internal/pkg/agent/application/upgrade/step_unpack.go | 4 ++-- internal/pkg/agent/application/upgrade/upgrade.go | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/step_unpack.go b/internal/pkg/agent/application/upgrade/step_unpack.go index d65399f806f..43a36825cb7 100644 --- a/internal/pkg/agent/application/upgrade/step_unpack.go +++ b/internal/pkg/agent/application/upgrade/step_unpack.go @@ -137,7 +137,7 @@ func unzip(log *logger.Logger, archivePath, dataDir string, flavor string) (Unpa } dstPath := strings.TrimPrefix(mappedPackagePath, "data/") - dstPath = filepath.Join(dataDir, dstPath) + dstPath = filepath.Join(dataDir, dstPath) // TODO: look into this, this may be the new home to cleanup if skipFn(dstPath) { return nil @@ -400,7 +400,7 @@ func untar(log *logger.Logger, archivePath, dataDir string, flavor string) (Unpa } rel := filepath.FromSlash(strings.TrimPrefix(fileName, "data/")) - abs := filepath.Join(dataDir, rel) + abs := filepath.Join(dataDir, rel) // TODO: if anything happens remove abs most likely, check this // find the root dir if currentDir := filepath.Dir(abs); rootDir == "" || len(filepath.Dir(rootDir)) > len(currentDir) { diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 983393bec18..9a0cdd5a17b 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -321,7 +321,7 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string return nil, fmt.Errorf("versionedhome is empty: %v", unpackRes) } - newHome := filepath.Join(paths.Top(), unpackRes.VersionedHome) + newHome := filepath.Join(paths.Top(), unpackRes.VersionedHome) // TODO: this is the dir to cleanup if anything goes wrong if err := copyActionStore(u.log, newHome); err != nil { return nil, errors.New(err, "failed to copy action store") @@ -337,7 +337,7 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string det.SetState(details.StateReplacing) // create symlink to the /elastic-agent - hashedDir := unpackRes.VersionedHome + hashedDir := unpackRes.VersionedHome // TODO: this is important, this is the name of the new agent home directory symlinkPath := filepath.Join(paths.Top(), agentName) @@ -536,7 +536,7 @@ func isSameVersion(log *logger.Logger, current agentVersion, newVersion agentVer } func rollbackInstall(ctx context.Context, log *logger.Logger, topDirPath, versionedHome, oldVersionedHome string) error { - oldAgentPath := paths.BinaryPath(filepath.Join(topDirPath, oldVersionedHome), agentName) + oldAgentPath := paths.BinaryPath(filepath.Join(topDirPath, oldVersionedHome), agentName) // TODO: topdir + new version home is the place to clean up: same as the newAgentInstallPath below err := changeSymlink(log, topDirPath, filepath.Join(topDirPath, agentName), oldAgentPath) if err != nil && !errors.Is(err, fs.ErrNotExist) { return fmt.Errorf("rolling back install: restoring symlink to %q failed: %w", oldAgentPath, err) From d6b3921c5f2e3747fcb0533a9484712e5167316b Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sat, 2 Aug 2025 18:41:49 +0300 Subject: [PATCH 038/127] enhancement(5235): added upgrade cleaner --- .../pkg/agent/application/upgrade/upgrade.go | 113 +++++++++++++++++- 1 file changed, 108 insertions(+), 5 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 9a0cdd5a17b..422c2fb2135 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -13,6 +13,7 @@ import ( "os/exec" "path/filepath" "runtime" + "slices" "strings" "time" @@ -99,6 +100,7 @@ type Upgrader struct { fleetServerURI string markerWatcher MarkerWatcher downloaderFactoryProvider DownloaderFactoryProvider + upgradeCleaner upgradeCleaner } // IsUpgradeable when agent is installed and running as a service or flag was provided. @@ -128,6 +130,11 @@ func NewUpgrader(log *logger.Logger, settings *artifact.Config, agentInfo info.A upgradeable: IsUpgradeable(), markerWatcher: newMarkerFileWatcher(markerFilePath(paths.Data()), log), downloaderFactoryProvider: downloaderFactoryProvider, + upgradeCleaner: &upgradeCleanup{ + log: log, + rollbackFunc: rollbackInstall, + cleanupFuncs: []func() error{}, + }, }, nil } @@ -228,6 +235,102 @@ func checkUpgrade(log *logger.Logger, currentVersion, newVersion agentVersion, m return nil } +type upgradeCleaner interface { + setupRollback(topDirPath, newHomeDir, oldHomeDir string) error + setupArchiveCleanup(archivePath string) error + setupUnpackCleanup(newHomeDir, oldHomeDir string) error + cleanup(err error) error +} +type upgradeCleanup struct { + log *logger.Logger + rollbackToggle bool + archiveCleanupToggle bool + unpackCleanupToggle bool + rollbackFunc func(*logger.Logger, string, string, string) error + cleanupFuncs []func() error +} + +func (u *upgradeCleanup) setupArchiveCleanup(archivePath string) error { + u.log.Debugf("Setting up cleanup for archive, archivePath: %s", archivePath) + if archivePath == "" { + msg := "archive path is empty, cannot cleanup" + u.log.Errorf(msg) + return errors.New(msg) + } + u.archiveCleanupToggle = true + + u.cleanupFuncs = append(u.cleanupFuncs, func() error { + return os.RemoveAll(archivePath) + }) + + return nil +} + +func (u *upgradeCleanup) setupUnpackCleanup(newHomeDir, oldHomeDir string) error { + u.log.Debugf("Setting up cleanup for unpack, newVersionedHome: %s", newHomeDir) + + if !u.archiveCleanupToggle { + msg := "Cannot setup for unpack cleanup before archive cleanup is setup" + u.log.Debugf(msg) + return errors.New(msg) + } + + if newHomeDir == "" || oldHomeDir == "" { + msg := "new or old versioned home is empty, cannot cleanup" + u.log.Errorf(msg) + return errors.New(msg) + } + + if newHomeDir == oldHomeDir { + msg := "new and old versioned home are the same, cannot cleanup" + u.log.Errorf(msg) + return errors.New(msg) + } + + u.unpackCleanupToggle = true + + u.cleanupFuncs = append(u.cleanupFuncs, func() error { + return os.RemoveAll(newHomeDir) + }) + + return nil +} + +func (u *upgradeCleanup) setupRollback(topDirPath, newHomeDir, oldHomeDir string) error { + u.log.Debugf("Setting up cleanup for rollback, topDirPath: %s, oldVersionedHome: %s, newVersionedHome: %s", topDirPath, oldHomeDir, newHomeDir) + + if !u.unpackCleanupToggle { + msg := "Cannot setup for rollback before unpack cleanup is setup" + u.log.Debugf(msg) + return errors.New(msg) + } + + u.rollbackToggle = true + + u.cleanupFuncs = append(u.cleanupFuncs, func() error { + return rollbackInstall(u.log, topDirPath, newHomeDir, oldHomeDir) + }) + + return nil +} + +func (u *upgradeCleanup) cleanup(err error) error { + if err == nil { + u.log.Debugf("No error, skipping cleanup") + return nil + } + + slices.Reverse(u.cleanupFuncs) + + for _, cleanupFunc := range u.cleanupFuncs { + if cleanupErr := cleanupFunc(); cleanupErr != nil { + return goerrors.Join(err, cleanupErr) + } + } + + return err +} + // Upgrade upgrades running agent, function returns shutdown callback that must be called by reexec. func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string, action *fleetapi.ActionUpgrade, det *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (_ reexec.ShutdownCallbackFn, err error) { u.log.Infow("Upgrading agent", "version", version, "source_uri", sourceURI) @@ -351,7 +454,7 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string if err := changeSymlink(u.log, paths.Top(), symlinkPath, newPath); err != nil { u.log.Errorw("Rolling back: changing symlink failed", "error.message", err) - rollbackErr := rollbackInstall(ctx, u.log, paths.Top(), hashedDir, currentVersionedHome) + rollbackErr := rollbackInstall(u.log, paths.Top(), hashedDir, currentVersionedHome) return nil, goerrors.Join(err, rollbackErr) } @@ -380,7 +483,7 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string previous, // old agent version data action, det, OUTCOME_UPGRADE); err != nil { u.log.Errorw("Rolling back: marking upgrade failed", "error.message", err) - rollbackErr := rollbackInstall(ctx, u.log, paths.Top(), hashedDir, currentVersionedHome) + rollbackErr := rollbackInstall(u.log, paths.Top(), hashedDir, currentVersionedHome) return nil, goerrors.Join(err, rollbackErr) } @@ -389,14 +492,14 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string var watcherCmd *exec.Cmd if watcherCmd, err = InvokeWatcher(u.log, watcherExecutable); err != nil { u.log.Errorw("Rolling back: starting watcher failed", "error.message", err) - rollbackErr := rollbackInstall(ctx, u.log, paths.Top(), hashedDir, currentVersionedHome) + rollbackErr := rollbackInstall(u.log, paths.Top(), hashedDir, currentVersionedHome) return nil, goerrors.Join(err, rollbackErr) } watcherWaitErr := waitForWatcher(ctx, u.log, markerFilePath(paths.Data()), watcherMaxWaitTime) if watcherWaitErr != nil { killWatcherErr := watcherCmd.Process.Kill() - rollbackErr := rollbackInstall(ctx, u.log, paths.Top(), hashedDir, currentVersionedHome) + rollbackErr := rollbackInstall(u.log, paths.Top(), hashedDir, currentVersionedHome) return nil, goerrors.Join(watcherWaitErr, killWatcherErr, rollbackErr) } @@ -535,7 +638,7 @@ func isSameVersion(log *logger.Logger, current agentVersion, newVersion agentVer return current == newVersion } -func rollbackInstall(ctx context.Context, log *logger.Logger, topDirPath, versionedHome, oldVersionedHome string) error { +func rollbackInstall(log *logger.Logger, topDirPath, versionedHome, oldVersionedHome string) error { oldAgentPath := paths.BinaryPath(filepath.Join(topDirPath, oldVersionedHome), agentName) // TODO: topdir + new version home is the place to clean up: same as the newAgentInstallPath below err := changeSymlink(log, topDirPath, filepath.Join(topDirPath, agentName), oldAgentPath) if err != nil && !errors.Is(err, fs.ErrNotExist) { From 7f3e787af2348e10799f2fb2878080c54f143f47 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Mon, 4 Aug 2025 15:00:52 +0300 Subject: [PATCH 039/127] enhancement(5235): downloaders return download result instead of path. updated tests, added logs for development, added early returns to simulate errors. --- .../artifact/download/composed/downloader.go | 12 +-- .../download/composed/downloader_test.go | 12 +-- .../upgrade/artifact/download/downloader.go | 7 +- .../artifact/download/fs/downloader.go | 17 ++-- .../artifact/download/fs/verifier_test.go | 4 +- .../artifact/download/http/downloader.go | 16 ++-- .../artifact/download/http/downloader_test.go | 20 ++--- .../artifact/download/http/verifier_test.go | 4 +- .../artifact/download/snapshot/downloader.go | 2 +- .../download/snapshot/downloader_test.go | 2 +- .../application/upgrade/step_download.go | 44 +++++----- .../application/upgrade/step_download_test.go | 6 +- .../agent/application/upgrade/step_relink.go | 3 +- .../agent/application/upgrade/step_unpack.go | 61 ++++++++------ .../pkg/agent/application/upgrade/upgrade.go | 82 +++++++++++++++---- .../agent/application/upgrade/upgrade_test.go | 4 +- 16 files changed, 190 insertions(+), 106 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/composed/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/composed/downloader.go index 46ac8d77492..12dc7e18c2b 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/composed/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/composed/downloader.go @@ -36,21 +36,21 @@ func NewDownloader(downloaders ...download.Downloader) *Downloader { // Download fetches the package from configured source. // Returns absolute path to downloaded package and an error. -func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version *version.ParsedSemVer) (string, error) { +func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version *version.ParsedSemVer) (download.DownloadResult, error) { var errs []error span, ctx := apm.StartSpan(ctx, "download", "app.internal") defer span.End() for _, d := range e.dd { - s, e := d.Download(ctx, a, version) - if e == nil { - return s, nil + downloadResult, err := d.Download(ctx, a, version) + if err == nil { + return downloadResult, nil } - errs = append(errs, e) + errs = append(errs, err) } - return "", goerrors.Join(errs...) + return download.DownloadResult{}, goerrors.Join(errs...) } func (e *Downloader) Reload(c *artifact.Config) error { diff --git a/internal/pkg/agent/application/upgrade/artifact/download/composed/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/composed/downloader_test.go index 0c72f97bcf6..97134d42b08 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/composed/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/composed/downloader_test.go @@ -26,9 +26,9 @@ type FailingDownloader struct { called bool } -func (d *FailingDownloader) Download(context.Context, artifact.Artifact, *agtversion.ParsedSemVer) (string, error) { +func (d *FailingDownloader) Download(context.Context, artifact.Artifact, *agtversion.ParsedSemVer) (download.DownloadResult, error) { d.called = true - return "", errors.New("failing") + return download.DownloadResult{}, errors.New("failing") } func (d *FailingDownloader) Called() bool { return d.called } @@ -37,9 +37,11 @@ type SuccDownloader struct { called bool } -func (d *SuccDownloader) Download(context.Context, artifact.Artifact, *agtversion.ParsedSemVer) (string, error) { +func (d *SuccDownloader) Download(context.Context, artifact.Artifact, *agtversion.ParsedSemVer) (download.DownloadResult, error) { d.called = true - return succ, nil + return download.DownloadResult{ + ArtifactPath: succ, + }, nil } func (d *SuccDownloader) Called() bool { return d.called } @@ -70,7 +72,7 @@ func TestComposed(t *testing.T) { d := NewDownloader(tc.downloaders[0], tc.downloaders[1]) r, _ := d.Download(context.TODO(), artifact.Artifact{Name: "a"}, parseVersion) - assert.Equal(t, tc.expectedResult, r == succ) + assert.Equal(t, tc.expectedResult, r.ArtifactPath == succ) assert.True(t, tc.checkFunc(tc.downloaders)) } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/downloader.go index 33a3afb98dd..89462dfd66d 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/downloader.go @@ -13,5 +13,10 @@ import ( // Downloader is an interface allowing download of an artifact type Downloader interface { - Download(ctx context.Context, a artifact.Artifact, version *version.ParsedSemVer) (string, error) + Download(ctx context.Context, a artifact.Artifact, version *version.ParsedSemVer) (DownloadResult, error) +} + +type DownloadResult struct { + ArtifactPath string + ArtifactHash string } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go index 02b7d12656c..e544830ffbf 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go @@ -15,6 +15,7 @@ import ( "github.com/elastic/elastic-agent/internal/pkg/agent/application/paths" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact" + "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download" upgradeErrors "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/errors" "github.com/elastic/elastic-agent/internal/pkg/agent/errors" agtversion "github.com/elastic/elastic-agent/pkg/version" @@ -44,7 +45,7 @@ func NewDownloader(config *artifact.Config) *Downloader { // Download fetches the package from configured source. // Returns absolute path to downloaded package and an error. -func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version *agtversion.ParsedSemVer) (_ string, err error) { +func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version *agtversion.ParsedSemVer) (_ download.DownloadResult, err error) { span, ctx := apm.StartSpan(ctx, "download", "app.internal") defer span.End() downloadedFiles := make([]string, 0, 2) @@ -61,13 +62,19 @@ func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version path, err := e.download(e.config.OS(), a, *version, "") downloadedFiles = append(downloadedFiles, path) if err != nil { - return "", err + return download.DownloadResult{}, err } - hashPath, err := e.download(e.config.OS(), a, *version, ".sha512") - downloadedFiles = append(downloadedFiles, hashPath) + hash, err := e.download(e.config.OS(), a, *version, ".sha512") + downloadedFiles = append(downloadedFiles, hash) + if err != nil { + return download.DownloadResult{}, err + } - return path, err + return download.DownloadResult{ + ArtifactPath: path, + ArtifactHash: hash, + }, nil } // DownloadAsc downloads the package .asc file from configured source. diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/verifier_test.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/verifier_test.go index aa15d9a05b1..abe35860593 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/verifier_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/verifier_test.go @@ -225,12 +225,12 @@ func TestVerify(t *testing.T) { pgpKey := prepareTestCase(t, agentSpec, testVersion, config) testClient := NewDownloader(config) - artifactPath, err := testClient.Download(ctx, agentSpec, testVersion) + downloadResult, err := testClient.Download(ctx, agentSpec, testVersion) require.NoError(t, err, "fs.Downloader could not download artifacts") _, err = testClient.DownloadAsc(context.Background(), agentSpec, *testVersion) require.NoError(t, err, "fs.Downloader could not download artifacts .asc file") - _, err = os.Stat(artifactPath) + _, err = os.Stat(downloadResult.ArtifactPath) require.NoError(t, err) testVerifier, err := NewVerifier(log, config, pgpKey) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go index 8317f634c00..f285d11e676 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go @@ -118,7 +118,7 @@ func (e *Downloader) Reload(c *artifact.Config) error { // Download fetches the package from configured source. // Returns absolute path to downloaded package and an error. -func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version *agtversion.ParsedSemVer) (_ string, err error) { +func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version *agtversion.ParsedSemVer) (_ download.DownloadResult, err error) { span, ctx := apm.StartSpan(ctx, "download", "app.internal") defer span.End() remoteArtifact := a.Artifact @@ -137,13 +137,19 @@ func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version path, err := e.download(ctx, remoteArtifact, e.config.OS(), a, *version) downloadedFiles = append(downloadedFiles, path) if err != nil { - return "", err + return download.DownloadResult{}, err } - hashPath, err := e.downloadHash(ctx, remoteArtifact, e.config.OS(), a, *version) - downloadedFiles = append(downloadedFiles, hashPath) + hash, err := e.downloadHash(ctx, remoteArtifact, e.config.OS(), a, *version) + downloadedFiles = append(downloadedFiles, hash) + if err != nil { + return download.DownloadResult{}, err + } - return path, err + return download.DownloadResult{ + ArtifactPath: path, + ArtifactHash: hash, + }, nil } func (e *Downloader) composeURI(artifactName, packageName string) (string, error) { diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go index 39471c633d2..f53e4d8da39 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go @@ -66,19 +66,19 @@ func TestDownload(t *testing.T) { upgradeDetails := details.NewDetails("8.12.0", details.StateRequested, "") testClient := NewDownloaderWithClient(log, config, elasticClient, upgradeDetails) - artifactPath, err := testClient.Download(context.Background(), beatSpec, version) + downloadResult, err := testClient.Download(context.Background(), beatSpec, version) if err != nil { t.Fatal(err) } - require.Equal(t, targetDir, filepath.Dir(artifactPath)) + require.Equal(t, targetDir, filepath.Dir(downloadResult.ArtifactPath)) - _, err = os.Stat(artifactPath) + _, err = os.Stat(downloadResult.ArtifactPath) if err != nil { t.Fatal(err) } - os.Remove(artifactPath) + os.Remove(downloadResult.ArtifactPath) }) } } @@ -118,8 +118,8 @@ func TestDownloadBodyError(t *testing.T) { log, obs := loggertest.New("downloader") upgradeDetails := details.NewDetails("8.12.0", details.StateRequested, "") testClient := NewDownloaderWithClient(log, config, *client, upgradeDetails) - artifactPath, err := testClient.Download(context.Background(), beatSpec, version) - os.Remove(artifactPath) + downloadResult, err := testClient.Download(context.Background(), beatSpec, version) + os.Remove(downloadResult.ArtifactPath) if err == nil { t.Fatal("expected Download to return an error") } @@ -175,8 +175,8 @@ func TestDownloadLogProgressWithLength(t *testing.T) { log, obs := loggertest.New("downloader") upgradeDetails := details.NewDetails("8.12.0", details.StateRequested, "") testClient := NewDownloaderWithClient(log, config, *client, upgradeDetails) - artifactPath, err := testClient.Download(context.Background(), beatSpec, version) - os.Remove(artifactPath) + downloadResult, err := testClient.Download(context.Background(), beatSpec, version) + os.Remove(downloadResult.ArtifactPath) require.NoError(t, err, "Download should not have errored") expectedURL := fmt.Sprintf("%s/%s-%s-%s", srv.URL, "beats/agentbeat/agentbeat", version, "linux-x86_64.tar.gz") @@ -258,8 +258,8 @@ func TestDownloadLogProgressWithoutLength(t *testing.T) { log, obs := loggertest.New("downloader") upgradeDetails := details.NewDetails("8.12.0", details.StateRequested, "") testClient := NewDownloaderWithClient(log, config, *client, upgradeDetails) - artifactPath, err := testClient.Download(context.Background(), beatSpec, version) - os.Remove(artifactPath) + downloadResult, err := testClient.Download(context.Background(), beatSpec, version) + os.Remove(downloadResult.ArtifactPath) require.NoError(t, err, "Download should not have errored") expectedURL := fmt.Sprintf("%s/%s-%s-%s", srv.URL, "beats/agentbeat/agentbeat", version, "linux-x86_64.tar.gz") diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/verifier_test.go b/internal/pkg/agent/application/upgrade/artifact/download/http/verifier_test.go index 248fc49ac19..b9f547c443f 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/verifier_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/verifier_test.go @@ -98,11 +98,11 @@ func runTests(t *testing.T, testCases []testCase, td *testDials, config *artifac downloader, err := NewDownloader(log, config, upgradeDetails) require.NoError(t, err, "could not create new downloader") - pkgPath, err := downloader.Download(cancelCtx, beatSpec, version) + downloadResult, err := downloader.Download(cancelCtx, beatSpec, version) require.NoErrorf(t, err, "failed downloading %s v%s", beatSpec.Artifact, version) - _, err = os.Stat(pkgPath) + _, err = os.Stat(downloadResult.ArtifactPath) if err != nil { t.Fatal(err) } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/snapshot/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/snapshot/downloader.go index 57fbac689aa..b673fbf05df 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/snapshot/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/snapshot/downloader.go @@ -81,7 +81,7 @@ func (e *Downloader) Reload(c *artifact.Config) error { // Download fetches the package from configured source. // Returns absolute path to downloaded package and an error. -func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version *agtversion.ParsedSemVer) (string, error) { +func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version *agtversion.ParsedSemVer) (download.DownloadResult, error) { // remove build metadata to match filename of the package for the specific snapshot build strippedVersion := agtversion.NewParsedSemVer(version.Major(), version.Minor(), version.Patch(), version.Prerelease(), "") return e.downloader.Download(ctx, a, strippedVersion) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/snapshot/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/snapshot/downloader_test.go index c9bcd20a071..a36a504ddcf 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/snapshot/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/snapshot/downloader_test.go @@ -146,7 +146,7 @@ func TestDownloadVersion(t *testing.T) { return } - assert.Equalf(t, filepath.Join(targetDirPath, tt.want), got, "Download(%v, %v)", tt.args.a, tt.args.version) + assert.Equalf(t, filepath.Join(targetDirPath, tt.want), got.ArtifactPath, "Download(%v, %v)", tt.args.a, tt.args.version) }) } diff --git a/internal/pkg/agent/application/upgrade/step_download.go b/internal/pkg/agent/application/upgrade/step_download.go index a8594f00ff1..f8a4e10007a 100644 --- a/internal/pkg/agent/application/upgrade/step_download.go +++ b/internal/pkg/agent/application/upgrade/step_download.go @@ -37,9 +37,9 @@ const ( fleetUpgradeFallbackPGPFormat = "/api/agents/upgrades/%d.%d.%d/pgp-public-key" ) -type downloader func(context.Context, downloaderFactory, *agtversion.ParsedSemVer, *artifact.Config, *details.Details) (string, error) +type downloader func(context.Context, downloaderFactory, *agtversion.ParsedSemVer, *artifact.Config, *details.Details) (download.DownloadResult, error) -func (u *Upgrader) downloadArtifact(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (_ string, err error) { +func (u *Upgrader) downloadArtifact(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (_ download.DownloadResult, err error) { span, ctx := apm.StartSpan(ctx, "downloadArtifact", "app.internal") defer func() { apm.CaptureError(ctx, err).Send() @@ -67,13 +67,13 @@ func (u *Upgrader) downloadArtifact(ctx context.Context, parsedVersion *agtversi // no fallback is allowed because it was requested that this specific source be used factory, err = u.downloaderFactoryProvider.GetDownloaderFactory(fileDownloaderFactory) if err != nil { - return "", err + return download.DownloadResult{}, err } // set specific verifier, local file verifies locally only verifier, err = fs.NewVerifier(u.log, &settings, release.PGP()) if err != nil { - return "", errors.New(err, "initiating verifier") + return download.DownloadResult{}, errors.New(err, "initiating verifier") } // log that a local upgrade artifact is being used @@ -89,7 +89,7 @@ func (u *Upgrader) downloadArtifact(ctx context.Context, parsedVersion *agtversi // set the factory to the newDownloader factory factory, err = u.downloaderFactoryProvider.GetDownloaderFactory(composedDownloaderFactory) if err != nil { - return "", err + return download.DownloadResult{}, err } u.log.Infow("Downloading upgrade artifact", "version", parsedVersion, "source_uri", settings.SourceURI, "drop_path", settings.DropPath, @@ -100,29 +100,29 @@ func (u *Upgrader) downloadArtifact(ctx context.Context, parsedVersion *agtversi } if err := os.MkdirAll(paths.Downloads(), 0750); err != nil { - return "", errors.New(err, fmt.Sprintf("failed to create download directory at %s", paths.Downloads())) + return download.DownloadResult{}, errors.New(err, fmt.Sprintf("failed to create download directory at %s", paths.Downloads())) } - path, err := downloaderFunc(ctx, factory, parsedVersion, &settings, upgradeDetails) + downloadResult, err := downloaderFunc(ctx, factory, parsedVersion, &settings, upgradeDetails) if err != nil { - return "", fmt.Errorf("failed download of agent binary: %w", err) + return download.DownloadResult{}, fmt.Errorf("failed download of agent binary: %w", err) } if skipVerifyOverride { - return path, nil + return downloadResult, nil } if verifier == nil { verifier, err = newVerifier(parsedVersion, u.log, &settings) if err != nil { - return "", errors.New(err, "initiating verifier") + return download.DownloadResult{}, errors.New(err, "initiating verifier") } } if err := verifier.Verify(ctx, agentArtifact, *parsedVersion, skipDefaultPgp, pgpBytes...); err != nil { - return "", errors.New(err, "failed verification of agent binary") + return download.DownloadResult{}, errors.New(err, "failed verification of agent binary") } - return path, nil + return downloadResult, nil } func (u *Upgrader) appendFallbackPGP(targetVersion *agtversion.ParsedSemVer, pgpBytes []string) []string { @@ -203,21 +203,21 @@ func (u *Upgrader) downloadOnce( version *agtversion.ParsedSemVer, settings *artifact.Config, upgradeDetails *details.Details, -) (string, error) { +) (download.DownloadResult, error) { downloader, err := factory(version, u.log, settings, upgradeDetails) if err != nil { - return "", fmt.Errorf("unable to create fetcher: %w", err) + return download.DownloadResult{}, fmt.Errorf("unable to create fetcher: %w", err) } // All download artifacts expect a name that includes .[-SNAPSHOT] so we have to // make sure not to include build metadata we might have in the parsed version (for snapshots we already // used that to configure the URL we download the files from) - path, err := downloader.Download(ctx, agentArtifact, version) + downloadResult, err := downloader.Download(ctx, agentArtifact, version) if err != nil { - return "", fmt.Errorf("unable to download package: %w", err) + return download.DownloadResult{}, fmt.Errorf("unable to download package: %w", err) } // Download successful - return path, nil + return downloadResult, nil } func (u *Upgrader) downloadWithRetries( @@ -226,7 +226,7 @@ func (u *Upgrader) downloadWithRetries( version *agtversion.ParsedSemVer, settings *artifact.Config, upgradeDetails *details.Details, -) (string, error) { +) (download.DownloadResult, error) { cancelDeadline := time.Now().Add(settings.Timeout) cancelCtx, cancel := context.WithDeadline(ctx, cancelDeadline) defer cancel() @@ -237,14 +237,14 @@ func (u *Upgrader) downloadWithRetries( expBo.InitialInterval = settings.RetrySleepInitDuration boCtx := backoff.WithContext(expBo, cancelCtx) - var path string + var downloadResult download.DownloadResult var attempt uint opFn := func() error { attempt++ u.log.Infof("download attempt %d", attempt) var err error - path, err = u.downloadOnce(cancelCtx, factory, version, settings, upgradeDetails) + downloadResult, err = u.downloadOnce(cancelCtx, factory, version, settings, upgradeDetails) if err != nil { if errors.Is(err, upgradeErrors.ErrInsufficientDiskSpace) { @@ -264,12 +264,12 @@ func (u *Upgrader) downloadWithRetries( } if err := backoff.RetryNotify(opFn, boCtx, opFailureNotificationFn); err != nil { - return "", err + return download.DownloadResult{}, err } // Clear retry details upon success upgradeDetails.SetRetryableError(nil) upgradeDetails.SetRetryUntil(nil) - return path, nil + return downloadResult, nil } diff --git a/internal/pkg/agent/application/upgrade/step_download_test.go b/internal/pkg/agent/application/upgrade/step_download_test.go index 8081bd8bbde..d3f2b3e1748 100644 --- a/internal/pkg/agent/application/upgrade/step_download_test.go +++ b/internal/pkg/agent/application/upgrade/step_download_test.go @@ -31,8 +31,10 @@ type mockDownloader struct { downloadErr error } -func (md *mockDownloader) Download(ctx context.Context, a artifact.Artifact, version *agtversion.ParsedSemVer) (string, error) { - return md.downloadPath, md.downloadErr +func (md *mockDownloader) Download(ctx context.Context, a artifact.Artifact, version *agtversion.ParsedSemVer) (download.DownloadResult, error) { + return download.DownloadResult{ + ArtifactPath: md.downloadPath, + }, md.downloadErr } func TestFallbackIsAppended(t *testing.T) { diff --git a/internal/pkg/agent/application/upgrade/step_relink.go b/internal/pkg/agent/application/upgrade/step_relink.go index f9256d9980d..337d5925f44 100644 --- a/internal/pkg/agent/application/upgrade/step_relink.go +++ b/internal/pkg/agent/application/upgrade/step_relink.go @@ -20,7 +20,8 @@ const ( ) func changeSymlink(log *logger.Logger, topDirPath, symlinkPath, newTarget string) error { - + log.Infof("Changing symlink, topDirPath: %s, symlinkPath: %s, newTarget: %s", topDirPath, symlinkPath, newTarget) + return nil // handle windows suffixes if runtime.GOOS == windows { symlinkPath += exe diff --git a/internal/pkg/agent/application/upgrade/step_unpack.go b/internal/pkg/agent/application/upgrade/step_unpack.go index 43a36825cb7..7c69f484399 100644 --- a/internal/pkg/agent/application/upgrade/step_unpack.go +++ b/internal/pkg/agent/application/upgrade/step_unpack.go @@ -49,7 +49,7 @@ func (u *Upgrader) unpack(version, archivePath, dataDir string, flavor string) ( if err != nil { u.log.Errorw("Failed to unpack upgrade artifact", "error.message", err, "version", version, "file.path", archivePath, "unpack_result", unpackRes) - return UnpackResult{}, err + return unpackRes, err } u.log.Infow("Unpacked upgrade artifact", "version", version, "file.path", archivePath, "unpack_result", unpackRes) @@ -80,9 +80,11 @@ func (u *Upgrader) getPackageMetadata(archivePath string) (packageMetadata, erro func unzip(log *logger.Logger, archivePath, dataDir string, flavor string) (UnpackResult, error) { var hash, rootDir string + result := UnpackResult{} + r, err := zip.OpenReader(archivePath) if err != nil { - return UnpackResult{}, err + return result, err } defer r.Close() @@ -93,7 +95,7 @@ func unzip(log *logger.Logger, archivePath, dataDir string, flavor string) (Unpa metadata, err := getPackageMetadataFromZipReader(r, fileNamePrefix) if err != nil { - return UnpackResult{}, fmt.Errorf("retrieving package metadata from %q: %w", archivePath, err) + return result, fmt.Errorf("retrieving package metadata from %q: %w", archivePath, err) } hash = metadata.hash[:hashLen] @@ -107,9 +109,12 @@ func unzip(log *logger.Logger, archivePath, dataDir string, flavor string) (Unpa versionedHome = createVersionedHomeFromHash(hash) } + log.Infof("THE VERSIONED HOME IN UNZIP IS %s", versionedHome) + result.VersionedHome = versionedHome + skipFn, err := skipFnFromZip(log, r, flavor, fileNamePrefix, createVersionedHomeFromHash(hash), registry) if err != nil { - return UnpackResult{}, err + return result, err } unpackFile := func(f *zip.File) (err error) { @@ -138,6 +143,7 @@ func unzip(log *logger.Logger, archivePath, dataDir string, flavor string) (Unpa dstPath := strings.TrimPrefix(mappedPackagePath, "data/") dstPath = filepath.Join(dataDir, dstPath) // TODO: look into this, this may be the new home to cleanup + log.Infof("THE DESTINATION PATH IN UNZIP IS %s", dstPath) if skipFn(dstPath) { return nil @@ -196,14 +202,13 @@ func unzip(log *logger.Logger, archivePath, dataDir string, flavor string) (Unpa } if err := unpackFile(f); err != nil { - return UnpackResult{}, err + return result, err } } - return UnpackResult{ - Hash: hash, - VersionedHome: versionedHome, - }, nil + result.Hash = hash + + return result, nil } // getPackageMetadataFromZip reads an archive on a path archivePath and parses metadata from manifest file @@ -319,12 +324,14 @@ func untar(log *logger.Logger, archivePath, dataDir string, flavor string) (Unpa var rootDir string var hash string + result := UnpackResult{} + // Look up manifest in the archive and prepare path mappings, if any pm := pathMapper{} metadata, err := getPackageMetadataFromTar(archivePath) if err != nil { - return UnpackResult{}, fmt.Errorf("retrieving package metadata from %q: %w", archivePath, err) + return result, fmt.Errorf("retrieving package metadata from %q: %w", archivePath, err) } hash = metadata.hash[:hashLen] @@ -340,20 +347,23 @@ func untar(log *logger.Logger, archivePath, dataDir string, flavor string) (Unpa versionedHome = createVersionedHomeFromHash(metadata.hash) } + log.Infof("THE VERSIONED HOME IN UNTAR IS %s", versionedHome) + result.VersionedHome = versionedHome + skipFn, err := skipFnFromTar(log, archivePath, flavor, registry) if err != nil { - return UnpackResult{}, err + return result, err } r, err := os.Open(archivePath) if err != nil { - return UnpackResult{}, errors.New(fmt.Sprintf("artifact for 'elastic-agent' could not be found at '%s'", archivePath), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, archivePath)) + return result, errors.New(fmt.Sprintf("artifact for 'elastic-agent' could not be found at '%s'", archivePath), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, archivePath)) } defer r.Close() zr, err := gzip.NewReader(r) if err != nil { - return UnpackResult{}, errors.New("requires gzip-compressed body", err, errors.TypeFilesystem) + return result, errors.New("requires gzip-compressed body", err, errors.TypeFilesystem) } tr := tar.NewReader(zr) @@ -370,11 +380,11 @@ func untar(log *logger.Logger, archivePath, dataDir string, flavor string) (Unpa break } if err != nil { - return UnpackResult{}, err + return result, err } if !validFileName(f.Name) { - return UnpackResult{}, errors.New("tar contained invalid filename: %q", f.Name, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, f.Name)) + return result, errors.New("tar contained invalid filename: %q", f.Name, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, f.Name)) } fileName := strings.TrimPrefix(f.Name, fileNamePrefix) @@ -401,6 +411,7 @@ func untar(log *logger.Logger, archivePath, dataDir string, flavor string) (Unpa rel := filepath.FromSlash(strings.TrimPrefix(fileName, "data/")) abs := filepath.Join(dataDir, rel) // TODO: if anything happens remove abs most likely, check this + log.Infof("THE ABSOLUTE PATH IN UNTAR IS %s", abs) // find the root dir if currentDir := filepath.Dir(abs); rootDir == "" || len(filepath.Dir(rootDir)) > len(currentDir) { @@ -415,13 +426,13 @@ func untar(log *logger.Logger, archivePath, dataDir string, flavor string) (Unpa // create non-existing containing folders with 0750 permissions right now, we'll fix the permission of each // directory as we come across them while processing the other package entries if err = os.MkdirAll(filepath.Dir(abs), 0750); err != nil { - return UnpackResult{}, errors.New(err, "TarInstaller: creating directory for file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) + return result, errors.New(err, "TarInstaller: creating directory for file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) } // remove any world permissions from the file wf, err := os.OpenFile(abs, os.O_RDWR|os.O_CREATE|os.O_TRUNC, mode.Perm()&0770) if err != nil { - return UnpackResult{}, errors.New(err, "TarInstaller: creating file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) + return result, errors.New(err, "TarInstaller: creating file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) } //nolint:gosec // legacy @@ -430,7 +441,7 @@ func untar(log *logger.Logger, archivePath, dataDir string, flavor string) (Unpa err = closeErr } if err != nil { - return UnpackResult{}, fmt.Errorf("TarInstaller: error writing to %s: %w", abs, err) + return result, fmt.Errorf("TarInstaller: error writing to %s: %w", abs, err) } case mode.IsDir(): log.Debugw("Unpacking directory", "archive", "tar", "file.path", abs) @@ -439,26 +450,24 @@ func untar(log *logger.Logger, archivePath, dataDir string, flavor string) (Unpa if errors.Is(err, fs.ErrNotExist) { // the directory does not exist, create it and any non-existing parent directory with the same permissions if err := os.MkdirAll(abs, mode.Perm()&0770); err != nil { - return UnpackResult{}, errors.New(err, "TarInstaller: creating directory for file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) + return result, errors.New(err, "TarInstaller: creating directory for file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) } } else if err != nil { - return UnpackResult{}, errors.New(err, "TarInstaller: stat() directory for file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) + return result, errors.New(err, "TarInstaller: stat() directory for file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) } else { // directory already exists, set the appropriate permissions err = os.Chmod(abs, mode.Perm()&0770) if err != nil { - return UnpackResult{}, errors.New(err, fmt.Sprintf("TarInstaller: setting permissions %O for directory %q", mode.Perm()&0770, abs), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) + return result, errors.New(err, fmt.Sprintf("TarInstaller: setting permissions %O for directory %q", mode.Perm()&0770, abs), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) } } default: - return UnpackResult{}, errors.New(fmt.Sprintf("tar file entry %s contained unsupported file type %v", fileName, mode), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, fileName)) + return result, errors.New(fmt.Sprintf("tar file entry %s contained unsupported file type %v", fileName, mode), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, fileName)) } } + result.Hash = hash - return UnpackResult{ - Hash: hash, - VersionedHome: versionedHome, - }, nil + return result, nil } func skipFnFromTar(log *logger.Logger, archivePath string, flavor string, registry map[string][]string) (install.SkipFn, error) { diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 422c2fb2135..0b346512d05 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -237,7 +237,7 @@ func checkUpgrade(log *logger.Logger, currentVersion, newVersion agentVersion, m type upgradeCleaner interface { setupRollback(topDirPath, newHomeDir, oldHomeDir string) error - setupArchiveCleanup(archivePath string) error + setupArchiveCleanup(downloadResult download.DownloadResult) error setupUnpackCleanup(newHomeDir, oldHomeDir string) error cleanup(err error) error } @@ -250,9 +250,9 @@ type upgradeCleanup struct { cleanupFuncs []func() error } -func (u *upgradeCleanup) setupArchiveCleanup(archivePath string) error { - u.log.Debugf("Setting up cleanup for archive, archivePath: %s", archivePath) - if archivePath == "" { +func (u *upgradeCleanup) setupArchiveCleanup(downloadResult download.DownloadResult) error { + u.log.Debugf("Setting up cleanup for archive, archivePath: %s", downloadResult.ArtifactPath) + if downloadResult.ArtifactPath == "" { msg := "archive path is empty, cannot cleanup" u.log.Errorf(msg) return errors.New(msg) @@ -260,7 +260,8 @@ func (u *upgradeCleanup) setupArchiveCleanup(archivePath string) error { u.archiveCleanupToggle = true u.cleanupFuncs = append(u.cleanupFuncs, func() error { - return os.RemoveAll(archivePath) + //TODO: remove the hash file as well + return os.RemoveAll(downloadResult.ArtifactPath) }) return nil @@ -333,6 +334,16 @@ func (u *upgradeCleanup) cleanup(err error) error { // Upgrade upgrades running agent, function returns shutdown callback that must be called by reexec. func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string, action *fleetapi.ActionUpgrade, det *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (_ reexec.ShutdownCallbackFn, err error) { + defer func() { + if err != nil { + cleanupErr := u.upgradeCleaner.cleanup(err) + if cleanupErr != nil { + u.log.Errorw("Error cleaning up after upgrade", "error.message", cleanupErr) + err = goerrors.Join(err, cleanupErr) + } + } + }() + u.log.Infow("Upgrading agent", "version", version, "source_uri", sourceURI) currentVersion := agentVersion{ @@ -375,7 +386,7 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string return nil, fmt.Errorf("error parsing version %q: %w", version, err) } - archivePath, err := u.downloadArtifact(ctx, parsedVersion, sourceURI, det, skipVerifyOverride, skipDefaultPgp, pgpBytes...) + downloadResult, err := u.downloadArtifact(ctx, parsedVersion, sourceURI, det, skipVerifyOverride, skipDefaultPgp, pgpBytes...) if err != nil { // Run the same pre-upgrade cleanup task to get rid of any newly downloaded files // This may have an issue if users are upgrading to the same version number. @@ -386,11 +397,15 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string return nil, err } + if err := u.upgradeCleaner.setupArchiveCleanup(downloadResult); err != nil { + return nil, err + } + det.SetState(details.StateExtracting) - metadata, err := u.getPackageMetadata(archivePath) + metadata, err := u.getPackageMetadata(downloadResult.ArtifactPath) if err != nil { - return nil, fmt.Errorf("reading metadata for elastic agent version %s package %q: %w", version, archivePath, err) + return nil, fmt.Errorf("reading metadata for elastic agent version %s package %q: %w", version, downloadResult.ArtifactPath, err) } newVersion := extractAgentVersion(metadata, version) @@ -410,21 +425,41 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string u.log.Warnf("error encountered when detecting used flavor with top path %q: %w", paths.Top(), err) } u.log.Debugf("detected used flavor: %q", detectedFlavor) - unpackRes, err := u.unpack(version, archivePath, paths.Data(), detectedFlavor) + unpackRes, unpackErr := u.unpack(version, downloadResult.ArtifactPath, paths.Data(), detectedFlavor) + + if unpackErr != nil { + err = goerrors.Join(err, unpackErr) + } + + newHome := filepath.Join(paths.Top(), unpackRes.VersionedHome) + u.log.Infof("newHome: %s", newHome) + + if unpackCleanupSetupErr := u.upgradeCleaner.setupUnpackCleanup(newHome, paths.Home()); unpackCleanupSetupErr != nil { + err = goerrors.Join(err, unpackCleanupSetupErr) + } + if err != nil { return nil, err } + u.log.Infof("unpackRes: %+v", unpackRes) + newHash := unpackRes.Hash if newHash == "" { return nil, errors.New("unknown hash") } - if unpackRes.VersionedHome == "" { - return nil, fmt.Errorf("versionedhome is empty: %v", unpackRes) - } + u.log.Infof("unpackRes.Hash: %s", unpackRes.Hash) + + // if unpackRes.VersionedHome == "" {// TODO: need to assert that unpack + // returns a versioned home at all times + // return nil, fmt.Errorf("versionedhome is empty: %v", unpackRes) + // } - newHome := filepath.Join(paths.Top(), unpackRes.VersionedHome) // TODO: this is the dir to cleanup if anything goes wrong + u.log.Infof("unpackRes.VersionedHome: %s", unpackRes.VersionedHome) + + // newHome := filepath.Join(paths.Top(), unpackRes.VersionedHome) // TODO: this is the dir to cleanup if anything goes wrong + // u.log.Infof("newHome: %s", newHome) if err := copyActionStore(u.log, newHome); err != nil { return nil, errors.New(err, "failed to copy action store") @@ -433,6 +468,9 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string newRunPath := filepath.Join(newHome, "run") oldRunPath := filepath.Join(paths.Run()) + u.log.Infof("oldRunPath: %s", oldRunPath) + u.log.Infof("newRunPath: %s", newRunPath) + if err := copyRunDirectory(u.log, oldRunPath, newRunPath); err != nil { return nil, errors.New(err, "failed to copy run directory") } @@ -440,18 +478,29 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string det.SetState(details.StateReplacing) // create symlink to the /elastic-agent - hashedDir := unpackRes.VersionedHome // TODO: this is important, this is the name of the new agent home directory + hashedDir := unpackRes.VersionedHome + u.log.Infof("hashedDir: %s", hashedDir) symlinkPath := filepath.Join(paths.Top(), agentName) + u.log.Infof("symlinkPath: %s", symlinkPath) // paths.BinaryPath properly derives the binary directory depending on the platform. The path to the binary for macOS is inside of the app bundle. newPath := paths.BinaryPath(filepath.Join(paths.Top(), hashedDir), agentName) + u.log.Infof("newPath: %s", newPath) currentVersionedHome, err := filepath.Rel(paths.Top(), paths.Home()) if err != nil { return nil, fmt.Errorf("calculating home path relative to top, home: %q top: %q : %w", paths.Home(), paths.Top(), err) } + if rollbackSetupErr := u.upgradeCleaner.setupRollback(paths.Top(), newHome, currentVersionedHome); rollbackSetupErr != nil { + err = goerrors.Join(err, rollbackSetupErr) + } + + u.log.Infof("currentVersionedHome: %s", currentVersionedHome) + changeSymlink(u.log, paths.Top(), symlinkPath, newPath) + return nil, errors.New("we are done here") + if err := changeSymlink(u.log, paths.Top(), symlinkPath, newPath); err != nil { u.log.Errorw("Rolling back: changing symlink failed", "error.message", err) rollbackErr := rollbackInstall(u.log, paths.Top(), hashedDir, currentVersionedHome) @@ -639,13 +688,16 @@ func isSameVersion(log *logger.Logger, current agentVersion, newVersion agentVer } func rollbackInstall(log *logger.Logger, topDirPath, versionedHome, oldVersionedHome string) error { - oldAgentPath := paths.BinaryPath(filepath.Join(topDirPath, oldVersionedHome), agentName) // TODO: topdir + new version home is the place to clean up: same as the newAgentInstallPath below + log.Infof("Rolling back install, topDirPath: %s, versionedHome: %s, oldVersionedHome: %s", topDirPath, versionedHome, oldVersionedHome) + oldAgentPath := paths.BinaryPath(filepath.Join(topDirPath, oldVersionedHome), agentName) + log.Infof("oldAgentPath: %s", oldAgentPath) err := changeSymlink(log, topDirPath, filepath.Join(topDirPath, agentName), oldAgentPath) if err != nil && !errors.Is(err, fs.ErrNotExist) { return fmt.Errorf("rolling back install: restoring symlink to %q failed: %w", oldAgentPath, err) } newAgentInstallPath := filepath.Join(topDirPath, versionedHome) + log.Infof("newAgentInstallPath: %s", newAgentInstallPath) err = os.RemoveAll(newAgentInstallPath) if err != nil && !errors.Is(err, fs.ErrNotExist) { return fmt.Errorf("rolling back install: removing new agent install at %q failed: %w", newAgentInstallPath, err) diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index 74e050c69a7..bc1d80c8419 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -1308,8 +1308,8 @@ func (f *fakeAcker) Commit(ctx context.Context) error { type mockDownloaderFactoryProviderTest struct { } -func (md *mockDownloaderFactoryProviderTest) Download(ctx context.Context, a artifact.Artifact, version *agtversion.ParsedSemVer) (string, error) { - return "", nil +func (md *mockDownloaderFactoryProviderTest) Download(ctx context.Context, a artifact.Artifact, version *agtversion.ParsedSemVer) (download.DownloadResult, error) { + return download.DownloadResult{}, nil } func TestDownloaderFactoryProvider(t *testing.T) { From 2dc897788ddaea6d02f9b949857d4721eccbd268 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Tue, 5 Aug 2025 02:00:38 +0300 Subject: [PATCH 040/127] enhancement(5235): refactored downloaders, refactored cleanup functions, added tests for the cleanup functions --- .../download/common/artifact_path_and_name.go | 36 +++ .../upgrade/artifact/download/downloader.go | 4 +- .../artifact/download/fs/downloader.go | 72 ++--- .../artifact/download/fs/downloader_test.go | 222 ++++++++------- .../artifact/download/fs/verifier_test.go | 2 - .../artifact/download/http/downloader.go | 84 +++--- .../artifact/download/http/downloader_test.go | 4 +- .../application/upgrade/step_download.go | 10 +- .../application/upgrade/step_download_test.go | 20 +- .../agent/application/upgrade/step_relink.go | 1 - .../pkg/agent/application/upgrade/upgrade.go | 134 ++------- .../application/upgrade/upgrade_cleanup.go | 113 ++++++++ .../upgrade/upgrade_cleanup_test.go | 264 ++++++++++++++++++ 13 files changed, 614 insertions(+), 352 deletions(-) create mode 100644 internal/pkg/agent/application/upgrade/artifact/download/common/artifact_path_and_name.go create mode 100644 internal/pkg/agent/application/upgrade/upgrade_cleanup.go create mode 100644 internal/pkg/agent/application/upgrade/upgrade_cleanup_test.go diff --git a/internal/pkg/agent/application/upgrade/artifact/download/common/artifact_path_and_name.go b/internal/pkg/agent/application/upgrade/artifact/download/common/artifact_path_and_name.go new file mode 100644 index 00000000000..8c35d98a1f2 --- /dev/null +++ b/internal/pkg/agent/application/upgrade/artifact/download/common/artifact_path_and_name.go @@ -0,0 +1,36 @@ +package common + +import ( + "fmt" + + "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact" + "github.com/elastic/elastic-agent/pkg/version" +) + +type ArtifactPathAndName struct { + ArtifactName string + HashName string + ArtifactPath string + HashPath string +} + +type GetArtifactPathAndNameFunc func(a artifact.Artifact, version version.ParsedSemVer, remoteArtifact, operatingSystem, arch, targetDir string) (ArtifactPathAndName, error) + +func GetArtifactPathAndName(a artifact.Artifact, version version.ParsedSemVer, remoteArtifact, operatingSystem, arch, targetDir string) (ArtifactPathAndName, error) { + filename, err := artifact.GetArtifactName(a, version, operatingSystem, arch) + if err != nil { + return ArtifactPathAndName{}, fmt.Errorf("generating package name failed: %w", err) + } + + fullPath, err := artifact.GetArtifactPath(a, version, operatingSystem, arch, targetDir) + if err != nil { + return ArtifactPathAndName{}, fmt.Errorf("generating package path failed: %w", err) + } + + return ArtifactPathAndName{ + ArtifactName: filename, + HashName: filename + ".sha512", + ArtifactPath: fullPath, + HashPath: fullPath + ".sha512", + }, nil +} diff --git a/internal/pkg/agent/application/upgrade/artifact/download/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/downloader.go index 89462dfd66d..ff7a775d9b2 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/downloader.go @@ -17,6 +17,6 @@ type Downloader interface { } type DownloadResult struct { - ArtifactPath string - ArtifactHash string + ArtifactPath string + ArtifactHashPath string } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go index e544830ffbf..72b9472a1a1 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go @@ -16,6 +16,7 @@ import ( "github.com/elastic/elastic-agent/internal/pkg/agent/application/paths" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download" + "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download/common" upgradeErrors "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/errors" "github.com/elastic/elastic-agent/internal/pkg/agent/errors" agtversion "github.com/elastic/elastic-agent/pkg/version" @@ -45,10 +46,14 @@ func NewDownloader(config *artifact.Config) *Downloader { // Download fetches the package from configured source. // Returns absolute path to downloaded package and an error. -func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version *agtversion.ParsedSemVer) (_ download.DownloadResult, err error) { +func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version *agtversion.ParsedSemVer) (download.DownloadResult, error) { span, ctx := apm.StartSpan(ctx, "download", "app.internal") defer span.End() + + var err error + downloadResult := download.DownloadResult{} downloadedFiles := make([]string, 0, 2) + defer func() { if err != nil { for _, path := range downloadedFiles { @@ -58,88 +63,57 @@ func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version } }() - // download from source to dest - path, err := e.download(e.config.OS(), a, *version, "") - downloadedFiles = append(downloadedFiles, path) + artifactPathAndName, err := common.GetArtifactPathAndName(a, *version, a.Artifact, e.config.OS(), e.config.Arch(), e.config.TargetDirectory) if err != nil { - return download.DownloadResult{}, err + return downloadResult, err } - hash, err := e.download(e.config.OS(), a, *version, ".sha512") - downloadedFiles = append(downloadedFiles, hash) - if err != nil { - return download.DownloadResult{}, err - } + downloadResult.ArtifactPath = artifactPathAndName.ArtifactPath + downloadResult.ArtifactHashPath = artifactPathAndName.HashPath - return download.DownloadResult{ - ArtifactPath: path, - ArtifactHash: hash, - }, nil -} - -// DownloadAsc downloads the package .asc file from configured source. -// It returns absolute path to the downloaded file and a no-nil error if any occurs. -func (e *Downloader) DownloadAsc(_ context.Context, a artifact.Artifact, version agtversion.ParsedSemVer) (string, error) { - path, err := e.download(e.config.OS(), a, version, ".asc") + err = e.downloadFile(artifactPathAndName.ArtifactName, artifactPathAndName.ArtifactPath) + downloadedFiles = append(downloadedFiles, artifactPathAndName.ArtifactPath) if err != nil { - os.Remove(path) - return "", err + return downloadResult, err } - return path, nil -} - -func (e *Downloader) download( - operatingSystem string, - a artifact.Artifact, - version agtversion.ParsedSemVer, - extension string) (string, error) { - filename, err := artifact.GetArtifactName(a, version, operatingSystem, e.config.Arch()) - if err != nil { - return "", errors.New(err, "generating package name failed") - } - - fullPath, err := artifact.GetArtifactPath(a, version, operatingSystem, e.config.Arch(), e.config.TargetDirectory) + err = e.downloadFile(artifactPathAndName.HashName, artifactPathAndName.HashPath) + downloadedFiles = append(downloadedFiles, artifactPathAndName.HashPath) if err != nil { - return "", errors.New(err, "generating package path failed") - } - - if extension != "" { - filename += extension - fullPath += extension + return downloadResult, err } - return e.downloadFile(filename, fullPath) + return downloadResult, nil } -func (e *Downloader) downloadFile(filename, fullPath string) (string, error) { +func (e *Downloader) downloadFile(filename, fullPath string) error { sourcePath := filepath.Join(e.dropPath, filename) sourceFile, err := os.Open(sourcePath) if err != nil { - return "", errors.New(err, fmt.Sprintf("package '%s' not found", sourcePath), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, fullPath)) + return errors.New(err, fmt.Sprintf("package '%s' not found", sourcePath), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, fullPath)) } defer sourceFile.Close() if destinationDir := filepath.Dir(fullPath); destinationDir != "" && destinationDir != "." { if err := os.MkdirAll(destinationDir, 0755); err != nil { - return "", err + return err } } destinationFile, err := os.OpenFile(fullPath, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, packagePermissions) if err != nil { - return "", errors.New(err, "creating package file failed", errors.TypeFilesystem, errors.M(errors.MetaKeyPath, fullPath)) + return errors.New(err, "creating package file failed", errors.TypeFilesystem, errors.M(errors.MetaKeyPath, fullPath)) } defer destinationFile.Close() _, err = e.CopyFunc(destinationFile, sourceFile) if err != nil { processedErr := e.diskSpaceErrorFunc(err) - return fullPath, processedErr + return processedErr } - return fullPath, nil + return nil } func getDropPath(cfg *artifact.Config) string { diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go index fff746d6c2b..1e701678304 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go @@ -6,7 +6,6 @@ package fs import ( "context" - "fmt" "io" "os" "path/filepath" @@ -190,115 +189,115 @@ func createFiles(t *testing.T, dstPath string, files []file) { } } -func TestDownloader_DownloadAsc(t *testing.T) { - type fields struct { - config *artifact.Config - } - type args struct { - a artifact.Artifact - version agtversion.ParsedSemVer - } - tests := []struct { - name string - files []file - fields fields - args args - want string - wantErr assert.ErrorAssertionFunc - }{ - { - name: "happy path released version", - files: []file{ - { - "elastic-agent-1.2.3-linux-x86_64.tar.gz.asc", - []byte("fake signature for elastic-agent package"), - }, - }, - fields: fields{ - config: &artifact.Config{ - OperatingSystem: "linux", - Architecture: "64", - }, - }, - args: args{a: agentSpec, version: *agtversion.NewParsedSemVer(1, 2, 3, "", "")}, - want: "elastic-agent-1.2.3-linux-x86_64.tar.gz.asc", - wantErr: assert.NoError, - }, - { - name: "happy path snapshot version", - files: []file{ - { - "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64.tar.gz.asc", - []byte("fake signature for elastic-agent package"), - }, - }, - fields: fields{ - config: &artifact.Config{ - OperatingSystem: "linux", - Architecture: "64", - }, - }, - args: args{a: agentSpec, version: *agtversion.NewParsedSemVer(1, 2, 3, "SNAPSHOT", "")}, - want: "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64.tar.gz.asc", - wantErr: assert.NoError, - }, - { - name: "happy path released version with build metadata", - files: []file{ - { - "elastic-agent-1.2.3+build19700101-linux-x86_64.tar.gz.asc", - []byte("fake signature for elastic-agent package"), - }, - }, - fields: fields{ - config: &artifact.Config{ - OperatingSystem: "linux", - Architecture: "64", - }, - }, - args: args{a: agentSpec, version: *agtversion.NewParsedSemVer(1, 2, 3, "", "build19700101")}, - want: "elastic-agent-1.2.3+build19700101-linux-x86_64.tar.gz.asc", - wantErr: assert.NoError, - }, - { - name: "happy path snapshot version with build metadata", - files: []file{ - { - "elastic-agent-1.2.3-SNAPSHOT+build19700101-linux-x86_64.tar.gz.asc", - []byte("fake signature for elastic-agent package"), - }, - }, - fields: fields{ - config: &artifact.Config{ - OperatingSystem: "linux", - Architecture: "64", - }, - }, - args: args{a: agentSpec, version: *agtversion.NewParsedSemVer(1, 2, 3, "SNAPSHOT", "build19700101")}, - want: "elastic-agent-1.2.3-SNAPSHOT+build19700101-linux-x86_64.tar.gz.asc", - wantErr: assert.NoError, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - dropPath := t.TempDir() - targetDirPath := t.TempDir() - - createFiles(t, dropPath, tt.files) - - config := tt.fields.config - config.DropPath = dropPath - config.TargetDirectory = targetDirPath - - e := NewDownloader(config) - got, err := e.DownloadAsc(context.TODO(), tt.args.a, tt.args.version) - if !tt.wantErr(t, err, fmt.Sprintf("DownloadAsc(%v, %v)", tt.args.a, tt.args.version)) { - return - } - assert.Equalf(t, filepath.Join(targetDirPath, tt.want), got, "DownloadAsc(%v, %v)", tt.args.a, tt.args.version) - }) - } -} +// func TestDownloader_DownloadAsc(t *testing.T) { +// type fields struct { +// config *artifact.Config +// } +// type args struct { +// a artifact.Artifact +// version agtversion.ParsedSemVer +// } +// tests := []struct { +// name string +// files []file +// fields fields +// args args +// want string +// wantErr assert.ErrorAssertionFunc +// }{ +// { +// name: "happy path released version", +// files: []file{ +// { +// "elastic-agent-1.2.3-linux-x86_64.tar.gz.asc", +// []byte("fake signature for elastic-agent package"), +// }, +// }, +// fields: fields{ +// config: &artifact.Config{ +// OperatingSystem: "linux", +// Architecture: "64", +// }, +// }, +// args: args{a: agentSpec, version: *agtversion.NewParsedSemVer(1, 2, 3, "", "")}, +// want: "elastic-agent-1.2.3-linux-x86_64.tar.gz.asc", +// wantErr: assert.NoError, +// }, +// { +// name: "happy path snapshot version", +// files: []file{ +// { +// "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64.tar.gz.asc", +// []byte("fake signature for elastic-agent package"), +// }, +// }, +// fields: fields{ +// config: &artifact.Config{ +// OperatingSystem: "linux", +// Architecture: "64", +// }, +// }, +// args: args{a: agentSpec, version: *agtversion.NewParsedSemVer(1, 2, 3, "SNAPSHOT", "")}, +// want: "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64.tar.gz.asc", +// wantErr: assert.NoError, +// }, +// { +// name: "happy path released version with build metadata", +// files: []file{ +// { +// "elastic-agent-1.2.3+build19700101-linux-x86_64.tar.gz.asc", +// []byte("fake signature for elastic-agent package"), +// }, +// }, +// fields: fields{ +// config: &artifact.Config{ +// OperatingSystem: "linux", +// Architecture: "64", +// }, +// }, +// args: args{a: agentSpec, version: *agtversion.NewParsedSemVer(1, 2, 3, "", "build19700101")}, +// want: "elastic-agent-1.2.3+build19700101-linux-x86_64.tar.gz.asc", +// wantErr: assert.NoError, +// }, +// { +// name: "happy path snapshot version with build metadata", +// files: []file{ +// { +// "elastic-agent-1.2.3-SNAPSHOT+build19700101-linux-x86_64.tar.gz.asc", +// []byte("fake signature for elastic-agent package"), +// }, +// }, +// fields: fields{ +// config: &artifact.Config{ +// OperatingSystem: "linux", +// Architecture: "64", +// }, +// }, +// args: args{a: agentSpec, version: *agtversion.NewParsedSemVer(1, 2, 3, "SNAPSHOT", "build19700101")}, +// want: "elastic-agent-1.2.3-SNAPSHOT+build19700101-linux-x86_64.tar.gz.asc", +// wantErr: assert.NoError, +// }, +// } +// for _, tt := range tests { +// t.Run(tt.name, func(t *testing.T) { +// dropPath := t.TempDir() +// targetDirPath := t.TempDir() + +// createFiles(t, dropPath, tt.files) + +// config := tt.fields.config +// config.DropPath = dropPath +// config.TargetDirectory = targetDirPath + +// e := NewDownloader(config) +// got, err := e.DownloadAsc(context.TODO(), tt.args.a, tt.args.version) +// if !tt.wantErr(t, err, fmt.Sprintf("DownloadAsc(%v, %v)", tt.args.a, tt.args.version)) { +// return +// } +// assert.Equalf(t, filepath.Join(targetDirPath, tt.want), got, "DownloadAsc(%v, %v)", tt.args.a, tt.args.version) +// }) +// } +// } type testCopyError struct { msg string @@ -345,10 +344,9 @@ func TestDownloader_downloadFile(t *testing.T) { e.CopyFunc = copyFunc e.diskSpaceErrorFunc = diskSpaceErrorFunc - path, err := e.downloadFile("elastic-agent-1.2.3-linux-x86_64.tar.gz", filepath.Join(targetDirPath, "elastic-agent-1.2.3-linux-x86_64.tar.gz")) + err := e.downloadFile("elastic-agent-1.2.3-linux-x86_64.tar.gz", filepath.Join(targetDirPath, "elastic-agent-1.2.3-linux-x86_64.tar.gz")) assert.Equal(t, err, diskSpaceErr) assert.Equal(t, receivedError, copyFuncError) - assert.Equal(t, filepath.Join(targetDirPath, "elastic-agent-1.2.3-linux-x86_64.tar.gz"), path) } func TestDownloader_NewDownloader(t *testing.T) { diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/verifier_test.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/verifier_test.go index abe35860593..3baa485eda8 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/verifier_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/verifier_test.go @@ -227,8 +227,6 @@ func TestVerify(t *testing.T) { testClient := NewDownloader(config) downloadResult, err := testClient.Download(ctx, agentSpec, testVersion) require.NoError(t, err, "fs.Downloader could not download artifacts") - _, err = testClient.DownloadAsc(context.Background(), agentSpec, *testVersion) - require.NoError(t, err, "fs.Downloader could not download artifacts .asc file") _, err = os.Stat(downloadResult.ArtifactPath) require.NoError(t, err) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go index f285d11e676..b9ebb16045f 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader.go @@ -22,6 +22,7 @@ import ( "github.com/elastic/elastic-agent-libs/transport/httpcommon" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download" + "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download/common" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/details" upgradeErrors "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/errors" "github.com/elastic/elastic-agent/internal/pkg/agent/errors" @@ -66,6 +67,7 @@ type Downloader struct { progressReporterProvider progressReporterProvider diskSpaceErrorFunc func(error) error CopyFunc func(dst io.Writer, src io.Reader) (written int64, err error) + getFilePathAndName func(a artifact.Artifact, version agtversion.ParsedSemVer, remoteArtifact, operatingSystem, arch, targetDir string) (common.ArtifactPathAndName, error) } // NewDownloader creates and configures Elastic Downloader @@ -96,6 +98,7 @@ func NewDownloaderWithClient(log *logger.Logger, config *artifact.Config, client diskSpaceErrorFunc: upgradeErrors.ToDiskSpaceErrorFunc(log), CopyFunc: io.Copy, progressReporterProvider: progressReporterProviderFunc, + getFilePathAndName: common.GetArtifactPathAndName, } } @@ -118,11 +121,15 @@ func (e *Downloader) Reload(c *artifact.Config) error { // Download fetches the package from configured source. // Returns absolute path to downloaded package and an error. -func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version *agtversion.ParsedSemVer) (_ download.DownloadResult, err error) { +func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version *agtversion.ParsedSemVer) (download.DownloadResult, error) { span, ctx := apm.StartSpan(ctx, "download", "app.internal") defer span.End() + + var err error + downloadResult := download.DownloadResult{} remoteArtifact := a.Artifact downloadedFiles := make([]string, 0, 2) + defer func() { if err != nil { for _, path := range downloadedFiles { @@ -133,23 +140,27 @@ func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version } }() - // download from source to dest - path, err := e.download(ctx, remoteArtifact, e.config.OS(), a, *version) - downloadedFiles = append(downloadedFiles, path) + artifactPathAndName, err := e.getFilePathAndName(a, *version, remoteArtifact, e.config.OS(), e.config.Arch(), e.config.TargetDirectory) + if err != nil { + return downloadResult, err + } + + downloadResult.ArtifactPath = artifactPathAndName.ArtifactPath + downloadResult.ArtifactHashPath = artifactPathAndName.HashPath + + err = e.downloadFile(ctx, remoteArtifact, artifactPathAndName.ArtifactName, artifactPathAndName.ArtifactPath) + downloadedFiles = append(downloadedFiles, artifactPathAndName.ArtifactPath) if err != nil { - return download.DownloadResult{}, err + return downloadResult, err } - hash, err := e.downloadHash(ctx, remoteArtifact, e.config.OS(), a, *version) - downloadedFiles = append(downloadedFiles, hash) + err = e.downloadFile(ctx, remoteArtifact, artifactPathAndName.HashName, artifactPathAndName.HashPath) + downloadedFiles = append(downloadedFiles, artifactPathAndName.HashPath) if err != nil { - return download.DownloadResult{}, err + return downloadResult, err } - return download.DownloadResult{ - ArtifactPath: path, - ArtifactHash: hash, - }, nil + return downloadResult, nil } func (e *Downloader) composeURI(artifactName, packageName string) (string, error) { @@ -169,68 +180,37 @@ func (e *Downloader) composeURI(artifactName, packageName string) (string, error return uri.String(), nil } -func (e *Downloader) download(ctx context.Context, remoteArtifact string, operatingSystem string, a artifact.Artifact, version agtversion.ParsedSemVer) (string, error) { - filename, err := artifact.GetArtifactName(a, version, operatingSystem, e.config.Arch()) - if err != nil { - return "", errors.New(err, "generating package name failed") - } - - fullPath, err := artifact.GetArtifactPath(a, version, operatingSystem, e.config.Arch(), e.config.TargetDirectory) - if err != nil { - return "", errors.New(err, "generating package path failed") - } - - return e.downloadFile(ctx, remoteArtifact, filename, fullPath) -} - -func (e *Downloader) downloadHash(ctx context.Context, remoteArtifact string, operatingSystem string, a artifact.Artifact, version agtversion.ParsedSemVer) (string, error) { - filename, err := artifact.GetArtifactName(a, version, operatingSystem, e.config.Arch()) - if err != nil { - return "", errors.New(err, "generating package name failed") - } - - fullPath, err := artifact.GetArtifactPath(a, version, operatingSystem, e.config.Arch(), e.config.TargetDirectory) - if err != nil { - return "", errors.New(err, "generating package path failed") - } - - filename = filename + ".sha512" - fullPath = fullPath + ".sha512" - - return e.downloadFile(ctx, remoteArtifact, filename, fullPath) -} - -func (e *Downloader) downloadFile(ctx context.Context, artifactName, filename, fullPath string) (string, error) { +func (e *Downloader) downloadFile(ctx context.Context, artifactName, filename, fullPath string) error { sourceURI, err := e.composeURI(artifactName, filename) if err != nil { - return "", err + return err } req, err := http.NewRequest("GET", sourceURI, nil) if err != nil { - return "", errors.New(err, "fetching package failed", errors.TypeNetwork, errors.M(errors.MetaKeyURI, sourceURI)) + return errors.New(err, "fetching package failed", errors.TypeNetwork, errors.M(errors.MetaKeyURI, sourceURI)) } if destinationDir := filepath.Dir(fullPath); destinationDir != "" && destinationDir != "." { if err := os.MkdirAll(destinationDir, 0o755); err != nil { - return "", err + return err } } destinationFile, err := os.OpenFile(fullPath, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, packagePermissions) if err != nil { - return "", errors.New(err, "creating package file failed", errors.TypeFilesystem, errors.M(errors.MetaKeyPath, fullPath)) + return errors.New(err, "creating package file failed", errors.TypeFilesystem, errors.M(errors.MetaKeyPath, fullPath)) } defer destinationFile.Close() resp, err := e.client.Do(req.WithContext(ctx)) if err != nil { - return fullPath, errors.New(err, "fetching package failed", errors.TypeNetwork, errors.M(errors.MetaKeyURI, sourceURI)) + return errors.New(err, "fetching package failed", errors.TypeNetwork, errors.M(errors.MetaKeyURI, sourceURI)) } defer resp.Body.Close() if resp.StatusCode != 200 { - return fullPath, errors.New(fmt.Sprintf("call to '%s' returned unsuccessful status code: %d", sourceURI, resp.StatusCode), errors.TypeNetwork, errors.M(errors.MetaKeyURI, sourceURI)) + return errors.New(fmt.Sprintf("call to '%s' returned unsuccessful status code: %d", sourceURI, resp.StatusCode), errors.TypeNetwork, errors.M(errors.MetaKeyURI, sourceURI)) } fileSize := -1 @@ -248,9 +228,9 @@ func (e *Downloader) downloadFile(ctx context.Context, artifactName, filename, f if err != nil { err = e.diskSpaceErrorFunc(err) progressReporter.ReportFailed(err) - return fullPath, fmt.Errorf("%s: %w", errors.New("copying fetched package failed", errors.TypeNetwork, errors.M(errors.MetaKeyURI, sourceURI)).Error(), err) + return fmt.Errorf("%s: %w", errors.New("copying fetched package failed", errors.TypeNetwork, errors.M(errors.MetaKeyURI, sourceURI)).Error(), err) } progressReporter.ReportComplete() - return fullPath, nil + return nil } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go index f53e4d8da39..ad862f87468 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go @@ -523,7 +523,7 @@ func TestDownloadVersion(t *testing.T) { got, err := downloader.Download(context.TODO(), tt.args.a, tt.args.version) - assert.Equalf(t, filepath.Join(targetDirPath, tt.want), got, "Download(%v, %v)", tt.args.a, tt.args.version) + assert.Equalf(t, filepath.Join(targetDirPath, tt.want), got.ArtifactPath, "Download(%v, %v)", tt.args.a, tt.args.version) if tt.wantErr { assert.Error(t, err) @@ -616,7 +616,7 @@ func TestDownloadFile(t *testing.T) { return progressReporter } - _, err := downloader.downloadFile(ctx, artifactName, filename, fullPath) + err := downloader.downloadFile(ctx, artifactName, filename, fullPath) t.Run("calls diskSpaceErrorFunc on any copy error", func(t *testing.T) { assert.Equal(t, receivedError, copyFuncError) diff --git a/internal/pkg/agent/application/upgrade/step_download.go b/internal/pkg/agent/application/upgrade/step_download.go index f8a4e10007a..27468d674bd 100644 --- a/internal/pkg/agent/application/upgrade/step_download.go +++ b/internal/pkg/agent/application/upgrade/step_download.go @@ -105,7 +105,7 @@ func (u *Upgrader) downloadArtifact(ctx context.Context, parsedVersion *agtversi downloadResult, err := downloaderFunc(ctx, factory, parsedVersion, &settings, upgradeDetails) if err != nil { - return download.DownloadResult{}, fmt.Errorf("failed download of agent binary: %w", err) + return downloadResult, fmt.Errorf("failed download of agent binary: %w", err) } if skipVerifyOverride { @@ -115,12 +115,12 @@ func (u *Upgrader) downloadArtifact(ctx context.Context, parsedVersion *agtversi if verifier == nil { verifier, err = newVerifier(parsedVersion, u.log, &settings) if err != nil { - return download.DownloadResult{}, errors.New(err, "initiating verifier") + return downloadResult, errors.New(err, "initiating verifier") } } if err := verifier.Verify(ctx, agentArtifact, *parsedVersion, skipDefaultPgp, pgpBytes...); err != nil { - return download.DownloadResult{}, errors.New(err, "failed verification of agent binary") + return downloadResult, errors.New(err, "failed verification of agent binary") } return downloadResult, nil } @@ -213,7 +213,7 @@ func (u *Upgrader) downloadOnce( // used that to configure the URL we download the files from) downloadResult, err := downloader.Download(ctx, agentArtifact, version) if err != nil { - return download.DownloadResult{}, fmt.Errorf("unable to download package: %w", err) + return downloadResult, fmt.Errorf("unable to download package: %w", err) } // Download successful @@ -264,7 +264,7 @@ func (u *Upgrader) downloadWithRetries( } if err := backoff.RetryNotify(opFn, boCtx, opFailureNotificationFn); err != nil { - return download.DownloadResult{}, err + return downloadResult, err } // Clear retry details upon success diff --git a/internal/pkg/agent/application/upgrade/step_download_test.go b/internal/pkg/agent/application/upgrade/step_download_test.go index d3f2b3e1748..5606988263c 100644 --- a/internal/pkg/agent/application/upgrade/step_download_test.go +++ b/internal/pkg/agent/application/upgrade/step_download_test.go @@ -103,9 +103,9 @@ func TestDownloadWithRetries(t *testing.T) { upgradeDetails, upgradeDetailsRetryUntil, upgradeDetailsRetryUntilWasUnset, upgradeDetailsRetryErrorMsg := mockUpgradeDetails(parsedVersion) minRetryDeadline := time.Now().Add(settings.Timeout) - path, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &settings, upgradeDetails) + downloadResult, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &settings, upgradeDetails) require.NoError(t, err) - require.Equal(t, expectedDownloadPath, path) + require.Equal(t, expectedDownloadPath, downloadResult.ArtifactPath) logs := obs.TakeAll() require.Len(t, logs, 1) @@ -153,9 +153,9 @@ func TestDownloadWithRetries(t *testing.T) { upgradeDetails, upgradeDetailsRetryUntil, upgradeDetailsRetryUntilWasUnset, upgradeDetailsRetryErrorMsg := mockUpgradeDetails(parsedVersion) minRetryDeadline := time.Now().Add(settings.Timeout) - path, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &settings, upgradeDetails) + downloadResult, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &settings, upgradeDetails) require.NoError(t, err) - require.Equal(t, expectedDownloadPath, path) + require.Equal(t, expectedDownloadPath, downloadResult.ArtifactPath) logs := obs.TakeAll() require.Len(t, logs, 3) @@ -208,9 +208,9 @@ func TestDownloadWithRetries(t *testing.T) { upgradeDetails, upgradeDetailsRetryUntil, upgradeDetailsRetryUntilWasUnset, upgradeDetailsRetryErrorMsg := mockUpgradeDetails(parsedVersion) minRetryDeadline := time.Now().Add(settings.Timeout) - path, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &settings, upgradeDetails) + downloadResult, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &settings, upgradeDetails) require.NoError(t, err) - require.Equal(t, expectedDownloadPath, path) + require.Equal(t, expectedDownloadPath, downloadResult.ArtifactPath) logs := obs.TakeAll() require.Len(t, logs, 3) @@ -253,9 +253,9 @@ func TestDownloadWithRetries(t *testing.T) { upgradeDetails, upgradeDetailsRetryUntil, upgradeDetailsRetryUntilWasUnset, upgradeDetailsRetryErrorMsg := mockUpgradeDetails(parsedVersion) minRetryDeadline := time.Now().Add(testCaseSettings.Timeout) - path, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &testCaseSettings, upgradeDetails) + downloadResult, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &testCaseSettings, upgradeDetails) require.Equal(t, "context deadline exceeded", err.Error()) - require.Equal(t, "", path) + require.Equal(t, "", downloadResult.ArtifactPath) logs := obs.TakeAll() logsJSON, err := json.MarshalIndent(logs, "", " ") @@ -293,10 +293,10 @@ func TestDownloadWithRetries(t *testing.T) { upgradeDetails, upgradeDetailsRetryUntil, upgradeDetailsRetryUntilWasUnset, upgradeDetailsRetryErrorMsg := mockUpgradeDetails(parsedVersion) - path, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &settings, upgradeDetails) + downloadResult, err := u.downloadWithRetries(context.Background(), mockDownloaderCtor, parsedVersion, &settings, upgradeDetails) require.Error(t, err) - require.Equal(t, "", path) + require.Equal(t, "", downloadResult.ArtifactPath) require.ErrorIs(t, err, upgradeErrors.ErrInsufficientDiskSpace) diff --git a/internal/pkg/agent/application/upgrade/step_relink.go b/internal/pkg/agent/application/upgrade/step_relink.go index 337d5925f44..f09e251200f 100644 --- a/internal/pkg/agent/application/upgrade/step_relink.go +++ b/internal/pkg/agent/application/upgrade/step_relink.go @@ -21,7 +21,6 @@ const ( func changeSymlink(log *logger.Logger, topDirPath, symlinkPath, newTarget string) error { log.Infof("Changing symlink, topDirPath: %s, symlinkPath: %s, newTarget: %s", topDirPath, symlinkPath, newTarget) - return nil // handle windows suffixes if runtime.GOOS == windows { symlinkPath += exe diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 0b346512d05..c4060bb7748 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -13,7 +13,6 @@ import ( "os/exec" "path/filepath" "runtime" - "slices" "strings" "time" @@ -27,6 +26,7 @@ import ( "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download" fsDownloader "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download/fs" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/details" + upgradeErrors "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/errors" "github.com/elastic/elastic-agent/internal/pkg/agent/configuration" "github.com/elastic/elastic-agent/internal/pkg/agent/errors" "github.com/elastic/elastic-agent/internal/pkg/agent/install" @@ -91,6 +91,13 @@ func (d *downloaderFactoryProvider) GetDownloaderFactory(name string) (downloade return factory, nil } +type upgradeCleaner interface { + setupSymlinkCleanup(symlinkFunc changeSymlinkFunc, topDirPath, oldVersionedHome, agentName string) error + setupArchiveCleanup(downloadResult download.DownloadResult) error + setupUnpackCleanup(newHomeDir, oldHomeDir string) error + cleanup(err error) error +} + // Upgrader performs an upgrade type Upgrader struct { log *logger.Logger @@ -101,6 +108,7 @@ type Upgrader struct { markerWatcher MarkerWatcher downloaderFactoryProvider DownloaderFactoryProvider upgradeCleaner upgradeCleaner + diskSpaceErrorFunc func(error) error } // IsUpgradeable when agent is installed and running as a service or flag was provided. @@ -132,9 +140,9 @@ func NewUpgrader(log *logger.Logger, settings *artifact.Config, agentInfo info.A downloaderFactoryProvider: downloaderFactoryProvider, upgradeCleaner: &upgradeCleanup{ log: log, - rollbackFunc: rollbackInstall, cleanupFuncs: []func() error{}, }, + diskSpaceErrorFunc: upgradeErrors.ToDiskSpaceErrorFunc(log), }, nil } @@ -235,110 +243,13 @@ func checkUpgrade(log *logger.Logger, currentVersion, newVersion agentVersion, m return nil } -type upgradeCleaner interface { - setupRollback(topDirPath, newHomeDir, oldHomeDir string) error - setupArchiveCleanup(downloadResult download.DownloadResult) error - setupUnpackCleanup(newHomeDir, oldHomeDir string) error - cleanup(err error) error -} -type upgradeCleanup struct { - log *logger.Logger - rollbackToggle bool - archiveCleanupToggle bool - unpackCleanupToggle bool - rollbackFunc func(*logger.Logger, string, string, string) error - cleanupFuncs []func() error -} - -func (u *upgradeCleanup) setupArchiveCleanup(downloadResult download.DownloadResult) error { - u.log.Debugf("Setting up cleanup for archive, archivePath: %s", downloadResult.ArtifactPath) - if downloadResult.ArtifactPath == "" { - msg := "archive path is empty, cannot cleanup" - u.log.Errorf(msg) - return errors.New(msg) - } - u.archiveCleanupToggle = true - - u.cleanupFuncs = append(u.cleanupFuncs, func() error { - //TODO: remove the hash file as well - return os.RemoveAll(downloadResult.ArtifactPath) - }) - - return nil -} - -func (u *upgradeCleanup) setupUnpackCleanup(newHomeDir, oldHomeDir string) error { - u.log.Debugf("Setting up cleanup for unpack, newVersionedHome: %s", newHomeDir) - - if !u.archiveCleanupToggle { - msg := "Cannot setup for unpack cleanup before archive cleanup is setup" - u.log.Debugf(msg) - return errors.New(msg) - } - - if newHomeDir == "" || oldHomeDir == "" { - msg := "new or old versioned home is empty, cannot cleanup" - u.log.Errorf(msg) - return errors.New(msg) - } - - if newHomeDir == oldHomeDir { - msg := "new and old versioned home are the same, cannot cleanup" - u.log.Errorf(msg) - return errors.New(msg) - } - - u.unpackCleanupToggle = true - - u.cleanupFuncs = append(u.cleanupFuncs, func() error { - return os.RemoveAll(newHomeDir) - }) - - return nil -} - -func (u *upgradeCleanup) setupRollback(topDirPath, newHomeDir, oldHomeDir string) error { - u.log.Debugf("Setting up cleanup for rollback, topDirPath: %s, oldVersionedHome: %s, newVersionedHome: %s", topDirPath, oldHomeDir, newHomeDir) - - if !u.unpackCleanupToggle { - msg := "Cannot setup for rollback before unpack cleanup is setup" - u.log.Debugf(msg) - return errors.New(msg) - } - - u.rollbackToggle = true - - u.cleanupFuncs = append(u.cleanupFuncs, func() error { - return rollbackInstall(u.log, topDirPath, newHomeDir, oldHomeDir) - }) - - return nil -} - -func (u *upgradeCleanup) cleanup(err error) error { - if err == nil { - u.log.Debugf("No error, skipping cleanup") - return nil - } - - slices.Reverse(u.cleanupFuncs) - - for _, cleanupFunc := range u.cleanupFuncs { - if cleanupErr := cleanupFunc(); cleanupErr != nil { - return goerrors.Join(err, cleanupErr) - } - } - - return err -} - // Upgrade upgrades running agent, function returns shutdown callback that must be called by reexec. func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string, action *fleetapi.ActionUpgrade, det *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (_ reexec.ShutdownCallbackFn, err error) { defer func() { if err != nil { cleanupErr := u.upgradeCleaner.cleanup(err) if cleanupErr != nil { - u.log.Errorw("Error cleaning up after upgrade", "error.message", cleanupErr) + u.log.Errorf("Error cleaning up after upgrade: %w", cleanupErr) err = goerrors.Join(err, cleanupErr) } } @@ -426,17 +337,14 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string } u.log.Debugf("detected used flavor: %q", detectedFlavor) unpackRes, unpackErr := u.unpack(version, downloadResult.ArtifactPath, paths.Data(), detectedFlavor) - - if unpackErr != nil { - err = goerrors.Join(err, unpackErr) - } + err = u.diskSpaceErrorFunc(unpackErr) + err = goerrors.Join(err, unpackErr) newHome := filepath.Join(paths.Top(), unpackRes.VersionedHome) u.log.Infof("newHome: %s", newHome) - if unpackCleanupSetupErr := u.upgradeCleaner.setupUnpackCleanup(newHome, paths.Home()); unpackCleanupSetupErr != nil { - err = goerrors.Join(err, unpackCleanupSetupErr) - } + unpackCleanupSetupErr := u.upgradeCleaner.setupUnpackCleanup(newHome, paths.Home()) + err = goerrors.Join(err, unpackCleanupSetupErr) if err != nil { return nil, err @@ -451,16 +359,8 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string u.log.Infof("unpackRes.Hash: %s", unpackRes.Hash) - // if unpackRes.VersionedHome == "" {// TODO: need to assert that unpack - // returns a versioned home at all times - // return nil, fmt.Errorf("versionedhome is empty: %v", unpackRes) - // } - u.log.Infof("unpackRes.VersionedHome: %s", unpackRes.VersionedHome) - // newHome := filepath.Join(paths.Top(), unpackRes.VersionedHome) // TODO: this is the dir to cleanup if anything goes wrong - // u.log.Infof("newHome: %s", newHome) - if err := copyActionStore(u.log, newHome); err != nil { return nil, errors.New(err, "failed to copy action store") } @@ -493,8 +393,8 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string return nil, fmt.Errorf("calculating home path relative to top, home: %q top: %q : %w", paths.Home(), paths.Top(), err) } - if rollbackSetupErr := u.upgradeCleaner.setupRollback(paths.Top(), newHome, currentVersionedHome); rollbackSetupErr != nil { - err = goerrors.Join(err, rollbackSetupErr) + if symlinkCleanupSetupErr := u.upgradeCleaner.setupSymlinkCleanup(changeSymlink, paths.Top(), currentVersionedHome, agentName); symlinkCleanupSetupErr != nil { + err = goerrors.Join(err, symlinkCleanupSetupErr) } u.log.Infof("currentVersionedHome: %s", currentVersionedHome) diff --git a/internal/pkg/agent/application/upgrade/upgrade_cleanup.go b/internal/pkg/agent/application/upgrade/upgrade_cleanup.go new file mode 100644 index 00000000000..8384ac0420e --- /dev/null +++ b/internal/pkg/agent/application/upgrade/upgrade_cleanup.go @@ -0,0 +1,113 @@ +package upgrade + +import ( + "errors" + goerrors "errors" + "fmt" + "io/fs" + "os" + "path/filepath" + "slices" + + "github.com/elastic/elastic-agent/internal/pkg/agent/application/paths" + "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download" + "github.com/elastic/elastic-agent/pkg/core/logger" +) + +type upgradeCleanup struct { + log *logger.Logger + symlinkCleanupToggle bool + archiveCleanupToggle bool + unpackCleanupToggle bool + cleanupFuncs []func() error +} + +func (u *upgradeCleanup) removeFiles(paths ...string) error { + for _, path := range paths { + err := os.RemoveAll(path) + if err != nil { + return err + } + } + return nil +} + +func (u *upgradeCleanup) setupArchiveCleanup(downloadResult download.DownloadResult) error { + u.log.Debugf("Setting up cleanup for archive, archivePath: %s", downloadResult.ArtifactPath) + if downloadResult.ArtifactPath == "" || downloadResult.ArtifactHashPath == "" { + return errors.New("archive path or hash path is empty, cannot cleanup") + } + + u.archiveCleanupToggle = true + + u.cleanupFuncs = append(u.cleanupFuncs, func() error { + return u.removeFiles(downloadResult.ArtifactPath, downloadResult.ArtifactHashPath) + }) + + return nil +} + +func (u *upgradeCleanup) setupUnpackCleanup(newHomeDir, oldHomeDir string) error { + u.log.Debugf("Setting up cleanup for unpack, newVersionedHome: %s", newHomeDir) + + if !u.archiveCleanupToggle { + return errors.New("Cannot setup for unpack cleanup before archive cleanup is setup") + } + + if newHomeDir == "" || oldHomeDir == "" { + return errors.New("new or old versioned home is empty, cannot cleanup") + } + + if newHomeDir == oldHomeDir { + return errors.New("new and old versioned home are the same, cannot cleanup") + } + + u.unpackCleanupToggle = true + + u.cleanupFuncs = append(u.cleanupFuncs, func() error { + return u.removeFiles(newHomeDir) + }) + + return nil +} + +type changeSymlinkFunc func(log *logger.Logger, topDirPath, symlinkPath, newTarget string) error + +func (u *upgradeCleanup) setupSymlinkCleanup(symlinkFunc changeSymlinkFunc, topDirPath, oldVersionedHome, agentName string) error { + u.log.Debugf("Setting up cleanup for symlink, topDirPath: %s, oldVersionedHome: %s, agentName: %s", topDirPath, oldVersionedHome, agentName) + + if !u.unpackCleanupToggle { + return errors.New("Cannot setup for symlink cleanup before unpack cleanup is setup") + } + + u.symlinkCleanupToggle = true + oldAgentPath := paths.BinaryPath(filepath.Join(topDirPath, oldVersionedHome), agentName) + u.log.Infof("oldAgentPath: %s", oldAgentPath) + + u.cleanupFuncs = append(u.cleanupFuncs, func() error { + err := symlinkFunc(u.log, topDirPath, filepath.Join(topDirPath, agentName), oldAgentPath) + if err != nil && !errors.Is(err, fs.ErrNotExist) { + return fmt.Errorf("cleaning up symlink to %q failed: %w", oldAgentPath, err) + } + return nil + }) + + return nil +} + +func (u *upgradeCleanup) cleanup(err error) error { + if err == nil { + u.log.Debugf("No error, skipping cleanup") + return nil + } + + slices.Reverse(u.cleanupFuncs) + + for _, cleanupFunc := range u.cleanupFuncs { + if cleanupErr := cleanupFunc(); cleanupErr != nil { + return goerrors.Join(err, cleanupErr) + } + } + + return err +} diff --git a/internal/pkg/agent/application/upgrade/upgrade_cleanup_test.go b/internal/pkg/agent/application/upgrade/upgrade_cleanup_test.go new file mode 100644 index 00000000000..4ea5f56139f --- /dev/null +++ b/internal/pkg/agent/application/upgrade/upgrade_cleanup_test.go @@ -0,0 +1,264 @@ +package upgrade + +import ( + "errors" + goerrors "errors" + "os" + "path/filepath" + "testing" + + "github.com/elastic/elastic-agent/internal/pkg/agent/application/paths" + "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download" + "github.com/elastic/elastic-agent/pkg/core/logger" + "github.com/elastic/elastic-agent/pkg/core/logger/loggertest" + "github.com/stretchr/testify/require" +) + +func newTestCleanup() *upgradeCleanup { + log, _ := loggertest.New("test") + return &upgradeCleanup{ + log: log, + } +} + +type mockCleaner struct { + callOrder []int +} + +func (m *mockCleaner) mockCleanupFunc(num int) func() error { + return func() error { + m.callOrder = append(m.callOrder, num) + return nil + } +} + +func TestUpgradeCleanup(t *testing.T) { + cleaner := newTestCleanup() + + t.Run("should return nil if no error is passed", func(t *testing.T) { + err := cleaner.cleanup(nil) + require.NoError(t, err) + }) + + t.Run("when provided with an error, should call the cleanup functions in reverse order", func(t *testing.T) { + expectedCallOrder := []int{3, 2, 1} + mockCleanup := mockCleaner{ + callOrder: []int{}, + } + + for i := len(expectedCallOrder) - 1; i >= 0; i-- { + cleaner.cleanupFuncs = append(cleaner.cleanupFuncs, mockCleanup.mockCleanupFunc(expectedCallOrder[i])) + } + + err := cleaner.cleanup(errors.New("test error")) + require.Error(t, err) + require.Equal(t, expectedCallOrder, mockCleanup.callOrder) + }) + + t.Run("when a cleanup function returns an error, should join the cleanup error with the passed error and immediately return", func(t *testing.T) { + initialError := errors.New("test error") + cleanup2Error := errors.New("cleanup 2 error") + cleanup1Called := false + cleanupFunc1 := func() error { + cleanup1Called = true + return nil + } + + cleanup2Called := false + cleanupFunc2 := func() error { + cleanup2Called = true + return cleanup2Error + } + + cleaner.cleanupFuncs = []func() error{cleanupFunc1, cleanupFunc2} + + err := cleaner.cleanup(initialError) + require.Error(t, err) + require.Equal(t, goerrors.Join(initialError, cleanup2Error), err, "the error should be the initial error joined with the cleanup error") + require.False(t, cleanup1Called, "cleanup 1 should not have been called") + require.True(t, cleanup2Called, "cleanup 2 should have been called") + }) +} + +func TestUpgradeCleanup_ArchiveCleanup(t *testing.T) { + + t.Run("when setting up archive cleanup, should return an error if the artifact path is empty", func(t *testing.T) { + cleaner := newTestCleanup() + err := cleaner.setupArchiveCleanup(download.DownloadResult{ArtifactHashPath: "test"}) + require.Error(t, err) + require.Equal(t, "archive path or hash path is empty, cannot cleanup", err.Error()) + }) + + t.Run("when setting up archive cleanup, should return an error if the hash path is empty", func(t *testing.T) { + cleaner := newTestCleanup() + err := cleaner.setupArchiveCleanup(download.DownloadResult{ArtifactPath: "test"}) + require.Error(t, err) + require.Equal(t, "archive path or hash path is empty, cannot cleanup", err.Error()) + }) + + t.Run("should clean up the archive files", func(t *testing.T) { + cleaner := newTestCleanup() + tmpdir := t.TempDir() + artifactPath := filepath.Join(tmpdir, "test") + artifactHashPath := filepath.Join(tmpdir, "test.hash") + + err := os.WriteFile(artifactPath, []byte("test"), 0755) + require.NoError(t, err) + err = os.WriteFile(artifactHashPath, []byte("test"), 0755) + require.NoError(t, err) + + downloadResult := download.DownloadResult{ + ArtifactPath: artifactPath, + ArtifactHashPath: artifactHashPath, + } + + err = cleaner.setupArchiveCleanup(downloadResult) + require.NoError(t, err) + require.True(t, cleaner.archiveCleanupToggle) + + require.Len(t, cleaner.cleanupFuncs, 1) + cleanupFunc := cleaner.cleanupFuncs[0] + err = cleanupFunc() + require.NoError(t, err) + + _, err = os.Stat(artifactPath) + require.Error(t, err) + require.True(t, errors.Is(err, os.ErrNotExist)) + + _, err = os.Stat(artifactHashPath) + require.Error(t, err) + require.True(t, errors.Is(err, os.ErrNotExist)) + }) + + t.Run("should not return an error if files do not exist", func(t *testing.T) { + cleaner := newTestCleanup() + tmpdir := t.TempDir() + artifactPath := filepath.Join(tmpdir, "test") + artifactHashPath := filepath.Join(tmpdir, "test.hash") + + err := cleaner.setupArchiveCleanup(download.DownloadResult{ArtifactPath: artifactPath, ArtifactHashPath: artifactHashPath}) + require.NoError(t, err) + + _, err = os.Stat(artifactPath) + require.Error(t, err) + require.True(t, errors.Is(err, os.ErrNotExist)) + + _, err = os.Stat(artifactHashPath) + require.Error(t, err) + require.True(t, errors.Is(err, os.ErrNotExist)) + + setupFunc := cleaner.cleanupFuncs[0] + err = setupFunc() + require.NoError(t, err) + }) +} + +func TestUpgradeCleanup_UnpackCleanup(t *testing.T) { + + t.Run("when setting up unpack cleanup, should return an error if the archive cleanup toggle is not set", func(t *testing.T) { + cleaner := newTestCleanup() + err := cleaner.setupUnpackCleanup("", "") + require.Error(t, err) + require.Equal(t, "Cannot setup for unpack cleanup before archive cleanup is setup", err.Error()) + }) + + t.Run("when setting up unpack cleanup, should return an error if the new home dir is empty", func(t *testing.T) { + cleaner := newTestCleanup() + cleaner.archiveCleanupToggle = true + err := cleaner.setupUnpackCleanup("", "test") + require.Error(t, err) + require.Equal(t, "new or old versioned home is empty, cannot cleanup", err.Error()) + }) + + t.Run("when setting up unpack cleanup, should return an error if the old home dir is empty", func(t *testing.T) { + cleaner := newTestCleanup() + cleaner.archiveCleanupToggle = true + err := cleaner.setupUnpackCleanup("test", "") + require.Error(t, err) + require.Equal(t, "new or old versioned home is empty, cannot cleanup", err.Error()) + }) + + t.Run("when setting up unpack cleanup, should return an error if the new and old home dirs are the same", func(t *testing.T) { + cleaner := newTestCleanup() + cleaner.archiveCleanupToggle = true + err := cleaner.setupUnpackCleanup("test", "test") + require.Error(t, err) + }) + + t.Run("should clean up the new home dir", func(t *testing.T) { + cleaner := newTestCleanup() + cleaner.archiveCleanupToggle = true + tmpdir := t.TempDir() + newHomeDir := filepath.Join(tmpdir, "new") + oldHomeDir := filepath.Join(tmpdir, "old") + + err := os.MkdirAll(newHomeDir, 0755) + require.NoError(t, err) + err = os.MkdirAll(oldHomeDir, 0755) + require.NoError(t, err) + + err = cleaner.setupUnpackCleanup(newHomeDir, oldHomeDir) + require.NoError(t, err) + + require.True(t, cleaner.unpackCleanupToggle) + + require.Len(t, cleaner.cleanupFuncs, 1) + + cleanupFunc := cleaner.cleanupFuncs[0] + err = cleanupFunc() + require.NoError(t, err) + + _, err = os.Stat(newHomeDir) + require.Error(t, err) + require.True(t, errors.Is(err, os.ErrNotExist)) + + _, err = os.Stat(oldHomeDir) + require.NoError(t, err) + }) +} + +func TestUpgradeCleanup_SymlinkCleanup(t *testing.T) { + cleaner := newTestCleanup() + cleaner.archiveCleanupToggle = true + cleaner.unpackCleanupToggle = true + mockSymlinkFunc := func(log *logger.Logger, topDirPath, symlinkPath, newTarget string) error { + return nil + } + + t.Run("should return an error if the unpack cleanup toggle is not set", func(t *testing.T) { + cleaner := newTestCleanup() + err := cleaner.setupSymlinkCleanup(mockSymlinkFunc, "test", "test", "test") + require.Error(t, err) + require.Equal(t, "Cannot setup for symlink cleanup before unpack cleanup is setup", err.Error()) + }) + + t.Run("should clean up the symlink", func(t *testing.T) { + cleaner := newTestCleanup() + cleaner.archiveCleanupToggle = true + cleaner.unpackCleanupToggle = true + + calledTopDirPath := "" + calledSymlinkPath := "" + calledNewTarget := "" + mockSymlinkFunc := func(log *logger.Logger, topDirPath, symlinkPath, newTarget string) error { + calledTopDirPath = topDirPath + calledSymlinkPath = symlinkPath + calledNewTarget = newTarget + return nil + } + + err := cleaner.setupSymlinkCleanup(mockSymlinkFunc, "mockTopDirPath", "mockSymlinkPath", "mockAgentName") + require.NoError(t, err) + + cleanupFunc := cleaner.cleanupFuncs[0] + err = cleanupFunc() + require.NoError(t, err) + + expectedOldAgentPath := paths.BinaryPath(filepath.Join("mockTopDirPath", "mockSymlinkPath"), "mockAgentName") + expectedSymlinkPath := filepath.Join("mockTopDirPath", "mockAgentName") + + require.Equal(t, "mockTopDirPath", calledTopDirPath) + require.Equal(t, expectedSymlinkPath, calledSymlinkPath) + require.Equal(t, expectedOldAgentPath, calledNewTarget) + }) +} From 96bd06c7bae9c303d0c22de47905176ef2917f23 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Tue, 5 Aug 2025 02:51:46 +0300 Subject: [PATCH 041/127] enhancement(5235): removed calls to rollback in the upgrade function, relying on the cleanup --- .../pkg/agent/application/upgrade/upgrade.go | 36 ++++++++----------- 1 file changed, 14 insertions(+), 22 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index c4060bb7748..2b4e0e10084 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -10,7 +10,6 @@ import ( "fmt" "io/fs" "os" - "os/exec" "path/filepath" "runtime" "strings" @@ -398,13 +397,10 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string } u.log.Infof("currentVersionedHome: %s", currentVersionedHome) - changeSymlink(u.log, paths.Top(), symlinkPath, newPath) - return nil, errors.New("we are done here") - if err := changeSymlink(u.log, paths.Top(), symlinkPath, newPath); err != nil { - u.log.Errorw("Rolling back: changing symlink failed", "error.message", err) - rollbackErr := rollbackInstall(u.log, paths.Top(), hashedDir, currentVersionedHome) - return nil, goerrors.Join(err, rollbackErr) + err = changeSymlink(u.log, paths.Top(), symlinkPath, newPath) + if err != nil { + return nil, err } // We rotated the symlink successfully: prepare the current and previous agent installation details for the update marker @@ -426,30 +422,26 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string versionedHome: currentVersionedHome, } - if err := markUpgrade(u.log, + err = markUpgrade(u.log, paths.Data(), // data dir to place the marker in current, // new agent version data previous, // old agent version data - action, det, OUTCOME_UPGRADE); err != nil { - u.log.Errorw("Rolling back: marking upgrade failed", "error.message", err) - rollbackErr := rollbackInstall(u.log, paths.Top(), hashedDir, currentVersionedHome) - return nil, goerrors.Join(err, rollbackErr) + action, det, OUTCOME_UPGRADE) + if err != nil { + return nil, err } watcherExecutable := selectWatcherExecutable(paths.Top(), previous, current) - var watcherCmd *exec.Cmd - if watcherCmd, err = InvokeWatcher(u.log, watcherExecutable); err != nil { - u.log.Errorw("Rolling back: starting watcher failed", "error.message", err) - rollbackErr := rollbackInstall(u.log, paths.Top(), hashedDir, currentVersionedHome) - return nil, goerrors.Join(err, rollbackErr) + watcherCmd, err := InvokeWatcher(u.log, watcherExecutable) + if err != nil { + return nil, err } - watcherWaitErr := waitForWatcher(ctx, u.log, markerFilePath(paths.Data()), watcherMaxWaitTime) - if watcherWaitErr != nil { - killWatcherErr := watcherCmd.Process.Kill() - rollbackErr := rollbackInstall(u.log, paths.Top(), hashedDir, currentVersionedHome) - return nil, goerrors.Join(watcherWaitErr, killWatcherErr, rollbackErr) + err = waitForWatcher(ctx, u.log, markerFilePath(paths.Data()), watcherMaxWaitTime) + if err != nil { + err = goerrors.Join(err, watcherCmd.Process.Kill()) + return nil, err } cb := shutdownCallback(u.log, paths.Home(), release.Version(), version, filepath.Join(paths.Top(), unpackRes.VersionedHome)) From ec02678ff0f1adfbbfe2ad723c649fcea759a066 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Tue, 5 Aug 2025 02:54:03 +0300 Subject: [PATCH 042/127] enhancement(5235): removed unused rollback function --- .../pkg/agent/application/upgrade/upgrade.go | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 2b4e0e10084..1f7db134f38 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -8,7 +8,6 @@ import ( "context" goerrors "errors" "fmt" - "io/fs" "os" "path/filepath" "runtime" @@ -579,24 +578,6 @@ func isSameVersion(log *logger.Logger, current agentVersion, newVersion agentVer return current == newVersion } -func rollbackInstall(log *logger.Logger, topDirPath, versionedHome, oldVersionedHome string) error { - log.Infof("Rolling back install, topDirPath: %s, versionedHome: %s, oldVersionedHome: %s", topDirPath, versionedHome, oldVersionedHome) - oldAgentPath := paths.BinaryPath(filepath.Join(topDirPath, oldVersionedHome), agentName) - log.Infof("oldAgentPath: %s", oldAgentPath) - err := changeSymlink(log, topDirPath, filepath.Join(topDirPath, agentName), oldAgentPath) - if err != nil && !errors.Is(err, fs.ErrNotExist) { - return fmt.Errorf("rolling back install: restoring symlink to %q failed: %w", oldAgentPath, err) - } - - newAgentInstallPath := filepath.Join(topDirPath, versionedHome) - log.Infof("newAgentInstallPath: %s", newAgentInstallPath) - err = os.RemoveAll(newAgentInstallPath) - if err != nil && !errors.Is(err, fs.ErrNotExist) { - return fmt.Errorf("rolling back install: removing new agent install at %q failed: %w", newAgentInstallPath, err) - } - return nil -} - func copyActionStore(log *logger.Logger, newHome string) error { // copies legacy action_store.yml, state.yml and state.enc encrypted file if exists storePaths := []string{paths.AgentActionStoreFile(), paths.AgentStateStoreYmlFile(), paths.AgentStateStoreFile()} From d3428d36ab30ff95d3230e709bf662427ac0df5e Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Wed, 6 Aug 2025 01:40:41 +0300 Subject: [PATCH 043/127] enhancement(5235): updated error handling in upgrade function --- .../agent/application/upgrade/step_unpack.go | 14 +++---- .../application/upgrade/step_unpack_test.go | 20 ++++----- .../pkg/agent/application/upgrade/upgrade.go | 42 +++++++++++-------- 3 files changed, 42 insertions(+), 34 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/step_unpack.go b/internal/pkg/agent/application/upgrade/step_unpack.go index 7c69f484399..a83051ee6c1 100644 --- a/internal/pkg/agent/application/upgrade/step_unpack.go +++ b/internal/pkg/agent/application/upgrade/step_unpack.go @@ -27,7 +27,7 @@ import ( ) // UnpackResult contains the location and hash of the unpacked agent files -type UnpackResult struct { +type unpackResult struct { // Hash contains the unpacked agent commit hash, limited to a length of 6 for backward compatibility Hash string `json:"hash" yaml:"hash"` // VersionedHome indicates the path (relative to topPath, formatted in os-dependent fashion) where to find the unpacked agent files @@ -36,10 +36,10 @@ type UnpackResult struct { } // unpack unpacks archive correctly, skips root (symlink, config...) unpacks data/* -func (u *Upgrader) unpack(version, archivePath, dataDir string, flavor string) (UnpackResult, error) { +func (u *Upgrader) unpack(version, archivePath, dataDir string, flavor string) (unpackResult, error) { // unpack must occur in directory that holds the installation directory // or the extraction will be double nested - var unpackRes UnpackResult + var unpackRes unpackResult var err error if runtime.GOOS == windows { unpackRes, err = unzip(u.log, archivePath, dataDir, flavor) @@ -78,9 +78,9 @@ func (u *Upgrader) getPackageMetadata(archivePath string) (packageMetadata, erro } } -func unzip(log *logger.Logger, archivePath, dataDir string, flavor string) (UnpackResult, error) { +func unzip(log *logger.Logger, archivePath, dataDir string, flavor string) (unpackResult, error) { var hash, rootDir string - result := UnpackResult{} + result := unpackResult{} r, err := zip.OpenReader(archivePath) if err != nil { @@ -319,12 +319,12 @@ func getPackageMetadataFromZipReader(r *zip.ReadCloser, fileNamePrefix string) ( return ret, nil } -func untar(log *logger.Logger, archivePath, dataDir string, flavor string) (UnpackResult, error) { +func untar(log *logger.Logger, archivePath, dataDir string, flavor string) (unpackResult, error) { var versionedHome string var rootDir string var hash string - result := UnpackResult{} + result := unpackResult{} // Look up manifest in the archive and prepare path mappings, if any pm := pathMapper{} diff --git a/internal/pkg/agent/application/upgrade/step_unpack_test.go b/internal/pkg/agent/application/upgrade/step_unpack_test.go index fb3a3db807e..c7c01db3579 100644 --- a/internal/pkg/agent/application/upgrade/step_unpack_test.go +++ b/internal/pkg/agent/application/upgrade/step_unpack_test.go @@ -219,7 +219,7 @@ func TestUpgrader_unpackTarGz(t *testing.T) { tests := []struct { name string args args - want UnpackResult + want unpackResult wantErr assert.ErrorAssertionFunc checkFiles checkExtractedPath flavor string @@ -233,7 +233,7 @@ func TestUpgrader_unpackTarGz(t *testing.T) { return createTarArchive(t, "elastic-agent-1.2.3-SNAPSHOT-someos-x86_64.tar.gz", i) }, }, - want: UnpackResult{ + want: unpackResult{ Hash: "abcdef", VersionedHome: filepath.Join("data", "elastic-agent-abcdef"), }, @@ -252,7 +252,7 @@ func TestUpgrader_unpackTarGz(t *testing.T) { return createTarArchive(t, "elastic-agent-1.2.3-SNAPSHOT-someos-x86_64.tar.gz", i) }, }, - want: UnpackResult{ + want: unpackResult{ Hash: "abcdef", VersionedHome: filepath.Join("data", "elastic-agent-1.2.3-SNAPSHOT-abcdef"), }, @@ -268,7 +268,7 @@ func TestUpgrader_unpackTarGz(t *testing.T) { return createTarArchive(t, "elastic-agent-1.2.3-SNAPSHOT-someos-x86_64.tar.gz", i) }, }, - want: UnpackResult{ + want: unpackResult{ Hash: "abcdef", VersionedHome: filepath.Join("data", "elastic-agent-1.2.3-SNAPSHOT-abcdef"), }, @@ -293,7 +293,7 @@ func TestUpgrader_unpackTarGz(t *testing.T) { return createTarArchive(t, "elastic-agent-1.2.3-SNAPSHOT-someos-x86_64.tar.gz", i) }, }, - want: UnpackResult{ + want: unpackResult{ Hash: "abcdef", VersionedHome: filepath.Join("data", "elastic-agent-1.2.3-SNAPSHOT-abcdef"), }, @@ -348,7 +348,7 @@ func TestUpgrader_unpackZip(t *testing.T) { tests := []struct { name string args args - want UnpackResult + want unpackResult wantErr assert.ErrorAssertionFunc checkFiles checkExtractedPath flavor string @@ -361,7 +361,7 @@ func TestUpgrader_unpackZip(t *testing.T) { return createZipArchive(t, "elastic-agent-1.2.3-SNAPSHOT-someos-x86_64.zip", i) }, }, - want: UnpackResult{ + want: unpackResult{ Hash: "abcdef", VersionedHome: filepath.Join("data", "elastic-agent-abcdef"), }, @@ -379,7 +379,7 @@ func TestUpgrader_unpackZip(t *testing.T) { return createZipArchive(t, "elastic-agent-1.2.3-SNAPSHOT-someos-x86_64.zip", i) }, }, - want: UnpackResult{ + want: unpackResult{ Hash: "abcdef", VersionedHome: filepath.Join("data", "elastic-agent-1.2.3-SNAPSHOT-abcdef"), }, @@ -395,7 +395,7 @@ func TestUpgrader_unpackZip(t *testing.T) { return createZipArchive(t, "elastic-agent-1.2.3-SNAPSHOT-someos-x86_64.zip", i) }, }, - want: UnpackResult{ + want: unpackResult{ Hash: "abcdef", VersionedHome: filepath.Join("data", "elastic-agent-1.2.3-SNAPSHOT-abcdef"), }, @@ -419,7 +419,7 @@ func TestUpgrader_unpackZip(t *testing.T) { return createZipArchive(t, "elastic-agent-1.2.3-SNAPSHOT-someos-x86_64.zip", i) }, }, - want: UnpackResult{ + want: unpackResult{ Hash: "abcdef", VersionedHome: filepath.Join("data", "elastic-agent-1.2.3-SNAPSHOT-abcdef"), }, diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 1f7db134f38..2a671465885 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -335,11 +335,20 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string } u.log.Debugf("detected used flavor: %q", detectedFlavor) unpackRes, unpackErr := u.unpack(version, downloadResult.ArtifactPath, paths.Data(), detectedFlavor) - err = u.diskSpaceErrorFunc(unpackErr) - err = goerrors.Join(err, unpackErr) + err = goerrors.Join(err, u.diskSpaceErrorFunc(unpackErr)) + + if unpackRes.VersionedHome == "" { + err = goerrors.Join(err, fmt.Errorf("unknown versioned home")) + return nil, err + } + + newHash := unpackRes.Hash + if newHash == "" { + err = goerrors.Join(err, fmt.Errorf("unknown hash")) + return nil, err + } newHome := filepath.Join(paths.Top(), unpackRes.VersionedHome) - u.log.Infof("newHome: %s", newHome) unpackCleanupSetupErr := u.upgradeCleaner.setupUnpackCleanup(newHome, paths.Home()) err = goerrors.Join(err, unpackCleanupSetupErr) @@ -348,29 +357,28 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string return nil, err } - u.log.Infof("unpackRes: %+v", unpackRes) + // u.log.Infof("unpackRes: %+v", unpackRes) - newHash := unpackRes.Hash - if newHash == "" { - return nil, errors.New("unknown hash") - } - - u.log.Infof("unpackRes.Hash: %s", unpackRes.Hash) + // u.log.Infof("unpackRes.Hash: %s", unpackRes.Hash) - u.log.Infof("unpackRes.VersionedHome: %s", unpackRes.VersionedHome) + // u.log.Infof("unpackRes.VersionedHome: %s", unpackRes.VersionedHome) - if err := copyActionStore(u.log, newHome); err != nil { - return nil, errors.New(err, "failed to copy action store") + err = copyActionStore(u.log, newHome) + if err != nil { + err = fmt.Errorf("failed to copy action store: %w", u.diskSpaceErrorFunc(err)) + return nil, err } newRunPath := filepath.Join(newHome, "run") oldRunPath := filepath.Join(paths.Run()) - u.log.Infof("oldRunPath: %s", oldRunPath) - u.log.Infof("newRunPath: %s", newRunPath) + // u.log.Infof("oldRunPath: %s", oldRunPath) + // u.log.Infof("newRunPath: %s", newRunPath) - if err := copyRunDirectory(u.log, oldRunPath, newRunPath); err != nil { - return nil, errors.New(err, "failed to copy run directory") + err = copyRunDirectory(u.log, oldRunPath, newRunPath) + if err != nil { + err = fmt.Errorf("failed to copy run directory: %w", u.diskSpaceErrorFunc(err)) + return nil, err } det.SetState(details.StateReplacing) From c097d72a328fc08f59fa8e00da82f4ba208e3c30 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Wed, 6 Aug 2025 02:08:26 +0300 Subject: [PATCH 044/127] enhancement(5235): added artifact downloader interface, updated tests --- .../pkg/agent/application/upgrade/cleanup.go | 2 +- .../agent/application/upgrade/cleanup_test.go | 4 +- .../application/upgrade/step_download.go | 30 ++++++++--- .../application/upgrade/step_download_test.go | 33 ++++-------- .../pkg/agent/application/upgrade/upgrade.go | 53 ++++++++++++------- .../agent/application/upgrade/upgrade_test.go | 28 +++++----- 6 files changed, 87 insertions(+), 63 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/cleanup.go b/internal/pkg/agent/application/upgrade/cleanup.go index dda37b4b4df..8a0e12e3823 100644 --- a/internal/pkg/agent/application/upgrade/cleanup.go +++ b/internal/pkg/agent/application/upgrade/cleanup.go @@ -16,7 +16,7 @@ import ( ) // cleanNonMatchingVersionsFromDownloads will remove files that do not have the passed version number from the downloads directory. -func cleanNonMatchingVersionsFromDownloads(log *logger.Logger, version string) error { +func (u *upgradeArtifactDownloader) cleanNonMatchingVersionsFromDownloads(log *logger.Logger, version string) error { downloadsPath := paths.Downloads() log.Infow("Cleaning up non-matching downloaded versions", "version", version, "downloads.path", downloadsPath) diff --git a/internal/pkg/agent/application/upgrade/cleanup_test.go b/internal/pkg/agent/application/upgrade/cleanup_test.go index 2b5d1825455..03e319c9fae 100644 --- a/internal/pkg/agent/application/upgrade/cleanup_test.go +++ b/internal/pkg/agent/application/upgrade/cleanup_test.go @@ -11,6 +11,7 @@ import ( "github.com/elastic/elastic-agent-libs/logp" "github.com/elastic/elastic-agent/internal/pkg/agent/application/paths" + "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact" "github.com/elastic/elastic-agent/pkg/core/logger" "github.com/stretchr/testify/require" @@ -34,7 +35,8 @@ func setupDir(t *testing.T) { func TestPreUpgradeCleanup(t *testing.T) { setupDir(t) log := newErrorLogger(t) - err := cleanNonMatchingVersionsFromDownloads(log, "8.4.0") + u := newUpgradeArtifactDownloader(log, &artifact.Config{}, nil) + err := u.cleanNonMatchingVersionsFromDownloads(log, "8.4.0") require.NoError(t, err) files, err := os.ReadDir(paths.Downloads()) diff --git a/internal/pkg/agent/application/upgrade/step_download.go b/internal/pkg/agent/application/upgrade/step_download.go index 27468d674bd..1b84e8ed1d9 100644 --- a/internal/pkg/agent/application/upgrade/step_download.go +++ b/internal/pkg/agent/application/upgrade/step_download.go @@ -37,16 +37,30 @@ const ( fleetUpgradeFallbackPGPFormat = "/api/agents/upgrades/%d.%d.%d/pgp-public-key" ) +func newUpgradeArtifactDownloader(log *logger.Logger, settings *artifact.Config, downloaderFactoryProvider DownloaderFactoryProvider) *upgradeArtifactDownloader { + return &upgradeArtifactDownloader{ + log: log, + settings: settings, + downloaderFactoryProvider: downloaderFactoryProvider, + } +} + +type upgradeArtifactDownloader struct { + log *logger.Logger + settings *artifact.Config + downloaderFactoryProvider DownloaderFactoryProvider +} + type downloader func(context.Context, downloaderFactory, *agtversion.ParsedSemVer, *artifact.Config, *details.Details) (download.DownloadResult, error) -func (u *Upgrader) downloadArtifact(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (_ download.DownloadResult, err error) { +func (u *upgradeArtifactDownloader) downloadArtifact(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (_ download.DownloadResult, err error) { span, ctx := apm.StartSpan(ctx, "downloadArtifact", "app.internal") defer func() { apm.CaptureError(ctx, err).Send() span.End() }() - pgpBytes = u.appendFallbackPGP(parsedVersion, pgpBytes) + pgpBytes = u.appendFallbackPGP(parsedVersion, fleetServerURI, pgpBytes) // do not update source config settings := *u.settings @@ -125,7 +139,7 @@ func (u *Upgrader) downloadArtifact(ctx context.Context, parsedVersion *agtversi return downloadResult, nil } -func (u *Upgrader) appendFallbackPGP(targetVersion *agtversion.ParsedSemVer, pgpBytes []string) []string { +func (u *upgradeArtifactDownloader) appendFallbackPGP(targetVersion *agtversion.ParsedSemVer, fleetServerURI string, pgpBytes []string) []string { if pgpBytes == nil { pgpBytes = make([]string, 0, 1) } @@ -134,10 +148,10 @@ func (u *Upgrader) appendFallbackPGP(targetVersion *agtversion.ParsedSemVer, pgp pgpBytes = append(pgpBytes, fallbackPGP) // add a secondary fallback if fleet server is configured - u.log.Debugf("Considering fleet server uri for pgp check fallback %q", u.fleetServerURI) - if u.fleetServerURI != "" { + u.log.Debugf("Considering fleet server uri for pgp check fallback %q", fleetServerURI) + if fleetServerURI != "" { secondaryPath, err := url.JoinPath( - u.fleetServerURI, + fleetServerURI, fmt.Sprintf(fleetUpgradeFallbackPGPFormat, targetVersion.Major(), targetVersion.Minor(), targetVersion.Patch()), ) if err != nil { @@ -197,7 +211,7 @@ func newVerifier(version *agtversion.ParsedSemVer, log *logger.Logger, settings return composed.NewVerifier(log, fsVerifier, snapshotVerifier, remoteVerifier), nil } -func (u *Upgrader) downloadOnce( +func (u *upgradeArtifactDownloader) downloadOnce( ctx context.Context, factory downloaderFactory, version *agtversion.ParsedSemVer, @@ -220,7 +234,7 @@ func (u *Upgrader) downloadOnce( return downloadResult, nil } -func (u *Upgrader) downloadWithRetries( +func (u *upgradeArtifactDownloader) downloadWithRetries( ctx context.Context, factory downloaderFactory, version *agtversion.ParsedSemVer, diff --git a/internal/pkg/agent/application/upgrade/step_download_test.go b/internal/pkg/agent/application/upgrade/step_download_test.go index 5606988263c..2cf452ba00e 100644 --- a/internal/pkg/agent/application/upgrade/step_download_test.go +++ b/internal/pkg/agent/application/upgrade/step_download_test.go @@ -15,7 +15,6 @@ import ( "github.com/stretchr/testify/require" "github.com/elastic/elastic-agent-libs/transport/httpcommon" - "github.com/elastic/elastic-agent/internal/pkg/agent/application/info" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/details" @@ -58,11 +57,8 @@ func TestFallbackIsAppended(t *testing.T) { for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { l, _ := loggertest.New(tc.name) - u := Upgrader{ - fleetServerURI: tc.fleetServerURI, - log: l, - } - res := u.appendFallbackPGP(tc.targetVersion, tc.passedBytes) + u := newUpgradeArtifactDownloader(l, &artifact.Config{}, nil) + res := u.appendFallbackPGP(tc.targetVersion, tc.fleetServerURI, tc.passedBytes) // check default fallback is passed and is very last require.NotNil(t, res) require.Equal(t, tc.expectedLen, len(res)) @@ -94,8 +90,7 @@ func TestDownloadWithRetries(t *testing.T) { return &mockDownloader{expectedDownloadPath, nil}, nil } - u, err := NewUpgrader(testLogger, &settings, &info.AgentInfo{}) - require.NoError(t, err) + u := newUpgradeArtifactDownloader(testLogger, &settings, nil) parsedVersion, err := agtversion.ParseVersion("8.9.0") require.NoError(t, err) @@ -144,8 +139,7 @@ func TestDownloadWithRetries(t *testing.T) { return nil, nil } - u, err := NewUpgrader(testLogger, &settings, &info.AgentInfo{}) - require.NoError(t, err) + u := newUpgradeArtifactDownloader(testLogger, &settings, nil) parsedVersion, err := agtversion.ParseVersion("8.9.0") require.NoError(t, err) @@ -199,8 +193,7 @@ func TestDownloadWithRetries(t *testing.T) { return nil, nil } - u, err := NewUpgrader(testLogger, &settings, &info.AgentInfo{}) - require.NoError(t, err) + u := newUpgradeArtifactDownloader(testLogger, &settings, nil) parsedVersion, err := agtversion.ParseVersion("8.9.0") require.NoError(t, err) @@ -244,8 +237,7 @@ func TestDownloadWithRetries(t *testing.T) { return &mockDownloader{"", errors.New("download failed")}, nil } - u, err := NewUpgrader(testLogger, &settings, &info.AgentInfo{}) - require.NoError(t, err) + u := newUpgradeArtifactDownloader(testLogger, &settings, nil) parsedVersion, err := agtversion.ParseVersion("8.9.0") require.NoError(t, err) @@ -285,8 +277,7 @@ func TestDownloadWithRetries(t *testing.T) { return &mockDownloader{"", upgradeErrors.ErrInsufficientDiskSpace}, nil } - u, err := NewUpgrader(testLogger, &settings, &info.AgentInfo{}) - require.NoError(t, err) + u := newUpgradeArtifactDownloader(testLogger, &settings, nil) parsedVersion, err := agtversion.ParseVersion("8.9.0") require.NoError(t, err) @@ -359,12 +350,11 @@ func TestDownloadArtifact(t *testing.T) { require.NoError(t, err) config := artifact.Config{} - u, err := NewUpgrader(logger, &config, nil) - require.NoError(t, err) + u := newUpgradeArtifactDownloader(logger, &config, nil) u.downloaderFactoryProvider = &mockDownloaderFactoryProvider{} - _, err = u.downloadArtifact(context.Background(), nil, "file://mockfilepath", nil, false, false) + _, err = u.downloadArtifact(context.Background(), nil, "file://mockfilepath", "", nil, false, false) require.Error(t, err) require.ErrorIs(t, err, mockDownloaderFactoryError) require.Equal(t, fileDownloaderFactory, u.downloaderFactoryProvider.(*mockDownloaderFactoryProvider).calledWithName) @@ -375,12 +365,11 @@ func TestDownloadArtifact(t *testing.T) { require.NoError(t, err) config := artifact.Config{} - u, err := NewUpgrader(logger, &config, nil) - require.NoError(t, err) + u := newUpgradeArtifactDownloader(logger, &config, nil) u.downloaderFactoryProvider = &mockDownloaderFactoryProvider{} - _, err = u.downloadArtifact(context.Background(), nil, "https://mockuri", nil, false, false) + _, err = u.downloadArtifact(context.Background(), nil, "https://mockuri", "", nil, false, false) require.Error(t, err) require.ErrorIs(t, err, mockDownloaderFactoryError) require.Equal(t, composedDownloaderFactory, u.downloaderFactoryProvider.(*mockDownloaderFactoryProvider).calledWithName) diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 2a671465885..7b4cc1f0298 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -96,17 +96,22 @@ type upgradeCleaner interface { cleanup(err error) error } +type artifactDownloader interface { + downloadArtifact(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) + cleanNonMatchingVersionsFromDownloads(log *logger.Logger, version string) error +} + // Upgrader performs an upgrade type Upgrader struct { - log *logger.Logger - settings *artifact.Config - agentInfo info.Agent - upgradeable bool - fleetServerURI string - markerWatcher MarkerWatcher - downloaderFactoryProvider DownloaderFactoryProvider - upgradeCleaner upgradeCleaner - diskSpaceErrorFunc func(error) error + log *logger.Logger + settings *artifact.Config + agentInfo info.Agent + upgradeable bool + fleetServerURI string + markerWatcher MarkerWatcher + upgradeCleaner upgradeCleaner + diskSpaceErrorFunc func(error) error + artifactDownloader artifactDownloader } // IsUpgradeable when agent is installed and running as a service or flag was provided. @@ -130,17 +135,17 @@ func NewUpgrader(log *logger.Logger, settings *artifact.Config, agentInfo info.A } return &Upgrader{ - log: log, - settings: settings, - agentInfo: agentInfo, - upgradeable: IsUpgradeable(), - markerWatcher: newMarkerFileWatcher(markerFilePath(paths.Data()), log), - downloaderFactoryProvider: downloaderFactoryProvider, + log: log, + settings: settings, + agentInfo: agentInfo, + upgradeable: IsUpgradeable(), + markerWatcher: newMarkerFileWatcher(markerFilePath(paths.Data()), log), upgradeCleaner: &upgradeCleanup{ log: log, cleanupFuncs: []func() error{}, }, diskSpaceErrorFunc: upgradeErrors.ToDiskSpaceErrorFunc(log), + artifactDownloader: newUpgradeArtifactDownloader(log, settings, downloaderFactoryProvider), }, nil } @@ -241,6 +246,18 @@ func checkUpgrade(log *logger.Logger, currentVersion, newVersion agentVersion, m return nil } +// type upgradeProcessWithFuncs interface { +// download(funcs downloadFuncs, params downloadParams) (download.DownloadResult, error) +// unpack(funcs unpackFuncs, params unpackParams) (unpackStepResult, error) +// replace(funcs replaceFuncs, params replaceParams) error +// watch(funcs watchFuncs, params watchParams) error +// } + +type upgradeStep interface { + downloadArtifact(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) + cleanNonMatchingVersions(log *logger.Logger, version string) error +} + // Upgrade upgrades running agent, function returns shutdown callback that must be called by reexec. func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string, action *fleetapi.ActionUpgrade, det *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (_ reexec.ShutdownCallbackFn, err error) { defer func() { @@ -281,7 +298,7 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string span, ctx := apm.StartSpan(ctx, "upgrade", "app.internal") defer span.End() - err = cleanNonMatchingVersionsFromDownloads(u.log, u.agentInfo.Version()) + err = u.artifactDownloader.cleanNonMatchingVersionsFromDownloads(u.log, u.agentInfo.Version()) if err != nil { u.log.Errorw("Unable to clean downloads before update", "error.message", err, "downloads.path", paths.Downloads()) } @@ -295,11 +312,11 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string return nil, fmt.Errorf("error parsing version %q: %w", version, err) } - downloadResult, err := u.downloadArtifact(ctx, parsedVersion, sourceURI, det, skipVerifyOverride, skipDefaultPgp, pgpBytes...) + downloadResult, err := u.artifactDownloader.downloadArtifact(ctx, parsedVersion, sourceURI, u.fleetServerURI, det, skipVerifyOverride, skipDefaultPgp, pgpBytes...) if err != nil { // Run the same pre-upgrade cleanup task to get rid of any newly downloaded files // This may have an issue if users are upgrading to the same version number. - if dErr := cleanNonMatchingVersionsFromDownloads(u.log, u.agentInfo.Version()); dErr != nil { + if dErr := u.artifactDownloader.cleanNonMatchingVersionsFromDownloads(u.log, u.agentInfo.Version()); dErr != nil { u.log.Errorw("Unable to remove file after verification failure", "error.message", dErr) } diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index bc1d80c8419..afa2134f413 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -1332,23 +1332,24 @@ func TestDownloaderFactoryProvider(t *testing.T) { } func TestNewUpgrader(t *testing.T) { - logger, err := logger.New("test", false) - require.NoError(t, err) + // TODO: address this test + // logger, err := logger.New("test", false) + // require.NoError(t, err) - upgrader, err := NewUpgrader(logger, nil, nil) - require.NoError(t, err) + // upgrader, err := NewUpgrader(logger, nil, nil) + // require.NoError(t, err) - fileDownloaderFactory, err := upgrader.downloaderFactoryProvider.GetDownloaderFactory(fileDownloaderFactory) - require.NoError(t, err) + // fileDownloaderFactory, err := upgrader.artifactDownloader.downloaderFactoryProvider.GetDownloaderFactory(fileDownloaderFactory) + // require.NoError(t, err) - fileDownloader, err := fileDownloaderFactory(nil, nil, nil, nil) - require.NoError(t, err) - require.IsType(t, &fs.Downloader{}, fileDownloader) + // fileDownloader, err := fileDownloaderFactory(nil, nil, nil, nil) + // require.NoError(t, err) + // require.IsType(t, &fs.Downloader{}, fileDownloader) - composedDownloader, err := upgrader.downloaderFactoryProvider.GetDownloaderFactory(composedDownloaderFactory) - require.NoError(t, err) + // composedDownloader, err := upgrader.downloaderFactoryProvider.GetDownloaderFactory(composedDownloaderFactory) + // require.NoError(t, err) - require.Equal(t, reflect.ValueOf(composedDownloader).Pointer(), reflect.ValueOf(newDownloader).Pointer()) + // require.Equal(t, reflect.ValueOf(composedDownloader).Pointer(), reflect.ValueOf(newDownloader).Pointer()) } func setupForFileDownloader(sourcePrefix string, expectedFileName string, partialData []byte) setupFunc { @@ -1551,13 +1552,14 @@ func TestUpgradeDownloadErrors(t *testing.T) { } downloaderFactoryProvider := tc.factoryProviderFunc(&config, copyFunc) + artifactDownloader := newUpgradeArtifactDownloader(log, &config, downloaderFactoryProvider) mockAgentInfo := mockinfo.NewAgent(t) mockAgentInfo.On("Version").Return(version.String()) upgrader, err := NewUpgrader(log, &config, mockAgentInfo) require.NoError(t, err) - upgrader.downloaderFactoryProvider = downloaderFactoryProvider + upgrader.artifactDownloader = artifactDownloader _, err = upgrader.Upgrade(context.Background(), version.String(), config.SourceURI, nil, upgradeDetails, false, false) require.Error(t, err, "expected error got none") From ac25c3d3219037a54c154365f63ffb6f356cf839 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Wed, 6 Aug 2025 02:23:40 +0300 Subject: [PATCH 045/127] enhancement(5235): added unpacker interface, updated tests --- .../agent/application/upgrade/step_unpack.go | 12 +++++-- .../pkg/agent/application/upgrade/upgrade.go | 35 +++++++++---------- .../agent/application/upgrade/upgrade_test.go | 4 ++- 3 files changed, 30 insertions(+), 21 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/step_unpack.go b/internal/pkg/agent/application/upgrade/step_unpack.go index a83051ee6c1..2ad66f29284 100644 --- a/internal/pkg/agent/application/upgrade/step_unpack.go +++ b/internal/pkg/agent/application/upgrade/step_unpack.go @@ -35,8 +35,12 @@ type unpackResult struct { VersionedHome string `json:"versioned-home" yaml:"versioned-home"` } +type upgradeUnpacker struct { + log *logger.Logger +} + // unpack unpacks archive correctly, skips root (symlink, config...) unpacks data/* -func (u *Upgrader) unpack(version, archivePath, dataDir string, flavor string) (unpackResult, error) { +func (u *upgradeUnpacker) unpack(version, archivePath, dataDir string, flavor string) (unpackResult, error) { // unpack must occur in directory that holds the installation directory // or the extraction will be double nested var unpackRes unpackResult @@ -61,7 +65,7 @@ type packageMetadata struct { hash string } -func (u *Upgrader) getPackageMetadata(archivePath string) (packageMetadata, error) { +func (u *upgradeUnpacker) getPackageMetadata(archivePath string) (packageMetadata, error) { ext := filepath.Ext(archivePath) if ext == ".gz" { // if we got gzip extension we need another extension before last @@ -729,3 +733,7 @@ func getFilesContentFromTar(archivePath string, files ...string) (map[string]io. func createVersionedHomeFromHash(hash string) string { return filepath.Join("data", fmt.Sprintf("elastic-agent-%s", hash[:hashLen])) } + +func (u *upgradeUnpacker) detectFlavor(topPath, flavor string) (string, error) { + return install.UsedFlavor(topPath, flavor) +} diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 7b4cc1f0298..f5a9c3919c1 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -101,6 +101,13 @@ type artifactDownloader interface { cleanNonMatchingVersionsFromDownloads(log *logger.Logger, version string) error } +type unpacker interface { + getPackageMetadata(archivePath string) (packageMetadata, error) + extractAgentVersion(metadata packageMetadata, version string) agentVersion + unpack(version, archivePath, topPath, flavor string) (unpackResult, error) + detectFlavor(topPath, flavor string) (string, error) +} + // Upgrader performs an upgrade type Upgrader struct { log *logger.Logger @@ -112,6 +119,7 @@ type Upgrader struct { upgradeCleaner upgradeCleaner diskSpaceErrorFunc func(error) error artifactDownloader artifactDownloader + unpacker unpacker } // IsUpgradeable when agent is installed and running as a service or flag was provided. @@ -146,6 +154,7 @@ func NewUpgrader(log *logger.Logger, settings *artifact.Config, agentInfo info.A }, diskSpaceErrorFunc: upgradeErrors.ToDiskSpaceErrorFunc(log), artifactDownloader: newUpgradeArtifactDownloader(log, settings, downloaderFactoryProvider), + unpacker: &upgradeUnpacker{log: log}, }, nil } @@ -246,18 +255,6 @@ func checkUpgrade(log *logger.Logger, currentVersion, newVersion agentVersion, m return nil } -// type upgradeProcessWithFuncs interface { -// download(funcs downloadFuncs, params downloadParams) (download.DownloadResult, error) -// unpack(funcs unpackFuncs, params unpackParams) (unpackStepResult, error) -// replace(funcs replaceFuncs, params replaceParams) error -// watch(funcs watchFuncs, params watchParams) error -// } - -type upgradeStep interface { - downloadArtifact(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) - cleanNonMatchingVersions(log *logger.Logger, version string) error -} - // Upgrade upgrades running agent, function returns shutdown callback that must be called by reexec. func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string, action *fleetapi.ActionUpgrade, det *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (_ reexec.ShutdownCallbackFn, err error) { defer func() { @@ -329,13 +326,14 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string det.SetState(details.StateExtracting) - metadata, err := u.getPackageMetadata(downloadResult.ArtifactPath) + metadata, err := u.unpacker.getPackageMetadata(downloadResult.ArtifactPath) if err != nil { return nil, fmt.Errorf("reading metadata for elastic agent version %s package %q: %w", version, downloadResult.ArtifactPath, err) } - newVersion := extractAgentVersion(metadata, version) - if err := checkUpgrade(u.log, currentVersion, newVersion, metadata); err != nil { + newVersion := u.unpacker.extractAgentVersion(metadata, version) + + if err := checkUpgrade(u.log, currentVersion, newVersion, metadata); err != nil { // pass this as param to unpack step in upgrade executor return nil, fmt.Errorf("cannot upgrade the agent: %w", err) } @@ -346,12 +344,13 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string // no default flavor, keep everything in case flavor is not specified // in case of error fallback to keep-all - detectedFlavor, err := install.UsedFlavor(paths.Top(), "") + detectedFlavor, err := u.unpacker.detectFlavor(paths.Top(), "") if err != nil { u.log.Warnf("error encountered when detecting used flavor with top path %q: %w", paths.Top(), err) } u.log.Debugf("detected used flavor: %q", detectedFlavor) - unpackRes, unpackErr := u.unpack(version, downloadResult.ArtifactPath, paths.Data(), detectedFlavor) + + unpackRes, unpackErr := u.unpacker.unpack(version, downloadResult.ArtifactPath, paths.Data(), detectedFlavor) err = goerrors.Join(err, u.diskSpaceErrorFunc(unpackErr)) if unpackRes.VersionedHome == "" { @@ -583,7 +582,7 @@ func (u *Upgrader) sourceURI(retrievedURI string) string { return u.settings.SourceURI } -func extractAgentVersion(metadata packageMetadata, upgradeVersion string) agentVersion { +func (u *upgradeUnpacker) extractAgentVersion(metadata packageMetadata, upgradeVersion string) agentVersion { newVersion := agentVersion{} if metadata.manifest != nil { packageDesc := metadata.manifest.Package diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index afa2134f413..b2dc11c5826 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -809,7 +809,9 @@ func TestExtractVersion(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { - actualNewVersion := extractAgentVersion(test.args.metadata, test.args.version) + log, _ := loggertest.New(test.name) + unpacker := &upgradeUnpacker{log: log} + actualNewVersion := unpacker.extractAgentVersion(test.args.metadata, test.args.version) assert.Equal(t, test.want.newVersion, actualNewVersion, "Unexpected new version result: extractAgentVersion(%v, %v) should be %v", test.args.metadata, test.args.version, test.want.newVersion) }) From 4cb407bdd8cd3a79e642ff113dee8f53718c3d1e Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Wed, 6 Aug 2025 02:35:17 +0300 Subject: [PATCH 046/127] enhancement(5235): added replacer interface --- .../pkg/agent/application/upgrade/upgrade.go | 34 +++++++++++-------- 1 file changed, 20 insertions(+), 14 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index f5a9c3919c1..6ac2e60f3b2 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -108,6 +108,12 @@ type unpacker interface { detectFlavor(topPath, flavor string) (string, error) } +type replacer interface { + copyActionStore(log *logger.Logger, newHome string) error + copyRunDirectory(log *logger.Logger, oldRunPath, newRunPath string) error + changeSymlink(log *logger.Logger, topPath, symlinkPath, newPath string) error +} + // Upgrader performs an upgrade type Upgrader struct { log *logger.Logger @@ -120,6 +126,7 @@ type Upgrader struct { diskSpaceErrorFunc func(error) error artifactDownloader artifactDownloader unpacker unpacker + replacer replacer } // IsUpgradeable when agent is installed and running as a service or flag was provided. @@ -155,6 +162,7 @@ func NewUpgrader(log *logger.Logger, settings *artifact.Config, agentInfo info.A diskSpaceErrorFunc: upgradeErrors.ToDiskSpaceErrorFunc(log), artifactDownloader: newUpgradeArtifactDownloader(log, settings, downloaderFactoryProvider), unpacker: &upgradeUnpacker{log: log}, + replacer: &upgradeReplacer{}, }, nil } @@ -255,6 +263,13 @@ func checkUpgrade(log *logger.Logger, currentVersion, newVersion agentVersion, m return nil } +type upgradeReplacer struct { +} + +func (u *upgradeReplacer) changeSymlink(log *logger.Logger, topDirPath, symlinkPath, newTarget string) error { + return changeSymlink(log, topDirPath, symlinkPath, newTarget) +} + // Upgrade upgrades running agent, function returns shutdown callback that must be called by reexec. func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string, action *fleetapi.ActionUpgrade, det *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (_ reexec.ShutdownCallbackFn, err error) { defer func() { @@ -373,13 +388,7 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string return nil, err } - // u.log.Infof("unpackRes: %+v", unpackRes) - - // u.log.Infof("unpackRes.Hash: %s", unpackRes.Hash) - - // u.log.Infof("unpackRes.VersionedHome: %s", unpackRes.VersionedHome) - - err = copyActionStore(u.log, newHome) + err = u.replacer.copyActionStore(u.log, newHome) if err != nil { err = fmt.Errorf("failed to copy action store: %w", u.diskSpaceErrorFunc(err)) return nil, err @@ -388,10 +397,7 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string newRunPath := filepath.Join(newHome, "run") oldRunPath := filepath.Join(paths.Run()) - // u.log.Infof("oldRunPath: %s", oldRunPath) - // u.log.Infof("newRunPath: %s", newRunPath) - - err = copyRunDirectory(u.log, oldRunPath, newRunPath) + err = u.replacer.copyRunDirectory(u.log, oldRunPath, newRunPath) if err != nil { err = fmt.Errorf("failed to copy run directory: %w", u.diskSpaceErrorFunc(err)) return nil, err @@ -421,7 +427,7 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string u.log.Infof("currentVersionedHome: %s", currentVersionedHome) - err = changeSymlink(u.log, paths.Top(), symlinkPath, newPath) + err = u.replacer.changeSymlink(u.log, paths.Top(), symlinkPath, newPath) if err != nil { return nil, err } @@ -602,7 +608,7 @@ func isSameVersion(log *logger.Logger, current agentVersion, newVersion agentVer return current == newVersion } -func copyActionStore(log *logger.Logger, newHome string) error { +func (u *upgradeReplacer) copyActionStore(log *logger.Logger, newHome string) error { // copies legacy action_store.yml, state.yml and state.enc encrypted file if exists storePaths := []string{paths.AgentActionStoreFile(), paths.AgentStateStoreYmlFile(), paths.AgentStateStoreFile()} log.Infow("Copying action store", "new_home_path", newHome) @@ -627,7 +633,7 @@ func copyActionStore(log *logger.Logger, newHome string) error { return nil } -func copyRunDirectory(log *logger.Logger, oldRunPath, newRunPath string) error { +func (u *upgradeReplacer) copyRunDirectory(log *logger.Logger, oldRunPath, newRunPath string) error { log.Infow("Copying run directory", "new_run_path", newRunPath, "old_run_path", oldRunPath) if err := os.MkdirAll(newRunPath, runDirMod); err != nil { From 3f841a92bcaaa97352a39c62550821f82537b3af Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Wed, 6 Aug 2025 03:19:01 +0300 Subject: [PATCH 047/127] enhancement(5235): added watcher interface and updated tests --- .../application/upgrade/rollback_test.go | 3 ++- .../agent/application/upgrade/step_mark.go | 2 +- .../pkg/agent/application/upgrade/upgrade.go | 20 ++++++++++++++----- .../agent/application/upgrade/upgrade_test.go | 3 ++- 4 files changed, 20 insertions(+), 8 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/rollback_test.go b/internal/pkg/agent/application/upgrade/rollback_test.go index 3f9cc0a33ab..d1db8b866df 100644 --- a/internal/pkg/agent/application/upgrade/rollback_test.go +++ b/internal/pkg/agent/application/upgrade/rollback_test.go @@ -504,7 +504,8 @@ func createUpdateMarker(t *testing.T, log *logger.Logger, topDir, newAgentVersio hash: oldAgentHash, versionedHome: oldAgentVersionedHome, } - err := markUpgrade(log, + watcher := &upgradeWatcher{} + err := watcher.markUpgrade(log, paths.DataFrom(topDir), newAgentInstall, oldAgentInstall, diff --git a/internal/pkg/agent/application/upgrade/step_mark.go b/internal/pkg/agent/application/upgrade/step_mark.go index 65b4e878a40..aa76be897d8 100644 --- a/internal/pkg/agent/application/upgrade/step_mark.go +++ b/internal/pkg/agent/application/upgrade/step_mark.go @@ -197,7 +197,7 @@ type agentInstall struct { } // markUpgrade marks update happened so we can handle grace period -func markUpgrade(log *logger.Logger, dataDirPath string, agent, previousAgent agentInstall, action *fleetapi.ActionUpgrade, upgradeDetails *details.Details, desiredOutcome UpgradeOutcome) error { +func (u *upgradeWatcher) markUpgrade(log *logger.Logger, dataDirPath string, agent, previousAgent agentInstall, action *fleetapi.ActionUpgrade, upgradeDetails *details.Details, desiredOutcome UpgradeOutcome) error { if len(previousAgent.hash) > hashLen { previousAgent.hash = previousAgent.hash[:hashLen] diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 6ac2e60f3b2..6a10f817b09 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -113,6 +113,11 @@ type replacer interface { copyRunDirectory(log *logger.Logger, oldRunPath, newRunPath string) error changeSymlink(log *logger.Logger, topPath, symlinkPath, newPath string) error } +type watcher interface { + waitForWatcher(ctx context.Context, log *logger.Logger, markerFilePath string, waitTime time.Duration) error + selectWatcherExecutable(topDir string, previous agentInstall, current agentInstall) string + markUpgrade(log *logger.Logger, dataDir string, current, previous agentInstall, action *fleetapi.ActionUpgrade, det *details.Details, outcome UpgradeOutcome) error +} // Upgrader performs an upgrade type Upgrader struct { @@ -127,6 +132,7 @@ type Upgrader struct { artifactDownloader artifactDownloader unpacker unpacker replacer replacer + watcher watcher } // IsUpgradeable when agent is installed and running as a service or flag was provided. @@ -163,6 +169,7 @@ func NewUpgrader(log *logger.Logger, settings *artifact.Config, agentInfo info.A artifactDownloader: newUpgradeArtifactDownloader(log, settings, downloaderFactoryProvider), unpacker: &upgradeUnpacker{log: log}, replacer: &upgradeReplacer{}, + watcher: &upgradeWatcher{}, }, nil } @@ -270,6 +277,9 @@ func (u *upgradeReplacer) changeSymlink(log *logger.Logger, topDirPath, symlinkP return changeSymlink(log, topDirPath, symlinkPath, newTarget) } +type upgradeWatcher struct { +} + // Upgrade upgrades running agent, function returns shutdown callback that must be called by reexec. func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string, action *fleetapi.ActionUpgrade, det *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (_ reexec.ShutdownCallbackFn, err error) { defer func() { @@ -451,7 +461,7 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string versionedHome: currentVersionedHome, } - err = markUpgrade(u.log, + err = u.watcher.markUpgrade(u.log, paths.Data(), // data dir to place the marker in current, // new agent version data previous, // old agent version data @@ -460,14 +470,14 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string return nil, err } - watcherExecutable := selectWatcherExecutable(paths.Top(), previous, current) + watcherExecutable := u.watcher.selectWatcherExecutable(paths.Top(), previous, current) watcherCmd, err := InvokeWatcher(u.log, watcherExecutable) if err != nil { return nil, err } - err = waitForWatcher(ctx, u.log, markerFilePath(paths.Data()), watcherMaxWaitTime) + err = u.watcher.waitForWatcher(ctx, u.log, markerFilePath(paths.Data()), watcherMaxWaitTime) if err != nil { err = goerrors.Join(err, watcherCmd.Process.Kill()) return nil, err @@ -485,7 +495,7 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string return cb, nil } -func selectWatcherExecutable(topDir string, previous agentInstall, current agentInstall) string { +func (u *upgradeWatcher) selectWatcherExecutable(topDir string, previous agentInstall, current agentInstall) string { // check if the upgraded version is less than the previous (currently installed) version if current.parsedVersion.Less(*previous.parsedVersion) { // use the current agent executable for watch, if downgrading the old agent doesn't understand the current agent's path structure. @@ -496,7 +506,7 @@ func selectWatcherExecutable(topDir string, previous agentInstall, current agent } } -func waitForWatcher(ctx context.Context, log *logger.Logger, markerFilePath string, waitTime time.Duration) error { +func (u *upgradeWatcher) waitForWatcher(ctx context.Context, log *logger.Logger, markerFilePath string, waitTime time.Duration) error { return waitForWatcherWithTimeoutCreationFunc(ctx, log, markerFilePath, waitTime, context.WithTimeout) } diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index b2dc11c5826..d3f58bed43e 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -1204,7 +1204,8 @@ func Test_selectWatcherExecutable(t *testing.T) { fakeTopDir := filepath.Join(t.TempDir(), "Elastic", "Agent") for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - assert.Equalf(t, paths.BinaryPath(filepath.Join(fakeTopDir, tt.want), agentName), selectWatcherExecutable(fakeTopDir, tt.args.previous, tt.args.current), "selectWatcherExecutable(%v, %v)", tt.args.previous, tt.args.current) + watcher := &upgradeWatcher{} + assert.Equalf(t, paths.BinaryPath(filepath.Join(fakeTopDir, tt.want), agentName), watcher.selectWatcherExecutable(fakeTopDir, tt.args.previous, tt.args.current), "selectWatcherExecutable(%v, %v)", tt.args.previous, tt.args.current) }) } } From 8a3cf7a572a21b4f2ac35c2bd23b2d16577783ff Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Wed, 6 Aug 2025 13:53:49 +0300 Subject: [PATCH 048/127] enhancement(5235): removed test case --- .../agent/application/upgrade/upgrade_test.go | 21 ------------------- 1 file changed, 21 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index d3f58bed43e..27ce54b93ba 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -1334,27 +1334,6 @@ func TestDownloaderFactoryProvider(t *testing.T) { require.Equal(t, "downloader factory \"nonExistentFactory\" not found", err.Error()) } -func TestNewUpgrader(t *testing.T) { - // TODO: address this test - // logger, err := logger.New("test", false) - // require.NoError(t, err) - - // upgrader, err := NewUpgrader(logger, nil, nil) - // require.NoError(t, err) - - // fileDownloaderFactory, err := upgrader.artifactDownloader.downloaderFactoryProvider.GetDownloaderFactory(fileDownloaderFactory) - // require.NoError(t, err) - - // fileDownloader, err := fileDownloaderFactory(nil, nil, nil, nil) - // require.NoError(t, err) - // require.IsType(t, &fs.Downloader{}, fileDownloader) - - // composedDownloader, err := upgrader.downloaderFactoryProvider.GetDownloaderFactory(composedDownloaderFactory) - // require.NoError(t, err) - - // require.Equal(t, reflect.ValueOf(composedDownloader).Pointer(), reflect.ValueOf(newDownloader).Pointer()) -} - func setupForFileDownloader(sourcePrefix string, expectedFileName string, partialData []byte) setupFunc { return func(t *testing.T, config *artifact.Config, basePath string, targetPath string) { testDownloadPath := filepath.Join(basePath, "downloads") From 66362f852071ba08292d215b7cd210dd81256b6e Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Wed, 6 Aug 2025 13:54:28 +0300 Subject: [PATCH 049/127] enhancement(5235): moved clean up function into step download, removed cleanup files --- .../pkg/agent/application/upgrade/cleanup.go | 44 ------------- .../agent/application/upgrade/cleanup_test.go | 63 ------------------- .../application/upgrade/step_download.go | 30 +++++++++ .../application/upgrade/step_download_test.go | 49 +++++++++++++++ 4 files changed, 79 insertions(+), 107 deletions(-) delete mode 100644 internal/pkg/agent/application/upgrade/cleanup.go delete mode 100644 internal/pkg/agent/application/upgrade/cleanup_test.go diff --git a/internal/pkg/agent/application/upgrade/cleanup.go b/internal/pkg/agent/application/upgrade/cleanup.go deleted file mode 100644 index 8a0e12e3823..00000000000 --- a/internal/pkg/agent/application/upgrade/cleanup.go +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License 2.0; -// you may not use this file except in compliance with the Elastic License 2.0. - -package upgrade - -import ( - "errors" - "fmt" - "os" - "path/filepath" - "strings" - - "github.com/elastic/elastic-agent/internal/pkg/agent/application/paths" - "github.com/elastic/elastic-agent/pkg/core/logger" -) - -// cleanNonMatchingVersionsFromDownloads will remove files that do not have the passed version number from the downloads directory. -func (u *upgradeArtifactDownloader) cleanNonMatchingVersionsFromDownloads(log *logger.Logger, version string) error { - downloadsPath := paths.Downloads() - log.Infow("Cleaning up non-matching downloaded versions", "version", version, "downloads.path", downloadsPath) - - files, err := os.ReadDir(downloadsPath) - if os.IsNotExist(err) { - // nothing to clean up - return nil - } - - if err != nil { - return fmt.Errorf("unable to read directory %q: %w", paths.Downloads(), err) - } - var errs []error - for _, file := range files { - if file.IsDir() { - continue - } - if !strings.Contains(file.Name(), version) { - if err := os.Remove(filepath.Join(paths.Downloads(), file.Name())); err != nil { - errs = append(errs, fmt.Errorf("unable to remove file %q: %w", filepath.Join(paths.Downloads(), file.Name()), err)) - } - } - } - return errors.Join(errs...) -} diff --git a/internal/pkg/agent/application/upgrade/cleanup_test.go b/internal/pkg/agent/application/upgrade/cleanup_test.go deleted file mode 100644 index 03e319c9fae..00000000000 --- a/internal/pkg/agent/application/upgrade/cleanup_test.go +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License 2.0; -// you may not use this file except in compliance with the Elastic License 2.0. - -package upgrade - -import ( - "os" - "path/filepath" - "testing" - - "github.com/elastic/elastic-agent-libs/logp" - "github.com/elastic/elastic-agent/internal/pkg/agent/application/paths" - "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact" - "github.com/elastic/elastic-agent/pkg/core/logger" - - "github.com/stretchr/testify/require" -) - -func setupDir(t *testing.T) { - t.Helper() - dir := t.TempDir() - paths.SetDownloads(dir) - - err := os.WriteFile(filepath.Join(dir, "test-8.3.0-file"), []byte("hello, world!"), 0600) - require.NoError(t, err) - err = os.WriteFile(filepath.Join(dir, "test-8.4.0-file"), []byte("hello, world!"), 0600) - require.NoError(t, err) - err = os.WriteFile(filepath.Join(dir, "test-8.5.0-file"), []byte("hello, world!"), 0600) - require.NoError(t, err) - err = os.WriteFile(filepath.Join(dir, "test-hash-file"), []byte("hello, world!"), 0600) - require.NoError(t, err) -} - -func TestPreUpgradeCleanup(t *testing.T) { - setupDir(t) - log := newErrorLogger(t) - u := newUpgradeArtifactDownloader(log, &artifact.Config{}, nil) - err := u.cleanNonMatchingVersionsFromDownloads(log, "8.4.0") - require.NoError(t, err) - - files, err := os.ReadDir(paths.Downloads()) - require.NoError(t, err) - require.Len(t, files, 1) - require.Equal(t, "test-8.4.0-file", files[0].Name()) - p, err := os.ReadFile(filepath.Join(paths.Downloads(), files[0].Name())) - require.NoError(t, err) - require.Equal(t, []byte("hello, world!"), p) -} - -func newErrorLogger(t *testing.T) *logger.Logger { - t.Helper() - - loggerCfg := logger.DefaultLoggingConfig() - loggerCfg.Level = logp.ErrorLevel - - eventLoggerCfg := logger.DefaultEventLoggingConfig() - eventLoggerCfg.Level = loggerCfg.Level - - log, err := logger.NewFromConfig("", loggerCfg, eventLoggerCfg, false) - require.NoError(t, err) - return log -} diff --git a/internal/pkg/agent/application/upgrade/step_download.go b/internal/pkg/agent/application/upgrade/step_download.go index 1b84e8ed1d9..9d70c549e73 100644 --- a/internal/pkg/agent/application/upgrade/step_download.go +++ b/internal/pkg/agent/application/upgrade/step_download.go @@ -6,9 +6,11 @@ package upgrade import ( "context" + goerrors "errors" "fmt" "net/url" "os" + "path/filepath" "strings" "time" @@ -287,3 +289,31 @@ func (u *upgradeArtifactDownloader) downloadWithRetries( return downloadResult, nil } + +// cleanNonMatchingVersionsFromDownloads will remove files that do not have the passed version number from the downloads directory. +func (u *upgradeArtifactDownloader) cleanNonMatchingVersionsFromDownloads(log *logger.Logger, version string) error { + downloadsPath := paths.Downloads() + log.Infow("Cleaning up non-matching downloaded versions", "version", version, "downloads.path", downloadsPath) + + files, err := os.ReadDir(downloadsPath) + if os.IsNotExist(err) { + // nothing to clean up + return nil + } + + if err != nil { + return fmt.Errorf("unable to read directory %q: %w", paths.Downloads(), err) + } + var errs []error + for _, file := range files { + if file.IsDir() { + continue + } + if !strings.Contains(file.Name(), version) { + if err := os.Remove(filepath.Join(paths.Downloads(), file.Name())); err != nil { + errs = append(errs, fmt.Errorf("unable to remove file %q: %w", filepath.Join(paths.Downloads(), file.Name()), err)) + } + } + } + return goerrors.Join(errs...) +} diff --git a/internal/pkg/agent/application/upgrade/step_download_test.go b/internal/pkg/agent/application/upgrade/step_download_test.go index 2cf452ba00e..bf37344e07a 100644 --- a/internal/pkg/agent/application/upgrade/step_download_test.go +++ b/internal/pkg/agent/application/upgrade/step_download_test.go @@ -8,13 +8,17 @@ import ( "context" "encoding/json" "fmt" + "os" + "path/filepath" "strings" "testing" "time" "github.com/stretchr/testify/require" + "github.com/elastic/elastic-agent-libs/logp" "github.com/elastic/elastic-agent-libs/transport/httpcommon" + "github.com/elastic/elastic-agent/internal/pkg/agent/application/paths" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/details" @@ -375,3 +379,48 @@ func TestDownloadArtifact(t *testing.T) { require.Equal(t, composedDownloaderFactory, u.downloaderFactoryProvider.(*mockDownloaderFactoryProvider).calledWithName) }) } + +func setupDir(t *testing.T) { + t.Helper() + dir := t.TempDir() + paths.SetDownloads(dir) + + err := os.WriteFile(filepath.Join(dir, "test-8.3.0-file"), []byte("hello, world!"), 0600) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(dir, "test-8.4.0-file"), []byte("hello, world!"), 0600) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(dir, "test-8.5.0-file"), []byte("hello, world!"), 0600) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(dir, "test-hash-file"), []byte("hello, world!"), 0600) + require.NoError(t, err) +} + +func TestPreUpgradeCleanup(t *testing.T) { + setupDir(t) + log := newErrorLogger(t) + u := newUpgradeArtifactDownloader(log, &artifact.Config{}, nil) + err := u.cleanNonMatchingVersionsFromDownloads(log, "8.4.0") + require.NoError(t, err) + + files, err := os.ReadDir(paths.Downloads()) + require.NoError(t, err) + require.Len(t, files, 1) + require.Equal(t, "test-8.4.0-file", files[0].Name()) + p, err := os.ReadFile(filepath.Join(paths.Downloads(), files[0].Name())) + require.NoError(t, err) + require.Equal(t, []byte("hello, world!"), p) +} + +func newErrorLogger(t *testing.T) *logger.Logger { + t.Helper() + + loggerCfg := logger.DefaultLoggingConfig() + loggerCfg.Level = logp.ErrorLevel + + eventLoggerCfg := logger.DefaultEventLoggingConfig() + eventLoggerCfg.Level = loggerCfg.Level + + log, err := logger.NewFromConfig("", loggerCfg, eventLoggerCfg, false) + require.NoError(t, err) + return log +} From 17f91d3f3cfdf550d5e23cd55142c2e489a061c6 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Wed, 6 Aug 2025 14:20:15 +0300 Subject: [PATCH 050/127] enhancement(5235): added relinker interface --- internal/pkg/agent/application/upgrade/rollback.go | 9 ++++++--- .../pkg/agent/application/upgrade/step_relink.go | 11 ++++++++--- internal/pkg/agent/application/upgrade/upgrade.go | 12 ++++++++---- 3 files changed, 22 insertions(+), 10 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/rollback.go b/internal/pkg/agent/application/upgrade/rollback.go index 90be1bbe2df..baedca3f71a 100644 --- a/internal/pkg/agent/application/upgrade/rollback.go +++ b/internal/pkg/agent/application/upgrade/rollback.go @@ -46,8 +46,10 @@ func Rollback(ctx context.Context, log *logger.Logger, c client.Client, topDirPa // paths.BinaryPath properly derives the binary directory depending on the platform. The path to the binary for macOS is inside of the app bundle. symlinkTarget = paths.BinaryPath(filepath.Join(paths.DataFrom(topDirPath), hashedDir), agentName) } + + relinker := &upgradeRelinker{} // change symlink - if err := changeSymlink(log, topDirPath, symlinkPath, symlinkTarget); err != nil { + if err := relinker.changeSymlink(log, topDirPath, symlinkPath, symlinkTarget); err != nil { return err } @@ -102,9 +104,10 @@ func cleanup(log *logger.Logger, topDirPath, currentVersionedHome, currentHash s return err } + relinker := &upgradeRelinker{} // remove symlink to avoid upgrade failures, ignore error - prevSymlink := prevSymlinkPath(topDirPath) - log.Infow("Removing previous symlink path", "file.path", prevSymlinkPath(topDirPath)) + prevSymlink := relinker.prevSymlinkPath(topDirPath) + log.Infow("Removing previous symlink path", "file.path", prevSymlink) _ = os.Remove(prevSymlink) dirPrefix := fmt.Sprintf("%s-", agentName) diff --git a/internal/pkg/agent/application/upgrade/step_relink.go b/internal/pkg/agent/application/upgrade/step_relink.go index f09e251200f..9eb3ceada45 100644 --- a/internal/pkg/agent/application/upgrade/step_relink.go +++ b/internal/pkg/agent/application/upgrade/step_relink.go @@ -19,7 +19,11 @@ const ( exe = ".exe" ) -func changeSymlink(log *logger.Logger, topDirPath, symlinkPath, newTarget string) error { +type upgradeRelinker struct { +} + +// TODO: add tests for this +func (u *upgradeRelinker) changeSymlink(log *logger.Logger, topDirPath, symlinkPath, newTarget string) error { log.Infof("Changing symlink, topDirPath: %s, symlinkPath: %s, newTarget: %s", topDirPath, symlinkPath, newTarget) // handle windows suffixes if runtime.GOOS == windows { @@ -27,7 +31,7 @@ func changeSymlink(log *logger.Logger, topDirPath, symlinkPath, newTarget string newTarget += exe } - prevNewPath := prevSymlinkPath(topDirPath) + prevNewPath := u.prevSymlinkPath(topDirPath) log.Infow("Changing symlink", "symlink_path", symlinkPath, "new_path", newTarget, "prev_path", prevNewPath) // remove symlink to avoid upgrade failures @@ -43,7 +47,8 @@ func changeSymlink(log *logger.Logger, topDirPath, symlinkPath, newTarget string return file.SafeFileRotate(symlinkPath, prevNewPath) } -func prevSymlinkPath(topDirPath string) string { +// TODO: add tests for this +func (u *upgradeRelinker) prevSymlinkPath(topDirPath string) string { agentPrevName := agentName + ".prev" // handle windows suffixes diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 6a10f817b09..04686a49885 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -113,6 +113,10 @@ type replacer interface { copyRunDirectory(log *logger.Logger, oldRunPath, newRunPath string) error changeSymlink(log *logger.Logger, topPath, symlinkPath, newPath string) error } + +type relinker interface { + changeSymlink(log *logger.Logger, topDirPath, symlinkPath, newTarget string) error +} type watcher interface { waitForWatcher(ctx context.Context, log *logger.Logger, markerFilePath string, waitTime time.Duration) error selectWatcherExecutable(topDir string, previous agentInstall, current agentInstall) string @@ -131,7 +135,7 @@ type Upgrader struct { diskSpaceErrorFunc func(error) error artifactDownloader artifactDownloader unpacker unpacker - replacer replacer + relinker relinker watcher watcher } @@ -168,7 +172,7 @@ func NewUpgrader(log *logger.Logger, settings *artifact.Config, agentInfo info.A diskSpaceErrorFunc: upgradeErrors.ToDiskSpaceErrorFunc(log), artifactDownloader: newUpgradeArtifactDownloader(log, settings, downloaderFactoryProvider), unpacker: &upgradeUnpacker{log: log}, - replacer: &upgradeReplacer{}, + relinker: &upgradeRelinker{}, watcher: &upgradeWatcher{}, }, nil } @@ -431,13 +435,13 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string return nil, fmt.Errorf("calculating home path relative to top, home: %q top: %q : %w", paths.Home(), paths.Top(), err) } - if symlinkCleanupSetupErr := u.upgradeCleaner.setupSymlinkCleanup(changeSymlink, paths.Top(), currentVersionedHome, agentName); symlinkCleanupSetupErr != nil { + if symlinkCleanupSetupErr := u.upgradeCleaner.setupSymlinkCleanup(u.relinker.changeSymlink, paths.Top(), currentVersionedHome, agentName); symlinkCleanupSetupErr != nil { err = goerrors.Join(err, symlinkCleanupSetupErr) } u.log.Infof("currentVersionedHome: %s", currentVersionedHome) - err = u.replacer.changeSymlink(u.log, paths.Top(), symlinkPath, newPath) + err = u.relinker.changeSymlink(u.log, paths.Top(), symlinkPath, newPath) if err != nil { return nil, err } From c201c9d0bb962afc1b58341aeffa0bf76ad6c32c Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Wed, 6 Aug 2025 14:21:34 +0300 Subject: [PATCH 051/127] enhancement(5235): added directory copier interface --- .../pkg/agent/application/upgrade/upgrade.go | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 04686a49885..3c31dab57ea 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -137,6 +137,7 @@ type Upgrader struct { unpacker unpacker relinker relinker watcher watcher + directoryCopier agentDirectoryCopier } // IsUpgradeable when agent is installed and running as a service or flag was provided. @@ -174,6 +175,7 @@ func NewUpgrader(log *logger.Logger, settings *artifact.Config, agentInfo info.A unpacker: &upgradeUnpacker{log: log}, relinker: &upgradeRelinker{}, watcher: &upgradeWatcher{}, + directoryCopier: &directoryCopier{}, }, nil } @@ -274,16 +276,6 @@ func checkUpgrade(log *logger.Logger, currentVersion, newVersion agentVersion, m return nil } -type upgradeReplacer struct { -} - -func (u *upgradeReplacer) changeSymlink(log *logger.Logger, topDirPath, symlinkPath, newTarget string) error { - return changeSymlink(log, topDirPath, symlinkPath, newTarget) -} - -type upgradeWatcher struct { -} - // Upgrade upgrades running agent, function returns shutdown callback that must be called by reexec. func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string, action *fleetapi.ActionUpgrade, det *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (_ reexec.ShutdownCallbackFn, err error) { defer func() { @@ -402,7 +394,7 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string return nil, err } - err = u.replacer.copyActionStore(u.log, newHome) + err = u.directoryCopier.copyActionStore(u.log, newHome) if err != nil { err = fmt.Errorf("failed to copy action store: %w", u.diskSpaceErrorFunc(err)) return nil, err @@ -411,7 +403,7 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string newRunPath := filepath.Join(newHome, "run") oldRunPath := filepath.Join(paths.Run()) - err = u.replacer.copyRunDirectory(u.log, oldRunPath, newRunPath) + err = u.directoryCopier.copyRunDirectory(u.log, oldRunPath, newRunPath) if err != nil { err = fmt.Errorf("failed to copy run directory: %w", u.diskSpaceErrorFunc(err)) return nil, err @@ -622,7 +614,15 @@ func isSameVersion(log *logger.Logger, current agentVersion, newVersion agentVer return current == newVersion } -func (u *upgradeReplacer) copyActionStore(log *logger.Logger, newHome string) error { +type agentDirectoryCopier interface { + copyActionStore(log *logger.Logger, newHome string) error + copyRunDirectory(log *logger.Logger, oldRunPath, newRunPath string) error +} + +type directoryCopier struct { +} + +func (u *directoryCopier) copyActionStore(log *logger.Logger, newHome string) error { // copies legacy action_store.yml, state.yml and state.enc encrypted file if exists storePaths := []string{paths.AgentActionStoreFile(), paths.AgentStateStoreYmlFile(), paths.AgentStateStoreFile()} log.Infow("Copying action store", "new_home_path", newHome) @@ -647,7 +647,7 @@ func (u *upgradeReplacer) copyActionStore(log *logger.Logger, newHome string) er return nil } -func (u *upgradeReplacer) copyRunDirectory(log *logger.Logger, oldRunPath, newRunPath string) error { +func (u *directoryCopier) copyRunDirectory(log *logger.Logger, oldRunPath, newRunPath string) error { log.Infow("Copying run directory", "new_run_path", newRunPath, "old_run_path", oldRunPath) if err := os.MkdirAll(newRunPath, runDirMod); err != nil { From 1d690ffd1e5d01ec46f3f4fd70a569b7d4e8af46 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Wed, 6 Aug 2025 14:22:40 +0300 Subject: [PATCH 052/127] enhancement(5235): moved unpacker functions into one file --- .../agent/application/upgrade/step_unpack.go | 16 ++ .../application/upgrade/step_unpack_test.go | 174 ++++++++++++++++++ .../pkg/agent/application/upgrade/upgrade.go | 15 -- 3 files changed, 190 insertions(+), 15 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/step_unpack.go b/internal/pkg/agent/application/upgrade/step_unpack.go index 2ad66f29284..e6171c2bcb3 100644 --- a/internal/pkg/agent/application/upgrade/step_unpack.go +++ b/internal/pkg/agent/application/upgrade/step_unpack.go @@ -24,6 +24,7 @@ import ( v1 "github.com/elastic/elastic-agent/pkg/api/v1" "github.com/elastic/elastic-agent/pkg/component" "github.com/elastic/elastic-agent/pkg/core/logger" + agtversion "github.com/elastic/elastic-agent/pkg/version" ) // UnpackResult contains the location and hash of the unpacked agent files @@ -737,3 +738,18 @@ func createVersionedHomeFromHash(hash string) string { func (u *upgradeUnpacker) detectFlavor(topPath, flavor string) (string, error) { return install.UsedFlavor(topPath, flavor) } + +func (u *upgradeUnpacker) extractAgentVersion(metadata packageMetadata, upgradeVersion string) agentVersion { + newVersion := agentVersion{} + if metadata.manifest != nil { + packageDesc := metadata.manifest.Package + newVersion.version = packageDesc.Version + newVersion.snapshot = packageDesc.Snapshot + } else { + // extract version info from the version string (we can ignore parsing errors as it would have never passed the download step) + parsedVersion, _ := agtversion.ParseVersion(upgradeVersion) + newVersion.version, newVersion.snapshot = parsedVersion.ExtractSnapshotFromVersionString() + } + newVersion.hash = metadata.hash + return newVersion +} diff --git a/internal/pkg/agent/application/upgrade/step_unpack_test.go b/internal/pkg/agent/application/upgrade/step_unpack_test.go index c7c01db3579..a4adf9752de 100644 --- a/internal/pkg/agent/application/upgrade/step_unpack_test.go +++ b/internal/pkg/agent/application/upgrade/step_unpack_test.go @@ -649,3 +649,177 @@ func TestGetFileNamePrefix(t *testing.T) { } } + +func TestExtractVersion(t *testing.T) { + type args struct { + metadata packageMetadata + version string + } + type want struct { + newVersion agentVersion + } + + tests := []struct { + name string + args args + want want + }{ + { + name: "same version, snapshot flag and hash", + args: args{ + metadata: packageMetadata{ + manifest: &v1.PackageManifest{ + Package: v1.PackageDesc{ + Version: "1.2.3", + Snapshot: true, + VersionedHome: "", + PathMappings: nil, + }, + }, + hash: "abcdef", + }, + version: "unused", + }, + want: want{ + newVersion: agentVersion123SNAPSHOTabcdef, + }, + }, + { + name: "same hash, snapshot flag, different version", + args: args{ + metadata: packageMetadata{ + manifest: &v1.PackageManifest{ + Package: v1.PackageDesc{ + Version: "1.2.3-repackaged", + Snapshot: true, + VersionedHome: "", + PathMappings: nil, + }, + }, + hash: "abcdef", + }, + version: "unused", + }, + want: want{ + newVersion: agentVersion123SNAPSHOTabcdefRepackaged, + }, + }, + { + name: "same version and hash, different snapshot flag (SNAPSHOT promotion to release)", + args: args{ + metadata: packageMetadata{ + manifest: &v1.PackageManifest{ + Package: v1.PackageDesc{ + Version: "1.2.3", + Snapshot: false, + VersionedHome: "", + PathMappings: nil, + }, + }, + hash: "abcdef", + }, + version: "unused", + }, + want: want{ + newVersion: agentVersion123abcdef, + }, + }, + { + name: "same version and snapshot, different hash (SNAPSHOT upgrade)", + args: args{ + metadata: packageMetadata{ + manifest: &v1.PackageManifest{ + Package: v1.PackageDesc{ + Version: "1.2.3", + Snapshot: true, + VersionedHome: "", + PathMappings: nil, + }, + }, + hash: "ghijkl", + }, + version: "unused", + }, + want: want{ + newVersion: agentVersion123SNAPSHOTghijkl, + }, + }, + { + name: "same version, snapshot flag and hash, no manifest", + args: args{ + metadata: packageMetadata{ + manifest: nil, + hash: "abcdef", + }, + version: "1.2.3-SNAPSHOT", + }, + want: want{ + newVersion: agentVersion123SNAPSHOTabcdef, + }, + }, + { + name: "same hash, snapshot flag, different version, no manifest", + args: args{ + metadata: packageMetadata{ + manifest: nil, + hash: "abcdef", + }, + version: "1.2.3-SNAPSHOT.repackaged", + }, + want: want{ + newVersion: agentVersion123SNAPSHOTabcdefRepackaged, + }, + }, + { + name: "same version and hash, different snapshot flag, no manifest (SNAPSHOT promotion to release)", + args: args{ + metadata: packageMetadata{ + manifest: nil, + hash: "abcdef", + }, + version: "1.2.3", + }, + want: want{ + newVersion: agentVersion123abcdef, + }, + }, + { + name: "same version and snapshot, different hash (SNAPSHOT upgrade)", + args: args{ + metadata: packageMetadata{ + manifest: nil, + hash: "ghijkl", + }, + version: "1.2.3-SNAPSHOT", + }, + want: want{ + newVersion: agentVersion123SNAPSHOTghijkl, + }, + }, + { + name: "same version and snapshot, no hash (SNAPSHOT upgrade before download)", + args: args{ + metadata: packageMetadata{ + manifest: nil, + }, + version: "1.2.3-SNAPSHOT", + }, + want: want{ + newVersion: agentVersion{ + version: "1.2.3", + snapshot: true, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + log, _ := loggertest.New(test.name) + unpacker := &upgradeUnpacker{log: log} + actualNewVersion := unpacker.extractAgentVersion(test.args.metadata, test.args.version) + assert.Equal(t, test.want.newVersion, actualNewVersion, "Unexpected new version result: extractAgentVersion(%v, %v) should be %v", + test.args.metadata, test.args.version, test.want.newVersion) + }) + } +} diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 3c31dab57ea..60f234fdc0d 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -594,21 +594,6 @@ func (u *Upgrader) sourceURI(retrievedURI string) string { return u.settings.SourceURI } -func (u *upgradeUnpacker) extractAgentVersion(metadata packageMetadata, upgradeVersion string) agentVersion { - newVersion := agentVersion{} - if metadata.manifest != nil { - packageDesc := metadata.manifest.Package - newVersion.version = packageDesc.Version - newVersion.snapshot = packageDesc.Snapshot - } else { - // extract version info from the version string (we can ignore parsing errors as it would have never passed the download step) - parsedVersion, _ := agtversion.ParseVersion(upgradeVersion) - newVersion.version, newVersion.snapshot = parsedVersion.ExtractSnapshotFromVersionString() - } - newVersion.hash = metadata.hash - return newVersion -} - func isSameVersion(log *logger.Logger, current agentVersion, newVersion agentVersion) bool { log.Debugw("Comparing current and new agent version", "current_version", current, "new_version", newVersion) return current == newVersion From 49fcef21704cec0675d4c07c6024f0f1b9585ff1 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Wed, 6 Aug 2025 14:23:54 +0300 Subject: [PATCH 053/127] enhancement(5235): moved ugprade wather and marker functions into one file --- .../agent/application/upgrade/step_mark.go | 20 ++ .../pkg/agent/application/upgrade/upgrade.go | 15 -- .../agent/application/upgrade/upgrade_test.go | 174 ------------------ 3 files changed, 20 insertions(+), 189 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/step_mark.go b/internal/pkg/agent/application/upgrade/step_mark.go index aa76be897d8..893a86981ad 100644 --- a/internal/pkg/agent/application/upgrade/step_mark.go +++ b/internal/pkg/agent/application/upgrade/step_mark.go @@ -5,6 +5,7 @@ package upgrade import ( + "context" "encoding/json" "fmt" "os" @@ -195,6 +196,8 @@ type agentInstall struct { hash string versionedHome string } +type upgradeWatcher struct { +} // markUpgrade marks update happened so we can handle grace period func (u *upgradeWatcher) markUpgrade(log *logger.Logger, dataDirPath string, agent, previousAgent agentInstall, action *fleetapi.ActionUpgrade, upgradeDetails *details.Details, desiredOutcome UpgradeOutcome) error { @@ -234,6 +237,23 @@ func (u *upgradeWatcher) markUpgrade(log *logger.Logger, dataDirPath string, age return nil } +// TODO: add tests for this +func (u *upgradeWatcher) selectWatcherExecutable(topDir string, previous agentInstall, current agentInstall) string { + // check if the upgraded version is less than the previous (currently installed) version + if current.parsedVersion.Less(*previous.parsedVersion) { + // use the current agent executable for watch, if downgrading the old agent doesn't understand the current agent's path structure. + return paths.BinaryPath(filepath.Join(topDir, previous.versionedHome), agentName) + } else { + // use the new agent executable as it should be able to parse the new update marker + return paths.BinaryPath(filepath.Join(topDir, current.versionedHome), agentName) + } +} + +// TODO: add tests for this +func (u *upgradeWatcher) waitForWatcher(ctx context.Context, log *logger.Logger, markerFilePath string, waitTime time.Duration) error { + return waitForWatcherWithTimeoutCreationFunc(ctx, log, markerFilePath, waitTime, context.WithTimeout) +} + // UpdateActiveCommit updates active.commit file to point to active version. func UpdateActiveCommit(log *logger.Logger, topDirPath, hash string) error { activeCommitPath := filepath.Join(topDirPath, agentCommitFile) diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 60f234fdc0d..c89920e8aeb 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -491,21 +491,6 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string return cb, nil } -func (u *upgradeWatcher) selectWatcherExecutable(topDir string, previous agentInstall, current agentInstall) string { - // check if the upgraded version is less than the previous (currently installed) version - if current.parsedVersion.Less(*previous.parsedVersion) { - // use the current agent executable for watch, if downgrading the old agent doesn't understand the current agent's path structure. - return paths.BinaryPath(filepath.Join(topDir, previous.versionedHome), agentName) - } else { - // use the new agent executable as it should be able to parse the new update marker - return paths.BinaryPath(filepath.Join(topDir, current.versionedHome), agentName) - } -} - -func (u *upgradeWatcher) waitForWatcher(ctx context.Context, log *logger.Logger, markerFilePath string, waitTime time.Duration) error { - return waitForWatcherWithTimeoutCreationFunc(ctx, log, markerFilePath, waitTime, context.WithTimeout) -} - type createContextWithTimeout func(ctx context.Context, timeout time.Duration) (context.Context, context.CancelFunc) func waitForWatcherWithTimeoutCreationFunc(ctx context.Context, log *logger.Logger, markerFilePath string, waitTime time.Duration, createTimeoutContext createContextWithTimeout) error { diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index 27ce54b93ba..3ca869816e0 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -644,180 +644,6 @@ var agentVersion123SNAPSHOTghijkl = agentVersion{ hash: "ghijkl", } -func TestExtractVersion(t *testing.T) { - type args struct { - metadata packageMetadata - version string - } - type want struct { - newVersion agentVersion - } - - tests := []struct { - name string - args args - want want - }{ - { - name: "same version, snapshot flag and hash", - args: args{ - metadata: packageMetadata{ - manifest: &v1.PackageManifest{ - Package: v1.PackageDesc{ - Version: "1.2.3", - Snapshot: true, - VersionedHome: "", - PathMappings: nil, - }, - }, - hash: "abcdef", - }, - version: "unused", - }, - want: want{ - newVersion: agentVersion123SNAPSHOTabcdef, - }, - }, - { - name: "same hash, snapshot flag, different version", - args: args{ - metadata: packageMetadata{ - manifest: &v1.PackageManifest{ - Package: v1.PackageDesc{ - Version: "1.2.3-repackaged", - Snapshot: true, - VersionedHome: "", - PathMappings: nil, - }, - }, - hash: "abcdef", - }, - version: "unused", - }, - want: want{ - newVersion: agentVersion123SNAPSHOTabcdefRepackaged, - }, - }, - { - name: "same version and hash, different snapshot flag (SNAPSHOT promotion to release)", - args: args{ - metadata: packageMetadata{ - manifest: &v1.PackageManifest{ - Package: v1.PackageDesc{ - Version: "1.2.3", - Snapshot: false, - VersionedHome: "", - PathMappings: nil, - }, - }, - hash: "abcdef", - }, - version: "unused", - }, - want: want{ - newVersion: agentVersion123abcdef, - }, - }, - { - name: "same version and snapshot, different hash (SNAPSHOT upgrade)", - args: args{ - metadata: packageMetadata{ - manifest: &v1.PackageManifest{ - Package: v1.PackageDesc{ - Version: "1.2.3", - Snapshot: true, - VersionedHome: "", - PathMappings: nil, - }, - }, - hash: "ghijkl", - }, - version: "unused", - }, - want: want{ - newVersion: agentVersion123SNAPSHOTghijkl, - }, - }, - { - name: "same version, snapshot flag and hash, no manifest", - args: args{ - metadata: packageMetadata{ - manifest: nil, - hash: "abcdef", - }, - version: "1.2.3-SNAPSHOT", - }, - want: want{ - newVersion: agentVersion123SNAPSHOTabcdef, - }, - }, - { - name: "same hash, snapshot flag, different version, no manifest", - args: args{ - metadata: packageMetadata{ - manifest: nil, - hash: "abcdef", - }, - version: "1.2.3-SNAPSHOT.repackaged", - }, - want: want{ - newVersion: agentVersion123SNAPSHOTabcdefRepackaged, - }, - }, - { - name: "same version and hash, different snapshot flag, no manifest (SNAPSHOT promotion to release)", - args: args{ - metadata: packageMetadata{ - manifest: nil, - hash: "abcdef", - }, - version: "1.2.3", - }, - want: want{ - newVersion: agentVersion123abcdef, - }, - }, - { - name: "same version and snapshot, different hash (SNAPSHOT upgrade)", - args: args{ - metadata: packageMetadata{ - manifest: nil, - hash: "ghijkl", - }, - version: "1.2.3-SNAPSHOT", - }, - want: want{ - newVersion: agentVersion123SNAPSHOTghijkl, - }, - }, - { - name: "same version and snapshot, no hash (SNAPSHOT upgrade before download)", - args: args{ - metadata: packageMetadata{ - manifest: nil, - }, - version: "1.2.3-SNAPSHOT", - }, - want: want{ - newVersion: agentVersion{ - version: "1.2.3", - snapshot: true, - }, - }, - }, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - log, _ := loggertest.New(test.name) - unpacker := &upgradeUnpacker{log: log} - actualNewVersion := unpacker.extractAgentVersion(test.args.metadata, test.args.version) - assert.Equal(t, test.want.newVersion, actualNewVersion, "Unexpected new version result: extractAgentVersion(%v, %v) should be %v", - test.args.metadata, test.args.version, test.want.newVersion) - }) - } -} - func TestCheckUpgrade(t *testing.T) { type args struct { current agentVersion From 1fcd3fabe824ec6f8427f70764b749f6de6f8812 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Wed, 6 Aug 2025 14:34:34 +0300 Subject: [PATCH 054/127] enhancement(5235): refactpred upgrade watcher functions and tests --- .../agent/application/upgrade/step_mark.go | 30 +++- .../application/upgrade/step_mark_test.go | 140 ++++++++++++++++++ .../pkg/agent/application/upgrade/upgrade.go | 38 +---- .../agent/application/upgrade/upgrade_test.go | 134 ----------------- 4 files changed, 173 insertions(+), 169 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/step_mark.go b/internal/pkg/agent/application/upgrade/step_mark.go index 893a86981ad..448d09d6e09 100644 --- a/internal/pkg/agent/application/upgrade/step_mark.go +++ b/internal/pkg/agent/application/upgrade/step_mark.go @@ -7,6 +7,7 @@ package upgrade import ( "context" "encoding/json" + goerrors "errors" "fmt" "os" "path/filepath" @@ -250,8 +251,33 @@ func (u *upgradeWatcher) selectWatcherExecutable(topDir string, previous agentIn } // TODO: add tests for this -func (u *upgradeWatcher) waitForWatcher(ctx context.Context, log *logger.Logger, markerFilePath string, waitTime time.Duration) error { - return waitForWatcherWithTimeoutCreationFunc(ctx, log, markerFilePath, waitTime, context.WithTimeout) +func (u *upgradeWatcher) waitForWatcher(ctx context.Context, log *logger.Logger, markerFilePath string, waitTime time.Duration, createTimeoutContext createContextWithTimeout) error { + // Wait for the watcher to be up and running + watcherContext, cancel := createTimeoutContext(ctx, waitTime) + defer cancel() + + markerWatcher := newMarkerFileWatcher(markerFilePath, log) + err := markerWatcher.Run(watcherContext) + if err != nil { + return fmt.Errorf("error starting update marker watcher: %w", err) + } + + log.Infof("waiting up to %s for upgrade watcher to set %s state in upgrade marker", waitTime, details.StateWatching) + + for { + select { + case updMarker := <-markerWatcher.Watch(): + if updMarker.Details != nil && updMarker.Details.State == details.StateWatching { + // watcher started and it is watching, all good + log.Infof("upgrade watcher set %s state in upgrade marker: exiting wait loop", details.StateWatching) + return nil + } + + case <-watcherContext.Done(): + log.Errorf("upgrade watcher did not start watching within %s or context has expired", waitTime) + return goerrors.Join(ErrWatcherNotStarted, watcherContext.Err()) + } + } } // UpdateActiveCommit updates active.commit file to point to active version. diff --git a/internal/pkg/agent/application/upgrade/step_mark_test.go b/internal/pkg/agent/application/upgrade/step_mark_test.go index fc1731e7b24..bc29c57a5a4 100644 --- a/internal/pkg/agent/application/upgrade/step_mark_test.go +++ b/internal/pkg/agent/application/upgrade/step_mark_test.go @@ -5,15 +5,20 @@ package upgrade import ( + "context" + "fmt" "os" "path/filepath" + "sync" "testing" "time" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/details" "github.com/elastic/elastic-agent/internal/pkg/fleetapi" + "github.com/elastic/elastic-agent/pkg/core/logger/loggertest" ) func TestSaveAndLoadMarker_NoLoss(t *testing.T) { @@ -260,3 +265,138 @@ desired_outcome: true }) } } + +func TestWaitForWatcher(t *testing.T) { + wantErrWatcherNotStarted := func(t assert.TestingT, err error, i ...interface{}) bool { + return assert.ErrorIs(t, err, ErrWatcherNotStarted, i) + } + + tests := []struct { + name string + states []details.State + stateChangeInterval time.Duration + cancelWaitContext bool + wantErr assert.ErrorAssertionFunc + }{ + { + name: "Happy path: watcher is watching already", + states: []details.State{details.StateWatching}, + stateChangeInterval: 1 * time.Millisecond, + wantErr: assert.NoError, + }, + { + name: "Sad path: watcher is never starting", + states: []details.State{details.StateReplacing}, + stateChangeInterval: 1 * time.Millisecond, + cancelWaitContext: true, + wantErr: wantErrWatcherNotStarted, + }, + { + name: "Runaround path: marker is jumping around and landing on watching", + states: []details.State{ + details.StateRequested, + details.StateScheduled, + details.StateDownloading, + details.StateExtracting, + details.StateReplacing, + details.StateRestarting, + details.StateWatching, + }, + stateChangeInterval: 1 * time.Millisecond, + wantErr: assert.NoError, + }, + { + name: "Timeout: marker is never created", + states: nil, + stateChangeInterval: 1 * time.Millisecond, + cancelWaitContext: true, + wantErr: wantErrWatcherNotStarted, + }, + { + name: "Timeout2: state doesn't get there in time", + states: []details.State{ + details.StateRequested, + details.StateScheduled, + details.StateDownloading, + details.StateExtracting, + details.StateReplacing, + details.StateRestarting, + }, + + stateChangeInterval: 1 * time.Millisecond, + cancelWaitContext: true, + wantErr: wantErrWatcherNotStarted, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + deadline, ok := t.Deadline() + if !ok { + deadline = time.Now().Add(5 * time.Second) + } + testCtx, testCancel := context.WithDeadline(context.Background(), deadline) + defer testCancel() + + tmpDir := t.TempDir() + updMarkerFilePath := filepath.Join(tmpDir, markerFilename) + + waitContext, waitCancel := context.WithCancel(testCtx) + defer waitCancel() + + fakeTimeout := 30 * time.Second + + // in order to take timing out of the equation provide a context that we can cancel manually + // still assert that the parent context and timeout passed are correct + var createContextFunc createContextWithTimeout = func(ctx context.Context, timeout time.Duration) (context.Context, context.CancelFunc) { + assert.Same(t, testCtx, ctx, "parent context should be the same as the waitForWatcherCall") + assert.Equal(t, fakeTimeout, timeout, "timeout used in new context should be the same as testcase") + + return waitContext, waitCancel + } + + if len(tt.states) > 0 { + initialState := tt.states[0] + writeState(t, updMarkerFilePath, initialState) + } + + wg := new(sync.WaitGroup) + + var furtherStates []details.State + if len(tt.states) > 1 { + // we have more states to produce + furtherStates = tt.states[1:] + } + + wg.Add(1) + + // worker goroutine: writes out additional states while the test is blocked on waitOnWatcher() call and expires + // the wait context if cancelWaitContext is set to true. Timing of the goroutine is driven by stateChangeInterval. + go func() { + defer wg.Done() + tick := time.NewTicker(tt.stateChangeInterval) + defer tick.Stop() + for _, state := range furtherStates { + select { + case <-testCtx.Done(): + return + case <-tick.C: + writeState(t, updMarkerFilePath, state) + } + } + if tt.cancelWaitContext { + <-tick.C + waitCancel() + } + }() + + log, _ := loggertest.New(tt.name) + + watcher := &upgradeWatcher{} + + tt.wantErr(t, watcher.waitForWatcher(testCtx, log, updMarkerFilePath, fakeTimeout, createContextFunc), fmt.Sprintf("waitForWatcher %s, %v, %s, %s)", updMarkerFilePath, tt.states, tt.stateChangeInterval, fakeTimeout)) + + // wait for goroutines to finish + wg.Wait() + }) + } +} diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index c89920e8aeb..dcdf2692d7d 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -117,8 +117,11 @@ type replacer interface { type relinker interface { changeSymlink(log *logger.Logger, topDirPath, symlinkPath, newTarget string) error } + +type createContextWithTimeout func(ctx context.Context, timeout time.Duration) (context.Context, context.CancelFunc) + type watcher interface { - waitForWatcher(ctx context.Context, log *logger.Logger, markerFilePath string, waitTime time.Duration) error + waitForWatcher(ctx context.Context, log *logger.Logger, markerFilePath string, waitTime time.Duration, createTimeoutContext createContextWithTimeout) error selectWatcherExecutable(topDir string, previous agentInstall, current agentInstall) string markUpgrade(log *logger.Logger, dataDir string, current, previous agentInstall, action *fleetapi.ActionUpgrade, det *details.Details, outcome UpgradeOutcome) error } @@ -473,7 +476,7 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string return nil, err } - err = u.watcher.waitForWatcher(ctx, u.log, markerFilePath(paths.Data()), watcherMaxWaitTime) + err = u.watcher.waitForWatcher(ctx, u.log, markerFilePath(paths.Data()), watcherMaxWaitTime, context.WithTimeout) if err != nil { err = goerrors.Join(err, watcherCmd.Process.Kill()) return nil, err @@ -491,37 +494,6 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string return cb, nil } -type createContextWithTimeout func(ctx context.Context, timeout time.Duration) (context.Context, context.CancelFunc) - -func waitForWatcherWithTimeoutCreationFunc(ctx context.Context, log *logger.Logger, markerFilePath string, waitTime time.Duration, createTimeoutContext createContextWithTimeout) error { - // Wait for the watcher to be up and running - watcherContext, cancel := createTimeoutContext(ctx, waitTime) - defer cancel() - - markerWatcher := newMarkerFileWatcher(markerFilePath, log) - err := markerWatcher.Run(watcherContext) - if err != nil { - return fmt.Errorf("error starting update marker watcher: %w", err) - } - - log.Infof("waiting up to %s for upgrade watcher to set %s state in upgrade marker", waitTime, details.StateWatching) - - for { - select { - case updMarker := <-markerWatcher.Watch(): - if updMarker.Details != nil && updMarker.Details.State == details.StateWatching { - // watcher started and it is watching, all good - log.Infof("upgrade watcher set %s state in upgrade marker: exiting wait loop", details.StateWatching) - return nil - } - - case <-watcherContext.Done(): - log.Errorf("upgrade watcher did not start watching within %s or context has expired", waitTime) - return goerrors.Join(ErrWatcherNotStarted, watcherContext.Err()) - } - } -} - // Ack acks last upgrade action func (u *Upgrader) Ack(ctx context.Context, acker acker.Acker) error { // get upgrade action diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index 3ca869816e0..0660c99ba8a 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -16,7 +16,6 @@ import ( "reflect" "runtime" "strings" - "sync" "testing" "time" @@ -799,139 +798,6 @@ func TestCheckUpgrade(t *testing.T) { } } -func TestWaitForWatcher(t *testing.T) { - wantErrWatcherNotStarted := func(t assert.TestingT, err error, i ...interface{}) bool { - return assert.ErrorIs(t, err, ErrWatcherNotStarted, i) - } - - tests := []struct { - name string - states []details.State - stateChangeInterval time.Duration - cancelWaitContext bool - wantErr assert.ErrorAssertionFunc - }{ - { - name: "Happy path: watcher is watching already", - states: []details.State{details.StateWatching}, - stateChangeInterval: 1 * time.Millisecond, - wantErr: assert.NoError, - }, - { - name: "Sad path: watcher is never starting", - states: []details.State{details.StateReplacing}, - stateChangeInterval: 1 * time.Millisecond, - cancelWaitContext: true, - wantErr: wantErrWatcherNotStarted, - }, - { - name: "Runaround path: marker is jumping around and landing on watching", - states: []details.State{ - details.StateRequested, - details.StateScheduled, - details.StateDownloading, - details.StateExtracting, - details.StateReplacing, - details.StateRestarting, - details.StateWatching, - }, - stateChangeInterval: 1 * time.Millisecond, - wantErr: assert.NoError, - }, - { - name: "Timeout: marker is never created", - states: nil, - stateChangeInterval: 1 * time.Millisecond, - cancelWaitContext: true, - wantErr: wantErrWatcherNotStarted, - }, - { - name: "Timeout2: state doesn't get there in time", - states: []details.State{ - details.StateRequested, - details.StateScheduled, - details.StateDownloading, - details.StateExtracting, - details.StateReplacing, - details.StateRestarting, - }, - - stateChangeInterval: 1 * time.Millisecond, - cancelWaitContext: true, - wantErr: wantErrWatcherNotStarted, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - deadline, ok := t.Deadline() - if !ok { - deadline = time.Now().Add(5 * time.Second) - } - testCtx, testCancel := context.WithDeadline(context.Background(), deadline) - defer testCancel() - - tmpDir := t.TempDir() - updMarkerFilePath := filepath.Join(tmpDir, markerFilename) - - waitContext, waitCancel := context.WithCancel(testCtx) - defer waitCancel() - - fakeTimeout := 30 * time.Second - - // in order to take timing out of the equation provide a context that we can cancel manually - // still assert that the parent context and timeout passed are correct - var createContextFunc createContextWithTimeout = func(ctx context.Context, timeout time.Duration) (context.Context, context.CancelFunc) { - assert.Same(t, testCtx, ctx, "parent context should be the same as the waitForWatcherCall") - assert.Equal(t, fakeTimeout, timeout, "timeout used in new context should be the same as testcase") - - return waitContext, waitCancel - } - - if len(tt.states) > 0 { - initialState := tt.states[0] - writeState(t, updMarkerFilePath, initialState) - } - - wg := new(sync.WaitGroup) - - var furtherStates []details.State - if len(tt.states) > 1 { - // we have more states to produce - furtherStates = tt.states[1:] - } - - wg.Add(1) - - // worker goroutine: writes out additional states while the test is blocked on waitOnWatcher() call and expires - // the wait context if cancelWaitContext is set to true. Timing of the goroutine is driven by stateChangeInterval. - go func() { - defer wg.Done() - tick := time.NewTicker(tt.stateChangeInterval) - defer tick.Stop() - for _, state := range furtherStates { - select { - case <-testCtx.Done(): - return - case <-tick.C: - writeState(t, updMarkerFilePath, state) - } - } - if tt.cancelWaitContext { - <-tick.C - waitCancel() - } - }() - - log, _ := loggertest.New(tt.name) - - tt.wantErr(t, waitForWatcherWithTimeoutCreationFunc(testCtx, log, updMarkerFilePath, fakeTimeout, createContextFunc), fmt.Sprintf("waitForWatcher %s, %v, %s, %s)", updMarkerFilePath, tt.states, tt.stateChangeInterval, fakeTimeout)) - - // wait for goroutines to finish - wg.Wait() - }) - } -} - func writeState(t *testing.T, path string, state details.State) { ms := newMarkerSerializer(&UpdateMarker{ Version: "version", From 659037e2c17cb33f7f3b91282d5c77f8e5c85702 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Wed, 6 Aug 2025 14:59:01 +0300 Subject: [PATCH 055/127] enhancement(5235): removed replacer interface --- internal/pkg/agent/application/upgrade/upgrade.go | 6 ------ 1 file changed, 6 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index dcdf2692d7d..7b8b75d35b6 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -108,12 +108,6 @@ type unpacker interface { detectFlavor(topPath, flavor string) (string, error) } -type replacer interface { - copyActionStore(log *logger.Logger, newHome string) error - copyRunDirectory(log *logger.Logger, oldRunPath, newRunPath string) error - changeSymlink(log *logger.Logger, topPath, symlinkPath, newPath string) error -} - type relinker interface { changeSymlink(log *logger.Logger, topDirPath, symlinkPath, newTarget string) error } From dd9ea3112f8ed54e1fbdbe5ebeed5bd5d164c049 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Wed, 6 Aug 2025 15:03:22 +0300 Subject: [PATCH 056/127] enhancement(5235): moved directory copy functions, moved relevant tests --- .../application/upgrade/directory_copy.go | 139 ++++++++++++++++++ .../upgrade/directory_copy_test.go | 90 ++++++++++++ .../pkg/agent/application/upgrade/upgrade.go | 135 +---------------- .../agent/application/upgrade/upgrade_test.go | 80 ---------- 4 files changed, 234 insertions(+), 210 deletions(-) create mode 100644 internal/pkg/agent/application/upgrade/directory_copy.go create mode 100644 internal/pkg/agent/application/upgrade/directory_copy_test.go diff --git a/internal/pkg/agent/application/upgrade/directory_copy.go b/internal/pkg/agent/application/upgrade/directory_copy.go new file mode 100644 index 00000000000..0a0b7b9501e --- /dev/null +++ b/internal/pkg/agent/application/upgrade/directory_copy.go @@ -0,0 +1,139 @@ +package upgrade + +import ( + "os" + "path/filepath" + "runtime" + + "github.com/elastic/elastic-agent/internal/pkg/agent/application/paths" + "github.com/elastic/elastic-agent/internal/pkg/agent/errors" + "github.com/elastic/elastic-agent/internal/pkg/agent/install" + "github.com/elastic/elastic-agent/pkg/core/logger" + "github.com/otiai10/copy" +) + +type directoryCopier struct { +} + +// TODO: add tests for this +// Update to accept copydir function +func (d *directoryCopier) copyActionStore(log *logger.Logger, newHome string) error { + // copies legacy action_store.yml, state.yml and state.enc encrypted file if exists + storePaths := []string{paths.AgentActionStoreFile(), paths.AgentStateStoreYmlFile(), paths.AgentStateStoreFile()} + log.Infow("Copying action store", "new_home_path", newHome) + + for _, currentActionStorePath := range storePaths { + newActionStorePath := filepath.Join(newHome, filepath.Base(currentActionStorePath)) + log.Infow("Copying action store path", "from", currentActionStorePath, "to", newActionStorePath) + currentActionStore, err := os.ReadFile(currentActionStorePath) + if os.IsNotExist(err) { + // nothing to copy + continue + } + if err != nil { + return err + } + + if err := os.WriteFile(newActionStorePath, currentActionStore, 0o600); err != nil { + return err + } + } + + return nil +} + +// TODO: add tests for this +// Update to accept copydir function +func (d *directoryCopier) copyRunDirectory(log *logger.Logger, oldRunPath, newRunPath string) error { + log.Infow("Copying run directory", "new_run_path", newRunPath, "old_run_path", oldRunPath) + + if err := os.MkdirAll(newRunPath, runDirMod); err != nil { + return errors.New(err, "failed to create run directory") + } + + err := copyDir(log, oldRunPath, newRunPath, true) + if os.IsNotExist(err) { + // nothing to copy, operation ok + log.Infow("Run directory not present", "old_run_path", oldRunPath) + return nil + } + if err != nil { + return errors.New(err, "failed to copy %q to %q", oldRunPath, newRunPath) + } + + return nil +} + +func copyDir(l *logger.Logger, from, to string, ignoreErrs bool) error { + var onErr func(src, dst string, err error) error + + if ignoreErrs { + onErr = func(src, dst string, err error) error { + if err == nil { + return nil + } + + // ignore all errors, just log them + l.Infof("ignoring error: failed to copy %q to %q: %s", src, dst, err.Error()) + return nil + } + } + + // Try to detect if we are running with SSDs. If we are increase the copy concurrency, + // otherwise fall back to the default. + copyConcurrency := 1 + hasSSDs, detectHWErr := install.HasAllSSDs() + if detectHWErr != nil { + l.Infow("Could not determine block storage type, disabling copy concurrency", "error.message", detectHWErr) + } + if hasSSDs { + copyConcurrency = runtime.NumCPU() * 4 + } + + return copy.Copy(from, to, copy.Options{ + OnSymlink: func(_ string) copy.SymlinkAction { + return copy.Shallow + }, + Sync: true, + OnError: onErr, + NumOfWorkers: int64(copyConcurrency), + }) +} + +func readProcessDirs(runtimeDir string) ([]string, error) { + pipelines, err := readDirs(runtimeDir) + if err != nil { + return nil, err + } + + processDirs := make([]string, 0) + for _, p := range pipelines { + dirs, err := readDirs(p) + if err != nil { + return nil, err + } + + processDirs = append(processDirs, dirs...) + } + + return processDirs, nil +} + +// readDirs returns list of absolute paths to directories inside specified path. +func readDirs(dir string) ([]string, error) { + dirEntries, err := os.ReadDir(dir) + if err != nil && !os.IsNotExist(err) { + return nil, err + } + + dirs := make([]string, 0, len(dirEntries)) + for _, de := range dirEntries { + if !de.IsDir() { + continue + } + + dirs = append(dirs, filepath.Join(dir, de.Name())) + } + + return dirs, nil +} diff --git a/internal/pkg/agent/application/upgrade/directory_copy_test.go b/internal/pkg/agent/application/upgrade/directory_copy_test.go new file mode 100644 index 00000000000..d32750dbea0 --- /dev/null +++ b/internal/pkg/agent/application/upgrade/directory_copy_test.go @@ -0,0 +1,90 @@ +package upgrade + +import ( + "os" + "path/filepath" + "runtime" + "testing" + + "github.com/elastic/elastic-agent/pkg/core/logger" + "github.com/gofrs/flock" + "github.com/stretchr/testify/require" +) + +func Test_CopyFile(t *testing.T) { + l, _ := logger.New("test", false) + tt := []struct { + Name string + From string + To string + IgnoreErr bool + KeepOpen bool + ExpectedErr bool + }{ + { + "Existing, no onerr", + filepath.Join(".", "test", "case1", "README.md"), + filepath.Join(".", "test", "case1", "copy", "README.md"), + false, + false, + false, + }, + { + "Existing but open", + filepath.Join(".", "test", "case2", "README.md"), + filepath.Join(".", "test", "case2", "copy", "README.md"), + false, + true, + runtime.GOOS == "windows", // this fails only on, + }, + { + "Existing but open, ignore errors", + filepath.Join(".", "test", "case3", "README.md"), + filepath.Join(".", "test", "case3", "copy", "README.md"), + true, + true, + false, + }, + { + "Not existing, accept errors", + filepath.Join(".", "test", "case4", "README.md"), + filepath.Join(".", "test", "case4", "copy", "README.md"), + false, + false, + true, + }, + { + "Not existing, ignore errors", + filepath.Join(".", "test", "case4", "README.md"), + filepath.Join(".", "test", "case4", "copy", "README.md"), + true, + false, + false, + }, + } + + for _, tc := range tt { + t.Run(tc.Name, func(t *testing.T) { + defer func() { + // cleanup + _ = os.RemoveAll(filepath.Dir(tc.To)) + }() + + var fl *flock.Flock + if tc.KeepOpen { + // this uses syscalls to create inter-process lock + fl = flock.New(tc.From) + _, err := fl.TryLock() + require.NoError(t, err) + + defer func() { + require.NoError(t, fl.Unlock()) + }() + + } + + err := copyDir(l, tc.From, tc.To, tc.IgnoreErr) + require.Equal(t, tc.ExpectedErr, err != nil, err) + }) + } +} diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 7b8b75d35b6..ea68bdacdc8 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -10,11 +10,9 @@ import ( "fmt" "os" "path/filepath" - "runtime" "strings" "time" - "github.com/otiai10/copy" "go.elastic.co/apm/v2" "github.com/elastic/elastic-agent/internal/pkg/agent/application/info" @@ -27,7 +25,6 @@ import ( upgradeErrors "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/errors" "github.com/elastic/elastic-agent/internal/pkg/agent/configuration" "github.com/elastic/elastic-agent/internal/pkg/agent/errors" - "github.com/elastic/elastic-agent/internal/pkg/agent/install" "github.com/elastic/elastic-agent/internal/pkg/config" "github.com/elastic/elastic-agent/internal/pkg/fleetapi" "github.com/elastic/elastic-agent/internal/pkg/fleetapi/acker" @@ -120,6 +117,11 @@ type watcher interface { markUpgrade(log *logger.Logger, dataDir string, current, previous agentInstall, action *fleetapi.ActionUpgrade, det *details.Details, outcome UpgradeOutcome) error } +type agentDirectoryCopier interface { + copyActionStore(log *logger.Logger, newHome string) error + copyRunDirectory(log *logger.Logger, oldRunPath, newRunPath string) error +} + // Upgrader performs an upgrade type Upgrader struct { log *logger.Logger @@ -550,59 +552,6 @@ func isSameVersion(log *logger.Logger, current agentVersion, newVersion agentVer return current == newVersion } -type agentDirectoryCopier interface { - copyActionStore(log *logger.Logger, newHome string) error - copyRunDirectory(log *logger.Logger, oldRunPath, newRunPath string) error -} - -type directoryCopier struct { -} - -func (u *directoryCopier) copyActionStore(log *logger.Logger, newHome string) error { - // copies legacy action_store.yml, state.yml and state.enc encrypted file if exists - storePaths := []string{paths.AgentActionStoreFile(), paths.AgentStateStoreYmlFile(), paths.AgentStateStoreFile()} - log.Infow("Copying action store", "new_home_path", newHome) - - for _, currentActionStorePath := range storePaths { - newActionStorePath := filepath.Join(newHome, filepath.Base(currentActionStorePath)) - log.Infow("Copying action store path", "from", currentActionStorePath, "to", newActionStorePath) - currentActionStore, err := os.ReadFile(currentActionStorePath) - if os.IsNotExist(err) { - // nothing to copy - continue - } - if err != nil { - return err - } - - if err := os.WriteFile(newActionStorePath, currentActionStore, 0o600); err != nil { - return err - } - } - - return nil -} - -func (u *directoryCopier) copyRunDirectory(log *logger.Logger, oldRunPath, newRunPath string) error { - log.Infow("Copying run directory", "new_run_path", newRunPath, "old_run_path", oldRunPath) - - if err := os.MkdirAll(newRunPath, runDirMod); err != nil { - return errors.New(err, "failed to create run directory") - } - - err := copyDir(log, oldRunPath, newRunPath, true) - if os.IsNotExist(err) { - // nothing to copy, operation ok - log.Infow("Run directory not present", "old_run_path", oldRunPath) - return nil - } - if err != nil { - return errors.New(err, "failed to copy %q to %q", oldRunPath, newRunPath) - } - - return nil -} - // shutdownCallback returns a callback function to be executing during shutdown once all processes are closed. // this goes through runtime directory of agent and copies all the state files created by processes to new versioned // home directory with updated process name to match new version. @@ -636,80 +585,6 @@ func shutdownCallback(l *logger.Logger, homePath, prevVersion, newVersion, newHo } } -func readProcessDirs(runtimeDir string) ([]string, error) { - pipelines, err := readDirs(runtimeDir) - if err != nil { - return nil, err - } - - processDirs := make([]string, 0) - for _, p := range pipelines { - dirs, err := readDirs(p) - if err != nil { - return nil, err - } - - processDirs = append(processDirs, dirs...) - } - - return processDirs, nil -} - -// readDirs returns list of absolute paths to directories inside specified path. -func readDirs(dir string) ([]string, error) { - dirEntries, err := os.ReadDir(dir) - if err != nil && !os.IsNotExist(err) { - return nil, err - } - - dirs := make([]string, 0, len(dirEntries)) - for _, de := range dirEntries { - if !de.IsDir() { - continue - } - - dirs = append(dirs, filepath.Join(dir, de.Name())) - } - - return dirs, nil -} - -func copyDir(l *logger.Logger, from, to string, ignoreErrs bool) error { - var onErr func(src, dst string, err error) error - - if ignoreErrs { - onErr = func(src, dst string, err error) error { - if err == nil { - return nil - } - - // ignore all errors, just log them - l.Infof("ignoring error: failed to copy %q to %q: %s", src, dst, err.Error()) - return nil - } - } - - // Try to detect if we are running with SSDs. If we are increase the copy concurrency, - // otherwise fall back to the default. - copyConcurrency := 1 - hasSSDs, detectHWErr := install.HasAllSSDs() - if detectHWErr != nil { - l.Infow("Could not determine block storage type, disabling copy concurrency", "error.message", detectHWErr) - } - if hasSSDs { - copyConcurrency = runtime.NumCPU() * 4 - } - - return copy.Copy(from, to, copy.Options{ - OnSymlink: func(_ string) copy.SymlinkAction { - return copy.Shallow - }, - Sync: true, - OnError: onErr, - NumOfWorkers: int64(copyConcurrency), - }) -} - // IsInProgress checks if an Elastic Agent upgrade is already in progress. It // returns true if so and false if not. // `c client.Client` is expected to be a connected client. diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index 0660c99ba8a..b6465856f6b 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -14,12 +14,10 @@ import ( "os" "path/filepath" "reflect" - "runtime" "strings" "testing" "time" - "github.com/gofrs/flock" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" @@ -51,84 +49,6 @@ import ( mocks "github.com/elastic/elastic-agent/testing/mocks/pkg/control/v2/client" ) -func Test_CopyFile(t *testing.T) { - l, _ := logger.New("test", false) - tt := []struct { - Name string - From string - To string - IgnoreErr bool - KeepOpen bool - ExpectedErr bool - }{ - { - "Existing, no onerr", - filepath.Join(".", "test", "case1", "README.md"), - filepath.Join(".", "test", "case1", "copy", "README.md"), - false, - false, - false, - }, - { - "Existing but open", - filepath.Join(".", "test", "case2", "README.md"), - filepath.Join(".", "test", "case2", "copy", "README.md"), - false, - true, - runtime.GOOS == "windows", // this fails only on, - }, - { - "Existing but open, ignore errors", - filepath.Join(".", "test", "case3", "README.md"), - filepath.Join(".", "test", "case3", "copy", "README.md"), - true, - true, - false, - }, - { - "Not existing, accept errors", - filepath.Join(".", "test", "case4", "README.md"), - filepath.Join(".", "test", "case4", "copy", "README.md"), - false, - false, - true, - }, - { - "Not existing, ignore errors", - filepath.Join(".", "test", "case4", "README.md"), - filepath.Join(".", "test", "case4", "copy", "README.md"), - true, - false, - false, - }, - } - - for _, tc := range tt { - t.Run(tc.Name, func(t *testing.T) { - defer func() { - // cleanup - _ = os.RemoveAll(filepath.Dir(tc.To)) - }() - - var fl *flock.Flock - if tc.KeepOpen { - // this uses syscalls to create inter-process lock - fl = flock.New(tc.From) - _, err := fl.TryLock() - require.NoError(t, err) - - defer func() { - require.NoError(t, fl.Unlock()) - }() - - } - - err := copyDir(l, tc.From, tc.To, tc.IgnoreErr) - require.Equal(t, tc.ExpectedErr, err != nil, err) - }) - } -} - func TestShutdownCallback(t *testing.T) { type testcase struct { name string From 99335837b91d980c52e48cc40a18883f5c78b0c1 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Wed, 6 Aug 2025 23:51:16 +0300 Subject: [PATCH 057/127] enhancement(5235): added invokeWatcher to upgradeWatcher to implement the watcher interface --- internal/pkg/agent/application/upgrade/step_mark.go | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/internal/pkg/agent/application/upgrade/step_mark.go b/internal/pkg/agent/application/upgrade/step_mark.go index 448d09d6e09..12f13cbd722 100644 --- a/internal/pkg/agent/application/upgrade/step_mark.go +++ b/internal/pkg/agent/application/upgrade/step_mark.go @@ -10,6 +10,7 @@ import ( goerrors "errors" "fmt" "os" + "os/exec" "path/filepath" "time" @@ -370,3 +371,7 @@ func saveMarkerToPath(marker *UpdateMarker, markerFile string, shouldFsync bool) func markerFilePath(dataDirPath string) string { return filepath.Join(dataDirPath, markerFilename) } + +func (u *upgradeWatcher) invokeWatcher(log *logger.Logger, agentExecutable string) (*exec.Cmd, error) { + return InvokeWatcher(log, agentExecutable) +} From 20eba22176249bb42df9a3d70fbe25933ccf7211 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 7 Aug 2025 00:04:53 +0300 Subject: [PATCH 058/127] enhancement(5235): added upgrade executor --- .../pkg/agent/application/upgrade/upgrade.go | 149 ++++++++++++++- .../application/upgrade/upgrade_executor.go | 178 ++++++++++++++++++ 2 files changed, 318 insertions(+), 9 deletions(-) create mode 100644 internal/pkg/agent/application/upgrade/upgrade_executor.go diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index ea68bdacdc8..9a9c03f9ebb 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -9,6 +9,7 @@ import ( goerrors "errors" "fmt" "os" + "os/exec" "path/filepath" "strings" "time" @@ -115,6 +116,7 @@ type watcher interface { waitForWatcher(ctx context.Context, log *logger.Logger, markerFilePath string, waitTime time.Duration, createTimeoutContext createContextWithTimeout) error selectWatcherExecutable(topDir string, previous agentInstall, current agentInstall) string markUpgrade(log *logger.Logger, dataDir string, current, previous agentInstall, action *fleetapi.ActionUpgrade, det *details.Details, outcome UpgradeOutcome) error + invokeWatcher(log *logger.Logger, agentExecutable string) (*exec.Cmd, error) } type agentDirectoryCopier interface { @@ -137,6 +139,7 @@ type Upgrader struct { relinker relinker watcher watcher directoryCopier agentDirectoryCopier + upgradeExecutor upgradeExecutor } // IsUpgradeable when agent is installed and running as a service or flag was provided. @@ -159,22 +162,34 @@ func NewUpgrader(log *logger.Logger, settings *artifact.Config, agentInfo info.A downloaderFactories: downloaderFactories, } + upgradeCleaner := &upgradeCleanup{ + log: log, + cleanupFuncs: []func() error{}, + } + return &Upgrader{ - log: log, - settings: settings, - agentInfo: agentInfo, - upgradeable: IsUpgradeable(), - markerWatcher: newMarkerFileWatcher(markerFilePath(paths.Data()), log), - upgradeCleaner: &upgradeCleanup{ - log: log, - cleanupFuncs: []func() error{}, - }, + log: log, + settings: settings, + agentInfo: agentInfo, + upgradeable: IsUpgradeable(), + markerWatcher: newMarkerFileWatcher(markerFilePath(paths.Data()), log), + upgradeCleaner: upgradeCleaner, diskSpaceErrorFunc: upgradeErrors.ToDiskSpaceErrorFunc(log), artifactDownloader: newUpgradeArtifactDownloader(log, settings, downloaderFactoryProvider), unpacker: &upgradeUnpacker{log: log}, relinker: &upgradeRelinker{}, watcher: &upgradeWatcher{}, directoryCopier: &directoryCopier{}, + upgradeExecutor: &ExecuteUpgrade{ + log: log, + upgradeCleaner: upgradeCleaner, + artifactDownloader: newUpgradeArtifactDownloader(log, settings, downloaderFactoryProvider), + unpacker: &upgradeUnpacker{log: log}, + relinker: &upgradeRelinker{}, + watcher: &upgradeWatcher{}, + directoryCopier: &directoryCopier{}, + diskSpaceErrorFunc: upgradeErrors.ToDiskSpaceErrorFunc(log), + }, }, nil } @@ -277,6 +292,10 @@ func checkUpgrade(log *logger.Logger, currentVersion, newVersion agentVersion, m // Upgrade upgrades running agent, function returns shutdown callback that must be called by reexec. func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string, action *fleetapi.ActionUpgrade, det *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (_ reexec.ShutdownCallbackFn, err error) { + return u.newUpgrade(ctx, version, sourceURI, action, det, skipVerifyOverride, skipDefaultPgp, pgpBytes...) +} + +func (u *Upgrader) oldUpgrade(ctx context.Context, version string, sourceURI string, action *fleetapi.ActionUpgrade, det *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (_ reexec.ShutdownCallbackFn, err error) { defer func() { if err != nil { cleanupErr := u.upgradeCleaner.cleanup(err) @@ -490,6 +509,118 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string return cb, nil } +func (u *Upgrader) newUpgrade(ctx context.Context, version string, sourceURI string, action *fleetapi.ActionUpgrade, det *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (_ reexec.ShutdownCallbackFn, err error) { + defer func() { + if err != nil { + cleanupErr := u.upgradeCleaner.cleanup(err) + if cleanupErr != nil { + u.log.Errorf("Error cleaning up after upgrade: %w", cleanupErr) + err = goerrors.Join(err, cleanupErr) + } + } + }() + + u.log.Infow("Upgrading agent", "version", version, "source_uri", sourceURI) + + currentVersion := agentVersion{ + version: release.Version(), + snapshot: release.Snapshot(), + hash: release.Commit(), + fips: release.FIPSDistribution(), + } + + // Compare versions and exit before downloading anything if the upgrade + // is for the same release version that is currently running + if isSameReleaseVersion(u.log, currentVersion, version) { + u.log.Warnf("Upgrade action skipped because agent is already at version %s", currentVersion) + return nil, ErrUpgradeSameVersion + } + + // Inform the Upgrade Marker Watcher that we've started upgrading. Note that this + // is only possible to do in-memory since, today, the process that's initiating + // the upgrade is the same as the Agent process in which the Upgrade Marker Watcher is + // running. If/when, in the future, the process initiating the upgrade is separated + // from the Agent process in which the Upgrade Marker Watcher is running, such in-memory + // communication will need to be replaced with inter-process communication (e.g. via + // a file, e.g. the Upgrade Marker file or something else). + u.markerWatcher.SetUpgradeStarted() + + span, ctx := apm.StartSpan(ctx, "upgrade", "app.internal") + defer span.End() + + sourceURI = u.sourceURI(sourceURI) + + parsedTargetVersion, err := agtversion.ParseVersion(version) + if err != nil { + return nil, fmt.Errorf("error parsing version %q: %w", version, err) + } + + downloadResult, err := u.upgradeExecutor.downloadArtifact(ctx, parsedTargetVersion, u.agentInfo, sourceURI, u.fleetServerURI, det, skipVerifyOverride, skipDefaultPgp, pgpBytes...) + if err != nil { + return nil, err + } + + unpackRes, err := u.upgradeExecutor.unpackArtifact(downloadResult, version, downloadResult.ArtifactPath, paths.Top(), "", paths.Data(), paths.Home(), det, currentVersion) + if err != nil { + return nil, err + } + + newRunPath := filepath.Join(unpackRes.newHome, "run") + oldRunPath := filepath.Join(paths.Run()) + + symlinkPath := filepath.Join(paths.Top(), agentName) + u.log.Infof("symlinkPath: %s", symlinkPath) + + // paths.BinaryPath properly derives the binary directory depending on the platform. The path to the binary for macOS is inside of the app bundle. + newPath := paths.BinaryPath(filepath.Join(paths.Top(), unpackRes.VersionedHome), agentName) + u.log.Infof("newPath: %s", newPath) + + currentVersionedHome, err := filepath.Rel(paths.Top(), paths.Home()) + if err != nil { + return nil, fmt.Errorf("calculating home path relative to top, home: %q top: %q : %w", paths.Home(), paths.Top(), err) + } + + err = u.upgradeExecutor.replaceOldWithNew(u.log, unpackRes, currentVersionedHome, paths.Top(), agentName, paths.Home(), oldRunPath, newRunPath, symlinkPath, newPath, det) + if err != nil { + return nil, err + } + + // We rotated the symlink successfully: prepare the current and previous agent installation details for the update marker + // In update marker the `current` agent install is the one where the symlink is pointing (the new one we didn't start yet) + // while the `previous` install is the currently executing elastic-agent that is no longer reachable via the symlink. + // After the restart at the end of the function, everything lines up correctly. + current := agentInstall{ + parsedVersion: parsedTargetVersion, + version: version, + hash: unpackRes.Hash, + versionedHome: unpackRes.VersionedHome, + } + + previousParsedVersion := currentagtversion.GetParsedAgentPackageVersion() + previous := agentInstall{ + parsedVersion: previousParsedVersion, + version: release.VersionWithSnapshot(), + hash: release.Commit(), + versionedHome: currentVersionedHome, + } + + err = u.upgradeExecutor.watchNewAgent(ctx, u.log, markerFilePath(paths.Data()), paths.Top(), paths.Data(), watcherMaxWaitTime, context.WithTimeout, current, previous, action, det, OUTCOME_UPGRADE) + if err != nil { + return nil, err + } + + cb := shutdownCallback(u.log, paths.Home(), release.Version(), version, filepath.Join(paths.Top(), unpackRes.VersionedHome)) + + // Clean everything from the downloads dir + u.log.Infow("Removing downloads directory", "file.path", paths.Downloads()) + err = os.RemoveAll(paths.Downloads()) + if err != nil { + u.log.Errorw("Unable to clean downloads after update", "error.message", err, "file.path", paths.Downloads()) + } + + return cb, nil +} + // Ack acks last upgrade action func (u *Upgrader) Ack(ctx context.Context, acker acker.Acker) error { // get upgrade action diff --git a/internal/pkg/agent/application/upgrade/upgrade_executor.go b/internal/pkg/agent/application/upgrade/upgrade_executor.go new file mode 100644 index 00000000000..ef86baf2212 --- /dev/null +++ b/internal/pkg/agent/application/upgrade/upgrade_executor.go @@ -0,0 +1,178 @@ +package upgrade + +import ( + "context" + goerrors "errors" + "fmt" + "path/filepath" + "time" + + "github.com/elastic/elastic-agent/internal/pkg/agent/application/info" + "github.com/elastic/elastic-agent/internal/pkg/agent/application/paths" + "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download" + "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/details" + "github.com/elastic/elastic-agent/internal/pkg/agent/errors" + "github.com/elastic/elastic-agent/internal/pkg/fleetapi" + "github.com/elastic/elastic-agent/pkg/core/logger" + agtversion "github.com/elastic/elastic-agent/pkg/version" +) + +type upgradeExecutor interface { + downloadArtifact(ctx context.Context, parsedTargetVersion *agtversion.ParsedSemVer, agentInfo info.Agent, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) + unpackArtifact(downloadResult download.DownloadResult, version, archivePath, topPath, flavor, dataPath, currentHome string, upgradeDetails *details.Details, currentVersion agentVersion) (unpackStepResult, error) + replaceOldWithNew(log *logger.Logger, unpackStepResult unpackStepResult, currentVersionedHome, topPath, agentName, currentHome, oldRunPath, newRunPath, symlinkPath, newBinPath string, upgradeDetails *details.Details) error + watchNewAgent(ctx context.Context, log *logger.Logger, markerFilePath, topPath, dataPath string, waitTime time.Duration, createTimeoutContext createContextWithTimeout, newAgentInstall agentInstall, previousAgentInstall agentInstall, action *fleetapi.ActionUpgrade, upgradeDetails *details.Details, upgradeOutcome UpgradeOutcome) error +} + +type ExecuteUpgrade struct { + log *logger.Logger + upgradeCleaner upgradeCleaner + artifactDownloader artifactDownloader + unpacker unpacker + relinker relinker + watcher watcher + directoryCopier agentDirectoryCopier + diskSpaceErrorFunc func(error) error +} + +type unpackStepResult struct { + newHome string + newHash string + unpackResult +} + +func (u *ExecuteUpgrade) downloadArtifact(ctx context.Context, parsedTargetVersion *agtversion.ParsedSemVer, agentInfo info.Agent, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) { + err := u.artifactDownloader.cleanNonMatchingVersionsFromDownloads(u.log, agentInfo.Version()) + if err != nil { + u.log.Errorw("Unable to clean downloads before update", "error.message", err, "downloads.path", paths.Downloads()) + } + + upgradeDetails.SetState(details.StateDownloading) + + downloadResult, err := u.artifactDownloader.downloadArtifact(ctx, parsedTargetVersion, sourceURI, fleetServerURI, upgradeDetails, skipVerifyOverride, skipDefaultPgp, pgpBytes...) + if err != nil { + // Run the same pre-upgrade cleanup task to get rid of any newly downloaded files + // This may have an issue if users are upgrading to the same version number. + if dErr := u.artifactDownloader.cleanNonMatchingVersionsFromDownloads(u.log, agentInfo.Version()); dErr != nil { + u.log.Errorw("Unable to remove file after verification failure", "error.message", dErr) + } + + return downloadResult, err + } + + return downloadResult, u.upgradeCleaner.setupArchiveCleanup(downloadResult) +} + +func (u *ExecuteUpgrade) unpackArtifact(downloadResult download.DownloadResult, version, archivePath, topPath, flavor, dataPath, currentHome string, upgradeDetails *details.Details, currentVersion agentVersion) (unpackStepResult, error) { + upgradeDetails.SetState(details.StateExtracting) + + metadata, err := u.unpacker.getPackageMetadata(downloadResult.ArtifactPath) + if err != nil { + return unpackStepResult{}, fmt.Errorf("reading metadata for elastic agent version %s package %q: %w", version, downloadResult.ArtifactPath, err) + } + + newVersion := u.unpacker.extractAgentVersion(metadata, version) + + if err := checkUpgrade(u.log, currentVersion, newVersion, metadata); err != nil { + return unpackStepResult{}, fmt.Errorf("cannot upgrade the agent: %w", err) + } + + u.log.Infow("Unpacking agent package", "version", newVersion) + + // Nice to have: add check that no archive files end up in the current versioned home + // default to no flavor to avoid breaking behavior + + // no default flavor, keep everything in case flavor is not specified + // in case of error fallback to keep-all + detectedFlavor, detectFlavorErr := u.unpacker.detectFlavor(topPath, "") + if detectFlavorErr != nil { + u.log.Warnf("error encountered when detecting used flavor with top path %q: %w", topPath, detectFlavorErr) + } + u.log.Debugf("detected used flavor: %q", detectedFlavor) + + unpackRes, unpackErr := u.unpacker.unpack(version, downloadResult.ArtifactPath, dataPath, detectedFlavor) + unpackErr = u.diskSpaceErrorFunc(unpackErr) + + if unpackRes.VersionedHome == "" { + return unpackStepResult{}, goerrors.Join(unpackErr, errors.New("unknown versioned home")) + } + + newHash := unpackRes.Hash + if newHash == "" { + return unpackStepResult{}, goerrors.Join(unpackErr, errors.New("unknown hash")) + } + + newHome := filepath.Join(topPath, unpackRes.VersionedHome) + + unpackStepResult := unpackStepResult{ + newHome: newHome, + newHash: newHash, + unpackResult: unpackRes, + } + + if err := u.upgradeCleaner.setupUnpackCleanup(newHome, currentHome); err != nil { + return unpackStepResult, goerrors.Join(unpackErr, err) + } + + return unpackStepResult, unpackErr +} + +func (u *ExecuteUpgrade) replaceOldWithNew(log *logger.Logger, unpackStepResult unpackStepResult, currentVersionedHome, topPath, agentName, currentHome, oldRunPath, newRunPath, symlinkPath, newBinPath string, upgradeDetails *details.Details) error { + if err := u.directoryCopier.copyActionStore(u.log, unpackStepResult.newHome); err != nil { + return fmt.Errorf("failed to copy action store: %w", u.diskSpaceErrorFunc(err)) + } + + if err := u.directoryCopier.copyRunDirectory(u.log, oldRunPath, newRunPath); err != nil { + return fmt.Errorf("failed to copy run directory: %w", u.diskSpaceErrorFunc(err)) + } + + upgradeDetails.SetState(details.StateReplacing) + + // create symlink to the /elastic-agent + // hashedDir := unpackRes.VersionedHome + // u.log.Infof("hashedDir: %s", hashedDir) + + // symlinkPath := filepath.Join(topPath, agentName) + // u.log.Infof("symlinkPath: %s", symlinkPath) + + // paths.BinaryPath properly derives the binary directory depending on the platform. The path to the binary for macOS is inside of the app bundle. + // newPath := paths.BinaryPath(filepath.Join(topPath, unpackStepResult.VersionedHome), agentName) + // u.log.Infof("newPath: %s", newPath) + + // currentVersionedHome, err := filepath.Rel(topPath, currentHome) + // if err != nil { + // return fmt.Errorf("calculating home path relative to top, home: %q top: %q : %w", currentHome, topPath, err) + // } + + if err := u.upgradeCleaner.setupSymlinkCleanup(u.relinker.changeSymlink, topPath, currentVersionedHome, agentName); err != nil { + return fmt.Errorf("error setting up symlink cleanup: %w", err) + } + + u.log.Infof("currentVersionedHome: %s", currentVersionedHome) + + return u.relinker.changeSymlink(u.log, topPath, symlinkPath, newBinPath) +} + +func (u *ExecuteUpgrade) watchNewAgent(ctx context.Context, log *logger.Logger, markerFilePath, topPath, dataPath string, waitTime time.Duration, createTimeoutContext createContextWithTimeout, newAgentInstall agentInstall, previousAgentInstall agentInstall, action *fleetapi.ActionUpgrade, upgradeDetails *details.Details, upgradeOutcome UpgradeOutcome) error { + if err := u.watcher.markUpgrade(u.log, + dataPath, // data dir to place the marker in + newAgentInstall, // new agent version data + previousAgentInstall, // old agent version data + action, upgradeDetails, upgradeOutcome); err != nil { + + return err + } + + watcherExecutable := u.watcher.selectWatcherExecutable(topPath, previousAgentInstall, newAgentInstall) + + watcherCmd, err := u.watcher.invokeWatcher(u.log, watcherExecutable) + if err != nil { + return err + } + + if err := u.watcher.waitForWatcher(ctx, u.log, markerFilePath, waitTime, createTimeoutContext); err != nil { + return goerrors.Join(err, watcherCmd.Process.Kill()) + } + + return nil +} From 876ff2fb13369987a986cb827d0bfcd456201f71 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 7 Aug 2025 00:19:19 +0300 Subject: [PATCH 059/127] enhancement(5235): removed commented code --- .../application/upgrade/upgrade_executor.go | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_executor.go b/internal/pkg/agent/application/upgrade/upgrade_executor.go index ef86baf2212..05442e3d1b5 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_executor.go +++ b/internal/pkg/agent/application/upgrade/upgrade_executor.go @@ -128,28 +128,10 @@ func (u *ExecuteUpgrade) replaceOldWithNew(log *logger.Logger, unpackStepResult upgradeDetails.SetState(details.StateReplacing) - // create symlink to the /elastic-agent - // hashedDir := unpackRes.VersionedHome - // u.log.Infof("hashedDir: %s", hashedDir) - - // symlinkPath := filepath.Join(topPath, agentName) - // u.log.Infof("symlinkPath: %s", symlinkPath) - - // paths.BinaryPath properly derives the binary directory depending on the platform. The path to the binary for macOS is inside of the app bundle. - // newPath := paths.BinaryPath(filepath.Join(topPath, unpackStepResult.VersionedHome), agentName) - // u.log.Infof("newPath: %s", newPath) - - // currentVersionedHome, err := filepath.Rel(topPath, currentHome) - // if err != nil { - // return fmt.Errorf("calculating home path relative to top, home: %q top: %q : %w", currentHome, topPath, err) - // } - if err := u.upgradeCleaner.setupSymlinkCleanup(u.relinker.changeSymlink, topPath, currentVersionedHome, agentName); err != nil { return fmt.Errorf("error setting up symlink cleanup: %w", err) } - u.log.Infof("currentVersionedHome: %s", currentVersionedHome) - return u.relinker.changeSymlink(u.log, topPath, symlinkPath, newBinPath) } From 185e5ec0b36a4d200be3febcc8d530222b2e8bd6 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 7 Aug 2025 02:10:49 +0300 Subject: [PATCH 060/127] enhancement(5235): updated download error tests --- internal/pkg/agent/application/upgrade/upgrade.go | 3 ++- .../pkg/agent/application/upgrade/upgrade_executor.go | 10 +++++----- internal/pkg/agent/application/upgrade/upgrade_test.go | 7 ++++++- 3 files changed, 13 insertions(+), 7 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 9a9c03f9ebb..82e2294b786 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -180,7 +180,7 @@ func NewUpgrader(log *logger.Logger, settings *artifact.Config, agentInfo info.A relinker: &upgradeRelinker{}, watcher: &upgradeWatcher{}, directoryCopier: &directoryCopier{}, - upgradeExecutor: &ExecuteUpgrade{ + upgradeExecutor: &executeUpgrade{ log: log, upgradeCleaner: upgradeCleaner, artifactDownloader: newUpgradeArtifactDownloader(log, settings, downloaderFactoryProvider), @@ -293,6 +293,7 @@ func checkUpgrade(log *logger.Logger, currentVersion, newVersion agentVersion, m // Upgrade upgrades running agent, function returns shutdown callback that must be called by reexec. func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string, action *fleetapi.ActionUpgrade, det *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (_ reexec.ShutdownCallbackFn, err error) { return u.newUpgrade(ctx, version, sourceURI, action, det, skipVerifyOverride, skipDefaultPgp, pgpBytes...) + // return u.oldUpgrade(ctx, version, sourceURI, action, det, skipVerifyOverride, skipDefaultPgp, pgpBytes...) } func (u *Upgrader) oldUpgrade(ctx context.Context, version string, sourceURI string, action *fleetapi.ActionUpgrade, det *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (_ reexec.ShutdownCallbackFn, err error) { diff --git a/internal/pkg/agent/application/upgrade/upgrade_executor.go b/internal/pkg/agent/application/upgrade/upgrade_executor.go index 05442e3d1b5..a71a0cce6d2 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_executor.go +++ b/internal/pkg/agent/application/upgrade/upgrade_executor.go @@ -24,7 +24,7 @@ type upgradeExecutor interface { watchNewAgent(ctx context.Context, log *logger.Logger, markerFilePath, topPath, dataPath string, waitTime time.Duration, createTimeoutContext createContextWithTimeout, newAgentInstall agentInstall, previousAgentInstall agentInstall, action *fleetapi.ActionUpgrade, upgradeDetails *details.Details, upgradeOutcome UpgradeOutcome) error } -type ExecuteUpgrade struct { +type executeUpgrade struct { log *logger.Logger upgradeCleaner upgradeCleaner artifactDownloader artifactDownloader @@ -41,7 +41,7 @@ type unpackStepResult struct { unpackResult } -func (u *ExecuteUpgrade) downloadArtifact(ctx context.Context, parsedTargetVersion *agtversion.ParsedSemVer, agentInfo info.Agent, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) { +func (u *executeUpgrade) downloadArtifact(ctx context.Context, parsedTargetVersion *agtversion.ParsedSemVer, agentInfo info.Agent, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) { err := u.artifactDownloader.cleanNonMatchingVersionsFromDownloads(u.log, agentInfo.Version()) if err != nil { u.log.Errorw("Unable to clean downloads before update", "error.message", err, "downloads.path", paths.Downloads()) @@ -63,7 +63,7 @@ func (u *ExecuteUpgrade) downloadArtifact(ctx context.Context, parsedTargetVersi return downloadResult, u.upgradeCleaner.setupArchiveCleanup(downloadResult) } -func (u *ExecuteUpgrade) unpackArtifact(downloadResult download.DownloadResult, version, archivePath, topPath, flavor, dataPath, currentHome string, upgradeDetails *details.Details, currentVersion agentVersion) (unpackStepResult, error) { +func (u *executeUpgrade) unpackArtifact(downloadResult download.DownloadResult, version, archivePath, topPath, flavor, dataPath, currentHome string, upgradeDetails *details.Details, currentVersion agentVersion) (unpackStepResult, error) { upgradeDetails.SetState(details.StateExtracting) metadata, err := u.unpacker.getPackageMetadata(downloadResult.ArtifactPath) @@ -117,7 +117,7 @@ func (u *ExecuteUpgrade) unpackArtifact(downloadResult download.DownloadResult, return unpackStepResult, unpackErr } -func (u *ExecuteUpgrade) replaceOldWithNew(log *logger.Logger, unpackStepResult unpackStepResult, currentVersionedHome, topPath, agentName, currentHome, oldRunPath, newRunPath, symlinkPath, newBinPath string, upgradeDetails *details.Details) error { +func (u *executeUpgrade) replaceOldWithNew(log *logger.Logger, unpackStepResult unpackStepResult, currentVersionedHome, topPath, agentName, currentHome, oldRunPath, newRunPath, symlinkPath, newBinPath string, upgradeDetails *details.Details) error { if err := u.directoryCopier.copyActionStore(u.log, unpackStepResult.newHome); err != nil { return fmt.Errorf("failed to copy action store: %w", u.diskSpaceErrorFunc(err)) } @@ -135,7 +135,7 @@ func (u *ExecuteUpgrade) replaceOldWithNew(log *logger.Logger, unpackStepResult return u.relinker.changeSymlink(u.log, topPath, symlinkPath, newBinPath) } -func (u *ExecuteUpgrade) watchNewAgent(ctx context.Context, log *logger.Logger, markerFilePath, topPath, dataPath string, waitTime time.Duration, createTimeoutContext createContextWithTimeout, newAgentInstall agentInstall, previousAgentInstall agentInstall, action *fleetapi.ActionUpgrade, upgradeDetails *details.Details, upgradeOutcome UpgradeOutcome) error { +func (u *executeUpgrade) watchNewAgent(ctx context.Context, log *logger.Logger, markerFilePath, topPath, dataPath string, waitTime time.Duration, createTimeoutContext createContextWithTimeout, newAgentInstall agentInstall, previousAgentInstall agentInstall, action *fleetapi.ActionUpgrade, upgradeDetails *details.Details, upgradeOutcome UpgradeOutcome) error { if err := u.watcher.markUpgrade(u.log, dataPath, // data dir to place the marker in newAgentInstall, // new agent version data diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index b6465856f6b..0ec93b5c95d 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -1147,13 +1147,18 @@ func TestUpgradeDownloadErrors(t *testing.T) { downloaderFactoryProvider := tc.factoryProviderFunc(&config, copyFunc) artifactDownloader := newUpgradeArtifactDownloader(log, &config, downloaderFactoryProvider) + executeUpgrade := &executeUpgrade{ + log: log, + artifactDownloader: artifactDownloader, + } mockAgentInfo := mockinfo.NewAgent(t) mockAgentInfo.On("Version").Return(version.String()) upgrader, err := NewUpgrader(log, &config, mockAgentInfo) require.NoError(t, err) - upgrader.artifactDownloader = artifactDownloader + // upgrader.artifactDownloader = artifactDownloader + upgrader.upgradeExecutor = executeUpgrade _, err = upgrader.Upgrade(context.Background(), version.String(), config.SourceURI, nil, upgradeDetails, false, false) require.Error(t, err, "expected error got none") From fb97058c456c7409b5972d45a61dd05d3508d6b5 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 7 Aug 2025 03:01:17 +0300 Subject: [PATCH 061/127] enhancement(5235): refactored interfaces and commented unused code --- .../pkg/agent/application/upgrade/upgrade.go | 512 +++++++++--------- .../application/upgrade/upgrade_executor.go | 34 +- 2 files changed, 273 insertions(+), 273 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 82e2294b786..65747eb687d 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -9,7 +9,6 @@ import ( goerrors "errors" "fmt" "os" - "os/exec" "path/filepath" "strings" "time" @@ -94,52 +93,29 @@ type upgradeCleaner interface { cleanup(err error) error } -type artifactDownloader interface { - downloadArtifact(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) - cleanNonMatchingVersionsFromDownloads(log *logger.Logger, version string) error -} - -type unpacker interface { - getPackageMetadata(archivePath string) (packageMetadata, error) - extractAgentVersion(metadata packageMetadata, version string) agentVersion - unpack(version, archivePath, topPath, flavor string) (unpackResult, error) - detectFlavor(topPath, flavor string) (string, error) -} - -type relinker interface { - changeSymlink(log *logger.Logger, topDirPath, symlinkPath, newTarget string) error -} - -type createContextWithTimeout func(ctx context.Context, timeout time.Duration) (context.Context, context.CancelFunc) - -type watcher interface { - waitForWatcher(ctx context.Context, log *logger.Logger, markerFilePath string, waitTime time.Duration, createTimeoutContext createContextWithTimeout) error - selectWatcherExecutable(topDir string, previous agentInstall, current agentInstall) string - markUpgrade(log *logger.Logger, dataDir string, current, previous agentInstall, action *fleetapi.ActionUpgrade, det *details.Details, outcome UpgradeOutcome) error - invokeWatcher(log *logger.Logger, agentExecutable string) (*exec.Cmd, error) -} - -type agentDirectoryCopier interface { - copyActionStore(log *logger.Logger, newHome string) error - copyRunDirectory(log *logger.Logger, oldRunPath, newRunPath string) error +type upgradeExecutor interface { + downloadArtifact(ctx context.Context, parsedTargetVersion *agtversion.ParsedSemVer, agentInfo info.Agent, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) + unpackArtifact(downloadResult download.DownloadResult, version, archivePath, topPath, flavor, dataPath, currentHome string, upgradeDetails *details.Details, currentVersion agentVersion) (unpackStepResult, error) + replaceOldWithNew(log *logger.Logger, unpackStepResult unpackStepResult, currentVersionedHome, topPath, agentName, currentHome, oldRunPath, newRunPath, symlinkPath, newBinPath string, upgradeDetails *details.Details) error + watchNewAgent(ctx context.Context, log *logger.Logger, markerFilePath, topPath, dataPath string, waitTime time.Duration, createTimeoutContext createContextWithTimeout, newAgentInstall agentInstall, previousAgentInstall agentInstall, action *fleetapi.ActionUpgrade, upgradeDetails *details.Details, upgradeOutcome UpgradeOutcome) error } // Upgrader performs an upgrade type Upgrader struct { - log *logger.Logger - settings *artifact.Config - agentInfo info.Agent - upgradeable bool - fleetServerURI string - markerWatcher MarkerWatcher - upgradeCleaner upgradeCleaner - diskSpaceErrorFunc func(error) error - artifactDownloader artifactDownloader - unpacker unpacker - relinker relinker - watcher watcher - directoryCopier agentDirectoryCopier - upgradeExecutor upgradeExecutor + log *logger.Logger + settings *artifact.Config + agentInfo info.Agent + upgradeable bool + fleetServerURI string + markerWatcher MarkerWatcher + upgradeCleaner upgradeCleaner + // diskSpaceErrorFunc func(error) error + // artifactDownloader artifactDownloader + // unpacker unpacker + // relinker relinker + // watcher watcher + // directoryCopier agentDirectoryCopier + upgradeExecutor upgradeExecutor } // IsUpgradeable when agent is installed and running as a service or flag was provided. @@ -168,18 +144,18 @@ func NewUpgrader(log *logger.Logger, settings *artifact.Config, agentInfo info.A } return &Upgrader{ - log: log, - settings: settings, - agentInfo: agentInfo, - upgradeable: IsUpgradeable(), - markerWatcher: newMarkerFileWatcher(markerFilePath(paths.Data()), log), - upgradeCleaner: upgradeCleaner, - diskSpaceErrorFunc: upgradeErrors.ToDiskSpaceErrorFunc(log), - artifactDownloader: newUpgradeArtifactDownloader(log, settings, downloaderFactoryProvider), - unpacker: &upgradeUnpacker{log: log}, - relinker: &upgradeRelinker{}, - watcher: &upgradeWatcher{}, - directoryCopier: &directoryCopier{}, + log: log, + settings: settings, + agentInfo: agentInfo, + upgradeable: IsUpgradeable(), + markerWatcher: newMarkerFileWatcher(markerFilePath(paths.Data()), log), + upgradeCleaner: upgradeCleaner, + // diskSpaceErrorFunc: upgradeErrors.ToDiskSpaceErrorFunc(log), + // artifactDownloader: newUpgradeArtifactDownloader(log, settings, downloaderFactoryProvider), + // unpacker: &upgradeUnpacker{log: log}, + // relinker: &upgradeRelinker{}, + // watcher: &upgradeWatcher{}, + // directoryCopier: &directoryCopier{}, upgradeExecutor: &executeUpgrade{ log: log, upgradeCleaner: upgradeCleaner, @@ -296,219 +272,219 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string // return u.oldUpgrade(ctx, version, sourceURI, action, det, skipVerifyOverride, skipDefaultPgp, pgpBytes...) } -func (u *Upgrader) oldUpgrade(ctx context.Context, version string, sourceURI string, action *fleetapi.ActionUpgrade, det *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (_ reexec.ShutdownCallbackFn, err error) { - defer func() { - if err != nil { - cleanupErr := u.upgradeCleaner.cleanup(err) - if cleanupErr != nil { - u.log.Errorf("Error cleaning up after upgrade: %w", cleanupErr) - err = goerrors.Join(err, cleanupErr) - } - } - }() - - u.log.Infow("Upgrading agent", "version", version, "source_uri", sourceURI) - - currentVersion := agentVersion{ - version: release.Version(), - snapshot: release.Snapshot(), - hash: release.Commit(), - fips: release.FIPSDistribution(), - } - - // Compare versions and exit before downloading anything if the upgrade - // is for the same release version that is currently running - if isSameReleaseVersion(u.log, currentVersion, version) { - u.log.Warnf("Upgrade action skipped because agent is already at version %s", currentVersion) - return nil, ErrUpgradeSameVersion - } - - // Inform the Upgrade Marker Watcher that we've started upgrading. Note that this - // is only possible to do in-memory since, today, the process that's initiating - // the upgrade is the same as the Agent process in which the Upgrade Marker Watcher is - // running. If/when, in the future, the process initiating the upgrade is separated - // from the Agent process in which the Upgrade Marker Watcher is running, such in-memory - // communication will need to be replaced with inter-process communication (e.g. via - // a file, e.g. the Upgrade Marker file or something else). - u.markerWatcher.SetUpgradeStarted() - - span, ctx := apm.StartSpan(ctx, "upgrade", "app.internal") - defer span.End() - - err = u.artifactDownloader.cleanNonMatchingVersionsFromDownloads(u.log, u.agentInfo.Version()) - if err != nil { - u.log.Errorw("Unable to clean downloads before update", "error.message", err, "downloads.path", paths.Downloads()) - } - - det.SetState(details.StateDownloading) - - sourceURI = u.sourceURI(sourceURI) - - parsedVersion, err := agtversion.ParseVersion(version) - if err != nil { - return nil, fmt.Errorf("error parsing version %q: %w", version, err) - } - - downloadResult, err := u.artifactDownloader.downloadArtifact(ctx, parsedVersion, sourceURI, u.fleetServerURI, det, skipVerifyOverride, skipDefaultPgp, pgpBytes...) - if err != nil { - // Run the same pre-upgrade cleanup task to get rid of any newly downloaded files - // This may have an issue if users are upgrading to the same version number. - if dErr := u.artifactDownloader.cleanNonMatchingVersionsFromDownloads(u.log, u.agentInfo.Version()); dErr != nil { - u.log.Errorw("Unable to remove file after verification failure", "error.message", dErr) - } - - return nil, err - } - - if err := u.upgradeCleaner.setupArchiveCleanup(downloadResult); err != nil { - return nil, err - } - - det.SetState(details.StateExtracting) - - metadata, err := u.unpacker.getPackageMetadata(downloadResult.ArtifactPath) - if err != nil { - return nil, fmt.Errorf("reading metadata for elastic agent version %s package %q: %w", version, downloadResult.ArtifactPath, err) - } - - newVersion := u.unpacker.extractAgentVersion(metadata, version) - - if err := checkUpgrade(u.log, currentVersion, newVersion, metadata); err != nil { // pass this as param to unpack step in upgrade executor - return nil, fmt.Errorf("cannot upgrade the agent: %w", err) - } - - u.log.Infow("Unpacking agent package", "version", newVersion) - - // Nice to have: add check that no archive files end up in the current versioned home - // default to no flavor to avoid breaking behavior - - // no default flavor, keep everything in case flavor is not specified - // in case of error fallback to keep-all - detectedFlavor, err := u.unpacker.detectFlavor(paths.Top(), "") - if err != nil { - u.log.Warnf("error encountered when detecting used flavor with top path %q: %w", paths.Top(), err) - } - u.log.Debugf("detected used flavor: %q", detectedFlavor) - - unpackRes, unpackErr := u.unpacker.unpack(version, downloadResult.ArtifactPath, paths.Data(), detectedFlavor) - err = goerrors.Join(err, u.diskSpaceErrorFunc(unpackErr)) - - if unpackRes.VersionedHome == "" { - err = goerrors.Join(err, fmt.Errorf("unknown versioned home")) - return nil, err - } - - newHash := unpackRes.Hash - if newHash == "" { - err = goerrors.Join(err, fmt.Errorf("unknown hash")) - return nil, err - } - - newHome := filepath.Join(paths.Top(), unpackRes.VersionedHome) - - unpackCleanupSetupErr := u.upgradeCleaner.setupUnpackCleanup(newHome, paths.Home()) - err = goerrors.Join(err, unpackCleanupSetupErr) - - if err != nil { - return nil, err - } - - err = u.directoryCopier.copyActionStore(u.log, newHome) - if err != nil { - err = fmt.Errorf("failed to copy action store: %w", u.diskSpaceErrorFunc(err)) - return nil, err - } - - newRunPath := filepath.Join(newHome, "run") - oldRunPath := filepath.Join(paths.Run()) - - err = u.directoryCopier.copyRunDirectory(u.log, oldRunPath, newRunPath) - if err != nil { - err = fmt.Errorf("failed to copy run directory: %w", u.diskSpaceErrorFunc(err)) - return nil, err - } - - det.SetState(details.StateReplacing) - - // create symlink to the /elastic-agent - hashedDir := unpackRes.VersionedHome - u.log.Infof("hashedDir: %s", hashedDir) - - symlinkPath := filepath.Join(paths.Top(), agentName) - u.log.Infof("symlinkPath: %s", symlinkPath) - - // paths.BinaryPath properly derives the binary directory depending on the platform. The path to the binary for macOS is inside of the app bundle. - newPath := paths.BinaryPath(filepath.Join(paths.Top(), hashedDir), agentName) - u.log.Infof("newPath: %s", newPath) - - currentVersionedHome, err := filepath.Rel(paths.Top(), paths.Home()) - if err != nil { - return nil, fmt.Errorf("calculating home path relative to top, home: %q top: %q : %w", paths.Home(), paths.Top(), err) - } - - if symlinkCleanupSetupErr := u.upgradeCleaner.setupSymlinkCleanup(u.relinker.changeSymlink, paths.Top(), currentVersionedHome, agentName); symlinkCleanupSetupErr != nil { - err = goerrors.Join(err, symlinkCleanupSetupErr) - } - - u.log.Infof("currentVersionedHome: %s", currentVersionedHome) - - err = u.relinker.changeSymlink(u.log, paths.Top(), symlinkPath, newPath) - if err != nil { - return nil, err - } - - // We rotated the symlink successfully: prepare the current and previous agent installation details for the update marker - // In update marker the `current` agent install is the one where the symlink is pointing (the new one we didn't start yet) - // while the `previous` install is the currently executing elastic-agent that is no longer reachable via the symlink. - // After the restart at the end of the function, everything lines up correctly. - current := agentInstall{ - parsedVersion: parsedVersion, - version: version, - hash: unpackRes.Hash, - versionedHome: unpackRes.VersionedHome, - } - - previousParsedVersion := currentagtversion.GetParsedAgentPackageVersion() - previous := agentInstall{ - parsedVersion: previousParsedVersion, - version: release.VersionWithSnapshot(), - hash: release.Commit(), - versionedHome: currentVersionedHome, - } - - err = u.watcher.markUpgrade(u.log, - paths.Data(), // data dir to place the marker in - current, // new agent version data - previous, // old agent version data - action, det, OUTCOME_UPGRADE) - if err != nil { - return nil, err - } - - watcherExecutable := u.watcher.selectWatcherExecutable(paths.Top(), previous, current) - - watcherCmd, err := InvokeWatcher(u.log, watcherExecutable) - if err != nil { - return nil, err - } - - err = u.watcher.waitForWatcher(ctx, u.log, markerFilePath(paths.Data()), watcherMaxWaitTime, context.WithTimeout) - if err != nil { - err = goerrors.Join(err, watcherCmd.Process.Kill()) - return nil, err - } - - cb := shutdownCallback(u.log, paths.Home(), release.Version(), version, filepath.Join(paths.Top(), unpackRes.VersionedHome)) - - // Clean everything from the downloads dir - u.log.Infow("Removing downloads directory", "file.path", paths.Downloads()) - err = os.RemoveAll(paths.Downloads()) - if err != nil { - u.log.Errorw("Unable to clean downloads after update", "error.message", err, "file.path", paths.Downloads()) - } - - return cb, nil -} +// func (u *Upgrader) oldUpgrade(ctx context.Context, version string, sourceURI string, action *fleetapi.ActionUpgrade, det *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (_ reexec.ShutdownCallbackFn, err error) { +// defer func() { +// if err != nil { +// cleanupErr := u.upgradeCleaner.cleanup(err) +// if cleanupErr != nil { +// u.log.Errorf("Error cleaning up after upgrade: %w", cleanupErr) +// err = goerrors.Join(err, cleanupErr) +// } +// } +// }() + +// u.log.Infow("Upgrading agent", "version", version, "source_uri", sourceURI) + +// currentVersion := agentVersion{ +// version: release.Version(), +// snapshot: release.Snapshot(), +// hash: release.Commit(), +// fips: release.FIPSDistribution(), +// } + +// // Compare versions and exit before downloading anything if the upgrade +// // is for the same release version that is currently running +// if isSameReleaseVersion(u.log, currentVersion, version) { +// u.log.Warnf("Upgrade action skipped because agent is already at version %s", currentVersion) +// return nil, ErrUpgradeSameVersion +// } + +// // Inform the Upgrade Marker Watcher that we've started upgrading. Note that this +// // is only possible to do in-memory since, today, the process that's initiating +// // the upgrade is the same as the Agent process in which the Upgrade Marker Watcher is +// // running. If/when, in the future, the process initiating the upgrade is separated +// // from the Agent process in which the Upgrade Marker Watcher is running, such in-memory +// // communication will need to be replaced with inter-process communication (e.g. via +// // a file, e.g. the Upgrade Marker file or something else). +// u.markerWatcher.SetUpgradeStarted() + +// span, ctx := apm.StartSpan(ctx, "upgrade", "app.internal") +// defer span.End() + +// err = u.artifactDownloader.cleanNonMatchingVersionsFromDownloads(u.log, u.agentInfo.Version()) +// if err != nil { +// u.log.Errorw("Unable to clean downloads before update", "error.message", err, "downloads.path", paths.Downloads()) +// } + +// det.SetState(details.StateDownloading) + +// sourceURI = u.sourceURI(sourceURI) + +// parsedVersion, err := agtversion.ParseVersion(version) +// if err != nil { +// return nil, fmt.Errorf("error parsing version %q: %w", version, err) +// } + +// downloadResult, err := u.artifactDownloader.downloadArtifact(ctx, parsedVersion, sourceURI, u.fleetServerURI, det, skipVerifyOverride, skipDefaultPgp, pgpBytes...) +// if err != nil { +// // Run the same pre-upgrade cleanup task to get rid of any newly downloaded files +// // This may have an issue if users are upgrading to the same version number. +// if dErr := u.artifactDownloader.cleanNonMatchingVersionsFromDownloads(u.log, u.agentInfo.Version()); dErr != nil { +// u.log.Errorw("Unable to remove file after verification failure", "error.message", dErr) +// } + +// return nil, err +// } + +// if err := u.upgradeCleaner.setupArchiveCleanup(downloadResult); err != nil { +// return nil, err +// } + +// det.SetState(details.StateExtracting) + +// metadata, err := u.unpacker.getPackageMetadata(downloadResult.ArtifactPath) +// if err != nil { +// return nil, fmt.Errorf("reading metadata for elastic agent version %s package %q: %w", version, downloadResult.ArtifactPath, err) +// } + +// newVersion := u.unpacker.extractAgentVersion(metadata, version) + +// if err := checkUpgrade(u.log, currentVersion, newVersion, metadata); err != nil { // pass this as param to unpack step in upgrade executor +// return nil, fmt.Errorf("cannot upgrade the agent: %w", err) +// } + +// u.log.Infow("Unpacking agent package", "version", newVersion) + +// // Nice to have: add check that no archive files end up in the current versioned home +// // default to no flavor to avoid breaking behavior + +// // no default flavor, keep everything in case flavor is not specified +// // in case of error fallback to keep-all +// detectedFlavor, err := u.unpacker.detectFlavor(paths.Top(), "") +// if err != nil { +// u.log.Warnf("error encountered when detecting used flavor with top path %q: %w", paths.Top(), err) +// } +// u.log.Debugf("detected used flavor: %q", detectedFlavor) + +// unpackRes, unpackErr := u.unpacker.unpack(version, downloadResult.ArtifactPath, paths.Data(), detectedFlavor) +// err = goerrors.Join(err, u.diskSpaceErrorFunc(unpackErr)) + +// if unpackRes.VersionedHome == "" { +// err = goerrors.Join(err, fmt.Errorf("unknown versioned home")) +// return nil, err +// } + +// newHash := unpackRes.Hash +// if newHash == "" { +// err = goerrors.Join(err, fmt.Errorf("unknown hash")) +// return nil, err +// } + +// newHome := filepath.Join(paths.Top(), unpackRes.VersionedHome) + +// unpackCleanupSetupErr := u.upgradeCleaner.setupUnpackCleanup(newHome, paths.Home()) +// err = goerrors.Join(err, unpackCleanupSetupErr) + +// if err != nil { +// return nil, err +// } + +// err = u.directoryCopier.copyActionStore(u.log, newHome) +// if err != nil { +// err = fmt.Errorf("failed to copy action store: %w", u.diskSpaceErrorFunc(err)) +// return nil, err +// } + +// newRunPath := filepath.Join(newHome, "run") +// oldRunPath := filepath.Join(paths.Run()) + +// err = u.directoryCopier.copyRunDirectory(u.log, oldRunPath, newRunPath) +// if err != nil { +// err = fmt.Errorf("failed to copy run directory: %w", u.diskSpaceErrorFunc(err)) +// return nil, err +// } + +// det.SetState(details.StateReplacing) + +// // create symlink to the /elastic-agent +// hashedDir := unpackRes.VersionedHome +// u.log.Infof("hashedDir: %s", hashedDir) + +// symlinkPath := filepath.Join(paths.Top(), agentName) +// u.log.Infof("symlinkPath: %s", symlinkPath) + +// // paths.BinaryPath properly derives the binary directory depending on the platform. The path to the binary for macOS is inside of the app bundle. +// newPath := paths.BinaryPath(filepath.Join(paths.Top(), hashedDir), agentName) +// u.log.Infof("newPath: %s", newPath) + +// currentVersionedHome, err := filepath.Rel(paths.Top(), paths.Home()) +// if err != nil { +// return nil, fmt.Errorf("calculating home path relative to top, home: %q top: %q : %w", paths.Home(), paths.Top(), err) +// } + +// if symlinkCleanupSetupErr := u.upgradeCleaner.setupSymlinkCleanup(u.relinker.changeSymlink, paths.Top(), currentVersionedHome, agentName); symlinkCleanupSetupErr != nil { +// err = goerrors.Join(err, symlinkCleanupSetupErr) +// } + +// u.log.Infof("currentVersionedHome: %s", currentVersionedHome) + +// err = u.relinker.changeSymlink(u.log, paths.Top(), symlinkPath, newPath) +// if err != nil { +// return nil, err +// } + +// // We rotated the symlink successfully: prepare the current and previous agent installation details for the update marker +// // In update marker the `current` agent install is the one where the symlink is pointing (the new one we didn't start yet) +// // while the `previous` install is the currently executing elastic-agent that is no longer reachable via the symlink. +// // After the restart at the end of the function, everything lines up correctly. +// current := agentInstall{ +// parsedVersion: parsedVersion, +// version: version, +// hash: unpackRes.Hash, +// versionedHome: unpackRes.VersionedHome, +// } + +// previousParsedVersion := currentagtversion.GetParsedAgentPackageVersion() +// previous := agentInstall{ +// parsedVersion: previousParsedVersion, +// version: release.VersionWithSnapshot(), +// hash: release.Commit(), +// versionedHome: currentVersionedHome, +// } + +// err = u.watcher.markUpgrade(u.log, +// paths.Data(), // data dir to place the marker in +// current, // new agent version data +// previous, // old agent version data +// action, det, OUTCOME_UPGRADE) +// if err != nil { +// return nil, err +// } + +// watcherExecutable := u.watcher.selectWatcherExecutable(paths.Top(), previous, current) + +// watcherCmd, err := InvokeWatcher(u.log, watcherExecutable) +// if err != nil { +// return nil, err +// } + +// err = u.watcher.waitForWatcher(ctx, u.log, markerFilePath(paths.Data()), watcherMaxWaitTime, context.WithTimeout) +// if err != nil { +// err = goerrors.Join(err, watcherCmd.Process.Kill()) +// return nil, err +// } + +// cb := shutdownCallback(u.log, paths.Home(), release.Version(), version, filepath.Join(paths.Top(), unpackRes.VersionedHome)) + +// // Clean everything from the downloads dir +// u.log.Infow("Removing downloads directory", "file.path", paths.Downloads()) +// err = os.RemoveAll(paths.Downloads()) +// if err != nil { +// u.log.Errorw("Unable to clean downloads after update", "error.message", err, "file.path", paths.Downloads()) +// } + +// return cb, nil +// } func (u *Upgrader) newUpgrade(ctx context.Context, version string, sourceURI string, action *fleetapi.ActionUpgrade, det *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (_ reexec.ShutdownCallbackFn, err error) { defer func() { diff --git a/internal/pkg/agent/application/upgrade/upgrade_executor.go b/internal/pkg/agent/application/upgrade/upgrade_executor.go index a71a0cce6d2..54daa37f34a 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_executor.go +++ b/internal/pkg/agent/application/upgrade/upgrade_executor.go @@ -4,6 +4,7 @@ import ( "context" goerrors "errors" "fmt" + "os/exec" "path/filepath" "time" @@ -17,11 +18,34 @@ import ( agtversion "github.com/elastic/elastic-agent/pkg/version" ) -type upgradeExecutor interface { - downloadArtifact(ctx context.Context, parsedTargetVersion *agtversion.ParsedSemVer, agentInfo info.Agent, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) - unpackArtifact(downloadResult download.DownloadResult, version, archivePath, topPath, flavor, dataPath, currentHome string, upgradeDetails *details.Details, currentVersion agentVersion) (unpackStepResult, error) - replaceOldWithNew(log *logger.Logger, unpackStepResult unpackStepResult, currentVersionedHome, topPath, agentName, currentHome, oldRunPath, newRunPath, symlinkPath, newBinPath string, upgradeDetails *details.Details) error - watchNewAgent(ctx context.Context, log *logger.Logger, markerFilePath, topPath, dataPath string, waitTime time.Duration, createTimeoutContext createContextWithTimeout, newAgentInstall agentInstall, previousAgentInstall agentInstall, action *fleetapi.ActionUpgrade, upgradeDetails *details.Details, upgradeOutcome UpgradeOutcome) error +type artifactDownloader interface { + downloadArtifact(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) + cleanNonMatchingVersionsFromDownloads(log *logger.Logger, version string) error +} + +type unpacker interface { + getPackageMetadata(archivePath string) (packageMetadata, error) + extractAgentVersion(metadata packageMetadata, version string) agentVersion + unpack(version, archivePath, topPath, flavor string) (unpackResult, error) + detectFlavor(topPath, flavor string) (string, error) +} + +type relinker interface { + changeSymlink(log *logger.Logger, topDirPath, symlinkPath, newTarget string) error +} + +type createContextWithTimeout func(ctx context.Context, timeout time.Duration) (context.Context, context.CancelFunc) + +type watcher interface { + waitForWatcher(ctx context.Context, log *logger.Logger, markerFilePath string, waitTime time.Duration, createTimeoutContext createContextWithTimeout) error + selectWatcherExecutable(topDir string, previous agentInstall, current agentInstall) string + markUpgrade(log *logger.Logger, dataDir string, current, previous agentInstall, action *fleetapi.ActionUpgrade, det *details.Details, outcome UpgradeOutcome) error + invokeWatcher(log *logger.Logger, agentExecutable string) (*exec.Cmd, error) +} + +type agentDirectoryCopier interface { + copyActionStore(log *logger.Logger, newHome string) error + copyRunDirectory(log *logger.Logger, oldRunPath, newRunPath string) error } type executeUpgrade struct { From acd485275629f71255b9e6510760792ce98dcf82 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 7 Aug 2025 04:17:07 +0300 Subject: [PATCH 062/127] enhancement(5235): added download artifact step test case --- .../upgrade/upgrade_executor_test.go | 123 ++++++++++++++++++ 1 file changed, 123 insertions(+) create mode 100644 internal/pkg/agent/application/upgrade/upgrade_executor_test.go diff --git a/internal/pkg/agent/application/upgrade/upgrade_executor_test.go b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go new file mode 100644 index 00000000000..5c99e02ffe6 --- /dev/null +++ b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go @@ -0,0 +1,123 @@ +package upgrade + +import ( + "context" + "testing" + + "github.com/elastic/elastic-agent/internal/pkg/agent/application/info" + "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download" + "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/details" + "github.com/elastic/elastic-agent/pkg/core/logger" + "github.com/elastic/elastic-agent/pkg/core/logger/loggertest" + agtversion "github.com/elastic/elastic-agent/pkg/version" + "github.com/stretchr/testify/require" +) + +type mockArtifactDownloader struct { + dowloadArtifactTestFunc func(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) + cleanNonMatchingVersionsFromDownloadsTestFunc func(log *logger.Logger, version string) error +} + +func (m *mockArtifactDownloader) downloadArtifact(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) { + return m.dowloadArtifactTestFunc(ctx, parsedVersion, sourceURI, fleetServerURI, upgradeDetails, skipVerifyOverride, skipDefaultPgp, pgpBytes...) +} + +func (m *mockArtifactDownloader) cleanNonMatchingVersionsFromDownloads(log *logger.Logger, version string) error { + return m.cleanNonMatchingVersionsFromDownloadsTestFunc(log, version) +} + +type mockUpgradeCleaner struct { + setupArchiveCleanupTestFunc func(downloadResult download.DownloadResult) error +} + +func (m *mockUpgradeCleaner) setupArchiveCleanup(downloadResult download.DownloadResult) error { + return m.setupArchiveCleanupTestFunc(downloadResult) +} + +func (m *mockUpgradeCleaner) setupUnpackCleanup(newHomeDir, oldHomeDir string) error { + return nil +} + +func (m *mockUpgradeCleaner) setupSymlinkCleanup(symlinkFunc changeSymlinkFunc, topDirPath, oldVersionedHome, agentName string) error { + return nil +} + +func (m *mockUpgradeCleaner) cleanup(err error) error { + return nil +} + +func TestDownloadArtifactStep(t *testing.T) { + ctx := t.Context() + log, _ := loggertest.New("test") + parsedVersion, err := agtversion.ParseVersion("9.1.0") + require.NoError(t, err) + + // agentInfo := &info.AgentInfo{} + // sourceURI := "mockURI" + // fleetServerURI := "mockFleetServerURI" + // upgradeDetails := &details.Details{} + + testValues := struct { + parsedVersion *agtversion.ParsedSemVer + agentInfo *info.AgentInfo + sourceURI string + fleetServerURI string + upgradeDetails *details.Details + skipVerifyOverride bool + skipDefaultPgp bool + pgpBytes []string + }{ + parsedVersion: parsedVersion, + agentInfo: &info.AgentInfo{}, + sourceURI: "mockURI", + fleetServerURI: "mockFleetServerURI", + upgradeDetails: &details.Details{}, + skipVerifyOverride: false, + skipDefaultPgp: false, + pgpBytes: []string{"mockPGPBytes"}, + } + + // skipVerifyOverride := false + // skipDefaultPgp := false + // pgpBytes := []string{"mockPGPBytes"} + + mockArtifactDownloader := &mockArtifactDownloader{} + mockUpgradeCleaner := &mockUpgradeCleaner{} + upgradeExecutor := &executeUpgrade{ + log: log, + artifactDownloader: mockArtifactDownloader, + upgradeCleaner: mockUpgradeCleaner, + } + + nonMatchingCallCount := 0 + mockArtifactDownloader.cleanNonMatchingVersionsFromDownloadsTestFunc = func(log *logger.Logger, version string) error { + nonMatchingCallCount++ + require.Equal(t, testValues.agentInfo.Version(), version) + return nil + } + + mockDownloadResult := download.DownloadResult{ + ArtifactPath: "mockArtifactPath", + ArtifactHashPath: "mockArtifactHashPath", + } + mockArtifactDownloader.dowloadArtifactTestFunc = func(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) { + require.Equal(t, testValues.parsedVersion, parsedVersion) + require.Equal(t, testValues.sourceURI, sourceURI) + require.Equal(t, testValues.fleetServerURI, fleetServerURI) + require.Equal(t, testValues.upgradeDetails, upgradeDetails) + require.Equal(t, testValues.skipVerifyOverride, skipVerifyOverride) + require.Equal(t, testValues.skipDefaultPgp, skipDefaultPgp) + require.Equal(t, testValues.pgpBytes, pgpBytes) + return mockDownloadResult, nil + } + + mockUpgradeCleaner.setupArchiveCleanupTestFunc = func(downloadResult download.DownloadResult) error { + require.Equal(t, mockDownloadResult, downloadResult) + return nil + } + + downloadResult, err := upgradeExecutor.downloadArtifact(ctx, testValues.parsedVersion, testValues.agentInfo, testValues.sourceURI, testValues.fleetServerURI, testValues.upgradeDetails, testValues.skipVerifyOverride, testValues.skipDefaultPgp, testValues.pgpBytes...) + require.NoError(t, err) + require.Equal(t, mockDownloadResult, downloadResult) + require.Equal(t, 1, nonMatchingCallCount) +} From a3a40ac43907983bac999f95d97bd56d56011ddf Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 7 Aug 2025 04:20:27 +0300 Subject: [PATCH 063/127] enhancement(5235): remove commented code --- .../pkg/agent/application/upgrade/upgrade_executor_test.go | 4 ---- 1 file changed, 4 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_executor_test.go b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go index 5c99e02ffe6..2f199e15f4d 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_executor_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go @@ -77,10 +77,6 @@ func TestDownloadArtifactStep(t *testing.T) { pgpBytes: []string{"mockPGPBytes"}, } - // skipVerifyOverride := false - // skipDefaultPgp := false - // pgpBytes := []string{"mockPGPBytes"} - mockArtifactDownloader := &mockArtifactDownloader{} mockUpgradeCleaner := &mockUpgradeCleaner{} upgradeExecutor := &executeUpgrade{ From d7fde0e95a93054e5606fe4191ad6da0d6f8b906 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 7 Aug 2025 04:51:05 +0300 Subject: [PATCH 064/127] enhancement(5235): added tests for executor download step, will need to refactor the test cases --- .../upgrade/upgrade_executor_test.go | 403 +++++++++++++++--- 1 file changed, 340 insertions(+), 63 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_executor_test.go b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go index 2f199e15f4d..99c612b1719 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_executor_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go @@ -7,6 +7,7 @@ import ( "github.com/elastic/elastic-agent/internal/pkg/agent/application/info" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/details" + "github.com/elastic/elastic-agent/internal/pkg/agent/errors" "github.com/elastic/elastic-agent/pkg/core/logger" "github.com/elastic/elastic-agent/pkg/core/logger/loggertest" agtversion "github.com/elastic/elastic-agent/pkg/version" @@ -48,72 +49,348 @@ func (m *mockUpgradeCleaner) cleanup(err error) error { func TestDownloadArtifactStep(t *testing.T) { ctx := t.Context() - log, _ := loggertest.New("test") parsedVersion, err := agtversion.ParseVersion("9.1.0") require.NoError(t, err) - // agentInfo := &info.AgentInfo{} - // sourceURI := "mockURI" - // fleetServerURI := "mockFleetServerURI" - // upgradeDetails := &details.Details{} - - testValues := struct { - parsedVersion *agtversion.ParsedSemVer - agentInfo *info.AgentInfo - sourceURI string - fleetServerURI string - upgradeDetails *details.Details - skipVerifyOverride bool - skipDefaultPgp bool - pgpBytes []string - }{ - parsedVersion: parsedVersion, - agentInfo: &info.AgentInfo{}, - sourceURI: "mockURI", - fleetServerURI: "mockFleetServerURI", - upgradeDetails: &details.Details{}, - skipVerifyOverride: false, - skipDefaultPgp: false, - pgpBytes: []string{"mockPGPBytes"}, - } - - mockArtifactDownloader := &mockArtifactDownloader{} - mockUpgradeCleaner := &mockUpgradeCleaner{} - upgradeExecutor := &executeUpgrade{ - log: log, - artifactDownloader: mockArtifactDownloader, - upgradeCleaner: mockUpgradeCleaner, - } - - nonMatchingCallCount := 0 - mockArtifactDownloader.cleanNonMatchingVersionsFromDownloadsTestFunc = func(log *logger.Logger, version string) error { - nonMatchingCallCount++ - require.Equal(t, testValues.agentInfo.Version(), version) - return nil - } - - mockDownloadResult := download.DownloadResult{ - ArtifactPath: "mockArtifactPath", - ArtifactHashPath: "mockArtifactHashPath", - } - mockArtifactDownloader.dowloadArtifactTestFunc = func(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) { - require.Equal(t, testValues.parsedVersion, parsedVersion) - require.Equal(t, testValues.sourceURI, sourceURI) - require.Equal(t, testValues.fleetServerURI, fleetServerURI) - require.Equal(t, testValues.upgradeDetails, upgradeDetails) - require.Equal(t, testValues.skipVerifyOverride, skipVerifyOverride) - require.Equal(t, testValues.skipDefaultPgp, skipDefaultPgp) - require.Equal(t, testValues.pgpBytes, pgpBytes) - return mockDownloadResult, nil - } - - mockUpgradeCleaner.setupArchiveCleanupTestFunc = func(downloadResult download.DownloadResult) error { + t.Run("should download artifact and setup archive cleanup", func(t *testing.T) { + log, _ := loggertest.New("test") + testValues := struct { + parsedVersion *agtversion.ParsedSemVer + agentInfo *info.AgentInfo + sourceURI string + fleetServerURI string + upgradeDetails *details.Details + skipVerifyOverride bool + skipDefaultPgp bool + pgpBytes []string + }{ + parsedVersion: parsedVersion, + agentInfo: &info.AgentInfo{}, + sourceURI: "mockURI", + fleetServerURI: "mockFleetServerURI", + upgradeDetails: &details.Details{}, + skipVerifyOverride: false, + skipDefaultPgp: false, + pgpBytes: []string{"mockPGPBytes"}, + } + + mockArtifactDownloader := &mockArtifactDownloader{} + mockUpgradeCleaner := &mockUpgradeCleaner{} + upgradeExecutor := &executeUpgrade{ + log: log, + artifactDownloader: mockArtifactDownloader, + upgradeCleaner: mockUpgradeCleaner, + } + + nonMatchingCallCount := 0 + mockArtifactDownloader.cleanNonMatchingVersionsFromDownloadsTestFunc = func(log *logger.Logger, version string) error { + nonMatchingCallCount++ + require.Equal(t, testValues.agentInfo.Version(), version) + return nil + } + + mockDownloadResult := download.DownloadResult{ + ArtifactPath: "mockArtifactPath", + ArtifactHashPath: "mockArtifactHashPath", + } + mockArtifactDownloader.dowloadArtifactTestFunc = func(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) { + require.Equal(t, testValues.parsedVersion, parsedVersion) + require.Equal(t, testValues.sourceURI, sourceURI) + require.Equal(t, testValues.fleetServerURI, fleetServerURI) + require.Equal(t, testValues.upgradeDetails, upgradeDetails) + require.Equal(t, testValues.skipVerifyOverride, skipVerifyOverride) + require.Equal(t, testValues.skipDefaultPgp, skipDefaultPgp) + require.Equal(t, testValues.pgpBytes, pgpBytes) + return mockDownloadResult, nil + } + + cleanerCallCount := 0 + mockUpgradeCleaner.setupArchiveCleanupTestFunc = func(downloadResult download.DownloadResult) error { + cleanerCallCount++ + require.Equal(t, mockDownloadResult, downloadResult) + return nil + } + + downloadResult, err := upgradeExecutor.downloadArtifact(ctx, testValues.parsedVersion, testValues.agentInfo, testValues.sourceURI, testValues.fleetServerURI, testValues.upgradeDetails, testValues.skipVerifyOverride, testValues.skipDefaultPgp, testValues.pgpBytes...) + require.NoError(t, err) require.Equal(t, mockDownloadResult, downloadResult) - return nil - } + require.Equal(t, 1, nonMatchingCallCount) + require.Equal(t, 1, cleanerCallCount) + }) - downloadResult, err := upgradeExecutor.downloadArtifact(ctx, testValues.parsedVersion, testValues.agentInfo, testValues.sourceURI, testValues.fleetServerURI, testValues.upgradeDetails, testValues.skipVerifyOverride, testValues.skipDefaultPgp, testValues.pgpBytes...) - require.NoError(t, err) - require.Equal(t, mockDownloadResult, downloadResult) - require.Equal(t, 1, nonMatchingCallCount) + t.Run("when initial cleanup of non-matching versions fails, should log error", func(t *testing.T) { + log, obs := loggertest.New("test") + testValues := struct { + parsedVersion *agtversion.ParsedSemVer + agentInfo *info.AgentInfo + sourceURI string + fleetServerURI string + upgradeDetails *details.Details + skipVerifyOverride bool + skipDefaultPgp bool + pgpBytes []string + }{ + parsedVersion: parsedVersion, + agentInfo: &info.AgentInfo{}, + sourceURI: "mockURI", + fleetServerURI: "mockFleetServerURI", + upgradeDetails: &details.Details{}, + skipVerifyOverride: false, + skipDefaultPgp: false, + pgpBytes: []string{"mockPGPBytes"}, + } + + mockArtifactDownloader := &mockArtifactDownloader{} + mockUpgradeCleaner := &mockUpgradeCleaner{} + upgradeExecutor := &executeUpgrade{ + log: log, + artifactDownloader: mockArtifactDownloader, + upgradeCleaner: mockUpgradeCleaner, + } + + nonMatchingCallCount := 0 + mockArtifactDownloader.cleanNonMatchingVersionsFromDownloadsTestFunc = func(log *logger.Logger, version string) error { + nonMatchingCallCount++ + require.Equal(t, testValues.agentInfo.Version(), version) + return errors.New("test error") + } + + mockDownloadResult := download.DownloadResult{ + ArtifactPath: "mockArtifactPath", + ArtifactHashPath: "mockArtifactHashPath", + } + mockArtifactDownloader.dowloadArtifactTestFunc = func(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) { + require.Equal(t, testValues.parsedVersion, parsedVersion) + require.Equal(t, testValues.sourceURI, sourceURI) + require.Equal(t, testValues.fleetServerURI, fleetServerURI) + require.Equal(t, testValues.upgradeDetails, upgradeDetails) + require.Equal(t, testValues.skipVerifyOverride, skipVerifyOverride) + require.Equal(t, testValues.skipDefaultPgp, skipDefaultPgp) + require.Equal(t, testValues.pgpBytes, pgpBytes) + return mockDownloadResult, nil + } + + cleanerCallCount := 0 + mockUpgradeCleaner.setupArchiveCleanupTestFunc = func(downloadResult download.DownloadResult) error { + cleanerCallCount++ + require.Equal(t, mockDownloadResult, downloadResult) + return nil + } + + downloadResult, err := upgradeExecutor.downloadArtifact(ctx, testValues.parsedVersion, testValues.agentInfo, testValues.sourceURI, testValues.fleetServerURI, testValues.upgradeDetails, testValues.skipVerifyOverride, testValues.skipDefaultPgp, testValues.pgpBytes...) + require.NoError(t, err) + require.Equal(t, mockDownloadResult, downloadResult) + require.Equal(t, 1, nonMatchingCallCount) + require.Equal(t, 1, obs.Len()) + require.Equal(t, "Unable to clean downloads before update", obs.All()[0].Message) + require.Equal(t, 1, cleanerCallCount) + }) + + t.Run("when download fails, and cleanup of non-matching versions succeeds, should return error", func(t *testing.T) { + log, _ := loggertest.New("test") + testValues := struct { + parsedVersion *agtversion.ParsedSemVer + agentInfo *info.AgentInfo + sourceURI string + fleetServerURI string + upgradeDetails *details.Details + skipVerifyOverride bool + skipDefaultPgp bool + pgpBytes []string + }{ + parsedVersion: parsedVersion, + agentInfo: &info.AgentInfo{}, + sourceURI: "mockURI", + fleetServerURI: "mockFleetServerURI", + upgradeDetails: &details.Details{}, + skipVerifyOverride: false, + skipDefaultPgp: false, + pgpBytes: []string{"mockPGPBytes"}, + } + + mockArtifactDownloader := &mockArtifactDownloader{} + mockUpgradeCleaner := &mockUpgradeCleaner{} + upgradeExecutor := &executeUpgrade{ + log: log, + artifactDownloader: mockArtifactDownloader, + upgradeCleaner: mockUpgradeCleaner, + } + + nonMatchingCallCount := 0 + mockArtifactDownloader.cleanNonMatchingVersionsFromDownloadsTestFunc = func(log *logger.Logger, version string) error { + nonMatchingCallCount++ + require.Equal(t, testValues.agentInfo.Version(), version) + return nil + } + + mockDownloadResult := download.DownloadResult{ + ArtifactPath: "mockArtifactPath", + ArtifactHashPath: "mockArtifactHashPath", + } + + mockArtifactDownloader.dowloadArtifactTestFunc = func(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) { + require.Equal(t, testValues.parsedVersion, parsedVersion) + require.Equal(t, testValues.sourceURI, sourceURI) + require.Equal(t, testValues.fleetServerURI, fleetServerURI) + require.Equal(t, testValues.upgradeDetails, upgradeDetails) + require.Equal(t, testValues.skipVerifyOverride, skipVerifyOverride) + require.Equal(t, testValues.skipDefaultPgp, skipDefaultPgp) + require.Equal(t, testValues.pgpBytes, pgpBytes) + return mockDownloadResult, errors.New("test error") + } + + cleanerCallCount := 0 + mockUpgradeCleaner.setupArchiveCleanupTestFunc = func(downloadResult download.DownloadResult) error { + cleanerCallCount++ + require.Equal(t, mockDownloadResult, downloadResult) + return nil + } + + downloadResult, err := upgradeExecutor.downloadArtifact(ctx, testValues.parsedVersion, testValues.agentInfo, testValues.sourceURI, testValues.fleetServerURI, testValues.upgradeDetails, testValues.skipVerifyOverride, testValues.skipDefaultPgp, testValues.pgpBytes...) + require.Error(t, err) + require.ErrorIs(t, err, errors.New("test error")) + require.Equal(t, mockDownloadResult, downloadResult) + require.Equal(t, 2, nonMatchingCallCount) + require.Equal(t, 0, cleanerCallCount) + }) + + t.Run("when download fails, and cleanup of non-matching versions fails, should log error and return error", func(t *testing.T) { + log, obs := loggertest.New("test") + testValues := struct { + parsedVersion *agtversion.ParsedSemVer + agentInfo *info.AgentInfo + sourceURI string + fleetServerURI string + upgradeDetails *details.Details + skipVerifyOverride bool + skipDefaultPgp bool + pgpBytes []string + }{ + parsedVersion: parsedVersion, + agentInfo: &info.AgentInfo{}, + sourceURI: "mockURI", + fleetServerURI: "mockFleetServerURI", + upgradeDetails: &details.Details{}, + skipVerifyOverride: false, + skipDefaultPgp: false, + pgpBytes: []string{"mockPGPBytes"}, + } + + mockArtifactDownloader := &mockArtifactDownloader{} + mockUpgradeCleaner := &mockUpgradeCleaner{} + upgradeExecutor := &executeUpgrade{ + log: log, + artifactDownloader: mockArtifactDownloader, + upgradeCleaner: mockUpgradeCleaner, + } + + nonMatchingCallCount := 0 + mockArtifactDownloader.cleanNonMatchingVersionsFromDownloadsTestFunc = func(log *logger.Logger, version string) error { + nonMatchingCallCount++ + require.Equal(t, testValues.agentInfo.Version(), version) + if nonMatchingCallCount == 2 { + return errors.New("test non-matching error") + } + return nil + } + + mockDownloadResult := download.DownloadResult{ + ArtifactPath: "mockArtifactPath", + ArtifactHashPath: "mockArtifactHashPath", + } + + mockArtifactDownloader.dowloadArtifactTestFunc = func(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) { + require.Equal(t, testValues.parsedVersion, parsedVersion) + require.Equal(t, testValues.sourceURI, sourceURI) + require.Equal(t, testValues.fleetServerURI, fleetServerURI) + require.Equal(t, testValues.upgradeDetails, upgradeDetails) + require.Equal(t, testValues.skipVerifyOverride, skipVerifyOverride) + require.Equal(t, testValues.skipDefaultPgp, skipDefaultPgp) + require.Equal(t, testValues.pgpBytes, pgpBytes) + return mockDownloadResult, errors.New("test download error") + } + + cleanerCallCount := 0 + mockUpgradeCleaner.setupArchiveCleanupTestFunc = func(downloadResult download.DownloadResult) error { + cleanerCallCount++ + require.Equal(t, mockDownloadResult, downloadResult) + return nil + } + + downloadResult, err := upgradeExecutor.downloadArtifact(ctx, testValues.parsedVersion, testValues.agentInfo, testValues.sourceURI, testValues.fleetServerURI, testValues.upgradeDetails, testValues.skipVerifyOverride, testValues.skipDefaultPgp, testValues.pgpBytes...) + require.Error(t, err) + require.ErrorIs(t, err, errors.New("test download error")) + require.Equal(t, mockDownloadResult, downloadResult) + require.Equal(t, 2, nonMatchingCallCount) + require.Equal(t, 0, cleanerCallCount) + require.Equal(t, 1, obs.Len()) + require.Equal(t, "Unable to remove file after verification failure", obs.All()[0].Message) + }) + + t.Run("when download succeeds, but setting up archive cleanup fails, should return error", func(t *testing.T) { + log, _ := loggertest.New("test") + testValues := struct { + parsedVersion *agtversion.ParsedSemVer + agentInfo *info.AgentInfo + sourceURI string + fleetServerURI string + upgradeDetails *details.Details + skipVerifyOverride bool + skipDefaultPgp bool + pgpBytes []string + }{ + parsedVersion: parsedVersion, + agentInfo: &info.AgentInfo{}, + sourceURI: "mockURI", + fleetServerURI: "mockFleetServerURI", + upgradeDetails: &details.Details{}, + skipVerifyOverride: false, + skipDefaultPgp: false, + pgpBytes: []string{"mockPGPBytes"}, + } + + mockArtifactDownloader := &mockArtifactDownloader{} + mockUpgradeCleaner := &mockUpgradeCleaner{} + upgradeExecutor := &executeUpgrade{ + log: log, + artifactDownloader: mockArtifactDownloader, + upgradeCleaner: mockUpgradeCleaner, + } + + nonMatchingCallCount := 0 + mockArtifactDownloader.cleanNonMatchingVersionsFromDownloadsTestFunc = func(log *logger.Logger, version string) error { + nonMatchingCallCount++ + require.Equal(t, testValues.agentInfo.Version(), version) + return nil + } + + mockDownloadResult := download.DownloadResult{ + ArtifactPath: "mockArtifactPath", + ArtifactHashPath: "mockArtifactHashPath", + } + mockArtifactDownloader.dowloadArtifactTestFunc = func(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) { + require.Equal(t, testValues.parsedVersion, parsedVersion) + require.Equal(t, testValues.sourceURI, sourceURI) + require.Equal(t, testValues.fleetServerURI, fleetServerURI) + require.Equal(t, testValues.upgradeDetails, upgradeDetails) + require.Equal(t, testValues.skipVerifyOverride, skipVerifyOverride) + require.Equal(t, testValues.skipDefaultPgp, skipDefaultPgp) + require.Equal(t, testValues.pgpBytes, pgpBytes) + return mockDownloadResult, nil + } + + cleanerCallCount := 0 + mockUpgradeCleaner.setupArchiveCleanupTestFunc = func(downloadResult download.DownloadResult) error { + cleanerCallCount++ + require.Equal(t, mockDownloadResult, downloadResult) + return errors.New("test cleanup error") + } + + downloadResult, err := upgradeExecutor.downloadArtifact(ctx, testValues.parsedVersion, testValues.agentInfo, testValues.sourceURI, testValues.fleetServerURI, testValues.upgradeDetails, testValues.skipVerifyOverride, testValues.skipDefaultPgp, testValues.pgpBytes...) + require.Error(t, err) + require.ErrorIs(t, err, errors.New("test cleanup error")) + require.Equal(t, mockDownloadResult, downloadResult) + require.Equal(t, 1, nonMatchingCallCount) + require.Equal(t, 1, cleanerCallCount) + }) } From 7ea282bce1f0618df7ca575254695fb2a1ebf61f Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 7 Aug 2025 16:48:51 +0300 Subject: [PATCH 065/127] enhancement(5235): updated mockery config and added mocks for the interfaces that the executor uses --- .mockery.yaml | 13 + .../upgrade/agent_directory_copier_mock.go | 130 +++++++++ .../upgrade/artifact_downloader_mock.go | 167 ++++++++++++ .../application/upgrade/relinker_mock.go | 84 ++++++ .../application/upgrade/unpacker_mock.go | 251 ++++++++++++++++++ .../upgrade/upgrade_cleaner_mock.go | 223 ++++++++++++++++ 6 files changed, 868 insertions(+) create mode 100644 internal/pkg/agent/application/upgrade/agent_directory_copier_mock.go create mode 100644 internal/pkg/agent/application/upgrade/artifact_downloader_mock.go create mode 100644 internal/pkg/agent/application/upgrade/relinker_mock.go create mode 100644 internal/pkg/agent/application/upgrade/unpacker_mock.go create mode 100644 internal/pkg/agent/application/upgrade/upgrade_cleaner_mock.go diff --git a/.mockery.yaml b/.mockery.yaml index ea5df1eebbe..10ed750a069 100644 --- a/.mockery.yaml +++ b/.mockery.yaml @@ -37,3 +37,16 @@ packages: installationModifier: config: mockname: "InstallationModifier" + github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade: + config: + inpackage: true + dir: internal/pkg/agent/application/upgrade + mockname: "mock_{{.InterfaceName}}" + interfaces: + artifactDownloader: + unpacker: + relinker: + watcher: + agentDirectoryCopier: + upgradeCleaner: + diff --git a/internal/pkg/agent/application/upgrade/agent_directory_copier_mock.go b/internal/pkg/agent/application/upgrade/agent_directory_copier_mock.go new file mode 100644 index 00000000000..984f37f5388 --- /dev/null +++ b/internal/pkg/agent/application/upgrade/agent_directory_copier_mock.go @@ -0,0 +1,130 @@ +// Code generated by mockery v2.53.4. DO NOT EDIT. + +package upgrade + +import ( + logp "github.com/elastic/elastic-agent-libs/logp" + mock "github.com/stretchr/testify/mock" +) + +// mock_agentDirectoryCopier is an autogenerated mock type for the agentDirectoryCopier type +type mock_agentDirectoryCopier struct { + mock.Mock +} + +type mock_agentDirectoryCopier_Expecter struct { + mock *mock.Mock +} + +func (_m *mock_agentDirectoryCopier) EXPECT() *mock_agentDirectoryCopier_Expecter { + return &mock_agentDirectoryCopier_Expecter{mock: &_m.Mock} +} + +// copyActionStore provides a mock function with given fields: log, newHome +func (_m *mock_agentDirectoryCopier) copyActionStore(log *logp.Logger, newHome string) error { + ret := _m.Called(log, newHome) + + if len(ret) == 0 { + panic("no return value specified for copyActionStore") + } + + var r0 error + if rf, ok := ret.Get(0).(func(*logp.Logger, string) error); ok { + r0 = rf(log, newHome) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// mock_agentDirectoryCopier_copyActionStore_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'copyActionStore' +type mock_agentDirectoryCopier_copyActionStore_Call struct { + *mock.Call +} + +// copyActionStore is a helper method to define mock.On call +// - log *logp.Logger +// - newHome string +func (_e *mock_agentDirectoryCopier_Expecter) copyActionStore(log interface{}, newHome interface{}) *mock_agentDirectoryCopier_copyActionStore_Call { + return &mock_agentDirectoryCopier_copyActionStore_Call{Call: _e.mock.On("copyActionStore", log, newHome)} +} + +func (_c *mock_agentDirectoryCopier_copyActionStore_Call) Run(run func(log *logp.Logger, newHome string)) *mock_agentDirectoryCopier_copyActionStore_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(*logp.Logger), args[1].(string)) + }) + return _c +} + +func (_c *mock_agentDirectoryCopier_copyActionStore_Call) Return(_a0 error) *mock_agentDirectoryCopier_copyActionStore_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *mock_agentDirectoryCopier_copyActionStore_Call) RunAndReturn(run func(*logp.Logger, string) error) *mock_agentDirectoryCopier_copyActionStore_Call { + _c.Call.Return(run) + return _c +} + +// copyRunDirectory provides a mock function with given fields: log, oldRunPath, newRunPath +func (_m *mock_agentDirectoryCopier) copyRunDirectory(log *logp.Logger, oldRunPath string, newRunPath string) error { + ret := _m.Called(log, oldRunPath, newRunPath) + + if len(ret) == 0 { + panic("no return value specified for copyRunDirectory") + } + + var r0 error + if rf, ok := ret.Get(0).(func(*logp.Logger, string, string) error); ok { + r0 = rf(log, oldRunPath, newRunPath) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// mock_agentDirectoryCopier_copyRunDirectory_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'copyRunDirectory' +type mock_agentDirectoryCopier_copyRunDirectory_Call struct { + *mock.Call +} + +// copyRunDirectory is a helper method to define mock.On call +// - log *logp.Logger +// - oldRunPath string +// - newRunPath string +func (_e *mock_agentDirectoryCopier_Expecter) copyRunDirectory(log interface{}, oldRunPath interface{}, newRunPath interface{}) *mock_agentDirectoryCopier_copyRunDirectory_Call { + return &mock_agentDirectoryCopier_copyRunDirectory_Call{Call: _e.mock.On("copyRunDirectory", log, oldRunPath, newRunPath)} +} + +func (_c *mock_agentDirectoryCopier_copyRunDirectory_Call) Run(run func(log *logp.Logger, oldRunPath string, newRunPath string)) *mock_agentDirectoryCopier_copyRunDirectory_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(*logp.Logger), args[1].(string), args[2].(string)) + }) + return _c +} + +func (_c *mock_agentDirectoryCopier_copyRunDirectory_Call) Return(_a0 error) *mock_agentDirectoryCopier_copyRunDirectory_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *mock_agentDirectoryCopier_copyRunDirectory_Call) RunAndReturn(run func(*logp.Logger, string, string) error) *mock_agentDirectoryCopier_copyRunDirectory_Call { + _c.Call.Return(run) + return _c +} + +// newMock_agentDirectoryCopier creates a new instance of mock_agentDirectoryCopier. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func newMock_agentDirectoryCopier(t interface { + mock.TestingT + Cleanup(func()) +}) *mock_agentDirectoryCopier { + mock := &mock_agentDirectoryCopier{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/internal/pkg/agent/application/upgrade/artifact_downloader_mock.go b/internal/pkg/agent/application/upgrade/artifact_downloader_mock.go new file mode 100644 index 00000000000..64203fdb1c7 --- /dev/null +++ b/internal/pkg/agent/application/upgrade/artifact_downloader_mock.go @@ -0,0 +1,167 @@ +// Code generated by mockery v2.53.4. DO NOT EDIT. + +package upgrade + +import ( + context "context" + + download "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download" + details "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/details" + + logp "github.com/elastic/elastic-agent-libs/logp" + + mock "github.com/stretchr/testify/mock" + + version "github.com/elastic/elastic-agent/pkg/version" +) + +// mock_artifactDownloader is an autogenerated mock type for the artifactDownloader type +type mock_artifactDownloader struct { + mock.Mock +} + +type mock_artifactDownloader_Expecter struct { + mock *mock.Mock +} + +func (_m *mock_artifactDownloader) EXPECT() *mock_artifactDownloader_Expecter { + return &mock_artifactDownloader_Expecter{mock: &_m.Mock} +} + +// cleanNonMatchingVersionsFromDownloads provides a mock function with given fields: log, _a1 +func (_m *mock_artifactDownloader) cleanNonMatchingVersionsFromDownloads(log *logp.Logger, _a1 string) error { + ret := _m.Called(log, _a1) + + if len(ret) == 0 { + panic("no return value specified for cleanNonMatchingVersionsFromDownloads") + } + + var r0 error + if rf, ok := ret.Get(0).(func(*logp.Logger, string) error); ok { + r0 = rf(log, _a1) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// mock_artifactDownloader_cleanNonMatchingVersionsFromDownloads_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'cleanNonMatchingVersionsFromDownloads' +type mock_artifactDownloader_cleanNonMatchingVersionsFromDownloads_Call struct { + *mock.Call +} + +// cleanNonMatchingVersionsFromDownloads is a helper method to define mock.On call +// - log *logp.Logger +// - _a1 string +func (_e *mock_artifactDownloader_Expecter) cleanNonMatchingVersionsFromDownloads(log interface{}, _a1 interface{}) *mock_artifactDownloader_cleanNonMatchingVersionsFromDownloads_Call { + return &mock_artifactDownloader_cleanNonMatchingVersionsFromDownloads_Call{Call: _e.mock.On("cleanNonMatchingVersionsFromDownloads", log, _a1)} +} + +func (_c *mock_artifactDownloader_cleanNonMatchingVersionsFromDownloads_Call) Run(run func(log *logp.Logger, _a1 string)) *mock_artifactDownloader_cleanNonMatchingVersionsFromDownloads_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(*logp.Logger), args[1].(string)) + }) + return _c +} + +func (_c *mock_artifactDownloader_cleanNonMatchingVersionsFromDownloads_Call) Return(_a0 error) *mock_artifactDownloader_cleanNonMatchingVersionsFromDownloads_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *mock_artifactDownloader_cleanNonMatchingVersionsFromDownloads_Call) RunAndReturn(run func(*logp.Logger, string) error) *mock_artifactDownloader_cleanNonMatchingVersionsFromDownloads_Call { + _c.Call.Return(run) + return _c +} + +// downloadArtifact provides a mock function with given fields: ctx, parsedVersion, sourceURI, fleetServerURI, upgradeDetails, skipVerifyOverride, skipDefaultPgp, pgpBytes +func (_m *mock_artifactDownloader) downloadArtifact(ctx context.Context, parsedVersion *version.ParsedSemVer, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) { + _va := make([]interface{}, len(pgpBytes)) + for _i := range pgpBytes { + _va[_i] = pgpBytes[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, parsedVersion, sourceURI, fleetServerURI, upgradeDetails, skipVerifyOverride, skipDefaultPgp) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for downloadArtifact") + } + + var r0 download.DownloadResult + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *version.ParsedSemVer, string, string, *details.Details, bool, bool, ...string) (download.DownloadResult, error)); ok { + return rf(ctx, parsedVersion, sourceURI, fleetServerURI, upgradeDetails, skipVerifyOverride, skipDefaultPgp, pgpBytes...) + } + if rf, ok := ret.Get(0).(func(context.Context, *version.ParsedSemVer, string, string, *details.Details, bool, bool, ...string) download.DownloadResult); ok { + r0 = rf(ctx, parsedVersion, sourceURI, fleetServerURI, upgradeDetails, skipVerifyOverride, skipDefaultPgp, pgpBytes...) + } else { + r0 = ret.Get(0).(download.DownloadResult) + } + + if rf, ok := ret.Get(1).(func(context.Context, *version.ParsedSemVer, string, string, *details.Details, bool, bool, ...string) error); ok { + r1 = rf(ctx, parsedVersion, sourceURI, fleetServerURI, upgradeDetails, skipVerifyOverride, skipDefaultPgp, pgpBytes...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// mock_artifactDownloader_downloadArtifact_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'downloadArtifact' +type mock_artifactDownloader_downloadArtifact_Call struct { + *mock.Call +} + +// downloadArtifact is a helper method to define mock.On call +// - ctx context.Context +// - parsedVersion *version.ParsedSemVer +// - sourceURI string +// - fleetServerURI string +// - upgradeDetails *details.Details +// - skipVerifyOverride bool +// - skipDefaultPgp bool +// - pgpBytes ...string +func (_e *mock_artifactDownloader_Expecter) downloadArtifact(ctx interface{}, parsedVersion interface{}, sourceURI interface{}, fleetServerURI interface{}, upgradeDetails interface{}, skipVerifyOverride interface{}, skipDefaultPgp interface{}, pgpBytes ...interface{}) *mock_artifactDownloader_downloadArtifact_Call { + return &mock_artifactDownloader_downloadArtifact_Call{Call: _e.mock.On("downloadArtifact", + append([]interface{}{ctx, parsedVersion, sourceURI, fleetServerURI, upgradeDetails, skipVerifyOverride, skipDefaultPgp}, pgpBytes...)...)} +} + +func (_c *mock_artifactDownloader_downloadArtifact_Call) Run(run func(ctx context.Context, parsedVersion *version.ParsedSemVer, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string)) *mock_artifactDownloader_downloadArtifact_Call { + _c.Call.Run(func(args mock.Arguments) { + variadicArgs := make([]string, len(args)-7) + for i, a := range args[7:] { + if a != nil { + variadicArgs[i] = a.(string) + } + } + run(args[0].(context.Context), args[1].(*version.ParsedSemVer), args[2].(string), args[3].(string), args[4].(*details.Details), args[5].(bool), args[6].(bool), variadicArgs...) + }) + return _c +} + +func (_c *mock_artifactDownloader_downloadArtifact_Call) Return(_a0 download.DownloadResult, _a1 error) *mock_artifactDownloader_downloadArtifact_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *mock_artifactDownloader_downloadArtifact_Call) RunAndReturn(run func(context.Context, *version.ParsedSemVer, string, string, *details.Details, bool, bool, ...string) (download.DownloadResult, error)) *mock_artifactDownloader_downloadArtifact_Call { + _c.Call.Return(run) + return _c +} + +// newMock_artifactDownloader creates a new instance of mock_artifactDownloader. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func newMock_artifactDownloader(t interface { + mock.TestingT + Cleanup(func()) +}) *mock_artifactDownloader { + mock := &mock_artifactDownloader{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/internal/pkg/agent/application/upgrade/relinker_mock.go b/internal/pkg/agent/application/upgrade/relinker_mock.go new file mode 100644 index 00000000000..b7eef55c151 --- /dev/null +++ b/internal/pkg/agent/application/upgrade/relinker_mock.go @@ -0,0 +1,84 @@ +// Code generated by mockery v2.53.4. DO NOT EDIT. + +package upgrade + +import ( + logp "github.com/elastic/elastic-agent-libs/logp" + mock "github.com/stretchr/testify/mock" +) + +// mock_relinker is an autogenerated mock type for the relinker type +type mock_relinker struct { + mock.Mock +} + +type mock_relinker_Expecter struct { + mock *mock.Mock +} + +func (_m *mock_relinker) EXPECT() *mock_relinker_Expecter { + return &mock_relinker_Expecter{mock: &_m.Mock} +} + +// changeSymlink provides a mock function with given fields: log, topDirPath, symlinkPath, newTarget +func (_m *mock_relinker) changeSymlink(log *logp.Logger, topDirPath string, symlinkPath string, newTarget string) error { + ret := _m.Called(log, topDirPath, symlinkPath, newTarget) + + if len(ret) == 0 { + panic("no return value specified for changeSymlink") + } + + var r0 error + if rf, ok := ret.Get(0).(func(*logp.Logger, string, string, string) error); ok { + r0 = rf(log, topDirPath, symlinkPath, newTarget) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// mock_relinker_changeSymlink_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'changeSymlink' +type mock_relinker_changeSymlink_Call struct { + *mock.Call +} + +// changeSymlink is a helper method to define mock.On call +// - log *logp.Logger +// - topDirPath string +// - symlinkPath string +// - newTarget string +func (_e *mock_relinker_Expecter) changeSymlink(log interface{}, topDirPath interface{}, symlinkPath interface{}, newTarget interface{}) *mock_relinker_changeSymlink_Call { + return &mock_relinker_changeSymlink_Call{Call: _e.mock.On("changeSymlink", log, topDirPath, symlinkPath, newTarget)} +} + +func (_c *mock_relinker_changeSymlink_Call) Run(run func(log *logp.Logger, topDirPath string, symlinkPath string, newTarget string)) *mock_relinker_changeSymlink_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(*logp.Logger), args[1].(string), args[2].(string), args[3].(string)) + }) + return _c +} + +func (_c *mock_relinker_changeSymlink_Call) Return(_a0 error) *mock_relinker_changeSymlink_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *mock_relinker_changeSymlink_Call) RunAndReturn(run func(*logp.Logger, string, string, string) error) *mock_relinker_changeSymlink_Call { + _c.Call.Return(run) + return _c +} + +// newMock_relinker creates a new instance of mock_relinker. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func newMock_relinker(t interface { + mock.TestingT + Cleanup(func()) +}) *mock_relinker { + mock := &mock_relinker{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/internal/pkg/agent/application/upgrade/unpacker_mock.go b/internal/pkg/agent/application/upgrade/unpacker_mock.go new file mode 100644 index 00000000000..5c9cf01ae1f --- /dev/null +++ b/internal/pkg/agent/application/upgrade/unpacker_mock.go @@ -0,0 +1,251 @@ +// Code generated by mockery v2.53.4. DO NOT EDIT. + +package upgrade + +import mock "github.com/stretchr/testify/mock" + +// mock_unpacker is an autogenerated mock type for the unpacker type +type mock_unpacker struct { + mock.Mock +} + +type mock_unpacker_Expecter struct { + mock *mock.Mock +} + +func (_m *mock_unpacker) EXPECT() *mock_unpacker_Expecter { + return &mock_unpacker_Expecter{mock: &_m.Mock} +} + +// detectFlavor provides a mock function with given fields: topPath, flavor +func (_m *mock_unpacker) detectFlavor(topPath string, flavor string) (string, error) { + ret := _m.Called(topPath, flavor) + + if len(ret) == 0 { + panic("no return value specified for detectFlavor") + } + + var r0 string + var r1 error + if rf, ok := ret.Get(0).(func(string, string) (string, error)); ok { + return rf(topPath, flavor) + } + if rf, ok := ret.Get(0).(func(string, string) string); ok { + r0 = rf(topPath, flavor) + } else { + r0 = ret.Get(0).(string) + } + + if rf, ok := ret.Get(1).(func(string, string) error); ok { + r1 = rf(topPath, flavor) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// mock_unpacker_detectFlavor_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'detectFlavor' +type mock_unpacker_detectFlavor_Call struct { + *mock.Call +} + +// detectFlavor is a helper method to define mock.On call +// - topPath string +// - flavor string +func (_e *mock_unpacker_Expecter) detectFlavor(topPath interface{}, flavor interface{}) *mock_unpacker_detectFlavor_Call { + return &mock_unpacker_detectFlavor_Call{Call: _e.mock.On("detectFlavor", topPath, flavor)} +} + +func (_c *mock_unpacker_detectFlavor_Call) Run(run func(topPath string, flavor string)) *mock_unpacker_detectFlavor_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].(string)) + }) + return _c +} + +func (_c *mock_unpacker_detectFlavor_Call) Return(_a0 string, _a1 error) *mock_unpacker_detectFlavor_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *mock_unpacker_detectFlavor_Call) RunAndReturn(run func(string, string) (string, error)) *mock_unpacker_detectFlavor_Call { + _c.Call.Return(run) + return _c +} + +// extractAgentVersion provides a mock function with given fields: metadata, version +func (_m *mock_unpacker) extractAgentVersion(metadata packageMetadata, version string) agentVersion { + ret := _m.Called(metadata, version) + + if len(ret) == 0 { + panic("no return value specified for extractAgentVersion") + } + + var r0 agentVersion + if rf, ok := ret.Get(0).(func(packageMetadata, string) agentVersion); ok { + r0 = rf(metadata, version) + } else { + r0 = ret.Get(0).(agentVersion) + } + + return r0 +} + +// mock_unpacker_extractAgentVersion_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'extractAgentVersion' +type mock_unpacker_extractAgentVersion_Call struct { + *mock.Call +} + +// extractAgentVersion is a helper method to define mock.On call +// - metadata packageMetadata +// - version string +func (_e *mock_unpacker_Expecter) extractAgentVersion(metadata interface{}, version interface{}) *mock_unpacker_extractAgentVersion_Call { + return &mock_unpacker_extractAgentVersion_Call{Call: _e.mock.On("extractAgentVersion", metadata, version)} +} + +func (_c *mock_unpacker_extractAgentVersion_Call) Run(run func(metadata packageMetadata, version string)) *mock_unpacker_extractAgentVersion_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(packageMetadata), args[1].(string)) + }) + return _c +} + +func (_c *mock_unpacker_extractAgentVersion_Call) Return(_a0 agentVersion) *mock_unpacker_extractAgentVersion_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *mock_unpacker_extractAgentVersion_Call) RunAndReturn(run func(packageMetadata, string) agentVersion) *mock_unpacker_extractAgentVersion_Call { + _c.Call.Return(run) + return _c +} + +// getPackageMetadata provides a mock function with given fields: archivePath +func (_m *mock_unpacker) getPackageMetadata(archivePath string) (packageMetadata, error) { + ret := _m.Called(archivePath) + + if len(ret) == 0 { + panic("no return value specified for getPackageMetadata") + } + + var r0 packageMetadata + var r1 error + if rf, ok := ret.Get(0).(func(string) (packageMetadata, error)); ok { + return rf(archivePath) + } + if rf, ok := ret.Get(0).(func(string) packageMetadata); ok { + r0 = rf(archivePath) + } else { + r0 = ret.Get(0).(packageMetadata) + } + + if rf, ok := ret.Get(1).(func(string) error); ok { + r1 = rf(archivePath) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// mock_unpacker_getPackageMetadata_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'getPackageMetadata' +type mock_unpacker_getPackageMetadata_Call struct { + *mock.Call +} + +// getPackageMetadata is a helper method to define mock.On call +// - archivePath string +func (_e *mock_unpacker_Expecter) getPackageMetadata(archivePath interface{}) *mock_unpacker_getPackageMetadata_Call { + return &mock_unpacker_getPackageMetadata_Call{Call: _e.mock.On("getPackageMetadata", archivePath)} +} + +func (_c *mock_unpacker_getPackageMetadata_Call) Run(run func(archivePath string)) *mock_unpacker_getPackageMetadata_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string)) + }) + return _c +} + +func (_c *mock_unpacker_getPackageMetadata_Call) Return(_a0 packageMetadata, _a1 error) *mock_unpacker_getPackageMetadata_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *mock_unpacker_getPackageMetadata_Call) RunAndReturn(run func(string) (packageMetadata, error)) *mock_unpacker_getPackageMetadata_Call { + _c.Call.Return(run) + return _c +} + +// unpack provides a mock function with given fields: version, archivePath, topPath, flavor +func (_m *mock_unpacker) unpack(version string, archivePath string, topPath string, flavor string) (unpackResult, error) { + ret := _m.Called(version, archivePath, topPath, flavor) + + if len(ret) == 0 { + panic("no return value specified for unpack") + } + + var r0 unpackResult + var r1 error + if rf, ok := ret.Get(0).(func(string, string, string, string) (unpackResult, error)); ok { + return rf(version, archivePath, topPath, flavor) + } + if rf, ok := ret.Get(0).(func(string, string, string, string) unpackResult); ok { + r0 = rf(version, archivePath, topPath, flavor) + } else { + r0 = ret.Get(0).(unpackResult) + } + + if rf, ok := ret.Get(1).(func(string, string, string, string) error); ok { + r1 = rf(version, archivePath, topPath, flavor) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// mock_unpacker_unpack_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'unpack' +type mock_unpacker_unpack_Call struct { + *mock.Call +} + +// unpack is a helper method to define mock.On call +// - version string +// - archivePath string +// - topPath string +// - flavor string +func (_e *mock_unpacker_Expecter) unpack(version interface{}, archivePath interface{}, topPath interface{}, flavor interface{}) *mock_unpacker_unpack_Call { + return &mock_unpacker_unpack_Call{Call: _e.mock.On("unpack", version, archivePath, topPath, flavor)} +} + +func (_c *mock_unpacker_unpack_Call) Run(run func(version string, archivePath string, topPath string, flavor string)) *mock_unpacker_unpack_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].(string), args[2].(string), args[3].(string)) + }) + return _c +} + +func (_c *mock_unpacker_unpack_Call) Return(_a0 unpackResult, _a1 error) *mock_unpacker_unpack_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *mock_unpacker_unpack_Call) RunAndReturn(run func(string, string, string, string) (unpackResult, error)) *mock_unpacker_unpack_Call { + _c.Call.Return(run) + return _c +} + +// newMock_unpacker creates a new instance of mock_unpacker. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func newMock_unpacker(t interface { + mock.TestingT + Cleanup(func()) +}) *mock_unpacker { + mock := &mock_unpacker{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/internal/pkg/agent/application/upgrade/upgrade_cleaner_mock.go b/internal/pkg/agent/application/upgrade/upgrade_cleaner_mock.go new file mode 100644 index 00000000000..cd91a8ce38a --- /dev/null +++ b/internal/pkg/agent/application/upgrade/upgrade_cleaner_mock.go @@ -0,0 +1,223 @@ +// Code generated by mockery v2.53.4. DO NOT EDIT. + +package upgrade + +import ( + download "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download" + mock "github.com/stretchr/testify/mock" +) + +// mock_upgradeCleaner is an autogenerated mock type for the upgradeCleaner type +type mock_upgradeCleaner struct { + mock.Mock +} + +type mock_upgradeCleaner_Expecter struct { + mock *mock.Mock +} + +func (_m *mock_upgradeCleaner) EXPECT() *mock_upgradeCleaner_Expecter { + return &mock_upgradeCleaner_Expecter{mock: &_m.Mock} +} + +// cleanup provides a mock function with given fields: err +func (_m *mock_upgradeCleaner) cleanup(err error) error { + ret := _m.Called(err) + + if len(ret) == 0 { + panic("no return value specified for cleanup") + } + + var r0 error + if rf, ok := ret.Get(0).(func(error) error); ok { + r0 = rf(err) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// mock_upgradeCleaner_cleanup_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'cleanup' +type mock_upgradeCleaner_cleanup_Call struct { + *mock.Call +} + +// cleanup is a helper method to define mock.On call +// - err error +func (_e *mock_upgradeCleaner_Expecter) cleanup(err interface{}) *mock_upgradeCleaner_cleanup_Call { + return &mock_upgradeCleaner_cleanup_Call{Call: _e.mock.On("cleanup", err)} +} + +func (_c *mock_upgradeCleaner_cleanup_Call) Run(run func(err error)) *mock_upgradeCleaner_cleanup_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(error)) + }) + return _c +} + +func (_c *mock_upgradeCleaner_cleanup_Call) Return(_a0 error) *mock_upgradeCleaner_cleanup_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *mock_upgradeCleaner_cleanup_Call) RunAndReturn(run func(error) error) *mock_upgradeCleaner_cleanup_Call { + _c.Call.Return(run) + return _c +} + +// setupArchiveCleanup provides a mock function with given fields: downloadResult +func (_m *mock_upgradeCleaner) setupArchiveCleanup(downloadResult download.DownloadResult) error { + ret := _m.Called(downloadResult) + + if len(ret) == 0 { + panic("no return value specified for setupArchiveCleanup") + } + + var r0 error + if rf, ok := ret.Get(0).(func(download.DownloadResult) error); ok { + r0 = rf(downloadResult) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// mock_upgradeCleaner_setupArchiveCleanup_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'setupArchiveCleanup' +type mock_upgradeCleaner_setupArchiveCleanup_Call struct { + *mock.Call +} + +// setupArchiveCleanup is a helper method to define mock.On call +// - downloadResult download.DownloadResult +func (_e *mock_upgradeCleaner_Expecter) setupArchiveCleanup(downloadResult interface{}) *mock_upgradeCleaner_setupArchiveCleanup_Call { + return &mock_upgradeCleaner_setupArchiveCleanup_Call{Call: _e.mock.On("setupArchiveCleanup", downloadResult)} +} + +func (_c *mock_upgradeCleaner_setupArchiveCleanup_Call) Run(run func(downloadResult download.DownloadResult)) *mock_upgradeCleaner_setupArchiveCleanup_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(download.DownloadResult)) + }) + return _c +} + +func (_c *mock_upgradeCleaner_setupArchiveCleanup_Call) Return(_a0 error) *mock_upgradeCleaner_setupArchiveCleanup_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *mock_upgradeCleaner_setupArchiveCleanup_Call) RunAndReturn(run func(download.DownloadResult) error) *mock_upgradeCleaner_setupArchiveCleanup_Call { + _c.Call.Return(run) + return _c +} + +// setupSymlinkCleanup provides a mock function with given fields: symlinkFunc, topDirPath, oldVersionedHome, agentName +func (_m *mock_upgradeCleaner) setupSymlinkCleanup(symlinkFunc changeSymlinkFunc, topDirPath string, oldVersionedHome string, agentName string) error { + ret := _m.Called(symlinkFunc, topDirPath, oldVersionedHome, agentName) + + if len(ret) == 0 { + panic("no return value specified for setupSymlinkCleanup") + } + + var r0 error + if rf, ok := ret.Get(0).(func(changeSymlinkFunc, string, string, string) error); ok { + r0 = rf(symlinkFunc, topDirPath, oldVersionedHome, agentName) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// mock_upgradeCleaner_setupSymlinkCleanup_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'setupSymlinkCleanup' +type mock_upgradeCleaner_setupSymlinkCleanup_Call struct { + *mock.Call +} + +// setupSymlinkCleanup is a helper method to define mock.On call +// - symlinkFunc changeSymlinkFunc +// - topDirPath string +// - oldVersionedHome string +// - agentName string +func (_e *mock_upgradeCleaner_Expecter) setupSymlinkCleanup(symlinkFunc interface{}, topDirPath interface{}, oldVersionedHome interface{}, agentName interface{}) *mock_upgradeCleaner_setupSymlinkCleanup_Call { + return &mock_upgradeCleaner_setupSymlinkCleanup_Call{Call: _e.mock.On("setupSymlinkCleanup", symlinkFunc, topDirPath, oldVersionedHome, agentName)} +} + +func (_c *mock_upgradeCleaner_setupSymlinkCleanup_Call) Run(run func(symlinkFunc changeSymlinkFunc, topDirPath string, oldVersionedHome string, agentName string)) *mock_upgradeCleaner_setupSymlinkCleanup_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(changeSymlinkFunc), args[1].(string), args[2].(string), args[3].(string)) + }) + return _c +} + +func (_c *mock_upgradeCleaner_setupSymlinkCleanup_Call) Return(_a0 error) *mock_upgradeCleaner_setupSymlinkCleanup_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *mock_upgradeCleaner_setupSymlinkCleanup_Call) RunAndReturn(run func(changeSymlinkFunc, string, string, string) error) *mock_upgradeCleaner_setupSymlinkCleanup_Call { + _c.Call.Return(run) + return _c +} + +// setupUnpackCleanup provides a mock function with given fields: newHomeDir, oldHomeDir +func (_m *mock_upgradeCleaner) setupUnpackCleanup(newHomeDir string, oldHomeDir string) error { + ret := _m.Called(newHomeDir, oldHomeDir) + + if len(ret) == 0 { + panic("no return value specified for setupUnpackCleanup") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string, string) error); ok { + r0 = rf(newHomeDir, oldHomeDir) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// mock_upgradeCleaner_setupUnpackCleanup_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'setupUnpackCleanup' +type mock_upgradeCleaner_setupUnpackCleanup_Call struct { + *mock.Call +} + +// setupUnpackCleanup is a helper method to define mock.On call +// - newHomeDir string +// - oldHomeDir string +func (_e *mock_upgradeCleaner_Expecter) setupUnpackCleanup(newHomeDir interface{}, oldHomeDir interface{}) *mock_upgradeCleaner_setupUnpackCleanup_Call { + return &mock_upgradeCleaner_setupUnpackCleanup_Call{Call: _e.mock.On("setupUnpackCleanup", newHomeDir, oldHomeDir)} +} + +func (_c *mock_upgradeCleaner_setupUnpackCleanup_Call) Run(run func(newHomeDir string, oldHomeDir string)) *mock_upgradeCleaner_setupUnpackCleanup_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].(string)) + }) + return _c +} + +func (_c *mock_upgradeCleaner_setupUnpackCleanup_Call) Return(_a0 error) *mock_upgradeCleaner_setupUnpackCleanup_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *mock_upgradeCleaner_setupUnpackCleanup_Call) RunAndReturn(run func(string, string) error) *mock_upgradeCleaner_setupUnpackCleanup_Call { + _c.Call.Return(run) + return _c +} + +// newMock_upgradeCleaner creates a new instance of mock_upgradeCleaner. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func newMock_upgradeCleaner(t interface { + mock.TestingT + Cleanup(func()) +}) *mock_upgradeCleaner { + mock := &mock_upgradeCleaner{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} From b4fe2a574fa962714d097b01e9bdd8f6a896bd79 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 7 Aug 2025 16:49:09 +0300 Subject: [PATCH 066/127] enhancemen(5235): added wathcer mock --- .../agent/application/upgrade/watcher_mock.go | 254 ++++++++++++++++++ 1 file changed, 254 insertions(+) create mode 100644 internal/pkg/agent/application/upgrade/watcher_mock.go diff --git a/internal/pkg/agent/application/upgrade/watcher_mock.go b/internal/pkg/agent/application/upgrade/watcher_mock.go new file mode 100644 index 00000000000..8d5e2cb262c --- /dev/null +++ b/internal/pkg/agent/application/upgrade/watcher_mock.go @@ -0,0 +1,254 @@ +// Code generated by mockery v2.53.4. DO NOT EDIT. + +package upgrade + +import ( + context "context" + exec "os/exec" + + details "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/details" + + fleetapi "github.com/elastic/elastic-agent/internal/pkg/fleetapi" + + logp "github.com/elastic/elastic-agent-libs/logp" + + mock "github.com/stretchr/testify/mock" + + time "time" +) + +// mock_watcher is an autogenerated mock type for the watcher type +type mock_watcher struct { + mock.Mock +} + +type mock_watcher_Expecter struct { + mock *mock.Mock +} + +func (_m *mock_watcher) EXPECT() *mock_watcher_Expecter { + return &mock_watcher_Expecter{mock: &_m.Mock} +} + +// invokeWatcher provides a mock function with given fields: log, agentExecutable +func (_m *mock_watcher) invokeWatcher(log *logp.Logger, agentExecutable string) (*exec.Cmd, error) { + ret := _m.Called(log, agentExecutable) + + if len(ret) == 0 { + panic("no return value specified for invokeWatcher") + } + + var r0 *exec.Cmd + var r1 error + if rf, ok := ret.Get(0).(func(*logp.Logger, string) (*exec.Cmd, error)); ok { + return rf(log, agentExecutable) + } + if rf, ok := ret.Get(0).(func(*logp.Logger, string) *exec.Cmd); ok { + r0 = rf(log, agentExecutable) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*exec.Cmd) + } + } + + if rf, ok := ret.Get(1).(func(*logp.Logger, string) error); ok { + r1 = rf(log, agentExecutable) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// mock_watcher_invokeWatcher_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'invokeWatcher' +type mock_watcher_invokeWatcher_Call struct { + *mock.Call +} + +// invokeWatcher is a helper method to define mock.On call +// - log *logp.Logger +// - agentExecutable string +func (_e *mock_watcher_Expecter) invokeWatcher(log interface{}, agentExecutable interface{}) *mock_watcher_invokeWatcher_Call { + return &mock_watcher_invokeWatcher_Call{Call: _e.mock.On("invokeWatcher", log, agentExecutable)} +} + +func (_c *mock_watcher_invokeWatcher_Call) Run(run func(log *logp.Logger, agentExecutable string)) *mock_watcher_invokeWatcher_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(*logp.Logger), args[1].(string)) + }) + return _c +} + +func (_c *mock_watcher_invokeWatcher_Call) Return(_a0 *exec.Cmd, _a1 error) *mock_watcher_invokeWatcher_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *mock_watcher_invokeWatcher_Call) RunAndReturn(run func(*logp.Logger, string) (*exec.Cmd, error)) *mock_watcher_invokeWatcher_Call { + _c.Call.Return(run) + return _c +} + +// markUpgrade provides a mock function with given fields: log, dataDir, current, previous, action, det, outcome +func (_m *mock_watcher) markUpgrade(log *logp.Logger, dataDir string, current agentInstall, previous agentInstall, action *fleetapi.ActionUpgrade, det *details.Details, outcome UpgradeOutcome) error { + ret := _m.Called(log, dataDir, current, previous, action, det, outcome) + + if len(ret) == 0 { + panic("no return value specified for markUpgrade") + } + + var r0 error + if rf, ok := ret.Get(0).(func(*logp.Logger, string, agentInstall, agentInstall, *fleetapi.ActionUpgrade, *details.Details, UpgradeOutcome) error); ok { + r0 = rf(log, dataDir, current, previous, action, det, outcome) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// mock_watcher_markUpgrade_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'markUpgrade' +type mock_watcher_markUpgrade_Call struct { + *mock.Call +} + +// markUpgrade is a helper method to define mock.On call +// - log *logp.Logger +// - dataDir string +// - current agentInstall +// - previous agentInstall +// - action *fleetapi.ActionUpgrade +// - det *details.Details +// - outcome UpgradeOutcome +func (_e *mock_watcher_Expecter) markUpgrade(log interface{}, dataDir interface{}, current interface{}, previous interface{}, action interface{}, det interface{}, outcome interface{}) *mock_watcher_markUpgrade_Call { + return &mock_watcher_markUpgrade_Call{Call: _e.mock.On("markUpgrade", log, dataDir, current, previous, action, det, outcome)} +} + +func (_c *mock_watcher_markUpgrade_Call) Run(run func(log *logp.Logger, dataDir string, current agentInstall, previous agentInstall, action *fleetapi.ActionUpgrade, det *details.Details, outcome UpgradeOutcome)) *mock_watcher_markUpgrade_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(*logp.Logger), args[1].(string), args[2].(agentInstall), args[3].(agentInstall), args[4].(*fleetapi.ActionUpgrade), args[5].(*details.Details), args[6].(UpgradeOutcome)) + }) + return _c +} + +func (_c *mock_watcher_markUpgrade_Call) Return(_a0 error) *mock_watcher_markUpgrade_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *mock_watcher_markUpgrade_Call) RunAndReturn(run func(*logp.Logger, string, agentInstall, agentInstall, *fleetapi.ActionUpgrade, *details.Details, UpgradeOutcome) error) *mock_watcher_markUpgrade_Call { + _c.Call.Return(run) + return _c +} + +// selectWatcherExecutable provides a mock function with given fields: topDir, previous, current +func (_m *mock_watcher) selectWatcherExecutable(topDir string, previous agentInstall, current agentInstall) string { + ret := _m.Called(topDir, previous, current) + + if len(ret) == 0 { + panic("no return value specified for selectWatcherExecutable") + } + + var r0 string + if rf, ok := ret.Get(0).(func(string, agentInstall, agentInstall) string); ok { + r0 = rf(topDir, previous, current) + } else { + r0 = ret.Get(0).(string) + } + + return r0 +} + +// mock_watcher_selectWatcherExecutable_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'selectWatcherExecutable' +type mock_watcher_selectWatcherExecutable_Call struct { + *mock.Call +} + +// selectWatcherExecutable is a helper method to define mock.On call +// - topDir string +// - previous agentInstall +// - current agentInstall +func (_e *mock_watcher_Expecter) selectWatcherExecutable(topDir interface{}, previous interface{}, current interface{}) *mock_watcher_selectWatcherExecutable_Call { + return &mock_watcher_selectWatcherExecutable_Call{Call: _e.mock.On("selectWatcherExecutable", topDir, previous, current)} +} + +func (_c *mock_watcher_selectWatcherExecutable_Call) Run(run func(topDir string, previous agentInstall, current agentInstall)) *mock_watcher_selectWatcherExecutable_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].(agentInstall), args[2].(agentInstall)) + }) + return _c +} + +func (_c *mock_watcher_selectWatcherExecutable_Call) Return(_a0 string) *mock_watcher_selectWatcherExecutable_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *mock_watcher_selectWatcherExecutable_Call) RunAndReturn(run func(string, agentInstall, agentInstall) string) *mock_watcher_selectWatcherExecutable_Call { + _c.Call.Return(run) + return _c +} + +// waitForWatcher provides a mock function with given fields: ctx, log, markerFilePath, waitTime, createTimeoutContext +func (_m *mock_watcher) waitForWatcher(ctx context.Context, log *logp.Logger, markerFilePath string, waitTime time.Duration, createTimeoutContext createContextWithTimeout) error { + ret := _m.Called(ctx, log, markerFilePath, waitTime, createTimeoutContext) + + if len(ret) == 0 { + panic("no return value specified for waitForWatcher") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *logp.Logger, string, time.Duration, createContextWithTimeout) error); ok { + r0 = rf(ctx, log, markerFilePath, waitTime, createTimeoutContext) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// mock_watcher_waitForWatcher_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'waitForWatcher' +type mock_watcher_waitForWatcher_Call struct { + *mock.Call +} + +// waitForWatcher is a helper method to define mock.On call +// - ctx context.Context +// - log *logp.Logger +// - markerFilePath string +// - waitTime time.Duration +// - createTimeoutContext createContextWithTimeout +func (_e *mock_watcher_Expecter) waitForWatcher(ctx interface{}, log interface{}, markerFilePath interface{}, waitTime interface{}, createTimeoutContext interface{}) *mock_watcher_waitForWatcher_Call { + return &mock_watcher_waitForWatcher_Call{Call: _e.mock.On("waitForWatcher", ctx, log, markerFilePath, waitTime, createTimeoutContext)} +} + +func (_c *mock_watcher_waitForWatcher_Call) Run(run func(ctx context.Context, log *logp.Logger, markerFilePath string, waitTime time.Duration, createTimeoutContext createContextWithTimeout)) *mock_watcher_waitForWatcher_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*logp.Logger), args[2].(string), args[3].(time.Duration), args[4].(createContextWithTimeout)) + }) + return _c +} + +func (_c *mock_watcher_waitForWatcher_Call) Return(_a0 error) *mock_watcher_waitForWatcher_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *mock_watcher_waitForWatcher_Call) RunAndReturn(run func(context.Context, *logp.Logger, string, time.Duration, createContextWithTimeout) error) *mock_watcher_waitForWatcher_Call { + _c.Call.Return(run) + return _c +} + +// newMock_watcher creates a new instance of mock_watcher. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func newMock_watcher(t interface { + mock.TestingT + Cleanup(func()) +}) *mock_watcher { + mock := &mock_watcher{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} From 794ae57f7d52ac59e1a4d6b33f01116c299dbd48 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Thu, 7 Aug 2025 16:49:31 +0300 Subject: [PATCH 067/127] enhancement(5235): added download step tests --- .../upgrade/upgrade_executor_test.go | 480 +++++------------- 1 file changed, 116 insertions(+), 364 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_executor_test.go b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go index 99c612b1719..bfbc24813cd 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_executor_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go @@ -1,396 +1,148 @@ package upgrade import ( - "context" "testing" "github.com/elastic/elastic-agent/internal/pkg/agent/application/info" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/details" "github.com/elastic/elastic-agent/internal/pkg/agent/errors" - "github.com/elastic/elastic-agent/pkg/core/logger" "github.com/elastic/elastic-agent/pkg/core/logger/loggertest" agtversion "github.com/elastic/elastic-agent/pkg/version" "github.com/stretchr/testify/require" ) -type mockArtifactDownloader struct { - dowloadArtifactTestFunc func(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) - cleanNonMatchingVersionsFromDownloadsTestFunc func(log *logger.Logger, version string) error -} - -func (m *mockArtifactDownloader) downloadArtifact(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) { - return m.dowloadArtifactTestFunc(ctx, parsedVersion, sourceURI, fleetServerURI, upgradeDetails, skipVerifyOverride, skipDefaultPgp, pgpBytes...) -} - -func (m *mockArtifactDownloader) cleanNonMatchingVersionsFromDownloads(log *logger.Logger, version string) error { - return m.cleanNonMatchingVersionsFromDownloadsTestFunc(log, version) -} - -type mockUpgradeCleaner struct { - setupArchiveCleanupTestFunc func(downloadResult download.DownloadResult) error -} - -func (m *mockUpgradeCleaner) setupArchiveCleanup(downloadResult download.DownloadResult) error { - return m.setupArchiveCleanupTestFunc(downloadResult) -} - -func (m *mockUpgradeCleaner) setupUnpackCleanup(newHomeDir, oldHomeDir string) error { - return nil -} - -func (m *mockUpgradeCleaner) setupSymlinkCleanup(symlinkFunc changeSymlinkFunc, topDirPath, oldVersionedHome, agentName string) error { - return nil -} - -func (m *mockUpgradeCleaner) cleanup(err error) error { - return nil +type downloadStepTestCase struct { + cleanNonMatchingVersionsFromDownloadsErrors []error + expectedLogMessage string + downloadArtifactError error + setupArchiveCleanupError error + setupArchiveCleanupCalled bool } func TestDownloadArtifactStep(t *testing.T) { ctx := t.Context() parsedVersion, err := agtversion.ParseVersion("9.1.0") require.NoError(t, err) - - t.Run("should download artifact and setup archive cleanup", func(t *testing.T) { - log, _ := loggertest.New("test") - testValues := struct { - parsedVersion *agtversion.ParsedSemVer - agentInfo *info.AgentInfo - sourceURI string - fleetServerURI string - upgradeDetails *details.Details - skipVerifyOverride bool - skipDefaultPgp bool - pgpBytes []string - }{ - parsedVersion: parsedVersion, - agentInfo: &info.AgentInfo{}, - sourceURI: "mockURI", - fleetServerURI: "mockFleetServerURI", - upgradeDetails: &details.Details{}, - skipVerifyOverride: false, - skipDefaultPgp: false, - pgpBytes: []string{"mockPGPBytes"}, - } - - mockArtifactDownloader := &mockArtifactDownloader{} - mockUpgradeCleaner := &mockUpgradeCleaner{} - upgradeExecutor := &executeUpgrade{ - log: log, - artifactDownloader: mockArtifactDownloader, - upgradeCleaner: mockUpgradeCleaner, - } - - nonMatchingCallCount := 0 - mockArtifactDownloader.cleanNonMatchingVersionsFromDownloadsTestFunc = func(log *logger.Logger, version string) error { - nonMatchingCallCount++ - require.Equal(t, testValues.agentInfo.Version(), version) - return nil - } - - mockDownloadResult := download.DownloadResult{ - ArtifactPath: "mockArtifactPath", - ArtifactHashPath: "mockArtifactHashPath", - } - mockArtifactDownloader.dowloadArtifactTestFunc = func(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) { - require.Equal(t, testValues.parsedVersion, parsedVersion) - require.Equal(t, testValues.sourceURI, sourceURI) - require.Equal(t, testValues.fleetServerURI, fleetServerURI) - require.Equal(t, testValues.upgradeDetails, upgradeDetails) - require.Equal(t, testValues.skipVerifyOverride, skipVerifyOverride) - require.Equal(t, testValues.skipDefaultPgp, skipDefaultPgp) - require.Equal(t, testValues.pgpBytes, pgpBytes) - return mockDownloadResult, nil - } - - cleanerCallCount := 0 - mockUpgradeCleaner.setupArchiveCleanupTestFunc = func(downloadResult download.DownloadResult) error { - cleanerCallCount++ - require.Equal(t, mockDownloadResult, downloadResult) - return nil - } - - downloadResult, err := upgradeExecutor.downloadArtifact(ctx, testValues.parsedVersion, testValues.agentInfo, testValues.sourceURI, testValues.fleetServerURI, testValues.upgradeDetails, testValues.skipVerifyOverride, testValues.skipDefaultPgp, testValues.pgpBytes...) - require.NoError(t, err) - require.Equal(t, mockDownloadResult, downloadResult) - require.Equal(t, 1, nonMatchingCallCount) - require.Equal(t, 1, cleanerCallCount) - }) - - t.Run("when initial cleanup of non-matching versions fails, should log error", func(t *testing.T) { - log, obs := loggertest.New("test") - testValues := struct { - parsedVersion *agtversion.ParsedSemVer - agentInfo *info.AgentInfo - sourceURI string - fleetServerURI string - upgradeDetails *details.Details - skipVerifyOverride bool - skipDefaultPgp bool - pgpBytes []string - }{ - parsedVersion: parsedVersion, - agentInfo: &info.AgentInfo{}, - sourceURI: "mockURI", - fleetServerURI: "mockFleetServerURI", - upgradeDetails: &details.Details{}, - skipVerifyOverride: false, - skipDefaultPgp: false, - pgpBytes: []string{"mockPGPBytes"}, - } - - mockArtifactDownloader := &mockArtifactDownloader{} - mockUpgradeCleaner := &mockUpgradeCleaner{} - upgradeExecutor := &executeUpgrade{ - log: log, - artifactDownloader: mockArtifactDownloader, - upgradeCleaner: mockUpgradeCleaner, - } - - nonMatchingCallCount := 0 - mockArtifactDownloader.cleanNonMatchingVersionsFromDownloadsTestFunc = func(log *logger.Logger, version string) error { - nonMatchingCallCount++ - require.Equal(t, testValues.agentInfo.Version(), version) - return errors.New("test error") - } - - mockDownloadResult := download.DownloadResult{ - ArtifactPath: "mockArtifactPath", - ArtifactHashPath: "mockArtifactHashPath", - } - mockArtifactDownloader.dowloadArtifactTestFunc = func(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) { - require.Equal(t, testValues.parsedVersion, parsedVersion) - require.Equal(t, testValues.sourceURI, sourceURI) - require.Equal(t, testValues.fleetServerURI, fleetServerURI) - require.Equal(t, testValues.upgradeDetails, upgradeDetails) - require.Equal(t, testValues.skipVerifyOverride, skipVerifyOverride) - require.Equal(t, testValues.skipDefaultPgp, skipDefaultPgp) - require.Equal(t, testValues.pgpBytes, pgpBytes) - return mockDownloadResult, nil - } - - cleanerCallCount := 0 - mockUpgradeCleaner.setupArchiveCleanupTestFunc = func(downloadResult download.DownloadResult) error { - cleanerCallCount++ - require.Equal(t, mockDownloadResult, downloadResult) - return nil - } - - downloadResult, err := upgradeExecutor.downloadArtifact(ctx, testValues.parsedVersion, testValues.agentInfo, testValues.sourceURI, testValues.fleetServerURI, testValues.upgradeDetails, testValues.skipVerifyOverride, testValues.skipDefaultPgp, testValues.pgpBytes...) - require.NoError(t, err) - require.Equal(t, mockDownloadResult, downloadResult) - require.Equal(t, 1, nonMatchingCallCount) - require.Equal(t, 1, obs.Len()) - require.Equal(t, "Unable to clean downloads before update", obs.All()[0].Message) - require.Equal(t, 1, cleanerCallCount) - }) - - t.Run("when download fails, and cleanup of non-matching versions succeeds, should return error", func(t *testing.T) { - log, _ := loggertest.New("test") - testValues := struct { - parsedVersion *agtversion.ParsedSemVer - agentInfo *info.AgentInfo - sourceURI string - fleetServerURI string - upgradeDetails *details.Details - skipVerifyOverride bool - skipDefaultPgp bool - pgpBytes []string - }{ - parsedVersion: parsedVersion, - agentInfo: &info.AgentInfo{}, - sourceURI: "mockURI", - fleetServerURI: "mockFleetServerURI", - upgradeDetails: &details.Details{}, - skipVerifyOverride: false, - skipDefaultPgp: false, - pgpBytes: []string{"mockPGPBytes"}, - } - - mockArtifactDownloader := &mockArtifactDownloader{} - mockUpgradeCleaner := &mockUpgradeCleaner{} - upgradeExecutor := &executeUpgrade{ - log: log, - artifactDownloader: mockArtifactDownloader, - upgradeCleaner: mockUpgradeCleaner, - } - - nonMatchingCallCount := 0 - mockArtifactDownloader.cleanNonMatchingVersionsFromDownloadsTestFunc = func(log *logger.Logger, version string) error { - nonMatchingCallCount++ - require.Equal(t, testValues.agentInfo.Version(), version) - return nil - } - - mockDownloadResult := download.DownloadResult{ - ArtifactPath: "mockArtifactPath", - ArtifactHashPath: "mockArtifactHashPath", - } - - mockArtifactDownloader.dowloadArtifactTestFunc = func(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) { - require.Equal(t, testValues.parsedVersion, parsedVersion) - require.Equal(t, testValues.sourceURI, sourceURI) - require.Equal(t, testValues.fleetServerURI, fleetServerURI) - require.Equal(t, testValues.upgradeDetails, upgradeDetails) - require.Equal(t, testValues.skipVerifyOverride, skipVerifyOverride) - require.Equal(t, testValues.skipDefaultPgp, skipDefaultPgp) - require.Equal(t, testValues.pgpBytes, pgpBytes) - return mockDownloadResult, errors.New("test error") - } - - cleanerCallCount := 0 - mockUpgradeCleaner.setupArchiveCleanupTestFunc = func(downloadResult download.DownloadResult) error { - cleanerCallCount++ - require.Equal(t, mockDownloadResult, downloadResult) - return nil - } - - downloadResult, err := upgradeExecutor.downloadArtifact(ctx, testValues.parsedVersion, testValues.agentInfo, testValues.sourceURI, testValues.fleetServerURI, testValues.upgradeDetails, testValues.skipVerifyOverride, testValues.skipDefaultPgp, testValues.pgpBytes...) - require.Error(t, err) - require.ErrorIs(t, err, errors.New("test error")) - require.Equal(t, mockDownloadResult, downloadResult) - require.Equal(t, 2, nonMatchingCallCount) - require.Equal(t, 0, cleanerCallCount) - }) - - t.Run("when download fails, and cleanup of non-matching versions fails, should log error and return error", func(t *testing.T) { - log, obs := loggertest.New("test") - testValues := struct { - parsedVersion *agtversion.ParsedSemVer - agentInfo *info.AgentInfo - sourceURI string - fleetServerURI string - upgradeDetails *details.Details - skipVerifyOverride bool - skipDefaultPgp bool - pgpBytes []string - }{ - parsedVersion: parsedVersion, - agentInfo: &info.AgentInfo{}, - sourceURI: "mockURI", - fleetServerURI: "mockFleetServerURI", - upgradeDetails: &details.Details{}, - skipVerifyOverride: false, - skipDefaultPgp: false, - pgpBytes: []string{"mockPGPBytes"}, - } - - mockArtifactDownloader := &mockArtifactDownloader{} - mockUpgradeCleaner := &mockUpgradeCleaner{} - upgradeExecutor := &executeUpgrade{ - log: log, - artifactDownloader: mockArtifactDownloader, - upgradeCleaner: mockUpgradeCleaner, - } - - nonMatchingCallCount := 0 - mockArtifactDownloader.cleanNonMatchingVersionsFromDownloadsTestFunc = func(log *logger.Logger, version string) error { - nonMatchingCallCount++ - require.Equal(t, testValues.agentInfo.Version(), version) - if nonMatchingCallCount == 2 { - return errors.New("test non-matching error") + testValues := struct { + parsedVersion *agtversion.ParsedSemVer + agentInfo *info.AgentInfo + sourceURI string + fleetServerURI string + upgradeDetails *details.Details + skipVerifyOverride bool + skipDefaultPgp bool + pgpBytes []string + }{ + parsedVersion: parsedVersion, + agentInfo: &info.AgentInfo{}, + sourceURI: "mockURI", + fleetServerURI: "mockFleetServerURI", + upgradeDetails: &details.Details{}, + skipVerifyOverride: false, + skipDefaultPgp: false, + pgpBytes: []string{"mockPGPBytes"}, + } + + testCases := map[string]downloadStepTestCase{ + "should download artifact and setup archive cleanup": { + cleanNonMatchingVersionsFromDownloadsErrors: []error{nil}, + expectedLogMessage: "", + downloadArtifactError: nil, + setupArchiveCleanupError: nil, + setupArchiveCleanupCalled: true, + }, + "when initial cleanup of non-matching versions fails, should log error": { + cleanNonMatchingVersionsFromDownloadsErrors: []error{errors.New("test error")}, + expectedLogMessage: "Unable to clean downloads before update", + downloadArtifactError: nil, + setupArchiveCleanupError: nil, + setupArchiveCleanupCalled: true, + }, + "when download fails, and cleanup of non-matching versions succeeds, should return error": { + cleanNonMatchingVersionsFromDownloadsErrors: []error{nil, nil}, + expectedLogMessage: "", + downloadArtifactError: errors.New("test error"), + setupArchiveCleanupError: nil, + setupArchiveCleanupCalled: false, + }, + "when download fails, and cleanup of non-matching versions fails, should log error and return error": { + cleanNonMatchingVersionsFromDownloadsErrors: []error{nil, errors.New("test non-matching error")}, + expectedLogMessage: "Unable to remove file after verification failure", + downloadArtifactError: errors.New("test download error"), + setupArchiveCleanupError: nil, + setupArchiveCleanupCalled: false, + }, + "when download succeeds, but setting up archive cleanup fails, should return error": { + cleanNonMatchingVersionsFromDownloadsErrors: []error{nil}, + expectedLogMessage: "", + downloadArtifactError: nil, + setupArchiveCleanupError: errors.New("test cleanup error"), + setupArchiveCleanupCalled: true, + }, + } + + for name, tc := range testCases { + t.Run(name, func(t *testing.T) { + log, obs := loggertest.New("test") + + mockArtifactDownloader := &mock_artifactDownloader{} + mockUpgradeCleaner := &mock_upgradeCleaner{} + + upgradeExecutor := &executeUpgrade{ + log: log, + artifactDownloader: mockArtifactDownloader, + upgradeCleaner: mockUpgradeCleaner, } - return nil - } - mockDownloadResult := download.DownloadResult{ - ArtifactPath: "mockArtifactPath", - ArtifactHashPath: "mockArtifactHashPath", - } + mockDownloadResult := download.DownloadResult{ + ArtifactPath: "mockArtifactPath", + ArtifactHashPath: "mockArtifactHashPath", + } - mockArtifactDownloader.dowloadArtifactTestFunc = func(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) { - require.Equal(t, testValues.parsedVersion, parsedVersion) - require.Equal(t, testValues.sourceURI, sourceURI) - require.Equal(t, testValues.fleetServerURI, fleetServerURI) - require.Equal(t, testValues.upgradeDetails, upgradeDetails) - require.Equal(t, testValues.skipVerifyOverride, skipVerifyOverride) - require.Equal(t, testValues.skipDefaultPgp, skipDefaultPgp) - require.Equal(t, testValues.pgpBytes, pgpBytes) - return mockDownloadResult, errors.New("test download error") - } + pgpBytesConverted := make([]interface{}, len(testValues.pgpBytes)) + for i, v := range testValues.pgpBytes { + pgpBytesConverted[i] = v + } - cleanerCallCount := 0 - mockUpgradeCleaner.setupArchiveCleanupTestFunc = func(downloadResult download.DownloadResult) error { - cleanerCallCount++ - require.Equal(t, mockDownloadResult, downloadResult) - return nil - } + for _, err := range tc.cleanNonMatchingVersionsFromDownloadsErrors { + mockArtifactDownloader.EXPECT().cleanNonMatchingVersionsFromDownloads(log, testValues.agentInfo.Version()).Return(err).Once() + } + mockArtifactDownloader.EXPECT().downloadArtifact( + ctx, + testValues.parsedVersion, + testValues.sourceURI, + testValues.fleetServerURI, + testValues.upgradeDetails, + testValues.skipVerifyOverride, + testValues.skipDefaultPgp, + pgpBytesConverted..., + ).Return(mockDownloadResult, tc.downloadArtifactError) + + if tc.setupArchiveCleanupCalled { + mockUpgradeCleaner.EXPECT().setupArchiveCleanup(mockDownloadResult).Return(tc.setupArchiveCleanupError) + } - downloadResult, err := upgradeExecutor.downloadArtifact(ctx, testValues.parsedVersion, testValues.agentInfo, testValues.sourceURI, testValues.fleetServerURI, testValues.upgradeDetails, testValues.skipVerifyOverride, testValues.skipDefaultPgp, testValues.pgpBytes...) - require.Error(t, err) - require.ErrorIs(t, err, errors.New("test download error")) - require.Equal(t, mockDownloadResult, downloadResult) - require.Equal(t, 2, nonMatchingCallCount) - require.Equal(t, 0, cleanerCallCount) - require.Equal(t, 1, obs.Len()) - require.Equal(t, "Unable to remove file after verification failure", obs.All()[0].Message) - }) + downloadResult, err := upgradeExecutor.downloadArtifact(ctx, testValues.parsedVersion, testValues.agentInfo, testValues.sourceURI, testValues.fleetServerURI, testValues.upgradeDetails, testValues.skipVerifyOverride, testValues.skipDefaultPgp, testValues.pgpBytes...) - t.Run("when download succeeds, but setting up archive cleanup fails, should return error", func(t *testing.T) { - log, _ := loggertest.New("test") - testValues := struct { - parsedVersion *agtversion.ParsedSemVer - agentInfo *info.AgentInfo - sourceURI string - fleetServerURI string - upgradeDetails *details.Details - skipVerifyOverride bool - skipDefaultPgp bool - pgpBytes []string - }{ - parsedVersion: parsedVersion, - agentInfo: &info.AgentInfo{}, - sourceURI: "mockURI", - fleetServerURI: "mockFleetServerURI", - upgradeDetails: &details.Details{}, - skipVerifyOverride: false, - skipDefaultPgp: false, - pgpBytes: []string{"mockPGPBytes"}, - } + mockArtifactDownloader.AssertExpectations(t) + mockUpgradeCleaner.AssertExpectations(t) - mockArtifactDownloader := &mockArtifactDownloader{} - mockUpgradeCleaner := &mockUpgradeCleaner{} - upgradeExecutor := &executeUpgrade{ - log: log, - artifactDownloader: mockArtifactDownloader, - upgradeCleaner: mockUpgradeCleaner, - } + if !tc.setupArchiveCleanupCalled { + mockUpgradeCleaner.AssertNotCalled(t, "setupArchiveCleanup") + } - nonMatchingCallCount := 0 - mockArtifactDownloader.cleanNonMatchingVersionsFromDownloadsTestFunc = func(log *logger.Logger, version string) error { - nonMatchingCallCount++ - require.Equal(t, testValues.agentInfo.Version(), version) - return nil - } + if tc.expectedLogMessage != "" { + require.Equal(t, tc.expectedLogMessage, obs.All()[0].Message) + } - mockDownloadResult := download.DownloadResult{ - ArtifactPath: "mockArtifactPath", - ArtifactHashPath: "mockArtifactHashPath", - } - mockArtifactDownloader.dowloadArtifactTestFunc = func(ctx context.Context, parsedVersion *agtversion.ParsedSemVer, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) { - require.Equal(t, testValues.parsedVersion, parsedVersion) - require.Equal(t, testValues.sourceURI, sourceURI) - require.Equal(t, testValues.fleetServerURI, fleetServerURI) - require.Equal(t, testValues.upgradeDetails, upgradeDetails) - require.Equal(t, testValues.skipVerifyOverride, skipVerifyOverride) - require.Equal(t, testValues.skipDefaultPgp, skipDefaultPgp) - require.Equal(t, testValues.pgpBytes, pgpBytes) - return mockDownloadResult, nil - } + if tc.downloadArtifactError != nil || tc.setupArchiveCleanupError != nil { + require.Error(t, err) + } else { + require.NoError(t, err) + } - cleanerCallCount := 0 - mockUpgradeCleaner.setupArchiveCleanupTestFunc = func(downloadResult download.DownloadResult) error { - cleanerCallCount++ require.Equal(t, mockDownloadResult, downloadResult) - return errors.New("test cleanup error") - } - - downloadResult, err := upgradeExecutor.downloadArtifact(ctx, testValues.parsedVersion, testValues.agentInfo, testValues.sourceURI, testValues.fleetServerURI, testValues.upgradeDetails, testValues.skipVerifyOverride, testValues.skipDefaultPgp, testValues.pgpBytes...) - require.Error(t, err) - require.ErrorIs(t, err, errors.New("test cleanup error")) - require.Equal(t, mockDownloadResult, downloadResult) - require.Equal(t, 1, nonMatchingCallCount) - require.Equal(t, 1, cleanerCallCount) - }) + }) + } } From 225a951dfacbcc404a107c9177dc74029d18204f Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 8 Aug 2025 00:00:24 +0300 Subject: [PATCH 068/127] enhancement(5235): removed newHash from unpack step result --- internal/pkg/agent/application/upgrade/upgrade_executor.go | 2 -- 1 file changed, 2 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_executor.go b/internal/pkg/agent/application/upgrade/upgrade_executor.go index 54daa37f34a..aab35686b8f 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_executor.go +++ b/internal/pkg/agent/application/upgrade/upgrade_executor.go @@ -61,7 +61,6 @@ type executeUpgrade struct { type unpackStepResult struct { newHome string - newHash string unpackResult } @@ -130,7 +129,6 @@ func (u *executeUpgrade) unpackArtifact(downloadResult download.DownloadResult, unpackStepResult := unpackStepResult{ newHome: newHome, - newHash: newHash, unpackResult: unpackRes, } From 812a6fe813f8326fc5308894a4661cad37b6dc12 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 8 Aug 2025 00:01:00 +0300 Subject: [PATCH 069/127] enhancement(5235): injecting check upgrade fn into unpack step --- internal/pkg/agent/application/upgrade/upgrade_executor.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_executor.go b/internal/pkg/agent/application/upgrade/upgrade_executor.go index aab35686b8f..316ec87f942 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_executor.go +++ b/internal/pkg/agent/application/upgrade/upgrade_executor.go @@ -86,7 +86,7 @@ func (u *executeUpgrade) downloadArtifact(ctx context.Context, parsedTargetVersi return downloadResult, u.upgradeCleaner.setupArchiveCleanup(downloadResult) } -func (u *executeUpgrade) unpackArtifact(downloadResult download.DownloadResult, version, archivePath, topPath, flavor, dataPath, currentHome string, upgradeDetails *details.Details, currentVersion agentVersion) (unpackStepResult, error) { +func (u *executeUpgrade) unpackArtifact(downloadResult download.DownloadResult, version, archivePath, topPath, flavor, dataPath, currentHome string, upgradeDetails *details.Details, currentVersion agentVersion, checkUpgradeFn checkUpgradeFn) (unpackStepResult, error) { upgradeDetails.SetState(details.StateExtracting) metadata, err := u.unpacker.getPackageMetadata(downloadResult.ArtifactPath) @@ -96,7 +96,7 @@ func (u *executeUpgrade) unpackArtifact(downloadResult download.DownloadResult, newVersion := u.unpacker.extractAgentVersion(metadata, version) - if err := checkUpgrade(u.log, currentVersion, newVersion, metadata); err != nil { + if err := checkUpgradeFn(u.log, currentVersion, newVersion, metadata); err != nil { return unpackStepResult{}, fmt.Errorf("cannot upgrade the agent: %w", err) } From bf42cb5d13e110c95f8a2a85dfc310c02dcce426 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 8 Aug 2025 00:01:52 +0300 Subject: [PATCH 070/127] enhancement(5235): removed testValues from download step test --- .../upgrade/upgrade_executor_test.go | 49 ++++++++----------- 1 file changed, 20 insertions(+), 29 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_executor_test.go b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go index bfbc24813cd..286c7a9d4e0 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_executor_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go @@ -24,25 +24,14 @@ func TestDownloadArtifactStep(t *testing.T) { ctx := t.Context() parsedVersion, err := agtversion.ParseVersion("9.1.0") require.NoError(t, err) - testValues := struct { - parsedVersion *agtversion.ParsedSemVer - agentInfo *info.AgentInfo - sourceURI string - fleetServerURI string - upgradeDetails *details.Details - skipVerifyOverride bool - skipDefaultPgp bool - pgpBytes []string - }{ - parsedVersion: parsedVersion, - agentInfo: &info.AgentInfo{}, - sourceURI: "mockURI", - fleetServerURI: "mockFleetServerURI", - upgradeDetails: &details.Details{}, - skipVerifyOverride: false, - skipDefaultPgp: false, - pgpBytes: []string{"mockPGPBytes"}, - } + + agentInfo := &info.AgentInfo{} + sourceURI := "mockURI" + fleetServerURI := "mockFleetServerURI" + upgradeDetails := &details.Details{} + skipVerifyOverride := false + skipDefaultPgp := false + pgpBytes := []string{"mockPGPBytes"} testCases := map[string]downloadStepTestCase{ "should download artifact and setup archive cleanup": { @@ -100,22 +89,22 @@ func TestDownloadArtifactStep(t *testing.T) { ArtifactHashPath: "mockArtifactHashPath", } - pgpBytesConverted := make([]interface{}, len(testValues.pgpBytes)) - for i, v := range testValues.pgpBytes { + pgpBytesConverted := make([]interface{}, len(pgpBytes)) + for i, v := range pgpBytes { pgpBytesConverted[i] = v } for _, err := range tc.cleanNonMatchingVersionsFromDownloadsErrors { - mockArtifactDownloader.EXPECT().cleanNonMatchingVersionsFromDownloads(log, testValues.agentInfo.Version()).Return(err).Once() + mockArtifactDownloader.EXPECT().cleanNonMatchingVersionsFromDownloads(log, agentInfo.Version()).Return(err).Once() } mockArtifactDownloader.EXPECT().downloadArtifact( ctx, - testValues.parsedVersion, - testValues.sourceURI, - testValues.fleetServerURI, - testValues.upgradeDetails, - testValues.skipVerifyOverride, - testValues.skipDefaultPgp, + parsedVersion, + sourceURI, + fleetServerURI, + upgradeDetails, + skipVerifyOverride, + skipDefaultPgp, pgpBytesConverted..., ).Return(mockDownloadResult, tc.downloadArtifactError) @@ -123,11 +112,13 @@ func TestDownloadArtifactStep(t *testing.T) { mockUpgradeCleaner.EXPECT().setupArchiveCleanup(mockDownloadResult).Return(tc.setupArchiveCleanupError) } - downloadResult, err := upgradeExecutor.downloadArtifact(ctx, testValues.parsedVersion, testValues.agentInfo, testValues.sourceURI, testValues.fleetServerURI, testValues.upgradeDetails, testValues.skipVerifyOverride, testValues.skipDefaultPgp, testValues.pgpBytes...) + downloadResult, err := upgradeExecutor.downloadArtifact(ctx, parsedVersion, agentInfo, sourceURI, fleetServerURI, upgradeDetails, skipVerifyOverride, skipDefaultPgp, pgpBytes...) mockArtifactDownloader.AssertExpectations(t) mockUpgradeCleaner.AssertExpectations(t) + require.Equal(t, details.StateDownloading, upgradeDetails.State) + if !tc.setupArchiveCleanupCalled { mockUpgradeCleaner.AssertNotCalled(t, "setupArchiveCleanup") } From 342a6f6179a8bc37d421c2fe67da4a2b465c9904 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 8 Aug 2025 00:02:36 +0300 Subject: [PATCH 071/127] enhancement(5235): added unpack step tests --- .../upgrade/upgrade_executor_test.go | 368 +++++++++++++++++- 1 file changed, 367 insertions(+), 1 deletion(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_executor_test.go b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go index 286c7a9d4e0..90f493f8649 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_executor_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go @@ -1,12 +1,16 @@ package upgrade import ( + "errors" + goerrors "errors" + "fmt" + "path/filepath" "testing" "github.com/elastic/elastic-agent/internal/pkg/agent/application/info" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/details" - "github.com/elastic/elastic-agent/internal/pkg/agent/errors" + "github.com/elastic/elastic-agent/pkg/core/logger" "github.com/elastic/elastic-agent/pkg/core/logger/loggertest" agtversion "github.com/elastic/elastic-agent/pkg/version" "github.com/stretchr/testify/require" @@ -137,3 +141,365 @@ func TestDownloadArtifactStep(t *testing.T) { }) } } + +type unpackStepTestCase struct { + unpackResult unpackResult + unpackStepResult unpackStepResult + getPackageMetadataError error + detectFlavorError error + unpackError error + setupUnpackCleanupError error + diskSpaceError error + checkUpgradeError error + unpackStepError error + calledUnpackerFuncs []string + uncalledUnpackerFuncs []string + cleanerCalled bool +} + +func TestUnpackArtifactStep(t *testing.T) { + currentVersion := agentVersion{ + version: "8.10.0", + } + + downloadResult := download.DownloadResult{ + ArtifactPath: "mockArtifactPath", + ArtifactHashPath: "mockArtifactHashPath", + } + version := "9.1.0" + topPath := "mockTopPath" + flavor := "mockFlavor" + dataPath := "mockDataPath" + currentHome := "mockCurrentHome" + upgradeDetails := &details.Details{} + + testCases := map[string]unpackStepTestCase{ + "should unpack artifact": { + unpackResult: unpackResult{ + VersionedHome: "mockVersionedHome", + Hash: "mockHash", + }, + unpackStepResult: unpackStepResult{ + newHome: filepath.Join(topPath, "mockVersionedHome"), + unpackResult: unpackResult{ + VersionedHome: "mockVersionedHome", + Hash: "mockHash", + }, + }, + getPackageMetadataError: nil, + detectFlavorError: nil, + unpackError: nil, + setupUnpackCleanupError: nil, + checkUpgradeError: nil, + diskSpaceError: nil, + calledUnpackerFuncs: []string{"getPackageMetadata", "extractAgentVersion", "detectFlavor", "unpack"}, + uncalledUnpackerFuncs: []string{}, + unpackStepError: nil, + cleanerCalled: true, + }, + "when getting package metadata fails, should return error": { + unpackResult: unpackResult{ + VersionedHome: "mockVersionedHome", + Hash: "mockHash", + }, + unpackStepResult: unpackStepResult{ + newHome: "", + unpackResult: unpackResult{ + VersionedHome: "", + Hash: "", + }, + }, + getPackageMetadataError: errors.New("test error"), + detectFlavorError: nil, + unpackError: nil, + setupUnpackCleanupError: nil, + checkUpgradeError: nil, + diskSpaceError: nil, + calledUnpackerFuncs: []string{"getPackageMetadata"}, + uncalledUnpackerFuncs: []string{"extractAgentVersion", "detectFlavor", "unpack"}, + unpackStepError: fmt.Errorf("reading metadata for elastic agent version %s package %q: %w", version, downloadResult.ArtifactPath, errors.New("test error")), + cleanerCalled: false, + }, + "when checking upgrade fails, should return error": { + unpackResult: unpackResult{ + VersionedHome: "mockVersionedHome", + Hash: "mockHash", + }, + unpackStepResult: unpackStepResult{ + newHome: "", + unpackResult: unpackResult{ + VersionedHome: "", + Hash: "", + }, + }, + getPackageMetadataError: nil, + detectFlavorError: nil, + unpackError: nil, + setupUnpackCleanupError: nil, + checkUpgradeError: errors.New("test error"), + diskSpaceError: nil, + calledUnpackerFuncs: []string{"getPackageMetadata", "extractAgentVersion"}, + uncalledUnpackerFuncs: []string{"detectFlavor", "unpack"}, + unpackStepError: fmt.Errorf("cannot upgrade the agent: %w", errors.New("test error")), + cleanerCalled: false, + }, + "when detecting flavor fails, should unpack artifact with default flavor": { + unpackResult: unpackResult{ + VersionedHome: "mockVersionedHome", + Hash: "mockHash", + }, + unpackStepResult: unpackStepResult{ + newHome: filepath.Join(topPath, "mockVersionedHome"), + unpackResult: unpackResult{ + VersionedHome: "mockVersionedHome", + Hash: "mockHash", + }, + }, + getPackageMetadataError: nil, + detectFlavorError: errors.New("test error"), + unpackError: nil, + setupUnpackCleanupError: nil, + checkUpgradeError: nil, + diskSpaceError: nil, + calledUnpackerFuncs: []string{"getPackageMetadata", "extractAgentVersion", "detectFlavor", "unpack"}, + uncalledUnpackerFuncs: []string{}, + unpackStepError: nil, + cleanerCalled: true, + }, + "when unpacking fails, should return error": { + unpackResult: unpackResult{ + VersionedHome: "mockVersionedHome", + Hash: "mockHash", + }, + unpackStepResult: unpackStepResult{ + newHome: filepath.Join(topPath, "mockVersionedHome"), + unpackResult: unpackResult{ + VersionedHome: "mockVersionedHome", + Hash: "mockHash", + }, + }, + getPackageMetadataError: nil, + detectFlavorError: nil, + unpackError: errors.New("test error"), + setupUnpackCleanupError: nil, + checkUpgradeError: nil, + diskSpaceError: errors.New("test error"), + calledUnpackerFuncs: []string{"getPackageMetadata", "extractAgentVersion", "detectFlavor", "unpack"}, + uncalledUnpackerFuncs: []string{}, + unpackStepError: errors.New("test error"), + cleanerCalled: true, + }, + "if versioned home is unknown, should return error": { + unpackResult: unpackResult{ + VersionedHome: "", + Hash: "mockHash", + }, + unpackStepResult: unpackStepResult{ + newHome: "", + unpackResult: unpackResult{ + VersionedHome: "", + Hash: "", + }, + }, + getPackageMetadataError: nil, + detectFlavorError: nil, + unpackError: nil, + setupUnpackCleanupError: nil, + checkUpgradeError: nil, + diskSpaceError: nil, + calledUnpackerFuncs: []string{"getPackageMetadata", "extractAgentVersion", "detectFlavor", "unpack"}, + uncalledUnpackerFuncs: []string{}, + unpackStepError: errors.New("unknown versioned home"), + cleanerCalled: false, + }, + "if hash is unknown, should return error": { + unpackResult: unpackResult{ + VersionedHome: "mockVersionedHome", + Hash: "", + }, + unpackStepResult: unpackStepResult{ + newHome: "", + unpackResult: unpackResult{ + VersionedHome: "", + Hash: "", + }, + }, + getPackageMetadataError: nil, + detectFlavorError: nil, + unpackError: nil, + setupUnpackCleanupError: nil, + checkUpgradeError: nil, + diskSpaceError: nil, + calledUnpackerFuncs: []string{"getPackageMetadata", "extractAgentVersion", "detectFlavor", "unpack"}, + uncalledUnpackerFuncs: []string{}, + unpackStepError: errors.New("unknown hash"), + cleanerCalled: false, + }, + "if setup unpack cleanup fails, should return error": { + unpackResult: unpackResult{ + VersionedHome: "mockVersionedHome", + Hash: "mockHash", + }, + unpackStepResult: unpackStepResult{ + newHome: filepath.Join(topPath, "mockVersionedHome"), + unpackResult: unpackResult{ + VersionedHome: "mockVersionedHome", + Hash: "mockHash", + }, + }, + getPackageMetadataError: nil, + detectFlavorError: nil, + unpackError: nil, + setupUnpackCleanupError: errors.New("test error"), + checkUpgradeError: nil, + diskSpaceError: nil, + calledUnpackerFuncs: []string{"getPackageMetadata", "extractAgentVersion", "detectFlavor", "unpack"}, + uncalledUnpackerFuncs: []string{}, + unpackStepError: errors.New("test error"), + cleanerCalled: true, + }, + "if unpack fails, and versioned home is unknown, should return combined error": { + unpackResult: unpackResult{ + VersionedHome: "", + Hash: "mockHash", + }, + unpackStepResult: unpackStepResult{ + newHome: "", + unpackResult: unpackResult{ + VersionedHome: "", + Hash: "", + }, + }, + getPackageMetadataError: nil, + detectFlavorError: nil, + unpackError: errors.New("test error"), + setupUnpackCleanupError: nil, + checkUpgradeError: nil, + diskSpaceError: errors.New("test error"), + calledUnpackerFuncs: []string{"getPackageMetadata", "extractAgentVersion", "detectFlavor", "unpack"}, + uncalledUnpackerFuncs: []string{}, + unpackStepError: goerrors.Join(errors.New("test error"), errors.New("unknown versioned home")), + cleanerCalled: false, + }, + "if unpack fails, and hash is unknown, should return combined error": { + unpackResult: unpackResult{ + VersionedHome: "mockVersionedHome", + Hash: "", + }, + unpackStepResult: unpackStepResult{ + newHome: "", + unpackResult: unpackResult{ + VersionedHome: "", + Hash: "", + }, + }, + getPackageMetadataError: nil, + detectFlavorError: nil, + unpackError: errors.New("test error"), + setupUnpackCleanupError: nil, + checkUpgradeError: nil, + diskSpaceError: errors.New("test error"), + calledUnpackerFuncs: []string{"getPackageMetadata", "extractAgentVersion", "detectFlavor", "unpack"}, + uncalledUnpackerFuncs: []string{}, + unpackStepError: goerrors.Join(errors.New("test error"), errors.New("unknown hash")), + cleanerCalled: false, + }, + "if unpack fails, and setup unpack cleanup fails, should return combined error": { + unpackResult: unpackResult{ + VersionedHome: "mockVersionedHome", + Hash: "mockHash", + }, + unpackStepResult: unpackStepResult{ + newHome: filepath.Join(topPath, "mockVersionedHome"), + unpackResult: unpackResult{ + VersionedHome: "mockVersionedHome", + Hash: "mockHash", + }, + }, + getPackageMetadataError: nil, + detectFlavorError: nil, + unpackError: errors.New("test unpack error"), + setupUnpackCleanupError: errors.New("test setup unpack cleanup error"), + checkUpgradeError: nil, + diskSpaceError: errors.New("test unpack error"), + calledUnpackerFuncs: []string{"getPackageMetadata", "extractAgentVersion", "detectFlavor", "unpack"}, + uncalledUnpackerFuncs: []string{}, + unpackStepError: goerrors.Join(errors.New("test unpack error"), errors.New("test setup unpack cleanup error")), + cleanerCalled: true, + }, + } + + for name, tc := range testCases { + t.Run(name, func(t *testing.T) { + log, _ := loggertest.New("test") + + mockUnpacker := &mock_unpacker{} + mockUpgradeCleaner := &mock_upgradeCleaner{} + + mockMetadata := packageMetadata{} + newVersion := agentVersion{} + detectedFlavor := "mockDetectedFlavor" + + newHome := filepath.Join(topPath, tc.unpackResult.VersionedHome) + + checkUpgradeFn := func(log *logger.Logger, currentVersion, newVersion agentVersion, metadata packageMetadata) error { + return tc.checkUpgradeError + } + + func() { + for _, calledFunc := range tc.calledUnpackerFuncs { + switch calledFunc { + case "getPackageMetadata": + mockUnpacker.EXPECT().getPackageMetadata(downloadResult.ArtifactPath).Return(mockMetadata, tc.getPackageMetadataError) + case "extractAgentVersion": + mockUnpacker.EXPECT().extractAgentVersion(mockMetadata, version).Return(newVersion) + case "detectFlavor": + mockUnpacker.EXPECT().detectFlavor(topPath, "").Return(detectedFlavor, tc.detectFlavorError) + case "unpack": + mockUnpacker.EXPECT().unpack(version, downloadResult.ArtifactPath, dataPath, detectedFlavor).Return(tc.unpackResult, tc.unpackError) + } + } + }() + if tc.cleanerCalled { + mockUpgradeCleaner.EXPECT().setupUnpackCleanup(newHome, currentHome).Return(tc.setupUnpackCleanupError) + } + + var diskSpaceErrorCalledWith error + upgradeExecutor := &executeUpgrade{ + log: log, + unpacker: mockUnpacker, + upgradeCleaner: mockUpgradeCleaner, + diskSpaceErrorFunc: func(err error) error { + diskSpaceErrorCalledWith = err + return err + }, + } + + unpackStepRes, err := upgradeExecutor.unpackArtifact(downloadResult, version, downloadResult.ArtifactPath, topPath, flavor, dataPath, currentHome, upgradeDetails, currentVersion, checkUpgradeFn) + + mockUnpacker.AssertExpectations(t) + + if tc.cleanerCalled { + mockUpgradeCleaner.AssertExpectations(t) + } else { + mockUpgradeCleaner.AssertNotCalled(t, "setupUnpackCleanup", "expected %v to not be called", tc.cleanerCalled) + } + + for _, uncalledFunc := range tc.uncalledUnpackerFuncs { + mockUnpacker.AssertNotCalled(t, uncalledFunc, "expected %v to not be called", uncalledFunc) + } + + require.Equal(t, details.StateExtracting, upgradeDetails.State, "expected state to be %v, got %v", details.StateExtracting, upgradeDetails.State) + + if tc.unpackStepError != nil { + require.Equal(t, tc.unpackStepError.Error(), err.Error(), "expected unpack step error to be %v, got %v", tc.unpackStepError, err) + } else { + require.NoError(t, err) + } + + require.Equal(t, tc.unpackStepResult, unpackStepRes, "expected unpack step result to be %v, got %v", tc.unpackStepResult, unpackStepRes) + + require.Equal(t, tc.diskSpaceError, diskSpaceErrorCalledWith, "expected disk space error to be %v, got %v", tc.diskSpaceError, diskSpaceErrorCalledWith) + }) + } +} From 117d691bb7721e374d74bacbaea8a7f031ba00a5 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 8 Aug 2025 00:05:20 +0300 Subject: [PATCH 072/127] enhancement(5235): removed import type alias --- .../pkg/agent/application/upgrade/upgrade_executor_test.go | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_executor_test.go b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go index 90f493f8649..f6afede37b6 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_executor_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go @@ -2,7 +2,6 @@ package upgrade import ( "errors" - goerrors "errors" "fmt" "path/filepath" "testing" @@ -378,7 +377,7 @@ func TestUnpackArtifactStep(t *testing.T) { diskSpaceError: errors.New("test error"), calledUnpackerFuncs: []string{"getPackageMetadata", "extractAgentVersion", "detectFlavor", "unpack"}, uncalledUnpackerFuncs: []string{}, - unpackStepError: goerrors.Join(errors.New("test error"), errors.New("unknown versioned home")), + unpackStepError: errors.Join(errors.New("test error"), errors.New("unknown versioned home")), cleanerCalled: false, }, "if unpack fails, and hash is unknown, should return combined error": { @@ -401,7 +400,7 @@ func TestUnpackArtifactStep(t *testing.T) { diskSpaceError: errors.New("test error"), calledUnpackerFuncs: []string{"getPackageMetadata", "extractAgentVersion", "detectFlavor", "unpack"}, uncalledUnpackerFuncs: []string{}, - unpackStepError: goerrors.Join(errors.New("test error"), errors.New("unknown hash")), + unpackStepError: errors.Join(errors.New("test error"), errors.New("unknown hash")), cleanerCalled: false, }, "if unpack fails, and setup unpack cleanup fails, should return combined error": { @@ -424,7 +423,7 @@ func TestUnpackArtifactStep(t *testing.T) { diskSpaceError: errors.New("test unpack error"), calledUnpackerFuncs: []string{"getPackageMetadata", "extractAgentVersion", "detectFlavor", "unpack"}, uncalledUnpackerFuncs: []string{}, - unpackStepError: goerrors.Join(errors.New("test unpack error"), errors.New("test setup unpack cleanup error")), + unpackStepError: errors.Join(errors.New("test unpack error"), errors.New("test setup unpack cleanup error")), cleanerCalled: true, }, } From 2634a3dc27fd4d5a50b23b2fc936bcb7a82c593a Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 8 Aug 2025 00:07:47 +0300 Subject: [PATCH 073/127] enhancement(5235): added check upgrade fn --- internal/pkg/agent/application/upgrade/upgrade.go | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 65747eb687d..1358e92fbd1 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -93,9 +93,11 @@ type upgradeCleaner interface { cleanup(err error) error } +type checkUpgradeFn func(log *logger.Logger, currentVersion, newVersion agentVersion, metadata packageMetadata) error + type upgradeExecutor interface { downloadArtifact(ctx context.Context, parsedTargetVersion *agtversion.ParsedSemVer, agentInfo info.Agent, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) - unpackArtifact(downloadResult download.DownloadResult, version, archivePath, topPath, flavor, dataPath, currentHome string, upgradeDetails *details.Details, currentVersion agentVersion) (unpackStepResult, error) + unpackArtifact(downloadResult download.DownloadResult, version, archivePath, topPath, flavor, dataPath, currentHome string, upgradeDetails *details.Details, currentVersion agentVersion, checkUpgradeFn checkUpgradeFn) (unpackStepResult, error) replaceOldWithNew(log *logger.Logger, unpackStepResult unpackStepResult, currentVersionedHome, topPath, agentName, currentHome, oldRunPath, newRunPath, symlinkPath, newBinPath string, upgradeDetails *details.Details) error watchNewAgent(ctx context.Context, log *logger.Logger, markerFilePath, topPath, dataPath string, waitTime time.Duration, createTimeoutContext createContextWithTimeout, newAgentInstall agentInstall, previousAgentInstall agentInstall, action *fleetapi.ActionUpgrade, upgradeDetails *details.Details, upgradeOutcome UpgradeOutcome) error } From 9e9ecc4d24fe963f184b2913a36ca320206ef201 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 8 Aug 2025 00:14:14 +0300 Subject: [PATCH 074/127] enhancement(5235): remove commented struct fields --- .../pkg/agent/application/upgrade/upgrade.go | 26 +++++-------------- 1 file changed, 7 insertions(+), 19 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 1358e92fbd1..2bb28db995d 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -104,19 +104,13 @@ type upgradeExecutor interface { // Upgrader performs an upgrade type Upgrader struct { - log *logger.Logger - settings *artifact.Config - agentInfo info.Agent - upgradeable bool - fleetServerURI string - markerWatcher MarkerWatcher - upgradeCleaner upgradeCleaner - // diskSpaceErrorFunc func(error) error - // artifactDownloader artifactDownloader - // unpacker unpacker - // relinker relinker - // watcher watcher - // directoryCopier agentDirectoryCopier + log *logger.Logger + settings *artifact.Config + agentInfo info.Agent + upgradeable bool + fleetServerURI string + markerWatcher MarkerWatcher + upgradeCleaner upgradeCleaner upgradeExecutor upgradeExecutor } @@ -152,12 +146,6 @@ func NewUpgrader(log *logger.Logger, settings *artifact.Config, agentInfo info.A upgradeable: IsUpgradeable(), markerWatcher: newMarkerFileWatcher(markerFilePath(paths.Data()), log), upgradeCleaner: upgradeCleaner, - // diskSpaceErrorFunc: upgradeErrors.ToDiskSpaceErrorFunc(log), - // artifactDownloader: newUpgradeArtifactDownloader(log, settings, downloaderFactoryProvider), - // unpacker: &upgradeUnpacker{log: log}, - // relinker: &upgradeRelinker{}, - // watcher: &upgradeWatcher{}, - // directoryCopier: &directoryCopier{}, upgradeExecutor: &executeUpgrade{ log: log, upgradeCleaner: upgradeCleaner, From d04f850ebed2fc14faa4ba6dfcc43f111a130e28 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 8 Aug 2025 00:15:01 +0300 Subject: [PATCH 075/127] enhancement(5235): removed commented code --- .../pkg/agent/application/upgrade/upgrade.go | 215 ------------------ 1 file changed, 215 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 2bb28db995d..4ea5510a7d4 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -259,223 +259,8 @@ func checkUpgrade(log *logger.Logger, currentVersion, newVersion agentVersion, m // Upgrade upgrades running agent, function returns shutdown callback that must be called by reexec. func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string, action *fleetapi.ActionUpgrade, det *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (_ reexec.ShutdownCallbackFn, err error) { return u.newUpgrade(ctx, version, sourceURI, action, det, skipVerifyOverride, skipDefaultPgp, pgpBytes...) - // return u.oldUpgrade(ctx, version, sourceURI, action, det, skipVerifyOverride, skipDefaultPgp, pgpBytes...) } -// func (u *Upgrader) oldUpgrade(ctx context.Context, version string, sourceURI string, action *fleetapi.ActionUpgrade, det *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (_ reexec.ShutdownCallbackFn, err error) { -// defer func() { -// if err != nil { -// cleanupErr := u.upgradeCleaner.cleanup(err) -// if cleanupErr != nil { -// u.log.Errorf("Error cleaning up after upgrade: %w", cleanupErr) -// err = goerrors.Join(err, cleanupErr) -// } -// } -// }() - -// u.log.Infow("Upgrading agent", "version", version, "source_uri", sourceURI) - -// currentVersion := agentVersion{ -// version: release.Version(), -// snapshot: release.Snapshot(), -// hash: release.Commit(), -// fips: release.FIPSDistribution(), -// } - -// // Compare versions and exit before downloading anything if the upgrade -// // is for the same release version that is currently running -// if isSameReleaseVersion(u.log, currentVersion, version) { -// u.log.Warnf("Upgrade action skipped because agent is already at version %s", currentVersion) -// return nil, ErrUpgradeSameVersion -// } - -// // Inform the Upgrade Marker Watcher that we've started upgrading. Note that this -// // is only possible to do in-memory since, today, the process that's initiating -// // the upgrade is the same as the Agent process in which the Upgrade Marker Watcher is -// // running. If/when, in the future, the process initiating the upgrade is separated -// // from the Agent process in which the Upgrade Marker Watcher is running, such in-memory -// // communication will need to be replaced with inter-process communication (e.g. via -// // a file, e.g. the Upgrade Marker file or something else). -// u.markerWatcher.SetUpgradeStarted() - -// span, ctx := apm.StartSpan(ctx, "upgrade", "app.internal") -// defer span.End() - -// err = u.artifactDownloader.cleanNonMatchingVersionsFromDownloads(u.log, u.agentInfo.Version()) -// if err != nil { -// u.log.Errorw("Unable to clean downloads before update", "error.message", err, "downloads.path", paths.Downloads()) -// } - -// det.SetState(details.StateDownloading) - -// sourceURI = u.sourceURI(sourceURI) - -// parsedVersion, err := agtversion.ParseVersion(version) -// if err != nil { -// return nil, fmt.Errorf("error parsing version %q: %w", version, err) -// } - -// downloadResult, err := u.artifactDownloader.downloadArtifact(ctx, parsedVersion, sourceURI, u.fleetServerURI, det, skipVerifyOverride, skipDefaultPgp, pgpBytes...) -// if err != nil { -// // Run the same pre-upgrade cleanup task to get rid of any newly downloaded files -// // This may have an issue if users are upgrading to the same version number. -// if dErr := u.artifactDownloader.cleanNonMatchingVersionsFromDownloads(u.log, u.agentInfo.Version()); dErr != nil { -// u.log.Errorw("Unable to remove file after verification failure", "error.message", dErr) -// } - -// return nil, err -// } - -// if err := u.upgradeCleaner.setupArchiveCleanup(downloadResult); err != nil { -// return nil, err -// } - -// det.SetState(details.StateExtracting) - -// metadata, err := u.unpacker.getPackageMetadata(downloadResult.ArtifactPath) -// if err != nil { -// return nil, fmt.Errorf("reading metadata for elastic agent version %s package %q: %w", version, downloadResult.ArtifactPath, err) -// } - -// newVersion := u.unpacker.extractAgentVersion(metadata, version) - -// if err := checkUpgrade(u.log, currentVersion, newVersion, metadata); err != nil { // pass this as param to unpack step in upgrade executor -// return nil, fmt.Errorf("cannot upgrade the agent: %w", err) -// } - -// u.log.Infow("Unpacking agent package", "version", newVersion) - -// // Nice to have: add check that no archive files end up in the current versioned home -// // default to no flavor to avoid breaking behavior - -// // no default flavor, keep everything in case flavor is not specified -// // in case of error fallback to keep-all -// detectedFlavor, err := u.unpacker.detectFlavor(paths.Top(), "") -// if err != nil { -// u.log.Warnf("error encountered when detecting used flavor with top path %q: %w", paths.Top(), err) -// } -// u.log.Debugf("detected used flavor: %q", detectedFlavor) - -// unpackRes, unpackErr := u.unpacker.unpack(version, downloadResult.ArtifactPath, paths.Data(), detectedFlavor) -// err = goerrors.Join(err, u.diskSpaceErrorFunc(unpackErr)) - -// if unpackRes.VersionedHome == "" { -// err = goerrors.Join(err, fmt.Errorf("unknown versioned home")) -// return nil, err -// } - -// newHash := unpackRes.Hash -// if newHash == "" { -// err = goerrors.Join(err, fmt.Errorf("unknown hash")) -// return nil, err -// } - -// newHome := filepath.Join(paths.Top(), unpackRes.VersionedHome) - -// unpackCleanupSetupErr := u.upgradeCleaner.setupUnpackCleanup(newHome, paths.Home()) -// err = goerrors.Join(err, unpackCleanupSetupErr) - -// if err != nil { -// return nil, err -// } - -// err = u.directoryCopier.copyActionStore(u.log, newHome) -// if err != nil { -// err = fmt.Errorf("failed to copy action store: %w", u.diskSpaceErrorFunc(err)) -// return nil, err -// } - -// newRunPath := filepath.Join(newHome, "run") -// oldRunPath := filepath.Join(paths.Run()) - -// err = u.directoryCopier.copyRunDirectory(u.log, oldRunPath, newRunPath) -// if err != nil { -// err = fmt.Errorf("failed to copy run directory: %w", u.diskSpaceErrorFunc(err)) -// return nil, err -// } - -// det.SetState(details.StateReplacing) - -// // create symlink to the /elastic-agent -// hashedDir := unpackRes.VersionedHome -// u.log.Infof("hashedDir: %s", hashedDir) - -// symlinkPath := filepath.Join(paths.Top(), agentName) -// u.log.Infof("symlinkPath: %s", symlinkPath) - -// // paths.BinaryPath properly derives the binary directory depending on the platform. The path to the binary for macOS is inside of the app bundle. -// newPath := paths.BinaryPath(filepath.Join(paths.Top(), hashedDir), agentName) -// u.log.Infof("newPath: %s", newPath) - -// currentVersionedHome, err := filepath.Rel(paths.Top(), paths.Home()) -// if err != nil { -// return nil, fmt.Errorf("calculating home path relative to top, home: %q top: %q : %w", paths.Home(), paths.Top(), err) -// } - -// if symlinkCleanupSetupErr := u.upgradeCleaner.setupSymlinkCleanup(u.relinker.changeSymlink, paths.Top(), currentVersionedHome, agentName); symlinkCleanupSetupErr != nil { -// err = goerrors.Join(err, symlinkCleanupSetupErr) -// } - -// u.log.Infof("currentVersionedHome: %s", currentVersionedHome) - -// err = u.relinker.changeSymlink(u.log, paths.Top(), symlinkPath, newPath) -// if err != nil { -// return nil, err -// } - -// // We rotated the symlink successfully: prepare the current and previous agent installation details for the update marker -// // In update marker the `current` agent install is the one where the symlink is pointing (the new one we didn't start yet) -// // while the `previous` install is the currently executing elastic-agent that is no longer reachable via the symlink. -// // After the restart at the end of the function, everything lines up correctly. -// current := agentInstall{ -// parsedVersion: parsedVersion, -// version: version, -// hash: unpackRes.Hash, -// versionedHome: unpackRes.VersionedHome, -// } - -// previousParsedVersion := currentagtversion.GetParsedAgentPackageVersion() -// previous := agentInstall{ -// parsedVersion: previousParsedVersion, -// version: release.VersionWithSnapshot(), -// hash: release.Commit(), -// versionedHome: currentVersionedHome, -// } - -// err = u.watcher.markUpgrade(u.log, -// paths.Data(), // data dir to place the marker in -// current, // new agent version data -// previous, // old agent version data -// action, det, OUTCOME_UPGRADE) -// if err != nil { -// return nil, err -// } - -// watcherExecutable := u.watcher.selectWatcherExecutable(paths.Top(), previous, current) - -// watcherCmd, err := InvokeWatcher(u.log, watcherExecutable) -// if err != nil { -// return nil, err -// } - -// err = u.watcher.waitForWatcher(ctx, u.log, markerFilePath(paths.Data()), watcherMaxWaitTime, context.WithTimeout) -// if err != nil { -// err = goerrors.Join(err, watcherCmd.Process.Kill()) -// return nil, err -// } - -// cb := shutdownCallback(u.log, paths.Home(), release.Version(), version, filepath.Join(paths.Top(), unpackRes.VersionedHome)) - -// // Clean everything from the downloads dir -// u.log.Infow("Removing downloads directory", "file.path", paths.Downloads()) -// err = os.RemoveAll(paths.Downloads()) -// if err != nil { -// u.log.Errorw("Unable to clean downloads after update", "error.message", err, "file.path", paths.Downloads()) -// } - -// return cb, nil -// } - func (u *Upgrader) newUpgrade(ctx context.Context, version string, sourceURI string, action *fleetapi.ActionUpgrade, det *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (_ reexec.ShutdownCallbackFn, err error) { defer func() { if err != nil { From fe339e55ac6ccfe06100251ceab5d3f3b4c8b55d Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 8 Aug 2025 00:15:30 +0300 Subject: [PATCH 076/127] enhancement(5235): injecting check upgrade fn in upgrade function --- internal/pkg/agent/application/upgrade/upgrade.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 4ea5510a7d4..3493d24592a 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -312,7 +312,7 @@ func (u *Upgrader) newUpgrade(ctx context.Context, version string, sourceURI str return nil, err } - unpackRes, err := u.upgradeExecutor.unpackArtifact(downloadResult, version, downloadResult.ArtifactPath, paths.Top(), "", paths.Data(), paths.Home(), det, currentVersion) + unpackRes, err := u.upgradeExecutor.unpackArtifact(downloadResult, version, downloadResult.ArtifactPath, paths.Top(), "", paths.Data(), paths.Home(), det, currentVersion, checkUpgrade) if err != nil { return nil, err } From 37c48a1affcb1b6a6f08b98ff8bf72398714901d Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 8 Aug 2025 13:29:39 +0300 Subject: [PATCH 077/127] enhancement(5235): removed logger from replace old with new function signature --- internal/pkg/agent/application/upgrade/upgrade.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 3493d24592a..fcdadcbd032 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -98,7 +98,7 @@ type checkUpgradeFn func(log *logger.Logger, currentVersion, newVersion agentVer type upgradeExecutor interface { downloadArtifact(ctx context.Context, parsedTargetVersion *agtversion.ParsedSemVer, agentInfo info.Agent, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) unpackArtifact(downloadResult download.DownloadResult, version, archivePath, topPath, flavor, dataPath, currentHome string, upgradeDetails *details.Details, currentVersion agentVersion, checkUpgradeFn checkUpgradeFn) (unpackStepResult, error) - replaceOldWithNew(log *logger.Logger, unpackStepResult unpackStepResult, currentVersionedHome, topPath, agentName, currentHome, oldRunPath, newRunPath, symlinkPath, newBinPath string, upgradeDetails *details.Details) error + replaceOldWithNew(unpackStepResult unpackStepResult, currentVersionedHome, topPath, agentName, currentHome, oldRunPath, newRunPath, symlinkPath, newBinPath string, upgradeDetails *details.Details) error watchNewAgent(ctx context.Context, log *logger.Logger, markerFilePath, topPath, dataPath string, waitTime time.Duration, createTimeoutContext createContextWithTimeout, newAgentInstall agentInstall, previousAgentInstall agentInstall, action *fleetapi.ActionUpgrade, upgradeDetails *details.Details, upgradeOutcome UpgradeOutcome) error } @@ -332,7 +332,7 @@ func (u *Upgrader) newUpgrade(ctx context.Context, version string, sourceURI str return nil, fmt.Errorf("calculating home path relative to top, home: %q top: %q : %w", paths.Home(), paths.Top(), err) } - err = u.upgradeExecutor.replaceOldWithNew(u.log, unpackRes, currentVersionedHome, paths.Top(), agentName, paths.Home(), oldRunPath, newRunPath, symlinkPath, newPath, det) + err = u.upgradeExecutor.replaceOldWithNew(unpackRes, currentVersionedHome, paths.Top(), agentName, paths.Home(), oldRunPath, newRunPath, symlinkPath, newPath, det) if err != nil { return nil, err } From 7add803cac9b3e6f7a1bfee62c33b1dd5cfe6555 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 8 Aug 2025 13:30:16 +0300 Subject: [PATCH 078/127] enhancement(5235): removed logger from replace old with new func definition --- internal/pkg/agent/application/upgrade/upgrade_executor.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_executor.go b/internal/pkg/agent/application/upgrade/upgrade_executor.go index 316ec87f942..935ad66deba 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_executor.go +++ b/internal/pkg/agent/application/upgrade/upgrade_executor.go @@ -139,7 +139,7 @@ func (u *executeUpgrade) unpackArtifact(downloadResult download.DownloadResult, return unpackStepResult, unpackErr } -func (u *executeUpgrade) replaceOldWithNew(log *logger.Logger, unpackStepResult unpackStepResult, currentVersionedHome, topPath, agentName, currentHome, oldRunPath, newRunPath, symlinkPath, newBinPath string, upgradeDetails *details.Details) error { +func (u *executeUpgrade) replaceOldWithNew(unpackStepResult unpackStepResult, currentVersionedHome, topPath, agentName, currentHome, oldRunPath, newRunPath, symlinkPath, newBinPath string, upgradeDetails *details.Details) error { if err := u.directoryCopier.copyActionStore(u.log, unpackStepResult.newHome); err != nil { return fmt.Errorf("failed to copy action store: %w", u.diskSpaceErrorFunc(err)) } From 07a3ecefefb2bcf093f4eb2ba613df38d1a685cd Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 8 Aug 2025 13:30:48 +0300 Subject: [PATCH 079/127] enhancement(5235): added replace old with new unit tests --- .../upgrade/upgrade_executor_test.go | 181 ++++++++++++++++++ 1 file changed, 181 insertions(+) diff --git a/internal/pkg/agent/application/upgrade/upgrade_executor_test.go b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go index f6afede37b6..534bacfc435 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_executor_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go @@ -502,3 +502,184 @@ func TestUnpackArtifactStep(t *testing.T) { }) } } + +type replaceOldWithNewTestCase struct { + copyActionStoreError error + copyRunDirectoryError error + setupSymlinkCleanupError error + setupSymlinkCleanupCalled bool + changeSymlinkError error + changeSymlinkCalled bool + upgradeDetailsStateSet bool + calledDirectoryCopierFuncs []string + uncalledDirectoryCopierFuncs []string + diskSpaceErrorFuncCalled bool + diskSpaceError error + expectedError error +} + +func TestReplaceOldWithNewStep(t *testing.T) { + log, _ := loggertest.New("test") + + unpackStepResult := unpackStepResult{ + newHome: "mockNewHome", + unpackResult: unpackResult{ + VersionedHome: "mockVersionedHome", + Hash: "mockHash", + }, + } + currentVersionedHome := "mockCurrentVersionedHome" + topPath := "mockTopPath" + agentName := "mockAgentName" + currentHome := "mockCurrentHome" + oldRunPath := "mockOldRunPath" + newRunPath := "mockNewRunPath" + symlinkPath := "mockSymlinkPath" + newBinPath := "mockNewBinPath" + + testCases := map[string]replaceOldWithNewTestCase{ + "should migrate action store and run directory and change symlink": { + copyActionStoreError: nil, + copyRunDirectoryError: nil, + setupSymlinkCleanupError: nil, + setupSymlinkCleanupCalled: true, + changeSymlinkError: nil, + changeSymlinkCalled: true, + upgradeDetailsStateSet: true, + calledDirectoryCopierFuncs: []string{"copyActionStore", "copyRunDirectory"}, + uncalledDirectoryCopierFuncs: []string{}, + diskSpaceErrorFuncCalled: false, + diskSpaceError: nil, + expectedError: nil, + }, + "if copying action store fails, should return error": { + copyActionStoreError: errors.New("test error"), + copyRunDirectoryError: nil, + setupSymlinkCleanupError: nil, + setupSymlinkCleanupCalled: false, + changeSymlinkError: nil, + changeSymlinkCalled: false, + upgradeDetailsStateSet: false, + calledDirectoryCopierFuncs: []string{"copyActionStore"}, + uncalledDirectoryCopierFuncs: []string{"copyRunDirectory"}, + diskSpaceErrorFuncCalled: true, + diskSpaceError: errors.New("test error"), + expectedError: fmt.Errorf("failed to copy action store: %w", errors.New("test error")), + }, + "if copying run directory fails, should return error": { + copyActionStoreError: nil, + copyRunDirectoryError: errors.New("test error"), + setupSymlinkCleanupError: nil, + setupSymlinkCleanupCalled: false, + changeSymlinkError: nil, + changeSymlinkCalled: false, + upgradeDetailsStateSet: false, + calledDirectoryCopierFuncs: []string{"copyActionStore", "copyRunDirectory"}, + uncalledDirectoryCopierFuncs: []string{}, + diskSpaceErrorFuncCalled: true, + diskSpaceError: errors.New("test error"), + expectedError: fmt.Errorf("failed to copy run directory: %w", errors.New("test error")), + }, + "if setting up symlink cleanup fails, should return error": { + copyActionStoreError: nil, + copyRunDirectoryError: nil, + setupSymlinkCleanupError: errors.New("test error"), + setupSymlinkCleanupCalled: true, + changeSymlinkError: nil, + changeSymlinkCalled: false, + upgradeDetailsStateSet: true, + calledDirectoryCopierFuncs: []string{"copyActionStore", "copyRunDirectory"}, + uncalledDirectoryCopierFuncs: []string{}, + diskSpaceErrorFuncCalled: false, + diskSpaceError: nil, + expectedError: fmt.Errorf("error setting up symlink cleanup: %w", errors.New("test error")), + }, + "if changing symlink fails, should return error": { + copyActionStoreError: nil, + copyRunDirectoryError: nil, + setupSymlinkCleanupError: nil, + setupSymlinkCleanupCalled: true, + changeSymlinkError: errors.New("test error"), + changeSymlinkCalled: true, + upgradeDetailsStateSet: true, + calledDirectoryCopierFuncs: []string{"copyActionStore", "copyRunDirectory"}, + uncalledDirectoryCopierFuncs: []string{}, + diskSpaceErrorFuncCalled: false, + diskSpaceError: nil, + expectedError: errors.New("test error"), + }, + } + + for name, tc := range testCases { + t.Run(name, func(t *testing.T) { + upgradeDetails := &details.Details{} + + mockDirectoryCopier := &mock_agentDirectoryCopier{} + mockUpgradeCleaner := &mock_upgradeCleaner{} + mockRelinker := &mock_relinker{} + + for _, calledFunc := range tc.calledDirectoryCopierFuncs { + switch calledFunc { + case "copyActionStore": + mockDirectoryCopier.EXPECT().copyActionStore(log, unpackStepResult.newHome).Return(tc.copyActionStoreError) + case "copyRunDirectory": + mockDirectoryCopier.EXPECT().copyRunDirectory(log, oldRunPath, newRunPath).Return(tc.copyRunDirectoryError) + } + } + + if tc.setupSymlinkCleanupCalled { + mockUpgradeCleaner.EXPECT().setupSymlinkCleanup(mock.AnythingOfType("upgrade.changeSymlinkFunc"), topPath, currentVersionedHome, agentName).Return(tc.setupSymlinkCleanupError) + } + + if tc.changeSymlinkCalled { + mockRelinker.EXPECT().changeSymlink(log, topPath, symlinkPath, newBinPath).Return(tc.changeSymlinkError) + } + + var diskSpaceErrorCalledWith error + diskSpaceErrorFuncCalled := false + upgradeExecutor := &executeUpgrade{ + log: log, + upgradeCleaner: mockUpgradeCleaner, + directoryCopier: mockDirectoryCopier, + relinker: mockRelinker, + diskSpaceErrorFunc: func(err error) error { + diskSpaceErrorFuncCalled = true + diskSpaceErrorCalledWith = err + return err + }, + } + + err := upgradeExecutor.replaceOldWithNew(unpackStepResult, currentVersionedHome, topPath, agentName, currentHome, oldRunPath, newRunPath, symlinkPath, newBinPath, upgradeDetails) + + mockDirectoryCopier.AssertExpectations(t) + + if tc.setupSymlinkCleanupCalled { + mockUpgradeCleaner.AssertExpectations(t) + } else { + mockUpgradeCleaner.AssertNotCalled(t, "setupSymlinkCleanup", "expected setupSymlinkCleanup to not be called") + } + + if tc.changeSymlinkCalled { + mockRelinker.AssertExpectations(t) + } else { + mockRelinker.AssertNotCalled(t, "changeSymlink", "expected changeSymlink to not be called") + } + + if tc.upgradeDetailsStateSet { + require.Equal(t, details.StateReplacing, upgradeDetails.State, "expected state to be %v, got %v", details.StateReplacing, upgradeDetails.State) + } else { + require.Empty(t, upgradeDetails.State, "expected state to be empty, got %v", upgradeDetails.State) + } + + require.Equal(t, tc.diskSpaceError, diskSpaceErrorCalledWith, "expected disk space error to be %v, got %v", tc.diskSpaceError, diskSpaceErrorCalledWith) + require.Equal(t, tc.diskSpaceErrorFuncCalled, diskSpaceErrorFuncCalled, "expected disk space error func to be called") + + if tc.expectedError != nil { + require.Equal(t, tc.expectedError.Error(), err.Error(), "expected error to be %s, got %v", tc.expectedError, err) + return + } + + require.NoError(t, err, "expected no error, got %v", err) + }) + } +} From 33e0114dd257a7e9ec6d559df6aeb6d98088becc Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 8 Aug 2025 13:31:44 +0300 Subject: [PATCH 080/127] enhancement(5235): remove unnecessary func encapsulation --- .../pkg/agent/application/upgrade/upgrade_executor_test.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_executor_test.go b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go index 534bacfc435..cf69f4511c4 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_executor_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go @@ -445,7 +445,6 @@ func TestUnpackArtifactStep(t *testing.T) { return tc.checkUpgradeError } - func() { for _, calledFunc := range tc.calledUnpackerFuncs { switch calledFunc { case "getPackageMetadata": @@ -458,7 +457,7 @@ func TestUnpackArtifactStep(t *testing.T) { mockUnpacker.EXPECT().unpack(version, downloadResult.ArtifactPath, dataPath, detectedFlavor).Return(tc.unpackResult, tc.unpackError) } } - }() + if tc.cleanerCalled { mockUpgradeCleaner.EXPECT().setupUnpackCleanup(newHome, currentHome).Return(tc.setupUnpackCleanupError) } From c3893e77e5d3e958e71cda7e48e2bc9a790d386c Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 8 Aug 2025 13:33:07 +0300 Subject: [PATCH 081/127] enhancement(5235): removed unnecessary func encapsulation --- .../upgrade/upgrade_executor_test.go | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_executor_test.go b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go index cf69f4511c4..17629178bb9 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_executor_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go @@ -445,18 +445,18 @@ func TestUnpackArtifactStep(t *testing.T) { return tc.checkUpgradeError } - for _, calledFunc := range tc.calledUnpackerFuncs { - switch calledFunc { - case "getPackageMetadata": - mockUnpacker.EXPECT().getPackageMetadata(downloadResult.ArtifactPath).Return(mockMetadata, tc.getPackageMetadataError) - case "extractAgentVersion": - mockUnpacker.EXPECT().extractAgentVersion(mockMetadata, version).Return(newVersion) - case "detectFlavor": - mockUnpacker.EXPECT().detectFlavor(topPath, "").Return(detectedFlavor, tc.detectFlavorError) - case "unpack": - mockUnpacker.EXPECT().unpack(version, downloadResult.ArtifactPath, dataPath, detectedFlavor).Return(tc.unpackResult, tc.unpackError) - } + for _, calledFunc := range tc.calledUnpackerFuncs { + switch calledFunc { + case "getPackageMetadata": + mockUnpacker.EXPECT().getPackageMetadata(downloadResult.ArtifactPath).Return(mockMetadata, tc.getPackageMetadataError) + case "extractAgentVersion": + mockUnpacker.EXPECT().extractAgentVersion(mockMetadata, version).Return(newVersion) + case "detectFlavor": + mockUnpacker.EXPECT().detectFlavor(topPath, "").Return(detectedFlavor, tc.detectFlavorError) + case "unpack": + mockUnpacker.EXPECT().unpack(version, downloadResult.ArtifactPath, dataPath, detectedFlavor).Return(tc.unpackResult, tc.unpackError) } + } if tc.cleanerCalled { mockUpgradeCleaner.EXPECT().setupUnpackCleanup(newHome, currentHome).Return(tc.setupUnpackCleanupError) From e7499364dd05805785b0526c376fd423bad1d1b1 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 8 Aug 2025 13:34:45 +0300 Subject: [PATCH 082/127] enhancement(5235): added import --- internal/pkg/agent/application/upgrade/upgrade_executor_test.go | 1 + 1 file changed, 1 insertion(+) diff --git a/internal/pkg/agent/application/upgrade/upgrade_executor_test.go b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go index 17629178bb9..7b2cfcc2a4b 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_executor_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go @@ -12,6 +12,7 @@ import ( "github.com/elastic/elastic-agent/pkg/core/logger" "github.com/elastic/elastic-agent/pkg/core/logger/loggertest" agtversion "github.com/elastic/elastic-agent/pkg/version" + "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" ) From e495f8ef93eec9ea2aebad3ef40f619a82d5b077 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 8 Aug 2025 13:37:13 +0300 Subject: [PATCH 083/127] enhancement(5235): removed logger from watchNewAgent function signature --- internal/pkg/agent/application/upgrade/upgrade.go | 2 +- internal/pkg/agent/application/upgrade/upgrade_executor.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index fcdadcbd032..f079d320056 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -99,7 +99,7 @@ type upgradeExecutor interface { downloadArtifact(ctx context.Context, parsedTargetVersion *agtversion.ParsedSemVer, agentInfo info.Agent, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) unpackArtifact(downloadResult download.DownloadResult, version, archivePath, topPath, flavor, dataPath, currentHome string, upgradeDetails *details.Details, currentVersion agentVersion, checkUpgradeFn checkUpgradeFn) (unpackStepResult, error) replaceOldWithNew(unpackStepResult unpackStepResult, currentVersionedHome, topPath, agentName, currentHome, oldRunPath, newRunPath, symlinkPath, newBinPath string, upgradeDetails *details.Details) error - watchNewAgent(ctx context.Context, log *logger.Logger, markerFilePath, topPath, dataPath string, waitTime time.Duration, createTimeoutContext createContextWithTimeout, newAgentInstall agentInstall, previousAgentInstall agentInstall, action *fleetapi.ActionUpgrade, upgradeDetails *details.Details, upgradeOutcome UpgradeOutcome) error + watchNewAgent(ctx context.Context, markerFilePath, topPath, dataPath string, waitTime time.Duration, createTimeoutContext createContextWithTimeout, newAgentInstall agentInstall, previousAgentInstall agentInstall, action *fleetapi.ActionUpgrade, upgradeDetails *details.Details, upgradeOutcome UpgradeOutcome) error } // Upgrader performs an upgrade diff --git a/internal/pkg/agent/application/upgrade/upgrade_executor.go b/internal/pkg/agent/application/upgrade/upgrade_executor.go index 935ad66deba..8ba1c85db08 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_executor.go +++ b/internal/pkg/agent/application/upgrade/upgrade_executor.go @@ -157,7 +157,7 @@ func (u *executeUpgrade) replaceOldWithNew(unpackStepResult unpackStepResult, cu return u.relinker.changeSymlink(u.log, topPath, symlinkPath, newBinPath) } -func (u *executeUpgrade) watchNewAgent(ctx context.Context, log *logger.Logger, markerFilePath, topPath, dataPath string, waitTime time.Duration, createTimeoutContext createContextWithTimeout, newAgentInstall agentInstall, previousAgentInstall agentInstall, action *fleetapi.ActionUpgrade, upgradeDetails *details.Details, upgradeOutcome UpgradeOutcome) error { +func (u *executeUpgrade) watchNewAgent(ctx context.Context, markerFilePath, topPath, dataPath string, waitTime time.Duration, createTimeoutContext createContextWithTimeout, newAgentInstall agentInstall, previousAgentInstall agentInstall, action *fleetapi.ActionUpgrade, upgradeDetails *details.Details, upgradeOutcome UpgradeOutcome) error { if err := u.watcher.markUpgrade(u.log, dataPath, // data dir to place the marker in newAgentInstall, // new agent version data From adb70f55c283b8901d6a6e2f0f83f123dc771107 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 8 Aug 2025 14:11:44 +0300 Subject: [PATCH 084/127] enhancmenet(5235): removed log from watch new agent function call --- internal/pkg/agent/application/upgrade/upgrade.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index f079d320056..cd6b23b63e0 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -356,7 +356,7 @@ func (u *Upgrader) newUpgrade(ctx context.Context, version string, sourceURI str versionedHome: currentVersionedHome, } - err = u.upgradeExecutor.watchNewAgent(ctx, u.log, markerFilePath(paths.Data()), paths.Top(), paths.Data(), watcherMaxWaitTime, context.WithTimeout, current, previous, action, det, OUTCOME_UPGRADE) + err = u.upgradeExecutor.watchNewAgent(ctx, markerFilePath(paths.Data()), paths.Top(), paths.Data(), watcherMaxWaitTime, context.WithTimeout, current, previous, action, det, OUTCOME_UPGRADE) if err != nil { return nil, err } From 00dc68ad10a5a98ac30298072754254f0cb0e321 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 8 Aug 2025 14:12:51 +0300 Subject: [PATCH 085/127] enhancement(5235): added tests for watch new agent --- .../upgrade/upgrade_executor_test.go | 114 ++++++++++++++++++ 1 file changed, 114 insertions(+) diff --git a/internal/pkg/agent/application/upgrade/upgrade_executor_test.go b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go index 7b2cfcc2a4b..202e0b32836 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_executor_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go @@ -3,12 +3,16 @@ package upgrade import ( "errors" "fmt" + "os" + "os/exec" "path/filepath" "testing" + "time" "github.com/elastic/elastic-agent/internal/pkg/agent/application/info" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/details" + "github.com/elastic/elastic-agent/internal/pkg/fleetapi" "github.com/elastic/elastic-agent/pkg/core/logger" "github.com/elastic/elastic-agent/pkg/core/logger/loggertest" agtversion "github.com/elastic/elastic-agent/pkg/version" @@ -683,3 +687,113 @@ func TestReplaceOldWithNewStep(t *testing.T) { }) } } + +type watchNewAgentTestCase struct { + markUpgradeError error + invokeWatcherError error + waitForWatcherError error + calledFuncs []string + uncalledFuncs []string + expectedError error +} + +func TestWatchNewAgentStep(t *testing.T) { + log, _ := loggertest.New("test") + ctx := t.Context() + + markerFilePath := "mockMarkerFilePath" + topPath := "mockTopPath" + dataPath := "mockDataPath" + watcherExecutable := "mockWatcherExecutable" + waitTime := time.Second * 10 + + var createTimeoutContext createContextWithTimeout + + newAgentInstall := agentInstall{ + versionedHome: "mockNewVersionedHome", + hash: "mockNewHash", + } + previousAgentInstall := agentInstall{ + versionedHome: "mockPreviousVersionedHome", + hash: "mockPreviousHash", + } + action := &fleetapi.ActionUpgrade{} + upgradeDetails := &details.Details{} + upgradeOutcome := OUTCOME_UPGRADE + watcherCmd := &exec.Cmd{} + watcherCmd.Process = &os.Process{} + + testCases := map[string]watchNewAgentTestCase{ + "should mark upgrade and invoke watcher": { + markUpgradeError: nil, + invokeWatcherError: nil, + waitForWatcherError: nil, + calledFuncs: []string{"markUpgrade", "selectWatcherExecutable", "invokeWatcher", "waitForWatcher"}, + uncalledFuncs: []string{}, + expectedError: nil, + }, + "should return error if marking upgrade fails": { + markUpgradeError: errors.New("test error"), + invokeWatcherError: nil, + waitForWatcherError: nil, + calledFuncs: []string{"markUpgrade"}, + uncalledFuncs: []string{"selectWatcherExecutable", "invokeWatcher", "waitForWatcher"}, + expectedError: errors.New("test error"), + }, + "should return error if invoking watcher fails": { + markUpgradeError: nil, + invokeWatcherError: errors.New("test error"), + waitForWatcherError: nil, + calledFuncs: []string{"markUpgrade", "selectWatcherExecutable", "invokeWatcher"}, + uncalledFuncs: []string{"waitForWatcher"}, + expectedError: errors.New("test error"), + }, + "if waiting for watcher fails, should kill watcher process and return combined error": { + markUpgradeError: nil, + invokeWatcherError: nil, + waitForWatcherError: errors.New("test error"), + calledFuncs: []string{"markUpgrade", "selectWatcherExecutable", "invokeWatcher", "waitForWatcher"}, + uncalledFuncs: []string{}, + expectedError: errors.Join(errors.New("test error"), errors.New("os: process not initialized")), + }, + } + + for name, tc := range testCases { + t.Run(name, func(t *testing.T) { + mockWatcher := &mock_watcher{} + + upgradeExecutor := &executeUpgrade{ + log: log, + watcher: mockWatcher, + } + + for _, calledFunc := range tc.calledFuncs { + switch calledFunc { + case "markUpgrade": + mockWatcher.EXPECT().markUpgrade(log, dataPath, newAgentInstall, previousAgentInstall, action, upgradeDetails, upgradeOutcome).Return(tc.markUpgradeError) + case "selectWatcherExecutable": + mockWatcher.EXPECT().selectWatcherExecutable(topPath, previousAgentInstall, newAgentInstall).Return(watcherExecutable) + case "invokeWatcher": + mockWatcher.EXPECT().invokeWatcher(log, watcherExecutable).Return(watcherCmd, tc.invokeWatcherError) + case "waitForWatcher": + mockWatcher.EXPECT().waitForWatcher(ctx, log, markerFilePath, waitTime, mock.AnythingOfType("upgrade.createContextWithTimeout")).Return(tc.waitForWatcherError) + } + } + + err := upgradeExecutor.watchNewAgent(ctx, markerFilePath, topPath, dataPath, waitTime, createTimeoutContext, newAgentInstall, previousAgentInstall, action, upgradeDetails, upgradeOutcome) + + mockWatcher.AssertExpectations(t) + for _, uncalledFunc := range tc.uncalledFuncs { + mockWatcher.AssertNotCalled(t, uncalledFunc, "expected %v to not be called", uncalledFunc) + } + + if tc.expectedError != nil { + require.Equal(t, tc.expectedError.Error(), err.Error(), "expected error to be %v, got %v", tc.expectedError, err) + return + } + + require.NoError(t, err) + + }) + } +} From 53add36f93b593cc4f624f9820ac1dcdc2dc89d3 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Fri, 8 Aug 2025 16:59:25 +0300 Subject: [PATCH 086/127] enhancement(5235): removed upgrade abstraction, using the new upgrade function --- internal/pkg/agent/application/upgrade/upgrade.go | 4 ---- 1 file changed, 4 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index cd6b23b63e0..aa8b6264b92 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -258,10 +258,6 @@ func checkUpgrade(log *logger.Logger, currentVersion, newVersion agentVersion, m // Upgrade upgrades running agent, function returns shutdown callback that must be called by reexec. func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string, action *fleetapi.ActionUpgrade, det *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (_ reexec.ShutdownCallbackFn, err error) { - return u.newUpgrade(ctx, version, sourceURI, action, det, skipVerifyOverride, skipDefaultPgp, pgpBytes...) -} - -func (u *Upgrader) newUpgrade(ctx context.Context, version string, sourceURI string, action *fleetapi.ActionUpgrade, det *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (_ reexec.ShutdownCallbackFn, err error) { defer func() { if err != nil { cleanupErr := u.upgradeCleaner.cleanup(err) From 81366425c67e185ab8543be296f3e71d4cdf3e51 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sat, 9 Aug 2025 00:40:56 +0300 Subject: [PATCH 087/127] enhancement(5235): added upgrade executor mock, added tests --- .mockery.yaml | 1 + .../upgrade/upgrade_executor_mock.go | 301 ++++++++++++++++++ .../agent/application/upgrade/upgrade_test.go | 244 +++++++++++++- 3 files changed, 545 insertions(+), 1 deletion(-) create mode 100644 internal/pkg/agent/application/upgrade/upgrade_executor_mock.go diff --git a/.mockery.yaml b/.mockery.yaml index 10ed750a069..be31886318a 100644 --- a/.mockery.yaml +++ b/.mockery.yaml @@ -49,4 +49,5 @@ packages: watcher: agentDirectoryCopier: upgradeCleaner: + upgradeExecutor: diff --git a/internal/pkg/agent/application/upgrade/upgrade_executor_mock.go b/internal/pkg/agent/application/upgrade/upgrade_executor_mock.go new file mode 100644 index 00000000000..e4d84b94acf --- /dev/null +++ b/internal/pkg/agent/application/upgrade/upgrade_executor_mock.go @@ -0,0 +1,301 @@ +// Code generated by mockery v2.53.4. DO NOT EDIT. + +package upgrade + +import ( + context "context" + + download "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download" + details "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/details" + + fleetapi "github.com/elastic/elastic-agent/internal/pkg/fleetapi" + + info "github.com/elastic/elastic-agent/internal/pkg/agent/application/info" + + mock "github.com/stretchr/testify/mock" + + time "time" + + version "github.com/elastic/elastic-agent/pkg/version" +) + +// mock_upgradeExecutor is an autogenerated mock type for the upgradeExecutor type +type mock_upgradeExecutor struct { + mock.Mock +} + +type mock_upgradeExecutor_Expecter struct { + mock *mock.Mock +} + +func (_m *mock_upgradeExecutor) EXPECT() *mock_upgradeExecutor_Expecter { + return &mock_upgradeExecutor_Expecter{mock: &_m.Mock} +} + +// downloadArtifact provides a mock function with given fields: ctx, parsedTargetVersion, agentInfo, sourceURI, fleetServerURI, upgradeDetails, skipVerifyOverride, skipDefaultPgp, pgpBytes +func (_m *mock_upgradeExecutor) downloadArtifact(ctx context.Context, parsedTargetVersion *version.ParsedSemVer, agentInfo info.Agent, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) { + _va := make([]interface{}, len(pgpBytes)) + for _i := range pgpBytes { + _va[_i] = pgpBytes[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, parsedTargetVersion, agentInfo, sourceURI, fleetServerURI, upgradeDetails, skipVerifyOverride, skipDefaultPgp) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for downloadArtifact") + } + + var r0 download.DownloadResult + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *version.ParsedSemVer, info.Agent, string, string, *details.Details, bool, bool, ...string) (download.DownloadResult, error)); ok { + return rf(ctx, parsedTargetVersion, agentInfo, sourceURI, fleetServerURI, upgradeDetails, skipVerifyOverride, skipDefaultPgp, pgpBytes...) + } + if rf, ok := ret.Get(0).(func(context.Context, *version.ParsedSemVer, info.Agent, string, string, *details.Details, bool, bool, ...string) download.DownloadResult); ok { + r0 = rf(ctx, parsedTargetVersion, agentInfo, sourceURI, fleetServerURI, upgradeDetails, skipVerifyOverride, skipDefaultPgp, pgpBytes...) + } else { + r0 = ret.Get(0).(download.DownloadResult) + } + + if rf, ok := ret.Get(1).(func(context.Context, *version.ParsedSemVer, info.Agent, string, string, *details.Details, bool, bool, ...string) error); ok { + r1 = rf(ctx, parsedTargetVersion, agentInfo, sourceURI, fleetServerURI, upgradeDetails, skipVerifyOverride, skipDefaultPgp, pgpBytes...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// mock_upgradeExecutor_downloadArtifact_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'downloadArtifact' +type mock_upgradeExecutor_downloadArtifact_Call struct { + *mock.Call +} + +// downloadArtifact is a helper method to define mock.On call +// - ctx context.Context +// - parsedTargetVersion *version.ParsedSemVer +// - agentInfo info.Agent +// - sourceURI string +// - fleetServerURI string +// - upgradeDetails *details.Details +// - skipVerifyOverride bool +// - skipDefaultPgp bool +// - pgpBytes ...string +func (_e *mock_upgradeExecutor_Expecter) downloadArtifact(ctx interface{}, parsedTargetVersion interface{}, agentInfo interface{}, sourceURI interface{}, fleetServerURI interface{}, upgradeDetails interface{}, skipVerifyOverride interface{}, skipDefaultPgp interface{}, pgpBytes ...interface{}) *mock_upgradeExecutor_downloadArtifact_Call { + return &mock_upgradeExecutor_downloadArtifact_Call{Call: _e.mock.On("downloadArtifact", + append([]interface{}{ctx, parsedTargetVersion, agentInfo, sourceURI, fleetServerURI, upgradeDetails, skipVerifyOverride, skipDefaultPgp}, pgpBytes...)...)} +} + +func (_c *mock_upgradeExecutor_downloadArtifact_Call) Run(run func(ctx context.Context, parsedTargetVersion *version.ParsedSemVer, agentInfo info.Agent, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string)) *mock_upgradeExecutor_downloadArtifact_Call { + _c.Call.Run(func(args mock.Arguments) { + variadicArgs := make([]string, len(args)-8) + for i, a := range args[8:] { + if a != nil { + variadicArgs[i] = a.(string) + } + } + run(args[0].(context.Context), args[1].(*version.ParsedSemVer), args[2].(info.Agent), args[3].(string), args[4].(string), args[5].(*details.Details), args[6].(bool), args[7].(bool), variadicArgs...) + }) + return _c +} + +func (_c *mock_upgradeExecutor_downloadArtifact_Call) Return(_a0 download.DownloadResult, _a1 error) *mock_upgradeExecutor_downloadArtifact_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *mock_upgradeExecutor_downloadArtifact_Call) RunAndReturn(run func(context.Context, *version.ParsedSemVer, info.Agent, string, string, *details.Details, bool, bool, ...string) (download.DownloadResult, error)) *mock_upgradeExecutor_downloadArtifact_Call { + _c.Call.Return(run) + return _c +} + +// replaceOldWithNew provides a mock function with given fields: unpackStepResult0, currentVersionedHome, topPath, agentName, currentHome, oldRunPath, newRunPath, symlinkPath, newBinPath, upgradeDetails +func (_m *mock_upgradeExecutor) replaceOldWithNew(unpackStepResult0 unpackStepResult, currentVersionedHome string, topPath string, agentName string, currentHome string, oldRunPath string, newRunPath string, symlinkPath string, newBinPath string, upgradeDetails *details.Details) error { + ret := _m.Called(unpackStepResult0, currentVersionedHome, topPath, agentName, currentHome, oldRunPath, newRunPath, symlinkPath, newBinPath, upgradeDetails) + + if len(ret) == 0 { + panic("no return value specified for replaceOldWithNew") + } + + var r0 error + if rf, ok := ret.Get(0).(func(unpackStepResult, string, string, string, string, string, string, string, string, *details.Details) error); ok { + r0 = rf(unpackStepResult0, currentVersionedHome, topPath, agentName, currentHome, oldRunPath, newRunPath, symlinkPath, newBinPath, upgradeDetails) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// mock_upgradeExecutor_replaceOldWithNew_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'replaceOldWithNew' +type mock_upgradeExecutor_replaceOldWithNew_Call struct { + *mock.Call +} + +// replaceOldWithNew is a helper method to define mock.On call +// - unpackStepResult0 unpackStepResult +// - currentVersionedHome string +// - topPath string +// - agentName string +// - currentHome string +// - oldRunPath string +// - newRunPath string +// - symlinkPath string +// - newBinPath string +// - upgradeDetails *details.Details +func (_e *mock_upgradeExecutor_Expecter) replaceOldWithNew(unpackStepResult0 interface{}, currentVersionedHome interface{}, topPath interface{}, agentName interface{}, currentHome interface{}, oldRunPath interface{}, newRunPath interface{}, symlinkPath interface{}, newBinPath interface{}, upgradeDetails interface{}) *mock_upgradeExecutor_replaceOldWithNew_Call { + return &mock_upgradeExecutor_replaceOldWithNew_Call{Call: _e.mock.On("replaceOldWithNew", unpackStepResult0, currentVersionedHome, topPath, agentName, currentHome, oldRunPath, newRunPath, symlinkPath, newBinPath, upgradeDetails)} +} + +func (_c *mock_upgradeExecutor_replaceOldWithNew_Call) Run(run func(unpackStepResult0 unpackStepResult, currentVersionedHome string, topPath string, agentName string, currentHome string, oldRunPath string, newRunPath string, symlinkPath string, newBinPath string, upgradeDetails *details.Details)) *mock_upgradeExecutor_replaceOldWithNew_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(unpackStepResult), args[1].(string), args[2].(string), args[3].(string), args[4].(string), args[5].(string), args[6].(string), args[7].(string), args[8].(string), args[9].(*details.Details)) + }) + return _c +} + +func (_c *mock_upgradeExecutor_replaceOldWithNew_Call) Return(_a0 error) *mock_upgradeExecutor_replaceOldWithNew_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *mock_upgradeExecutor_replaceOldWithNew_Call) RunAndReturn(run func(unpackStepResult, string, string, string, string, string, string, string, string, *details.Details) error) *mock_upgradeExecutor_replaceOldWithNew_Call { + _c.Call.Return(run) + return _c +} + +// unpackArtifact provides a mock function with given fields: downloadResult, _a1, archivePath, topPath, flavor, dataPath, currentHome, upgradeDetails, currentVersion, checkUpgradeFn9 +func (_m *mock_upgradeExecutor) unpackArtifact(downloadResult download.DownloadResult, _a1 string, archivePath string, topPath string, flavor string, dataPath string, currentHome string, upgradeDetails *details.Details, currentVersion agentVersion, checkUpgradeFn9 checkUpgradeFn) (unpackStepResult, error) { + ret := _m.Called(downloadResult, _a1, archivePath, topPath, flavor, dataPath, currentHome, upgradeDetails, currentVersion, checkUpgradeFn9) + + if len(ret) == 0 { + panic("no return value specified for unpackArtifact") + } + + var r0 unpackStepResult + var r1 error + if rf, ok := ret.Get(0).(func(download.DownloadResult, string, string, string, string, string, string, *details.Details, agentVersion, checkUpgradeFn) (unpackStepResult, error)); ok { + return rf(downloadResult, _a1, archivePath, topPath, flavor, dataPath, currentHome, upgradeDetails, currentVersion, checkUpgradeFn9) + } + if rf, ok := ret.Get(0).(func(download.DownloadResult, string, string, string, string, string, string, *details.Details, agentVersion, checkUpgradeFn) unpackStepResult); ok { + r0 = rf(downloadResult, _a1, archivePath, topPath, flavor, dataPath, currentHome, upgradeDetails, currentVersion, checkUpgradeFn9) + } else { + r0 = ret.Get(0).(unpackStepResult) + } + + if rf, ok := ret.Get(1).(func(download.DownloadResult, string, string, string, string, string, string, *details.Details, agentVersion, checkUpgradeFn) error); ok { + r1 = rf(downloadResult, _a1, archivePath, topPath, flavor, dataPath, currentHome, upgradeDetails, currentVersion, checkUpgradeFn9) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// mock_upgradeExecutor_unpackArtifact_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'unpackArtifact' +type mock_upgradeExecutor_unpackArtifact_Call struct { + *mock.Call +} + +// unpackArtifact is a helper method to define mock.On call +// - downloadResult download.DownloadResult +// - _a1 string +// - archivePath string +// - topPath string +// - flavor string +// - dataPath string +// - currentHome string +// - upgradeDetails *details.Details +// - currentVersion agentVersion +// - checkUpgradeFn9 checkUpgradeFn +func (_e *mock_upgradeExecutor_Expecter) unpackArtifact(downloadResult interface{}, _a1 interface{}, archivePath interface{}, topPath interface{}, flavor interface{}, dataPath interface{}, currentHome interface{}, upgradeDetails interface{}, currentVersion interface{}, checkUpgradeFn9 interface{}) *mock_upgradeExecutor_unpackArtifact_Call { + return &mock_upgradeExecutor_unpackArtifact_Call{Call: _e.mock.On("unpackArtifact", downloadResult, _a1, archivePath, topPath, flavor, dataPath, currentHome, upgradeDetails, currentVersion, checkUpgradeFn9)} +} + +func (_c *mock_upgradeExecutor_unpackArtifact_Call) Run(run func(downloadResult download.DownloadResult, _a1 string, archivePath string, topPath string, flavor string, dataPath string, currentHome string, upgradeDetails *details.Details, currentVersion agentVersion, checkUpgradeFn9 checkUpgradeFn)) *mock_upgradeExecutor_unpackArtifact_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(download.DownloadResult), args[1].(string), args[2].(string), args[3].(string), args[4].(string), args[5].(string), args[6].(string), args[7].(*details.Details), args[8].(agentVersion), args[9].(checkUpgradeFn)) + }) + return _c +} + +func (_c *mock_upgradeExecutor_unpackArtifact_Call) Return(_a0 unpackStepResult, _a1 error) *mock_upgradeExecutor_unpackArtifact_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *mock_upgradeExecutor_unpackArtifact_Call) RunAndReturn(run func(download.DownloadResult, string, string, string, string, string, string, *details.Details, agentVersion, checkUpgradeFn) (unpackStepResult, error)) *mock_upgradeExecutor_unpackArtifact_Call { + _c.Call.Return(run) + return _c +} + +// watchNewAgent provides a mock function with given fields: ctx, markerFilePath, topPath, dataPath, waitTime, createTimeoutContext, newAgentInstall, previousAgentInstall, action, upgradeDetails, upgradeOutcome +func (_m *mock_upgradeExecutor) watchNewAgent(ctx context.Context, markerFilePath string, topPath string, dataPath string, waitTime time.Duration, createTimeoutContext createContextWithTimeout, newAgentInstall agentInstall, previousAgentInstall agentInstall, action *fleetapi.ActionUpgrade, upgradeDetails *details.Details, upgradeOutcome UpgradeOutcome) error { + ret := _m.Called(ctx, markerFilePath, topPath, dataPath, waitTime, createTimeoutContext, newAgentInstall, previousAgentInstall, action, upgradeDetails, upgradeOutcome) + + if len(ret) == 0 { + panic("no return value specified for watchNewAgent") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string, string, string, time.Duration, createContextWithTimeout, agentInstall, agentInstall, *fleetapi.ActionUpgrade, *details.Details, UpgradeOutcome) error); ok { + r0 = rf(ctx, markerFilePath, topPath, dataPath, waitTime, createTimeoutContext, newAgentInstall, previousAgentInstall, action, upgradeDetails, upgradeOutcome) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// mock_upgradeExecutor_watchNewAgent_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'watchNewAgent' +type mock_upgradeExecutor_watchNewAgent_Call struct { + *mock.Call +} + +// watchNewAgent is a helper method to define mock.On call +// - ctx context.Context +// - markerFilePath string +// - topPath string +// - dataPath string +// - waitTime time.Duration +// - createTimeoutContext createContextWithTimeout +// - newAgentInstall agentInstall +// - previousAgentInstall agentInstall +// - action *fleetapi.ActionUpgrade +// - upgradeDetails *details.Details +// - upgradeOutcome UpgradeOutcome +func (_e *mock_upgradeExecutor_Expecter) watchNewAgent(ctx interface{}, markerFilePath interface{}, topPath interface{}, dataPath interface{}, waitTime interface{}, createTimeoutContext interface{}, newAgentInstall interface{}, previousAgentInstall interface{}, action interface{}, upgradeDetails interface{}, upgradeOutcome interface{}) *mock_upgradeExecutor_watchNewAgent_Call { + return &mock_upgradeExecutor_watchNewAgent_Call{Call: _e.mock.On("watchNewAgent", ctx, markerFilePath, topPath, dataPath, waitTime, createTimeoutContext, newAgentInstall, previousAgentInstall, action, upgradeDetails, upgradeOutcome)} +} + +func (_c *mock_upgradeExecutor_watchNewAgent_Call) Run(run func(ctx context.Context, markerFilePath string, topPath string, dataPath string, waitTime time.Duration, createTimeoutContext createContextWithTimeout, newAgentInstall agentInstall, previousAgentInstall agentInstall, action *fleetapi.ActionUpgrade, upgradeDetails *details.Details, upgradeOutcome UpgradeOutcome)) *mock_upgradeExecutor_watchNewAgent_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(string), args[3].(string), args[4].(time.Duration), args[5].(createContextWithTimeout), args[6].(agentInstall), args[7].(agentInstall), args[8].(*fleetapi.ActionUpgrade), args[9].(*details.Details), args[10].(UpgradeOutcome)) + }) + return _c +} + +func (_c *mock_upgradeExecutor_watchNewAgent_Call) Return(_a0 error) *mock_upgradeExecutor_watchNewAgent_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *mock_upgradeExecutor_watchNewAgent_Call) RunAndReturn(run func(context.Context, string, string, string, time.Duration, createContextWithTimeout, agentInstall, agentInstall, *fleetapi.ActionUpgrade, *details.Details, UpgradeOutcome) error) *mock_upgradeExecutor_watchNewAgent_Call { + _c.Call.Return(run) + return _c +} + +// newMock_upgradeExecutor creates a new instance of mock_upgradeExecutor. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func newMock_upgradeExecutor(t interface { + mock.TestingT + Cleanup(func()) +}) *mock_upgradeExecutor { + mock := &mock_upgradeExecutor{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index 0ec93b5c95d..71cc421e0e3 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -7,6 +7,7 @@ package upgrade import ( "context" "crypto/tls" + goerrors "errors" "fmt" "io" "net/http" @@ -25,6 +26,7 @@ import ( "github.com/elastic/elastic-agent-libs/transport/httpcommon" "github.com/elastic/elastic-agent-libs/transport/tlscommon" + "github.com/elastic/elastic-agent/internal/pkg/agent/application/info" "github.com/elastic/elastic-agent/internal/pkg/agent/application/paths" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download" @@ -42,7 +44,9 @@ import ( "github.com/elastic/elastic-agent/pkg/control/v2/client" "github.com/elastic/elastic-agent/pkg/control/v2/cproto" "github.com/elastic/elastic-agent/pkg/core/logger" + version "github.com/elastic/elastic-agent/pkg/version" mockinfo "github.com/elastic/elastic-agent/testing/mocks/internal_/pkg/agent/application/info" + currentagtversion "github.com/elastic/elastic-agent/version" "github.com/elastic/elastic-agent/pkg/core/logger/loggertest" agtversion "github.com/elastic/elastic-agent/pkg/version" @@ -1041,7 +1045,245 @@ type testError struct { expectedError error } -func TestUpgradeDownloadErrors(t *testing.T) { +type upgradeTestCase struct { + targetVersion string + parsedTargetVersion *agtversion.ParsedSemVer + downloadError error + unpackError error + replaceOldWithNewError error + watchNewAgentError error + cleanupError error + cleanupCalledWith error + expectedError error + expectCallback bool + calledFuncs []string + uncalledFuncs []string +} + +func TestUpgrade(t *testing.T) { + ctx := t.Context() + log, _ := loggertest.New("test") + + defaultTargetVersion := "1.0.0" + defaultParsedTargetVersion, err := agtversion.ParseVersion(defaultTargetVersion) + require.NoError(t, err) + + currentReleaseVersion := release.VersionWithSnapshot() + parsedCurrentReleaseVersion, err := agtversion.ParseVersion(currentReleaseVersion) + require.NoError(t, err) + + invalidTargetVersion := "invalidTargetVersion" + + sourceURI := "mockUri" + action := &fleetapi.ActionUpgrade{} + details := &details.Details{} + skipVerify := false + skipDefaultPgp := false + pgpBytes := []string{"mockPGPBytes"} + pgpBytesConverted := make([]interface{}, len(pgpBytes)) + for i, v := range pgpBytes { + pgpBytesConverted[i] = v + } + agentInfo := &info.AgentInfo{} + + topPath := t.TempDir() + paths.SetTop(topPath) + + markerFilePath := markerFilePath(paths.Data()) + currentVersionedHome, err := filepath.Rel(topPath, paths.Home()) + require.NoError(t, err) + symlinkPath := filepath.Join(topPath, agentName) + + downloadResult := download.DownloadResult{ + ArtifactPath: "mockArtifactPath", + ArtifactHashPath: "mockArtifactHashPath", + } + + unpackStepResult := unpackStepResult{ + newHome: "mockNewHome", + unpackResult: unpackResult{ + VersionedHome: "mockVersionedHome", + Hash: "mockHash", + }, + } + + newRunPath := filepath.Join(unpackStepResult.newHome, "run") + + newBinaryPath := paths.BinaryPath(filepath.Join(topPath, unpackStepResult.VersionedHome), agentName) + + currentVersion := agentVersion{ + version: release.Version(), + snapshot: release.Snapshot(), + hash: release.Commit(), + fips: release.FIPSDistribution(), + } + + defaultCleanupError := errors.New("test cleanup error") + + previousAgentInstall := agentInstall{ + parsedVersion: currentagtversion.GetParsedAgentPackageVersion(), + version: release.VersionWithSnapshot(), + hash: release.Commit(), + versionedHome: currentVersionedHome, + } + + testCases := map[string]upgradeTestCase{ + "should download artifact, unpack it, replace the old agent and watch the new agent": { + targetVersion: defaultTargetVersion, + parsedTargetVersion: defaultParsedTargetVersion, + downloadError: nil, + unpackError: nil, + replaceOldWithNewError: nil, + watchNewAgentError: nil, + cleanupError: nil, + cleanupCalledWith: nil, + expectCallback: true, + calledFuncs: []string{"downloadArtifact", "unpackArtifact", "replaceOldWithNew", "watchNewAgent"}, + uncalledFuncs: []string{}, + }, + "if the target version is the same release version, it should return error": { + targetVersion: currentReleaseVersion, + parsedTargetVersion: parsedCurrentReleaseVersion, + downloadError: nil, + unpackError: nil, + replaceOldWithNewError: nil, + watchNewAgentError: nil, + cleanupError: defaultCleanupError, + cleanupCalledWith: ErrUpgradeSameVersion, + expectCallback: false, + calledFuncs: []string{}, + uncalledFuncs: []string{"downloadArtifact", "unpackArtifact", "replaceOldWithNew", "watchNewAgent"}, + expectedError: goerrors.Join(ErrUpgradeSameVersion, defaultCleanupError), + }, + "if the target version cannot be parsed, it should return error": { + targetVersion: invalidTargetVersion, + parsedTargetVersion: nil, + downloadError: nil, + unpackError: nil, + replaceOldWithNewError: nil, + watchNewAgentError: nil, + cleanupError: defaultCleanupError, + cleanupCalledWith: fmt.Errorf("error parsing version %q: %w", "invalidTargetVersion", version.ErrNoMatch), + expectCallback: false, + calledFuncs: []string{}, + uncalledFuncs: []string{"downloadArtifact", "unpackArtifact", "replaceOldWithNew", "watchNewAgent"}, + expectedError: goerrors.Join(fmt.Errorf("error parsing version %q: %w", "invalidTargetVersion", version.ErrNoMatch), defaultCleanupError), + }, + "if the download fails, it should return error": { + targetVersion: defaultTargetVersion, + parsedTargetVersion: defaultParsedTargetVersion, + downloadError: errors.New("test download error"), + unpackError: nil, + replaceOldWithNewError: nil, + watchNewAgentError: nil, + cleanupError: defaultCleanupError, + cleanupCalledWith: errors.New("test download error"), + expectCallback: false, + calledFuncs: []string{"downloadArtifact"}, + uncalledFuncs: []string{"unpackArtifact", "replaceOldWithNew", "watchNewAgent"}, + expectedError: goerrors.Join(errors.New("test download error"), defaultCleanupError), + }, + "if the unpack fails, it should return error": { + targetVersion: defaultTargetVersion, + parsedTargetVersion: defaultParsedTargetVersion, + downloadError: nil, + unpackError: errors.New("test unpack error"), + replaceOldWithNewError: nil, + watchNewAgentError: nil, + cleanupError: defaultCleanupError, + cleanupCalledWith: errors.New("test unpack error"), + expectCallback: false, + calledFuncs: []string{"downloadArtifact", "unpackArtifact"}, + uncalledFuncs: []string{"replaceOldWithNew", "watchNewAgent"}, + expectedError: goerrors.Join(errors.New("test unpack error"), defaultCleanupError), + }, + "if the replace old with new fails, it should return error": { + targetVersion: defaultTargetVersion, + parsedTargetVersion: defaultParsedTargetVersion, + downloadError: nil, + unpackError: nil, + replaceOldWithNewError: errors.New("test replace old with new error"), + watchNewAgentError: nil, + cleanupError: defaultCleanupError, + cleanupCalledWith: errors.New("test replace old with new error"), + expectCallback: false, + calledFuncs: []string{"downloadArtifact", "unpackArtifact", "replaceOldWithNew"}, + uncalledFuncs: []string{"watchNewAgent"}, + expectedError: goerrors.Join(errors.New("test replace old with new error"), defaultCleanupError), + }, + } + + for name, tc := range testCases { + t.Run(name, func(t *testing.T) { + newAgentInstall := agentInstall{ + parsedVersion: tc.parsedTargetVersion, + version: tc.targetVersion, + hash: unpackStepResult.Hash, + versionedHome: unpackStepResult.VersionedHome, + } + + downloadsPath := t.TempDir() + paths.SetDownloads(downloadsPath) + + mockCleaner := &mock_upgradeCleaner{} + mockExecutor := &mock_upgradeExecutor{} + + upgrader := &Upgrader{ + log: log, + agentInfo: agentInfo, + markerWatcher: newMarkerFileWatcher(markerFilePath, log), + upgradeCleaner: mockCleaner, + upgradeExecutor: mockExecutor, + } + + for _, funcName := range tc.calledFuncs { + switch funcName { + case "downloadArtifact": + mockExecutor.EXPECT().downloadArtifact(ctx, tc.parsedTargetVersion, agentInfo, sourceURI, "", details, skipVerify, skipDefaultPgp, pgpBytesConverted...).Return(downloadResult, tc.downloadError) + + case "unpackArtifact": + mockExecutor.EXPECT().unpackArtifact(downloadResult, tc.targetVersion, downloadResult.ArtifactPath, topPath, "", paths.Data(), paths.Home(), details, currentVersion, mock.AnythingOfType("checkUpgradeFn")).Return(unpackStepResult, tc.unpackError) + + case "replaceOldWithNew": + mockExecutor.EXPECT().replaceOldWithNew(unpackStepResult, currentVersionedHome, topPath, agentName, paths.Home(), paths.Run(), newRunPath, symlinkPath, newBinaryPath, details).Return(tc.replaceOldWithNewError) + + case "watchNewAgent": + mockExecutor.EXPECT().watchNewAgent(ctx, markerFilePath, topPath, paths.Data(), watcherMaxWaitTime, mock.AnythingOfType("createContextWithTimeout"), newAgentInstall, previousAgentInstall, action, details, OUTCOME_UPGRADE).Return(tc.watchNewAgentError) + } + } + + mockCleaner.EXPECT().cleanup(tc.cleanupCalledWith).Return(tc.cleanupError) + + cb, err := upgrader.Upgrade(ctx, tc.targetVersion, sourceURI, action, details, skipVerify, skipDefaultPgp, pgpBytes...) + + if len(tc.calledFuncs) > 0 { + mockExecutor.AssertExpectations(t) + } + + for _, funcName := range tc.uncalledFuncs { + mockExecutor.AssertNotCalled(t, funcName, "expected %v to not be called", funcName) + } + + mockCleaner.AssertExpectations(t) + + if tc.expectCallback { + require.NotNil(t, cb) + } else { + require.Nil(t, cb) + } + + if tc.expectedError != nil { + require.Equal(t, tc.expectedError.Error(), err.Error(), "expected error to be %v, got %v", tc.expectedError, err) + return + } + + require.NoError(t, err, "expected no error, got %v", err) + + }) + } +} + +func TesE2EtUpgradeDownloadErrors(t *testing.T) { testArtifact := artifact.Artifact{ Name: "Elastic Agent", Cmd: "elastic-agent", From 02e542b52c1b64b18a8441f7ade75f9b8f6287ea Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sat, 9 Aug 2025 00:41:36 +0300 Subject: [PATCH 088/127] enhancement(5235): removed nil error check from deferred cleanup --- internal/pkg/agent/application/upgrade/upgrade.go | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index aa8b6264b92..4eabe0ee07f 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -259,12 +259,10 @@ func checkUpgrade(log *logger.Logger, currentVersion, newVersion agentVersion, m // Upgrade upgrades running agent, function returns shutdown callback that must be called by reexec. func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string, action *fleetapi.ActionUpgrade, det *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (_ reexec.ShutdownCallbackFn, err error) { defer func() { - if err != nil { - cleanupErr := u.upgradeCleaner.cleanup(err) - if cleanupErr != nil { - u.log.Errorf("Error cleaning up after upgrade: %w", cleanupErr) - err = goerrors.Join(err, cleanupErr) - } + cleanupErr := u.upgradeCleaner.cleanup(err) + if cleanupErr != nil { + u.log.Errorf("Error cleaning up after upgrade: %w", cleanupErr) + err = goerrors.Join(err, cleanupErr) } }() From 41e4566c110ae160decdca1b97f1bedbc7c99a4f Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sat, 9 Aug 2025 00:44:52 +0300 Subject: [PATCH 089/127] enhancement(5235): removed commented test code from fs downloader tests --- .../artifact/download/fs/downloader_test.go | 110 ------------------ 1 file changed, 110 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go index 1e701678304..7e4d800e8aa 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go @@ -189,116 +189,6 @@ func createFiles(t *testing.T, dstPath string, files []file) { } } -// func TestDownloader_DownloadAsc(t *testing.T) { -// type fields struct { -// config *artifact.Config -// } -// type args struct { -// a artifact.Artifact -// version agtversion.ParsedSemVer -// } -// tests := []struct { -// name string -// files []file -// fields fields -// args args -// want string -// wantErr assert.ErrorAssertionFunc -// }{ -// { -// name: "happy path released version", -// files: []file{ -// { -// "elastic-agent-1.2.3-linux-x86_64.tar.gz.asc", -// []byte("fake signature for elastic-agent package"), -// }, -// }, -// fields: fields{ -// config: &artifact.Config{ -// OperatingSystem: "linux", -// Architecture: "64", -// }, -// }, -// args: args{a: agentSpec, version: *agtversion.NewParsedSemVer(1, 2, 3, "", "")}, -// want: "elastic-agent-1.2.3-linux-x86_64.tar.gz.asc", -// wantErr: assert.NoError, -// }, -// { -// name: "happy path snapshot version", -// files: []file{ -// { -// "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64.tar.gz.asc", -// []byte("fake signature for elastic-agent package"), -// }, -// }, -// fields: fields{ -// config: &artifact.Config{ -// OperatingSystem: "linux", -// Architecture: "64", -// }, -// }, -// args: args{a: agentSpec, version: *agtversion.NewParsedSemVer(1, 2, 3, "SNAPSHOT", "")}, -// want: "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64.tar.gz.asc", -// wantErr: assert.NoError, -// }, -// { -// name: "happy path released version with build metadata", -// files: []file{ -// { -// "elastic-agent-1.2.3+build19700101-linux-x86_64.tar.gz.asc", -// []byte("fake signature for elastic-agent package"), -// }, -// }, -// fields: fields{ -// config: &artifact.Config{ -// OperatingSystem: "linux", -// Architecture: "64", -// }, -// }, -// args: args{a: agentSpec, version: *agtversion.NewParsedSemVer(1, 2, 3, "", "build19700101")}, -// want: "elastic-agent-1.2.3+build19700101-linux-x86_64.tar.gz.asc", -// wantErr: assert.NoError, -// }, -// { -// name: "happy path snapshot version with build metadata", -// files: []file{ -// { -// "elastic-agent-1.2.3-SNAPSHOT+build19700101-linux-x86_64.tar.gz.asc", -// []byte("fake signature for elastic-agent package"), -// }, -// }, -// fields: fields{ -// config: &artifact.Config{ -// OperatingSystem: "linux", -// Architecture: "64", -// }, -// }, -// args: args{a: agentSpec, version: *agtversion.NewParsedSemVer(1, 2, 3, "SNAPSHOT", "build19700101")}, -// want: "elastic-agent-1.2.3-SNAPSHOT+build19700101-linux-x86_64.tar.gz.asc", -// wantErr: assert.NoError, -// }, -// } -// for _, tt := range tests { -// t.Run(tt.name, func(t *testing.T) { -// dropPath := t.TempDir() -// targetDirPath := t.TempDir() - -// createFiles(t, dropPath, tt.files) - -// config := tt.fields.config -// config.DropPath = dropPath -// config.TargetDirectory = targetDirPath - -// e := NewDownloader(config) -// got, err := e.DownloadAsc(context.TODO(), tt.args.a, tt.args.version) -// if !tt.wantErr(t, err, fmt.Sprintf("DownloadAsc(%v, %v)", tt.args.a, tt.args.version)) { -// return -// } -// assert.Equalf(t, filepath.Join(targetDirPath, tt.want), got, "DownloadAsc(%v, %v)", tt.args.a, tt.args.version) -// }) -// } -// } - type testCopyError struct { msg string } From e468776492b52178aa25fbf4d67a561c5a45dfa2 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sat, 9 Aug 2025 01:18:05 +0300 Subject: [PATCH 090/127] enhancement(5235): using download results instead of archive path in fs downloader test --- .../artifact/download/fs/downloader_test.go | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go index 7e4d800e8aa..e9984e5c43d 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go @@ -16,6 +16,7 @@ import ( "github.com/stretchr/testify/require" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact" + "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download" "github.com/elastic/elastic-agent/internal/pkg/agent/errors" agtversion "github.com/elastic/elastic-agent/pkg/version" ) @@ -163,12 +164,17 @@ func TestDownloader_Download(t *testing.T) { e := NewDownloader(config) got, err := e.Download(context.TODO(), tt.args.a, tt.args.version) + expectedTargetFile := filepath.Join(targetDirPath, tt.want) + expectedHashFile := expectedTargetFile + ".sha512" + + expectedDownloadResult := download.DownloadResult{ + ArtifactPath: expectedTargetFile, + ArtifactHashPath: expectedHashFile, + } + if tt.wantErr { assert.Error(t, err) - expectedTargetFile := filepath.Join(targetDirPath, tt.want) - expectedHashFile := expectedTargetFile + ".sha512" - assert.NoFileExists(t, expectedTargetFile, "downloader should clean up partial artifact file on error") assert.NoFileExists(t, expectedHashFile, "downloader should clean up partial hash file on error") assert.DirExists(t, targetDirPath, "downloader should not clean up target directory on error") @@ -176,7 +182,7 @@ func TestDownloader_Download(t *testing.T) { } assert.NoError(t, err) - assert.Equalf(t, filepath.Join(targetDirPath, tt.want), got, "Download(%v, %v)", tt.args.a, tt.args.version) + assert.Equalf(t, expectedDownloadResult, got, "Download(%v, %v)", tt.args.a, tt.args.version) }) } } From 93029b8effc0b495200393d5b0ea0e0bb248bc79 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sat, 9 Aug 2025 01:20:51 +0300 Subject: [PATCH 091/127] enhancement(5235): using download result instead of archive path in http downloader test --- .../upgrade/artifact/download/http/downloader_test.go | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go index ad862f87468..20264d9ad51 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/http/downloader_test.go @@ -24,6 +24,7 @@ import ( "go.uber.org/zap/zaptest/observer" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact" + "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/artifact/download" "github.com/elastic/elastic-agent/internal/pkg/agent/application/upgrade/details" "github.com/elastic/elastic-agent/internal/pkg/testutils/fipsutils" "github.com/elastic/elastic-agent/pkg/core/logger" @@ -523,7 +524,15 @@ func TestDownloadVersion(t *testing.T) { got, err := downloader.Download(context.TODO(), tt.args.a, tt.args.version) - assert.Equalf(t, filepath.Join(targetDirPath, tt.want), got.ArtifactPath, "Download(%v, %v)", tt.args.a, tt.args.version) + expectedTargetFile := filepath.Join(targetDirPath, tt.want) + expectedTargetHashFile := expectedTargetFile + ".sha512" + + expectedDownloadResult := download.DownloadResult{ + ArtifactPath: expectedTargetFile, + ArtifactHashPath: expectedTargetHashFile, + } + + assert.Equalf(t, expectedDownloadResult, got, "Download(%v, %v)", tt.args.a, tt.args.version) if tt.wantErr { assert.Error(t, err) From 5221a857214a45ecfde2866cc7e80717ae94e804 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sat, 9 Aug 2025 01:23:56 +0300 Subject: [PATCH 092/127] enhancement(5235): removed dev logs --- internal/pkg/agent/application/upgrade/step_unpack.go | 4 ---- 1 file changed, 4 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/step_unpack.go b/internal/pkg/agent/application/upgrade/step_unpack.go index e6171c2bcb3..c725cf0f317 100644 --- a/internal/pkg/agent/application/upgrade/step_unpack.go +++ b/internal/pkg/agent/application/upgrade/step_unpack.go @@ -114,7 +114,6 @@ func unzip(log *logger.Logger, archivePath, dataDir string, flavor string) (unpa versionedHome = createVersionedHomeFromHash(hash) } - log.Infof("THE VERSIONED HOME IN UNZIP IS %s", versionedHome) result.VersionedHome = versionedHome skipFn, err := skipFnFromZip(log, r, flavor, fileNamePrefix, createVersionedHomeFromHash(hash), registry) @@ -148,7 +147,6 @@ func unzip(log *logger.Logger, archivePath, dataDir string, flavor string) (unpa dstPath := strings.TrimPrefix(mappedPackagePath, "data/") dstPath = filepath.Join(dataDir, dstPath) // TODO: look into this, this may be the new home to cleanup - log.Infof("THE DESTINATION PATH IN UNZIP IS %s", dstPath) if skipFn(dstPath) { return nil @@ -352,7 +350,6 @@ func untar(log *logger.Logger, archivePath, dataDir string, flavor string) (unpa versionedHome = createVersionedHomeFromHash(metadata.hash) } - log.Infof("THE VERSIONED HOME IN UNTAR IS %s", versionedHome) result.VersionedHome = versionedHome skipFn, err := skipFnFromTar(log, archivePath, flavor, registry) @@ -416,7 +413,6 @@ func untar(log *logger.Logger, archivePath, dataDir string, flavor string) (unpa rel := filepath.FromSlash(strings.TrimPrefix(fileName, "data/")) abs := filepath.Join(dataDir, rel) // TODO: if anything happens remove abs most likely, check this - log.Infof("THE ABSOLUTE PATH IN UNTAR IS %s", abs) // find the root dir if currentDir := filepath.Dir(abs); rootDir == "" || len(filepath.Dir(rootDir)) > len(currentDir) { From a7153fcb2f905ca9d3df5333474481fbef9db6b4 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sat, 9 Aug 2025 02:07:46 +0300 Subject: [PATCH 093/127] enhancement(5235): asserting downloads dir is cleaned up if upgrade is successful --- .../pkg/agent/application/upgrade/upgrade_test.go | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index 71cc421e0e3..d95781f31e9 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -1058,6 +1058,7 @@ type upgradeTestCase struct { expectCallback bool calledFuncs []string uncalledFuncs []string + downloadsDirCleaned bool } func TestUpgrade(t *testing.T) { @@ -1140,6 +1141,7 @@ func TestUpgrade(t *testing.T) { expectCallback: true, calledFuncs: []string{"downloadArtifact", "unpackArtifact", "replaceOldWithNew", "watchNewAgent"}, uncalledFuncs: []string{}, + downloadsDirCleaned: true, }, "if the target version is the same release version, it should return error": { targetVersion: currentReleaseVersion, @@ -1154,6 +1156,7 @@ func TestUpgrade(t *testing.T) { calledFuncs: []string{}, uncalledFuncs: []string{"downloadArtifact", "unpackArtifact", "replaceOldWithNew", "watchNewAgent"}, expectedError: goerrors.Join(ErrUpgradeSameVersion, defaultCleanupError), + downloadsDirCleaned: false, }, "if the target version cannot be parsed, it should return error": { targetVersion: invalidTargetVersion, @@ -1168,6 +1171,7 @@ func TestUpgrade(t *testing.T) { calledFuncs: []string{}, uncalledFuncs: []string{"downloadArtifact", "unpackArtifact", "replaceOldWithNew", "watchNewAgent"}, expectedError: goerrors.Join(fmt.Errorf("error parsing version %q: %w", "invalidTargetVersion", version.ErrNoMatch), defaultCleanupError), + downloadsDirCleaned: false, }, "if the download fails, it should return error": { targetVersion: defaultTargetVersion, @@ -1182,6 +1186,7 @@ func TestUpgrade(t *testing.T) { calledFuncs: []string{"downloadArtifact"}, uncalledFuncs: []string{"unpackArtifact", "replaceOldWithNew", "watchNewAgent"}, expectedError: goerrors.Join(errors.New("test download error"), defaultCleanupError), + downloadsDirCleaned: false, }, "if the unpack fails, it should return error": { targetVersion: defaultTargetVersion, @@ -1196,6 +1201,7 @@ func TestUpgrade(t *testing.T) { calledFuncs: []string{"downloadArtifact", "unpackArtifact"}, uncalledFuncs: []string{"replaceOldWithNew", "watchNewAgent"}, expectedError: goerrors.Join(errors.New("test unpack error"), defaultCleanupError), + downloadsDirCleaned: false, }, "if the replace old with new fails, it should return error": { targetVersion: defaultTargetVersion, @@ -1210,6 +1216,7 @@ func TestUpgrade(t *testing.T) { calledFuncs: []string{"downloadArtifact", "unpackArtifact", "replaceOldWithNew"}, uncalledFuncs: []string{"watchNewAgent"}, expectedError: goerrors.Join(errors.New("test replace old with new error"), defaultCleanupError), + downloadsDirCleaned: false, }, } @@ -1272,13 +1279,18 @@ func TestUpgrade(t *testing.T) { require.Nil(t, cb) } + if tc.downloadsDirCleaned { + require.NoDirExists(t, downloadsPath, "downloads directory should be cleaned up") + } else { + require.DirExists(t, downloadsPath, "downloads directory should not be cleaned up") + } + if tc.expectedError != nil { require.Equal(t, tc.expectedError.Error(), err.Error(), "expected error to be %v, got %v", tc.expectedError, err) return } require.NoError(t, err, "expected no error, got %v", err) - }) } } From 3b1c62da953517583a8675b8197f3492922def1a Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sat, 9 Aug 2025 02:08:42 +0300 Subject: [PATCH 094/127] enhancement(5235): added package scoped var for unpack step to abstract io.Copy --- internal/pkg/agent/application/upgrade/step_unpack.go | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/step_unpack.go b/internal/pkg/agent/application/upgrade/step_unpack.go index c725cf0f317..2e463427ed2 100644 --- a/internal/pkg/agent/application/upgrade/step_unpack.go +++ b/internal/pkg/agent/application/upgrade/step_unpack.go @@ -27,6 +27,8 @@ import ( agtversion "github.com/elastic/elastic-agent/pkg/version" ) +var unpackArchiveCopyFunc = io.Copy + // UnpackResult contains the location and hash of the unpacked agent files type unpackResult struct { // Hash contains the unpacked agent commit hash, limited to a length of 6 for backward compatibility @@ -188,7 +190,7 @@ func unzip(log *logger.Logger, archivePath, dataDir string, flavor string) (unpa }() //nolint:gosec // legacy - if _, err = io.Copy(f, rc); err != nil { + if _, err = unpackArchiveCopyFunc(f, rc); err != nil { return err } } @@ -437,7 +439,7 @@ func untar(log *logger.Logger, archivePath, dataDir string, flavor string) (unpa } //nolint:gosec // legacy - _, err = io.Copy(wf, tr) + _, err = unpackArchiveCopyFunc(wf, tr) if closeErr := wf.Close(); closeErr != nil && err == nil { err = closeErr } From 41ef242de8350328d1b1c0fc518518ad6c7abb31 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sat, 9 Aug 2025 02:42:36 +0300 Subject: [PATCH 095/127] enhancement(5235): returning unpack result with hash --- internal/pkg/agent/application/upgrade/step_unpack.go | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/step_unpack.go b/internal/pkg/agent/application/upgrade/step_unpack.go index 2e463427ed2..c4be925cd48 100644 --- a/internal/pkg/agent/application/upgrade/step_unpack.go +++ b/internal/pkg/agent/application/upgrade/step_unpack.go @@ -117,6 +117,7 @@ func unzip(log *logger.Logger, archivePath, dataDir string, flavor string) (unpa } result.VersionedHome = versionedHome + result.Hash = hash skipFn, err := skipFnFromZip(log, r, flavor, fileNamePrefix, createVersionedHomeFromHash(hash), registry) if err != nil { @@ -211,8 +212,6 @@ func unzip(log *logger.Logger, archivePath, dataDir string, flavor string) (unpa } } - result.Hash = hash - return result, nil } @@ -353,6 +352,7 @@ func untar(log *logger.Logger, archivePath, dataDir string, flavor string) (unpa } result.VersionedHome = versionedHome + result.Hash = hash skipFn, err := skipFnFromTar(log, archivePath, flavor, registry) if err != nil { @@ -468,7 +468,6 @@ func untar(log *logger.Logger, archivePath, dataDir string, flavor string) (unpa return result, errors.New(fmt.Sprintf("tar file entry %s contained unsupported file type %v", fileName, mode), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, fileName)) } } - result.Hash = hash return result, nil } From 4d32d4be65b595e557ff2f111b732f8eb2323914 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sat, 9 Aug 2025 02:43:18 +0300 Subject: [PATCH 096/127] enhancement(5235): added tests for copy func errors in unpack --- .../application/upgrade/step_unpack_test.go | 56 ++++++++++++++++++- 1 file changed, 55 insertions(+), 1 deletion(-) diff --git a/internal/pkg/agent/application/upgrade/step_unpack_test.go b/internal/pkg/agent/application/upgrade/step_unpack_test.go index a4adf9752de..8b74dee7b79 100644 --- a/internal/pkg/agent/application/upgrade/step_unpack_test.go +++ b/internal/pkg/agent/application/upgrade/step_unpack_test.go @@ -22,6 +22,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/elastic/elastic-agent/internal/pkg/agent/errors" v1 "github.com/elastic/elastic-agent/pkg/api/v1" "github.com/elastic/elastic-agent/pkg/core/logger/loggertest" ) @@ -223,6 +224,7 @@ func TestUpgrader_unpackTarGz(t *testing.T) { wantErr assert.ErrorAssertionFunc checkFiles checkExtractedPath flavor string + copyFunc func(dst io.Writer, src io.Reader) (int64, error) }{ { name: "file before containing folder", @@ -259,6 +261,24 @@ func TestUpgrader_unpackTarGz(t *testing.T) { wantErr: assert.NoError, checkFiles: checkExtractedFilesWithManifest, }, + { + name: "when copying files fails, it should return error", + args: args{ + version: "1.2.3", + archiveFiles: append(archiveFilesWithManifestNoSymlink, agentArchiveSymLink), + archiveGenerator: func(t *testing.T, i []files) (string, error) { + return createTarArchive(t, "elastic-agent-1.2.3-SNAPSHOT-someos-x86_64.tar.gz", i) + }, + }, + want: unpackResult{ + Hash: "abcdef", + VersionedHome: filepath.Join("data", "elastic-agent-1.2.3-SNAPSHOT-abcdef"), + }, + wantErr: assert.Error, + copyFunc: func(dst io.Writer, src io.Reader) (int64, error) { + return 0, errors.New("test copy error") + }, + }, { name: "package with basic flavor", args: args{ @@ -313,6 +333,15 @@ func TestUpgrader_unpackTarGz(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { + if tt.copyFunc != nil { + tmpCopyFunc := unpackArchiveCopyFunc + unpackArchiveCopyFunc = tt.copyFunc + + t.Cleanup(func() { + unpackArchiveCopyFunc = tmpCopyFunc + }) + } + testTop := t.TempDir() testDataDir := filepath.Join(testTop, "data") err := os.MkdirAll(testDataDir, 0o777) @@ -352,6 +381,7 @@ func TestUpgrader_unpackZip(t *testing.T) { wantErr assert.ErrorAssertionFunc checkFiles checkExtractedPath flavor string + copyFunc func(dst io.Writer, src io.Reader) (int64, error) }{ { name: "file before containing folder", @@ -386,7 +416,23 @@ func TestUpgrader_unpackZip(t *testing.T) { wantErr: assert.NoError, checkFiles: checkExtractedFilesWithManifest, }, - + { + name: "when copying files fails, it should return error", + args: args{ + archiveFiles: archiveFilesWithManifestNoSymlink, + archiveGenerator: func(t *testing.T, i []files) (string, error) { + return createZipArchive(t, "elastic-agent-1.2.3-SNAPSHOT-someos-x86_64.zip", i) + }, + }, + want: unpackResult{ + Hash: "abcdef", + VersionedHome: filepath.Join("data", "elastic-agent-1.2.3-SNAPSHOT-abcdef"), + }, + wantErr: assert.Error, + copyFunc: func(dst io.Writer, src io.Reader) (int64, error) { + return 0, errors.New("test copy error") + }, + }, { name: "package with basic flavor", args: args{ @@ -439,6 +485,14 @@ func TestUpgrader_unpackZip(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { + if tt.copyFunc != nil { + tmpCopyFunc := unpackArchiveCopyFunc + unpackArchiveCopyFunc = tt.copyFunc + + t.Cleanup(func() { + unpackArchiveCopyFunc = tmpCopyFunc + }) + } testTop := t.TempDir() testDataDir := filepath.Join(testTop, "data") From 460ad370bd7ff5ac3a2d99e7717d516f558ddfb9 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sat, 9 Aug 2025 11:43:53 +0300 Subject: [PATCH 097/127] enhancement(5235): added asc downloader and tests back in --- .../artifact/download/fs/downloader.go | 25 ++++ .../artifact/download/fs/downloader_test.go | 115 ++++++++++++++++++ .../artifact/download/fs/verifier_test.go | 2 + 3 files changed, 142 insertions(+) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go index 72b9472a1a1..82aeb12406f 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader.go @@ -86,6 +86,31 @@ func (e *Downloader) Download(ctx context.Context, a artifact.Artifact, version return downloadResult, nil } +// DownloadAsc downloads the package .asc file from configured source. +// It returns absolute path to the downloaded file and a no-nil error if any occurs. +func (e *Downloader) DownloadAsc(_ context.Context, a artifact.Artifact, version agtversion.ParsedSemVer) (string, error) { + filename, err := artifact.GetArtifactName(a, version, e.config.OS(), e.config.Arch()) + if err != nil { + return "", errors.New(err, "generating package name failed") + } + + filename += ".asc" + + fullPath, err := artifact.GetArtifactPath(a, version, e.config.OS(), e.config.Arch(), e.config.TargetDirectory) + if err != nil { + return "", errors.New(err, "generating package path failed") + } + + fullPath += ".asc" + + err = e.downloadFile(filename, fullPath) + if err != nil { + return "", err + } + + return fullPath, nil +} + func (e *Downloader) downloadFile(filename, fullPath string) error { sourcePath := filepath.Join(e.dropPath, filename) diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go index e9984e5c43d..55c82d8ea2b 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/downloader_test.go @@ -6,6 +6,7 @@ package fs import ( "context" + "fmt" "io" "os" "path/filepath" @@ -195,6 +196,120 @@ func createFiles(t *testing.T, dstPath string, files []file) { } } +func TestDownloader_DownloadAsc(t *testing.T) { + type fields struct { + config *artifact.Config + } + type args struct { + a artifact.Artifact + version agtversion.ParsedSemVer + } + tests := []struct { + name string + files []file + fields fields + args args + want string + wantErr assert.ErrorAssertionFunc + }{ + { + name: "happy path released version", + files: []file{ + { + "elastic-agent-1.2.3-linux-x86_64.tar.gz.asc", + []byte("fake signature for elastic-agent package"), + }, + }, + fields: fields{ + config: &artifact.Config{ + OperatingSystem: "linux", + Architecture: "64", + }, + }, + args: args{a: agentSpec, version: *agtversion.NewParsedSemVer(1, 2, 3, "", "")}, + want: "elastic-agent-1.2.3-linux-x86_64.tar.gz.asc", + wantErr: assert.NoError, + }, + { + name: "happy path snapshot version", + files: []file{ + { + "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64.tar.gz.asc", + []byte("fake signature for elastic-agent package"), + }, + }, + fields: fields{ + config: &artifact.Config{ + OperatingSystem: "linux", + Architecture: "64", + }, + }, + args: args{a: agentSpec, version: *agtversion.NewParsedSemVer(1, 2, 3, "SNAPSHOT", "")}, + want: "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64.tar.gz.asc", + wantErr: assert.NoError, + }, + { + name: "happy path released version with build metadata", + files: []file{ + { + "elastic-agent-1.2.3+build19700101-linux-x86_64.tar.gz.asc", + []byte("fake signature for elastic-agent package"), + }, + }, + fields: fields{ + config: &artifact.Config{ + OperatingSystem: "linux", + Architecture: "64", + }, + }, + args: args{a: agentSpec, version: *agtversion.NewParsedSemVer(1, 2, 3, "", "build19700101")}, + want: "elastic-agent-1.2.3+build19700101-linux-x86_64.tar.gz.asc", + wantErr: assert.NoError, + }, + { + name: "happy path snapshot version with build metadata", + files: []file{ + { + "elastic-agent-1.2.3-SNAPSHOT+build19700101-linux-x86_64.tar.gz.asc", + []byte("fake signature for elastic-agent package"), + }, + }, + fields: fields{ + config: &artifact.Config{ + OperatingSystem: "linux", + Architecture: "64", + }, + }, + args: args{a: agentSpec, version: *agtversion.NewParsedSemVer(1, 2, 3, "SNAPSHOT", "build19700101")}, + want: "elastic-agent-1.2.3-SNAPSHOT+build19700101-linux-x86_64.tar.gz.asc", + wantErr: assert.NoError, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + dropPath := t.TempDir() + targetDirPath := t.TempDir() + + createFiles(t, dropPath, tt.files) + + config := tt.fields.config + config.DropPath = dropPath + config.TargetDirectory = targetDirPath + + e := &Downloader{ + dropPath: dropPath, + config: config, + CopyFunc: io.Copy, + } + got, err := e.DownloadAsc(context.TODO(), tt.args.a, tt.args.version) + if !tt.wantErr(t, err, fmt.Sprintf("DownloadAsc(%v, %v)", tt.args.a, tt.args.version)) { + return + } + assert.Equalf(t, filepath.Join(targetDirPath, tt.want), got, "DownloadAsc(%v, %v)", tt.args.a, tt.args.version) + }) + } +} + type testCopyError struct { msg string } diff --git a/internal/pkg/agent/application/upgrade/artifact/download/fs/verifier_test.go b/internal/pkg/agent/application/upgrade/artifact/download/fs/verifier_test.go index 3baa485eda8..abe35860593 100644 --- a/internal/pkg/agent/application/upgrade/artifact/download/fs/verifier_test.go +++ b/internal/pkg/agent/application/upgrade/artifact/download/fs/verifier_test.go @@ -227,6 +227,8 @@ func TestVerify(t *testing.T) { testClient := NewDownloader(config) downloadResult, err := testClient.Download(ctx, agentSpec, testVersion) require.NoError(t, err, "fs.Downloader could not download artifacts") + _, err = testClient.DownloadAsc(context.Background(), agentSpec, *testVersion) + require.NoError(t, err, "fs.Downloader could not download artifacts .asc file") _, err = os.Stat(downloadResult.ArtifactPath) require.NoError(t, err) From 6bd4a2bff18c588b98357c17264015dc750b1b0d Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sat, 9 Aug 2025 12:04:23 +0300 Subject: [PATCH 098/127] enhancement(5235): asserting returned error --- .../application/upgrade/step_unpack_test.go | 33 +++++++++---------- 1 file changed, 15 insertions(+), 18 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/step_unpack_test.go b/internal/pkg/agent/application/upgrade/step_unpack_test.go index 8b74dee7b79..a1c9ce4c6b9 100644 --- a/internal/pkg/agent/application/upgrade/step_unpack_test.go +++ b/internal/pkg/agent/application/upgrade/step_unpack_test.go @@ -221,7 +221,7 @@ func TestUpgrader_unpackTarGz(t *testing.T) { name string args args want unpackResult - wantErr assert.ErrorAssertionFunc + wantErr error checkFiles checkExtractedPath flavor string copyFunc func(dst io.Writer, src io.Reader) (int64, error) @@ -239,7 +239,7 @@ func TestUpgrader_unpackTarGz(t *testing.T) { Hash: "abcdef", VersionedHome: filepath.Join("data", "elastic-agent-abcdef"), }, - wantErr: assert.NoError, + wantErr: nil, checkFiles: func(t *testing.T, testDataDir string) { versionedHome := filepath.Join(testDataDir, "elastic-agent-abcdef") checkExtractedFilesOutOfOrder(t, versionedHome) @@ -258,7 +258,7 @@ func TestUpgrader_unpackTarGz(t *testing.T) { Hash: "abcdef", VersionedHome: filepath.Join("data", "elastic-agent-1.2.3-SNAPSHOT-abcdef"), }, - wantErr: assert.NoError, + wantErr: nil, checkFiles: checkExtractedFilesWithManifest, }, { @@ -274,7 +274,7 @@ func TestUpgrader_unpackTarGz(t *testing.T) { Hash: "abcdef", VersionedHome: filepath.Join("data", "elastic-agent-1.2.3-SNAPSHOT-abcdef"), }, - wantErr: assert.Error, + wantErr: errors.New("test copy error"), copyFunc: func(dst io.Writer, src io.Reader) (int64, error) { return 0, errors.New("test copy error") }, @@ -292,7 +292,7 @@ func TestUpgrader_unpackTarGz(t *testing.T) { Hash: "abcdef", VersionedHome: filepath.Join("data", "elastic-agent-1.2.3-SNAPSHOT-abcdef"), }, - wantErr: assert.NoError, + wantErr: nil, flavor: "basic", checkFiles: func(t *testing.T, testDataDir string) { checkFilesPresence(t, testDataDir, @@ -317,7 +317,7 @@ func TestUpgrader_unpackTarGz(t *testing.T) { Hash: "abcdef", VersionedHome: filepath.Join("data", "elastic-agent-1.2.3-SNAPSHOT-abcdef"), }, - wantErr: assert.NoError, + wantErr: nil, flavor: "servers", checkFiles: func(t *testing.T, testDataDir string) { checkFilesPresence(t, testDataDir, @@ -352,9 +352,7 @@ func TestUpgrader_unpackTarGz(t *testing.T) { require.NoError(t, err, "creation of test archive file failed") got, err := untar(log, archiveFile, testDataDir, tt.flavor) - if !tt.wantErr(t, err, fmt.Sprintf("untar(%v, %v, %v)", tt.args.version, archiveFile, testDataDir)) { - return - } + assert.ErrorIs(t, err, tt.wantErr, fmt.Sprintf("untar(%v, %v, %v)", tt.args.version, archiveFile, testDataDir)) assert.Equalf(t, tt.want, got, "untar(%v, %v, %v)", tt.args.version, archiveFile, testDataDir) if tt.checkFiles != nil { tt.checkFiles(t, testDataDir) @@ -378,7 +376,7 @@ func TestUpgrader_unpackZip(t *testing.T) { name string args args want unpackResult - wantErr assert.ErrorAssertionFunc + wantErr error checkFiles checkExtractedPath flavor string copyFunc func(dst io.Writer, src io.Reader) (int64, error) @@ -395,7 +393,7 @@ func TestUpgrader_unpackZip(t *testing.T) { Hash: "abcdef", VersionedHome: filepath.Join("data", "elastic-agent-abcdef"), }, - wantErr: assert.NoError, + wantErr: nil, checkFiles: func(t *testing.T, testDataDir string) { versionedHome := filepath.Join(testDataDir, "elastic-agent-abcdef") checkExtractedFilesOutOfOrder(t, versionedHome) @@ -413,7 +411,7 @@ func TestUpgrader_unpackZip(t *testing.T) { Hash: "abcdef", VersionedHome: filepath.Join("data", "elastic-agent-1.2.3-SNAPSHOT-abcdef"), }, - wantErr: assert.NoError, + wantErr: nil, checkFiles: checkExtractedFilesWithManifest, }, { @@ -428,7 +426,7 @@ func TestUpgrader_unpackZip(t *testing.T) { Hash: "abcdef", VersionedHome: filepath.Join("data", "elastic-agent-1.2.3-SNAPSHOT-abcdef"), }, - wantErr: assert.Error, + wantErr: errors.New("test copy error"), copyFunc: func(dst io.Writer, src io.Reader) (int64, error) { return 0, errors.New("test copy error") }, @@ -445,7 +443,8 @@ func TestUpgrader_unpackZip(t *testing.T) { Hash: "abcdef", VersionedHome: filepath.Join("data", "elastic-agent-1.2.3-SNAPSHOT-abcdef"), }, - wantErr: assert.NoError, + // wantErr: assert.NoError, + wantErr: nil, flavor: "basic", checkFiles: func(t *testing.T, testDataDir string) { checkFilesPresence(t, testDataDir, @@ -469,7 +468,7 @@ func TestUpgrader_unpackZip(t *testing.T) { Hash: "abcdef", VersionedHome: filepath.Join("data", "elastic-agent-1.2.3-SNAPSHOT-abcdef"), }, - wantErr: assert.NoError, + wantErr: nil, flavor: "servers", checkFiles: func(t *testing.T, testDataDir string) { checkFilesPresence(t, testDataDir, @@ -504,9 +503,7 @@ func TestUpgrader_unpackZip(t *testing.T) { require.NoError(t, err, "creation of test archive file failed") got, err := unzip(log, archiveFile, testDataDir, tt.flavor) - if !tt.wantErr(t, err, fmt.Sprintf("unzip(%v, %v)", archiveFile, testDataDir)) { - return - } + assert.ErrorIs(t, err, tt.wantErr, fmt.Sprintf("unzip(%v, %v)", archiveFile, testDataDir)) assert.Equalf(t, tt.want, got, "unzip(%v, %v)", archiveFile, testDataDir) if tt.checkFiles != nil { tt.checkFiles(t, testDataDir) From 4eda991db0c290569ee7e5e467ab55f2d5647695 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sat, 9 Aug 2025 22:22:08 +0300 Subject: [PATCH 099/127] enhancement(5235): moved test functions around --- .../agent/application/upgrade/upgrade_test.go | 190 +++++++++--------- 1 file changed, 95 insertions(+), 95 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index d95781f31e9..7dc9ca32d2b 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -950,101 +950,6 @@ func TestDownloaderFactoryProvider(t *testing.T) { require.Equal(t, "downloader factory \"nonExistentFactory\" not found", err.Error()) } -func setupForFileDownloader(sourcePrefix string, expectedFileName string, partialData []byte) setupFunc { - return func(t *testing.T, config *artifact.Config, basePath string, targetPath string) { - testDownloadPath := filepath.Join(basePath, "downloads") - originalDownloadsPath := paths.Downloads() - t.Cleanup(func() { - paths.SetDownloads(originalDownloadsPath) - }) - paths.SetDownloads(targetPath) - err := os.MkdirAll(testDownloadPath, 0755) - require.NoError(t, err) - tempArtifactPath := filepath.Join(testDownloadPath, expectedFileName) - err = os.WriteFile(tempArtifactPath, partialData, 0644) - require.NoError(t, err) - - config.SourceURI = sourcePrefix + tempArtifactPath - config.DropPath = testDownloadPath - } -} - -func setupForHttpDownloader(partialData []byte) (setupFunc, *httptest.Server) { - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { - w.WriteHeader(http.StatusOK) - w.Write(partialData) //nolint:errcheck //test code - })) - - return func(t *testing.T, config *artifact.Config, basePath string, targetPath string) { - config.SourceURI = server.URL - config.RetrySleepInitDuration = 1 * time.Second - config.HTTPTransportSettings = httpcommon.HTTPTransportSettings{ - Timeout: 1 * time.Second, - } - }, server -} - -func fileDownloaderFactoryProvider(config *artifact.Config, copyFunc func(dst io.Writer, src io.Reader) (int64, error)) *downloaderFactoryProvider { - fileDownloader := fs.NewDownloader(config) - fileDownloader.CopyFunc = copyFunc - - fileFactory := func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { - return fileDownloader, nil - } - - return &downloaderFactoryProvider{ - downloaderFactories: map[string]downloaderFactory{ - fileDownloaderFactory: fileFactory, - }, - } -} - -func composedDownloaderFactoryProvider(config *artifact.Config, copyFunc func(dst io.Writer, src io.Reader) (int64, error), log *logger.Logger, upgradeDetails *details.Details) *downloaderFactoryProvider { - fileDownloader := fs.NewDownloader(config) - httpDownloader := httpDownloader.NewDownloaderWithClient(log, config, http.Client{}, upgradeDetails) - - if strings.HasPrefix(config.SourceURI, "http://") || strings.HasPrefix(config.SourceURI, "https://") { - httpDownloader.CopyFunc = copyFunc - } else { - fileDownloader.CopyFunc = copyFunc - } - - composedDownloader := composed.NewDownloader(fileDownloader, httpDownloader) - - fileFactory := func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { - return fileDownloader, nil - } - composedFactory := func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { - return composedDownloader, nil - } - - return &downloaderFactoryProvider{ - downloaderFactories: map[string]downloaderFactory{ - fileDownloaderFactory: fileFactory, - composedDownloaderFactory: composedFactory, - }, - } -} - -type setupFunc func(t *testing.T, config *artifact.Config, basePath string, targetPath string) -type factoryProviderFunc func(config *artifact.Config, copyFunc func(dst io.Writer, src io.Reader) (int64, error)) *downloaderFactoryProvider -type mockError struct { - message string -} - -func (e *mockError) Error() string { - return e.message -} - -func (e *mockError) Is(target error) bool { - return e.message == target.Error() -} - -type testError struct { - copyFuncError error - expectedError error -} - type upgradeTestCase struct { targetVersion string parsedTargetVersion *agtversion.ParsedSemVer @@ -1295,6 +1200,101 @@ func TestUpgrade(t *testing.T) { } } +func setupForFileDownloader(sourcePrefix string, expectedFileName string, partialData []byte) setupFunc { + return func(t *testing.T, config *artifact.Config, basePath string, targetPath string) { + testDownloadPath := filepath.Join(basePath, "downloads") + originalDownloadsPath := paths.Downloads() + t.Cleanup(func() { + paths.SetDownloads(originalDownloadsPath) + }) + paths.SetDownloads(targetPath) + err := os.MkdirAll(testDownloadPath, 0755) + require.NoError(t, err) + tempArtifactPath := filepath.Join(testDownloadPath, expectedFileName) + err = os.WriteFile(tempArtifactPath, partialData, 0644) + require.NoError(t, err) + + config.SourceURI = sourcePrefix + tempArtifactPath + config.DropPath = testDownloadPath + } +} + +func setupForHttpDownloader(partialData []byte) (setupFunc, *httptest.Server) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { + w.WriteHeader(http.StatusOK) + w.Write(partialData) //nolint:errcheck //test code + })) + + return func(t *testing.T, config *artifact.Config, basePath string, targetPath string) { + config.SourceURI = server.URL + config.RetrySleepInitDuration = 1 * time.Second + config.HTTPTransportSettings = httpcommon.HTTPTransportSettings{ + Timeout: 1 * time.Second, + } + }, server +} + +func fileDownloaderFactoryProvider(config *artifact.Config, copyFunc func(dst io.Writer, src io.Reader) (int64, error)) *downloaderFactoryProvider { + fileDownloader := fs.NewDownloader(config) + fileDownloader.CopyFunc = copyFunc + + fileFactory := func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { + return fileDownloader, nil + } + + return &downloaderFactoryProvider{ + downloaderFactories: map[string]downloaderFactory{ + fileDownloaderFactory: fileFactory, + }, + } +} + +func composedDownloaderFactoryProvider(config *artifact.Config, copyFunc func(dst io.Writer, src io.Reader) (int64, error), log *logger.Logger, upgradeDetails *details.Details) *downloaderFactoryProvider { + fileDownloader := fs.NewDownloader(config) + httpDownloader := httpDownloader.NewDownloaderWithClient(log, config, http.Client{}, upgradeDetails) + + if strings.HasPrefix(config.SourceURI, "http://") || strings.HasPrefix(config.SourceURI, "https://") { + httpDownloader.CopyFunc = copyFunc + } else { + fileDownloader.CopyFunc = copyFunc + } + + composedDownloader := composed.NewDownloader(fileDownloader, httpDownloader) + + fileFactory := func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { + return fileDownloader, nil + } + composedFactory := func(ver *agtversion.ParsedSemVer, l *logger.Logger, config *artifact.Config, d *details.Details) (download.Downloader, error) { + return composedDownloader, nil + } + + return &downloaderFactoryProvider{ + downloaderFactories: map[string]downloaderFactory{ + fileDownloaderFactory: fileFactory, + composedDownloaderFactory: composedFactory, + }, + } +} + +type setupFunc func(t *testing.T, config *artifact.Config, basePath string, targetPath string) +type factoryProviderFunc func(config *artifact.Config, copyFunc func(dst io.Writer, src io.Reader) (int64, error)) *downloaderFactoryProvider +type mockError struct { + message string +} + +func (e *mockError) Error() string { + return e.message +} + +func (e *mockError) Is(target error) bool { + return e.message == target.Error() +} + +type testError struct { + copyFuncError error + expectedError error +} + func TesE2EtUpgradeDownloadErrors(t *testing.T) { testArtifact := artifact.Artifact{ Name: "Elastic Agent", From 69c94d1fa4ecb7df1b3a8052b8f9007c336dfec5 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sat, 9 Aug 2025 22:24:48 +0300 Subject: [PATCH 100/127] enhancement(5235): added unpack step error handling tests --- .../agent/application/upgrade/upgrade_test.go | 126 ++++++++++++++++++ 1 file changed, 126 insertions(+) diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index 7dc9ca32d2b..773353ebe5a 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -15,6 +15,7 @@ import ( "os" "path/filepath" "reflect" + "runtime" "strings" "testing" "time" @@ -1424,3 +1425,128 @@ func TesE2EtUpgradeDownloadErrors(t *testing.T) { }) } } + +func archiveFilesWithArchiveDirName(archiveWithoutSuffix string, archiveFiles []files) []files { + modifiedArchiveFiles := make([]files, len(archiveFiles)) + for i, file := range archiveFiles { + file.path = strings.Replace(file.path, "elastic-agent-1.2.3-SNAPSHOT-someos-x86_64", archiveWithoutSuffix, 1) + modifiedArchiveFiles[i] = file + } + return modifiedArchiveFiles +} + +func createArchive(t *testing.T, archiveName string, archiveFiles []files) (string, error) { + archiveWithoutSuffix := strings.TrimSuffix(archiveName, ".tar.gz") + archiveWithoutSuffix = strings.TrimSuffix(archiveWithoutSuffix, ".zip") + + archiveFiles = archiveFilesWithArchiveDirName(archiveWithoutSuffix, archiveFiles) + + if runtime.GOOS == "windows" { + return createZipArchive(t, archiveName, archiveFiles) + } + return createTarArchive(t, archiveName, archiveFiles) +} + +func TestE2EUpgradeUnpackErrors(t *testing.T) { + log, _ := loggertest.New("test") + + testVersion := agtversion.NewParsedSemVer(1, 2, 3, "SNAPSHOT", "") + upgradeDetails := details.NewDetails(testVersion.String(), details.StateRequested, "test") + artifactName, err := artifact.GetArtifactName(agentArtifact, *testVersion, runtime.GOOS, runtime.GOARCH) + require.NoError(t, err) + + versionedHome := "data/elastic-agent-1.2.3-SNAPSHOT-abcdef" + + t.Logf("Expected artifact name: %s", artifactName) + + archive, err := createArchive(t, artifactName, archiveFilesWithMoreComponents) + require.NoError(t, err) + t.Logf("Created archive: %s", archive) + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + http.ServeFile(w, r, archive) + })) + t.Cleanup(server.Close) + + tmpCopyFunc := unpackArchiveCopyFunc + t.Cleanup(func() { + unpackArchiveCopyFunc = tmpCopyFunc + }) + + testCases := map[string]testError{ + "should cleanup downloaded artifact and partially unpacked archive on generic error": { + copyFuncError: errors.New("test copy error"), + expectedError: errors.New("test copy error"), + }, + } + + for _, te := range TestErrors { + testCases[fmt.Sprintf("should cleanup downloaded artifact and partially unpacked archive on disk space error: %v and return InsufficientDiskSpace error", te)] = testError{ + copyFuncError: te, + expectedError: upgradeErrors.ErrInsufficientDiskSpace, + } + } + + for name, tc := range testCases { + t.Run(name, func(t *testing.T) { + mockAgentInfo := mockinfo.NewAgent(t) + mockAgentInfo.On("Version").Return(testVersion.String()) + + baseDir := t.TempDir() + paths.SetTop(baseDir) + testTargetPath := filepath.Join(baseDir, "target") + versionedHomePath := filepath.Join(baseDir, versionedHome) + + config := artifact.Config{ + TargetDirectory: testTargetPath, + SourceURI: server.URL, + RetrySleepInitDuration: 1 * time.Second, + HTTPTransportSettings: httpcommon.HTTPTransportSettings{ + Timeout: 1 * time.Second, + }, + } + + unpackArchiveCopyFunc = func(dst io.Writer, src io.Reader) (int64, error) { + require.DirExists(t, versionedHomePath, "versionedHomePath should exist before copying") + entries, err := os.ReadDir(versionedHomePath) + require.NoError(t, err, "reading versionedHomePath failed") + require.Len(t, entries, 1, "versionedHomePath should only contain one file before copying") + + fileInfo, err := entries[0].Info() + require.NoError(t, err, "getting file info failed") + require.False(t, fileInfo.IsDir(), "the entry in versionedHomePath should be a file") + + filePath := filepath.Join(versionedHomePath, entries[0].Name()) + file, err := os.Open(filePath) + require.NoError(t, err, fmt.Sprintf("error opening file %s", filePath)) + + defer file.Close() + + stat, err := file.Stat() + require.NoError(t, err, fmt.Sprintf("error getting file info for %s", filePath)) + require.Equal(t, int64(0), stat.Size(), "file in versionedHomePath should be empty") + + _, err = io.Copy(dst, src) + require.NoError(t, err, fmt.Sprintf("error copying archive to %s", versionedHomePath)) + + statAfter, err := file.Stat() + require.NoError(t, err, fmt.Sprintf("error getting file info for %s", filePath)) + require.NotEqual(t, int64(0), statAfter.Size(), "file in versionedHomePath should not be empty after copying") + + return 0, tc.copyFuncError + } + + upgrader, err := NewUpgrader(log, &config, mockAgentInfo) + require.NoError(t, err) + + _, err = upgrader.Upgrade(context.Background(), testVersion.String(), server.URL, nil, upgradeDetails, true, true) + require.ErrorIs(t, err, tc.expectedError, "expected error mismatch") + + require.NoDirExists(t, versionedHomePath, "partially unpacked archive should be cleaned up") + + artifactPath, err := artifact.GetArtifactPath(agentArtifact, *testVersion, runtime.GOOS, runtime.GOARCH, config.TargetDirectory) + require.NoError(t, err) + require.NoFileExists(t, artifactPath, "downloaded artifact should be cleaned up") + }) + } +} From 9c894f4e2ba2dfa6851077c95f32ebc110d15cc8 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sat, 9 Aug 2025 22:25:12 +0300 Subject: [PATCH 101/127] enhancement(5235): removed unnecessary commented code --- internal/pkg/agent/application/upgrade/upgrade_test.go | 1 - 1 file changed, 1 deletion(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index 773353ebe5a..3b2a0c00dc6 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -1412,7 +1412,6 @@ func TesE2EtUpgradeDownloadErrors(t *testing.T) { upgrader, err := NewUpgrader(log, &config, mockAgentInfo) require.NoError(t, err) - // upgrader.artifactDownloader = artifactDownloader upgrader.upgradeExecutor = executeUpgrade _, err = upgrader.Upgrade(context.Background(), version.String(), config.SourceURI, nil, upgradeDetails, false, false) From 6d5f5089757fde1b99893dee01e7c779a58d7430 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sat, 9 Aug 2025 22:26:26 +0300 Subject: [PATCH 102/127] enhancement(5235): removed dev comment --- internal/pkg/agent/application/upgrade/step_unpack.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/pkg/agent/application/upgrade/step_unpack.go b/internal/pkg/agent/application/upgrade/step_unpack.go index c4be925cd48..f6a6ff01e85 100644 --- a/internal/pkg/agent/application/upgrade/step_unpack.go +++ b/internal/pkg/agent/application/upgrade/step_unpack.go @@ -149,7 +149,7 @@ func unzip(log *logger.Logger, archivePath, dataDir string, flavor string) (unpa } dstPath := strings.TrimPrefix(mappedPackagePath, "data/") - dstPath = filepath.Join(dataDir, dstPath) // TODO: look into this, this may be the new home to cleanup + dstPath = filepath.Join(dataDir, dstPath) if skipFn(dstPath) { return nil From 2fc7cfe7d7d2c8e714f845b6eaa675e00ff5e3c9 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sun, 10 Aug 2025 00:21:02 +0300 Subject: [PATCH 103/127] enhancement(5235): using config to get os and arch instead of runtime --- internal/pkg/agent/application/upgrade/upgrade_test.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index 3b2a0c00dc6..b076e681699 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -1449,6 +1449,8 @@ func createArchive(t *testing.T, archiveName string, archiveFiles []files) (stri func TestE2EUpgradeUnpackErrors(t *testing.T) { log, _ := loggertest.New("test") + tempConfig := &artifact.Config{} // used only to get os and arch, runtime.GOARCH returns amd64 instead of 64 which is not a valid arch when used in GetArtifactName + testVersion := agtversion.NewParsedSemVer(1, 2, 3, "SNAPSHOT", "") upgradeDetails := details.NewDetails(testVersion.String(), details.StateRequested, "test") artifactName, err := artifact.GetArtifactName(agentArtifact, *testVersion, runtime.GOOS, runtime.GOARCH) From 5af449bb03ef46c91f306432eeb9ea91522c1fd3 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sun, 10 Aug 2025 00:21:50 +0300 Subject: [PATCH 104/127] enhancmenet(5235): using config to get os and arch --- internal/pkg/agent/application/upgrade/upgrade_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index b076e681699..3f92072a473 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -1453,7 +1453,7 @@ func TestE2EUpgradeUnpackErrors(t *testing.T) { testVersion := agtversion.NewParsedSemVer(1, 2, 3, "SNAPSHOT", "") upgradeDetails := details.NewDetails(testVersion.String(), details.StateRequested, "test") - artifactName, err := artifact.GetArtifactName(agentArtifact, *testVersion, runtime.GOOS, runtime.GOARCH) + artifactName, err := artifact.GetArtifactName(agentArtifact, *testVersion, tempConfig.OS(), tempConfig.Arch()) require.NoError(t, err) versionedHome := "data/elastic-agent-1.2.3-SNAPSHOT-abcdef" From 6d923cee3646827d0e0b8e0b1a7bcc123e0f8d97 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sun, 10 Aug 2025 00:22:58 +0300 Subject: [PATCH 105/127] enhancement(5235): updated comment --- internal/pkg/agent/application/upgrade/upgrade_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index 3f92072a473..ecd15835803 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -1449,7 +1449,7 @@ func createArchive(t *testing.T, archiveName string, archiveFiles []files) (stri func TestE2EUpgradeUnpackErrors(t *testing.T) { log, _ := loggertest.New("test") - tempConfig := &artifact.Config{} // used only to get os and arch, runtime.GOARCH returns amd64 instead of 64 which is not a valid arch when used in GetArtifactName + tempConfig := &artifact.Config{} // used only to get os and arch, runtime.GOARCH returns amd64 which is not a valid arch when used in GetArtifactName testVersion := agtversion.NewParsedSemVer(1, 2, 3, "SNAPSHOT", "") upgradeDetails := details.NewDetails(testVersion.String(), details.StateRequested, "test") From 3c2dfb435b662287a1f8c66868cebca53ed9dea0 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sun, 10 Aug 2025 18:29:11 +0300 Subject: [PATCH 106/127] enhancement(5235): added package level exported var for paths.Home() for testability --- internal/pkg/agent/application/paths/files.go | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/internal/pkg/agent/application/paths/files.go b/internal/pkg/agent/application/paths/files.go index c6ea6024c5e..31708371e95 100644 --- a/internal/pkg/agent/application/paths/files.go +++ b/internal/pkg/agent/application/paths/files.go @@ -78,19 +78,21 @@ func AgentCapabilitiesPath() string { return filepath.Join(Config(), defaultAgentCapabilitiesFile) } +var HomePath = Home // used only for mocking Home() for testing purposes + // AgentActionStoreFile is the file that contains the action that can be replayed after restart. func AgentActionStoreFile() string { - return filepath.Join(Home(), defaultAgentActionStoreFile) + return filepath.Join(HomePath(), defaultAgentActionStoreFile) } // AgentStateStoreYmlFile is the file that contains the persisted state of the agent including the action that can be replayed after restart. func AgentStateStoreYmlFile() string { - return filepath.Join(Home(), defaultAgentStateStoreYmlFile) + return filepath.Join(HomePath(), defaultAgentStateStoreYmlFile) } // AgentStateStoreFile is the file that contains the persisted state of the agent including the action that can be replayed after restart encrypted. func AgentStateStoreFile() string { - return filepath.Join(Home(), defaultAgentStateStoreFile) + return filepath.Join(HomePath(), defaultAgentStateStoreFile) } // AgentInputsDPath is directory that contains the fragment of inputs yaml for K8s deployment. From ba288a12472b2175796442be3c48dce4f5a7c87a Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sun, 10 Aug 2025 18:30:36 +0300 Subject: [PATCH 107/127] enhancement(5235): added package level vars for cop.Copy and os.WriteFile for testing --- internal/pkg/agent/application/upgrade/directory_copy.go | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/directory_copy.go b/internal/pkg/agent/application/upgrade/directory_copy.go index 0a0b7b9501e..988962b92b2 100644 --- a/internal/pkg/agent/application/upgrade/directory_copy.go +++ b/internal/pkg/agent/application/upgrade/directory_copy.go @@ -15,6 +15,9 @@ import ( type directoryCopier struct { } +var writeFile = os.WriteFile +var dirCopy = copy.Copy + // TODO: add tests for this // Update to accept copydir function func (d *directoryCopier) copyActionStore(log *logger.Logger, newHome string) error { @@ -34,7 +37,7 @@ func (d *directoryCopier) copyActionStore(log *logger.Logger, newHome string) er return err } - if err := os.WriteFile(newActionStorePath, currentActionStore, 0o600); err != nil { + if err := writeFile(newActionStorePath, currentActionStore, 0o600); err != nil { return err } } @@ -90,7 +93,7 @@ func copyDir(l *logger.Logger, from, to string, ignoreErrs bool) error { copyConcurrency = runtime.NumCPU() * 4 } - return copy.Copy(from, to, copy.Options{ + return dirCopy(from, to, copy.Options{ OnSymlink: func(_ string) copy.SymlinkAction { return copy.Shallow }, From 07db133a41ad57488bd99aab9abb3fafa8f6b192 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sun, 10 Aug 2025 18:31:45 +0300 Subject: [PATCH 108/127] enhancement(5235): added function to check archive files are extracted correctly using manifest and versioned home --- .../pkg/agent/application/upgrade/step_unpack_test.go | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/step_unpack_test.go b/internal/pkg/agent/application/upgrade/step_unpack_test.go index a1c9ce4c6b9..c6f0f6822a9 100644 --- a/internal/pkg/agent/application/upgrade/step_unpack_test.go +++ b/internal/pkg/agent/application/upgrade/step_unpack_test.go @@ -532,8 +532,7 @@ func checkExtractedFilesOutOfOrder(t *testing.T, versionedHome string) { } } -func checkExtractedFilesWithManifest(t *testing.T, testDataDir string) { - versionedHome := filepath.Join(testDataDir, "elastic-agent-1.2.3-SNAPSHOT-abcdef") +func checkExtractedFilesWithManifestAndVersionedHome(t *testing.T, testDataDir string, versionedHome string) { require.DirExists(t, versionedHome, "mapped versioned home directory does not exists") mappedAgentExecutable := filepath.Join(versionedHome, agentName) if assert.FileExistsf(t, mappedAgentExecutable, "agent executable %q is not found in mapped versioned home directory %q", mappedAgentExecutable, versionedHome) { @@ -551,6 +550,11 @@ func checkExtractedFilesWithManifest(t *testing.T, testDataDir string) { } } +func checkExtractedFilesWithManifest(t *testing.T, testDataDir string) { + versionedHome := filepath.Join(testDataDir, "elastic-agent-1.2.3-SNAPSHOT-abcdef") + checkExtractedFilesWithManifestAndVersionedHome(t, testDataDir, versionedHome) +} + func checkFilesPresence(t *testing.T, testDataDir string, requiredFiles, unwantedFiles []string) { versionedHome := filepath.Join(testDataDir, "elastic-agent-1.2.3-SNAPSHOT-abcdef") for _, f := range requiredFiles { From 29ec2599b6b43d19ae31eb892ba4daf80b4ccad4 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sun, 10 Aug 2025 18:33:17 +0300 Subject: [PATCH 109/127] enhancement(5235): added helper functions to create archives and added action store error handling test --- .../agent/application/upgrade/upgrade_test.go | 165 +++++++++++++++++- 1 file changed, 160 insertions(+), 5 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index ecd15835803..812d45efa82 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -1425,21 +1425,36 @@ func TesE2EtUpgradeDownloadErrors(t *testing.T) { } } -func archiveFilesWithArchiveDirName(archiveWithoutSuffix string, archiveFiles []files) []files { +func archiveFilesWithArchiveDirName(archiveName string, archiveFiles []files) []files { + archiveWithoutSuffix := strings.TrimSuffix(archiveName, ".tar.gz") + archiveWithoutSuffix = strings.TrimSuffix(archiveWithoutSuffix, ".zip") + modifiedArchiveFiles := make([]files, len(archiveFiles)) for i, file := range archiveFiles { file.path = strings.Replace(file.path, "elastic-agent-1.2.3-SNAPSHOT-someos-x86_64", archiveWithoutSuffix, 1) modifiedArchiveFiles[i] = file } + return modifiedArchiveFiles } -func createArchive(t *testing.T, archiveName string, archiveFiles []files) (string, error) { - archiveWithoutSuffix := strings.TrimSuffix(archiveName, ".tar.gz") - archiveWithoutSuffix = strings.TrimSuffix(archiveWithoutSuffix, ".zip") +func archiveFilesWithVersionedHome(version string, meta string, archiveFiles []files) []files { + modifiedArchiveFiles := make([]files, len(archiveFiles)) + for i, file := range archiveFiles { + if file.content == ea_123_manifest { + newContent := strings.ReplaceAll(file.content, "1.2.3", version) + newContent = strings.ReplaceAll(newContent, "abcdef", meta) + + file.content = newContent + } + file.path = strings.ReplaceAll(file.path, "abcdef", meta) + modifiedArchiveFiles[i] = file + } - archiveFiles = archiveFilesWithArchiveDirName(archiveWithoutSuffix, archiveFiles) + return modifiedArchiveFiles +} +func createArchive(t *testing.T, archiveName string, archiveFiles []files) (string, error) { if runtime.GOOS == "windows" { return createZipArchive(t, archiveName, archiveFiles) } @@ -1551,3 +1566,143 @@ func TestE2EUpgradeUnpackErrors(t *testing.T) { }) } } + +func TestUpgradeCopyDirErrors(t *testing.T) { + log, _ := loggertest.New("test") + + tempConfig := &artifact.Config{} // used only to get os and arch, runtime.GOARCH returns amd64 which is not a valid arch when used in GetArtifactName + + // Prepare to override HomePath + tmpHomePath := paths.HomePath + t.Cleanup(func() { + paths.HomePath = tmpHomePath + }) + + initialVersion := agtversion.NewParsedSemVer(1, 2, 3, "SNAPSHOT", "") + initialArtifactName, err := artifact.GetArtifactName(agentArtifact, *initialVersion, tempConfig.OS(), tempConfig.Arch()) + require.NoError(t, err) + + initialArchiveFiles := archiveFilesWithArchiveDirName(initialArtifactName, archiveFilesWithMoreComponents) + initialArchiveFiles = archiveFilesWithVersionedHome(initialVersion.CoreVersion(), "abcdef", initialArchiveFiles) + + targetVersion := agtversion.NewParsedSemVer(3, 4, 5, "SNAPSHOT", "") + targetArtifactName, err := artifact.GetArtifactName(agentArtifact, *targetVersion, tempConfig.OS(), tempConfig.Arch()) + require.NoError(t, err) + + targetArchiveFiles := archiveFilesWithArchiveDirName(targetArtifactName, archiveFilesWithMoreComponents) + targetArchiveFiles = archiveFilesWithVersionedHome(targetVersion.CoreVersion(), "ghijkl", targetArchiveFiles) + + mockAgentInfo := mockinfo.NewAgent(t) + mockAgentInfo.On("Version").Return(targetVersion.String()) + + upgradeDetails := details.NewDetails(targetVersion.String(), details.StateRequested, "test") + + tempUnpacker := &upgradeUnpacker{ // used only to unpack the initial archive + log: log, + } + + testCases := map[string]struct { + writeFileError error + expectedError error + }{ + "should return error if action store copy fails": { + writeFileError: errors.New("test write error"), + expectedError: errors.New("test write error"), + }, + } + + for _, te := range TestErrors { + testCases[fmt.Sprintf("should return error if action store copy fails with disk space error: %v", te)] = struct { + writeFileError error + expectedError error + }{ + writeFileError: te, + expectedError: upgradeErrors.ErrInsufficientDiskSpace, + } + } + + for name, tc := range testCases { + t.Run(name, func(t *testing.T) { + paths.SetTop(t.TempDir()) + + initialArchive, err := createArchive(t, initialArtifactName, initialArchiveFiles) + require.NoError(t, err) + + t.Logf("Created archive: %s", initialArchive) + + initialUnpackRes, err := tempUnpacker.unpack(initialVersion.String(), initialArchive, paths.Data(), "") + require.NoError(t, err) + + checkExtractedFilesWithManifestAndVersionedHome(t, paths.Data(), filepath.Join(paths.Top(), initialUnpackRes.VersionedHome)) + + // Overriding HomePath which is just a var holding paths.Home() because + // Home() returns "unknow" short commit and returns the release version + // which is set in init. + paths.HomePath = func() string { + actualPath := filepath.Join(paths.Top(), initialUnpackRes.VersionedHome) + return actualPath + } + + // The file list does not contain the action store files, so we need to + // create them + err = os.WriteFile(paths.AgentActionStoreFile(), []byte("initial agent action store content"), 0o600) + require.NoError(t, err) + err = os.WriteFile(paths.AgentStateStoreYmlFile(), []byte("initial agent state yml content"), 0o600) + require.NoError(t, err) + err = os.WriteFile(paths.AgentStateStoreFile(), []byte("initial agent state enc content"), 0o600) + require.NoError(t, err) + + targetArchive, err := createArchive(t, targetArtifactName, targetArchiveFiles) + require.NoError(t, err) + + t.Logf("Created archive: %s", targetArchive) + + newVersionedHome := "data/elastic-agent-3.4.5-SNAPSHOT-ghijkl" + newVersionedHomePath := filepath.Join(paths.Top(), newVersionedHome) + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + http.ServeFile(w, r, targetArchive) + })) + t.Cleanup(server.Close) + + tmpWriteFile := writeFile + t.Cleanup(func() { + writeFile = tmpWriteFile + }) + + writeFile = func(name string, data []byte, perm os.FileMode) error { + require.DirExists(t, paths.HomePath(), "home path should exist before writing") + require.NoFileExists(t, name, fmt.Sprintf("file %s should not exist before writing", name)) + + err := tmpWriteFile(name, data, perm) + require.NoError(t, err) + + require.FileExists(t, name, fmt.Sprintf("file %s should exist after writing", name)) + + return tc.writeFileError + } + + config := artifact.Config{ + TargetDirectory: paths.Downloads(), + SourceURI: server.URL, + RetrySleepInitDuration: 1 * time.Second, + HTTPTransportSettings: httpcommon.HTTPTransportSettings{ + Timeout: 1 * time.Second, + }, + } + + upgrader, err := NewUpgrader(log, &config, mockAgentInfo) + require.NoError(t, err) + + _, err = upgrader.Upgrade(context.Background(), targetVersion.String(), server.URL, nil, upgradeDetails, true, true) + require.ErrorIs(t, err, tc.expectedError, "expected error mismatch") + + require.NoDirExists(t, newVersionedHomePath, "the new agent directory should be cleaned up if action store copy fails") + + entries, err := os.ReadDir(config.TargetDirectory) + require.NoError(t, err, "reading target directory failed") + require.Len(t, entries, 0, "the downloaded artifact should be cleaned up if action store copy fails") + }) + } + +} From e8b582d8902d07bbb011ac4dea0f10e0240b6d00 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sun, 10 Aug 2025 18:33:54 +0300 Subject: [PATCH 110/127] enhancement(5235): updated the use of archive helpers in unpack error handling tests --- internal/pkg/agent/application/upgrade/upgrade_test.go | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index 812d45efa82..f3b5bce27f1 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -1468,17 +1468,16 @@ func TestE2EUpgradeUnpackErrors(t *testing.T) { testVersion := agtversion.NewParsedSemVer(1, 2, 3, "SNAPSHOT", "") upgradeDetails := details.NewDetails(testVersion.String(), details.StateRequested, "test") + artifactName, err := artifact.GetArtifactName(agentArtifact, *testVersion, tempConfig.OS(), tempConfig.Arch()) require.NoError(t, err) - versionedHome := "data/elastic-agent-1.2.3-SNAPSHOT-abcdef" - - t.Logf("Expected artifact name: %s", artifactName) - - archive, err := createArchive(t, artifactName, archiveFilesWithMoreComponents) + archive, err := createArchive(t, artifactName, archiveFilesWithArchiveDirName(artifactName, archiveFilesWithMoreComponents)) require.NoError(t, err) t.Logf("Created archive: %s", archive) + versionedHome := "data/elastic-agent-1.2.3-SNAPSHOT-abcdef" + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { http.ServeFile(w, r, archive) })) From 5e41094b6dc783f1ebe58c534a082f5e3be3f30c Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sun, 10 Aug 2025 18:34:21 +0300 Subject: [PATCH 111/127] enhancement(5235): updated test function names --- internal/pkg/agent/application/upgrade/upgrade_test.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index f3b5bce27f1..c64a0fffd27 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -1296,7 +1296,7 @@ type testError struct { expectedError error } -func TesE2EtUpgradeDownloadErrors(t *testing.T) { +func TestUpgradeDownloadErrors(t *testing.T) { testArtifact := artifact.Artifact{ Name: "Elastic Agent", Cmd: "elastic-agent", @@ -1461,7 +1461,7 @@ func createArchive(t *testing.T, archiveName string, archiveFiles []files) (stri return createTarArchive(t, archiveName, archiveFiles) } -func TestE2EUpgradeUnpackErrors(t *testing.T) { +func TestUpgradeUnpackErrors(t *testing.T) { log, _ := loggertest.New("test") tempConfig := &artifact.Config{} // used only to get os and arch, runtime.GOARCH returns amd64 which is not a valid arch when used in GetArtifactName From 9bef459a0eede349e7eea81f77a19e7ca3305ad0 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sun, 10 Aug 2025 20:52:58 +0300 Subject: [PATCH 112/127] enhancement(5235): use home path to get run path --- internal/pkg/agent/application/paths/common.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/pkg/agent/application/paths/common.go b/internal/pkg/agent/application/paths/common.go index 463f7ea4fdb..f36c85e1abe 100644 --- a/internal/pkg/agent/application/paths/common.go +++ b/internal/pkg/agent/application/paths/common.go @@ -197,7 +197,7 @@ func DataFrom(topDirPath string) string { // Run returns the run directory for Agent func Run() string { - return filepath.Join(Home(), "run") + return filepath.Join(HomePath(), "run") } // Components returns the component directory for Agent From 8a1ee3c21ea4331d0f2d3c8effbbecd9112cda6c Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sun, 10 Aug 2025 21:13:23 +0300 Subject: [PATCH 113/127] enhancement(5235): added run dir copy error test --- .../agent/application/upgrade/upgrade_test.go | 159 ++++++++++++++++++ 1 file changed, 159 insertions(+) diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index c64a0fffd27..6e003fde37e 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -20,6 +20,7 @@ import ( "testing" "time" + "github.com/otiai10/copy" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" @@ -1703,5 +1704,163 @@ func TestUpgradeCopyDirErrors(t *testing.T) { require.Len(t, entries, 0, "the downloaded artifact should be cleaned up if action store copy fails") }) } +} + +func TestUpgradeCopyRunDirErrors(t *testing.T) { + log, _ := loggertest.New("test") + + tempConfig := &artifact.Config{} // used only to get os and arch, runtime.GOARCH returns amd64 which is not a valid arch when used in GetArtifactName + + // Prepare to override HomePath + tmpHomePath := paths.HomePath + t.Cleanup(func() { + paths.HomePath = tmpHomePath + }) + + initialVersion := agtversion.NewParsedSemVer(1, 2, 3, "SNAPSHOT", "") + initialArtifactName, err := artifact.GetArtifactName(agentArtifact, *initialVersion, tempConfig.OS(), tempConfig.Arch()) + require.NoError(t, err) + + initialArchiveFiles := archiveFilesWithArchiveDirName(initialArtifactName, archiveFilesWithMoreComponents) + initialArchiveFiles = archiveFilesWithVersionedHome(initialVersion.CoreVersion(), "abcdef", initialArchiveFiles) + + targetVersion := agtversion.NewParsedSemVer(3, 4, 5, "SNAPSHOT", "") + targetArtifactName, err := artifact.GetArtifactName(agentArtifact, *targetVersion, tempConfig.OS(), tempConfig.Arch()) + require.NoError(t, err) + + targetArchiveFiles := archiveFilesWithArchiveDirName(targetArtifactName, archiveFilesWithMoreComponents) + targetArchiveFiles = archiveFilesWithVersionedHome(targetVersion.CoreVersion(), "ghijkl", targetArchiveFiles) + + mockAgentInfo := mockinfo.NewAgent(t) + mockAgentInfo.On("Version").Return(targetVersion.String()) + + upgradeDetails := details.NewDetails(targetVersion.String(), details.StateRequested, "test") + + tempUnpacker := &upgradeUnpacker{ // used only to unpack the initial archive + log: log, + } + + testCases := map[string]struct { + dirCopyError error + expectedError error + }{ + "should return error if run directory copy fails": { + dirCopyError: errors.New("test dir copy error"), + expectedError: errors.New("test dir copy error"), + }, + } + for _, te := range TestErrors { + testCases[fmt.Sprintf("should return error if run directory copy fails with disk space error: %v", te)] = struct { + dirCopyError error + expectedError error + }{ + dirCopyError: te, + expectedError: upgradeErrors.ErrInsufficientDiskSpace, + } + } + + for name, tc := range testCases { + t.Run(name, func(t *testing.T) { + paths.SetTop(t.TempDir()) + + initialArchive, err := createArchive(t, initialArtifactName, initialArchiveFiles) + require.NoError(t, err) + + t.Logf("Created archive: %s", initialArchive) + + initialUnpackRes, err := tempUnpacker.unpack(initialVersion.String(), initialArchive, paths.Data(), "") + require.NoError(t, err) + + checkExtractedFilesWithManifestAndVersionedHome(t, paths.Data(), filepath.Join(paths.Top(), initialUnpackRes.VersionedHome)) + + // Overriding HomePath which is just a var holding paths.Home() because + // Home() returns "unknow" short commit and returns the release version + // which is set in init. + paths.HomePath = func() string { + actualPath := filepath.Join(paths.Top(), initialUnpackRes.VersionedHome) + return actualPath + } + + // The file list does not contain the action store files, so we need to + // create them + err = os.WriteFile(paths.AgentActionStoreFile(), []byte("initial agent action store content"), 0o600) + require.NoError(t, err) + err = os.WriteFile(paths.AgentStateStoreYmlFile(), []byte("initial agent state yml content"), 0o600) + require.NoError(t, err) + err = os.WriteFile(paths.AgentStateStoreFile(), []byte("initial agent state enc content"), 0o600) + require.NoError(t, err) + + // Create several files in the initial run path and save their paths in an array. + initialRunPath := paths.Run() + require.NoError(t, os.MkdirAll(initialRunPath, 0o755)) + + var createdFilePaths []string + for i := 0; i < 3; i++ { + filePath := filepath.Join(initialRunPath, fmt.Sprintf("file%d.txt", i)) + err := os.WriteFile(filePath, []byte(fmt.Sprintf("content for file %d", i)), 0o600) + require.NoError(t, err) + createdFilePaths = append(createdFilePaths, filePath) + } + + targetArchive, err := createArchive(t, targetArtifactName, targetArchiveFiles) + require.NoError(t, err) + + t.Logf("Created archive: %s", targetArchive) + + newVersionedHome := "data/elastic-agent-3.4.5-SNAPSHOT-ghijkl" + newVersionedHomePath := filepath.Join(paths.Top(), newVersionedHome) + newRunPath := filepath.Join(newVersionedHomePath, "run") + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + http.ServeFile(w, r, targetArchive) + })) + t.Cleanup(server.Close) + + tmpDirCopy := dirCopy + t.Cleanup(func() { + dirCopy = tmpDirCopy + }) + + dirCopy = func(src string, dest string, opts ...copy.Options) error { + require.DirExists(t, newRunPath, "new run path should exist before copying") + runEntries, err := os.ReadDir(newRunPath) + require.NoError(t, err, "reading new run directory failed") + require.Len(t, runEntries, 0, "new run directory should be empty before copying") + + err = tmpDirCopy(src, dest, opts...) + require.NoError(t, err) + + runEntries, err = os.ReadDir(newRunPath) + require.NoError(t, err, "reading new run directory failed") + for _, createdFilePath := range createdFilePaths { + _, fileName := filepath.Split(createdFilePath) + require.FileExists(t, filepath.Join(newRunPath, fileName), "expected run file %q to exist in new run directory", fileName) + } + + return tc.dirCopyError + } + + config := artifact.Config{ + TargetDirectory: paths.Downloads(), + SourceURI: server.URL, + RetrySleepInitDuration: 1 * time.Second, + HTTPTransportSettings: httpcommon.HTTPTransportSettings{ + Timeout: 1 * time.Second, + }, + } + + upgrader, err := NewUpgrader(log, &config, mockAgentInfo) + require.NoError(t, err) + + _, err = upgrader.Upgrade(context.Background(), targetVersion.String(), server.URL, nil, upgradeDetails, true, true) + require.ErrorIs(t, err, tc.expectedError, "expected error mismatch") + + require.NoDirExists(t, newVersionedHomePath, "the new agent directory should be cleaned up if run directory copy fails") + + entries, err := os.ReadDir(config.TargetDirectory) + require.NoError(t, err, "reading target directory failed") + require.Len(t, entries, 0, "the downloaded artifact should be cleaned up if run directory copy fails") + }) + } } From a9aaa6ca22508418fac943fe1cbdbd00e5928b4f Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sun, 10 Aug 2025 21:15:29 +0300 Subject: [PATCH 114/127] enhancement(5235): refactored action store copy and run dir copy error tests --- .../agent/application/upgrade/upgrade_test.go | 330 ++++++------------ 1 file changed, 107 insertions(+), 223 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index 6e003fde37e..50ad32d5e94 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -1567,7 +1567,7 @@ func TestUpgradeUnpackErrors(t *testing.T) { } } -func TestUpgradeCopyDirErrors(t *testing.T) { +func TestUpgradeDirectoryCopyErrors(t *testing.T) { log, _ := loggertest.New("test") tempConfig := &artifact.Config{} // used only to get os and arch, runtime.GOARCH returns amd64 which is not a valid arch when used in GetArtifactName @@ -1602,265 +1602,149 @@ func TestUpgradeCopyDirErrors(t *testing.T) { } testCases := map[string]struct { - writeFileError error - expectedError error + mockReturnedError error + expectedError error }{ - "should return error if action store copy fails": { - writeFileError: errors.New("test write error"), - expectedError: errors.New("test write error"), + "should return error if run directory copy fails": { + mockReturnedError: errors.New("test dir copy error"), + expectedError: errors.New("test dir copy error"), }, } for _, te := range TestErrors { - testCases[fmt.Sprintf("should return error if action store copy fails with disk space error: %v", te)] = struct { - writeFileError error - expectedError error + testCases[fmt.Sprintf("should return error if run directory copy fails with disk space error: %v", te)] = struct { + mockReturnedError error + expectedError error }{ - writeFileError: te, - expectedError: upgradeErrors.ErrInsufficientDiskSpace, + mockReturnedError: te, + expectedError: upgradeErrors.ErrInsufficientDiskSpace, } } - for name, tc := range testCases { - t.Run(name, func(t *testing.T) { - paths.SetTop(t.TempDir()) - - initialArchive, err := createArchive(t, initialArtifactName, initialArchiveFiles) - require.NoError(t, err) - - t.Logf("Created archive: %s", initialArchive) - - initialUnpackRes, err := tempUnpacker.unpack(initialVersion.String(), initialArchive, paths.Data(), "") - require.NoError(t, err) + for _, copiedDir := range []string{"action_store", "run_directory"} { + for name, tc := range testCases { + t.Run(fmt.Sprintf("when copying %s: %s", copiedDir, name), func(t *testing.T) { + paths.SetTop(t.TempDir()) - checkExtractedFilesWithManifestAndVersionedHome(t, paths.Data(), filepath.Join(paths.Top(), initialUnpackRes.VersionedHome)) - - // Overriding HomePath which is just a var holding paths.Home() because - // Home() returns "unknow" short commit and returns the release version - // which is set in init. - paths.HomePath = func() string { - actualPath := filepath.Join(paths.Top(), initialUnpackRes.VersionedHome) - return actualPath - } - - // The file list does not contain the action store files, so we need to - // create them - err = os.WriteFile(paths.AgentActionStoreFile(), []byte("initial agent action store content"), 0o600) - require.NoError(t, err) - err = os.WriteFile(paths.AgentStateStoreYmlFile(), []byte("initial agent state yml content"), 0o600) - require.NoError(t, err) - err = os.WriteFile(paths.AgentStateStoreFile(), []byte("initial agent state enc content"), 0o600) - require.NoError(t, err) - - targetArchive, err := createArchive(t, targetArtifactName, targetArchiveFiles) - require.NoError(t, err) - - t.Logf("Created archive: %s", targetArchive) - - newVersionedHome := "data/elastic-agent-3.4.5-SNAPSHOT-ghijkl" - newVersionedHomePath := filepath.Join(paths.Top(), newVersionedHome) - - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - http.ServeFile(w, r, targetArchive) - })) - t.Cleanup(server.Close) - - tmpWriteFile := writeFile - t.Cleanup(func() { - writeFile = tmpWriteFile - }) - - writeFile = func(name string, data []byte, perm os.FileMode) error { - require.DirExists(t, paths.HomePath(), "home path should exist before writing") - require.NoFileExists(t, name, fmt.Sprintf("file %s should not exist before writing", name)) - - err := tmpWriteFile(name, data, perm) + initialArchive, err := createArchive(t, initialArtifactName, initialArchiveFiles) require.NoError(t, err) - require.FileExists(t, name, fmt.Sprintf("file %s should exist after writing", name)) - - return tc.writeFileError - } - - config := artifact.Config{ - TargetDirectory: paths.Downloads(), - SourceURI: server.URL, - RetrySleepInitDuration: 1 * time.Second, - HTTPTransportSettings: httpcommon.HTTPTransportSettings{ - Timeout: 1 * time.Second, - }, - } - - upgrader, err := NewUpgrader(log, &config, mockAgentInfo) - require.NoError(t, err) + t.Logf("Created archive: %s", initialArchive) - _, err = upgrader.Upgrade(context.Background(), targetVersion.String(), server.URL, nil, upgradeDetails, true, true) - require.ErrorIs(t, err, tc.expectedError, "expected error mismatch") - - require.NoDirExists(t, newVersionedHomePath, "the new agent directory should be cleaned up if action store copy fails") - - entries, err := os.ReadDir(config.TargetDirectory) - require.NoError(t, err, "reading target directory failed") - require.Len(t, entries, 0, "the downloaded artifact should be cleaned up if action store copy fails") - }) - } -} - -func TestUpgradeCopyRunDirErrors(t *testing.T) { - log, _ := loggertest.New("test") + initialUnpackRes, err := tempUnpacker.unpack(initialVersion.String(), initialArchive, paths.Data(), "") + require.NoError(t, err) - tempConfig := &artifact.Config{} // used only to get os and arch, runtime.GOARCH returns amd64 which is not a valid arch when used in GetArtifactName + checkExtractedFilesWithManifestAndVersionedHome(t, paths.Data(), filepath.Join(paths.Top(), initialUnpackRes.VersionedHome)) - // Prepare to override HomePath - tmpHomePath := paths.HomePath - t.Cleanup(func() { - paths.HomePath = tmpHomePath - }) + // Overriding HomePath which is just a var holding paths.Home() because + // Home() returns "unknow" short commit and returns the release version + // which is set in init. + paths.HomePath = func() string { + actualPath := filepath.Join(paths.Top(), initialUnpackRes.VersionedHome) + return actualPath + } - initialVersion := agtversion.NewParsedSemVer(1, 2, 3, "SNAPSHOT", "") - initialArtifactName, err := artifact.GetArtifactName(agentArtifact, *initialVersion, tempConfig.OS(), tempConfig.Arch()) - require.NoError(t, err) + // The file list does not contain the action store files, so we need to + // create them + err = os.WriteFile(paths.AgentActionStoreFile(), []byte("initial agent action store content"), 0o600) + require.NoError(t, err) + err = os.WriteFile(paths.AgentStateStoreYmlFile(), []byte("initial agent state yml content"), 0o600) + require.NoError(t, err) + err = os.WriteFile(paths.AgentStateStoreFile(), []byte("initial agent state enc content"), 0o600) + require.NoError(t, err) - initialArchiveFiles := archiveFilesWithArchiveDirName(initialArtifactName, archiveFilesWithMoreComponents) - initialArchiveFiles = archiveFilesWithVersionedHome(initialVersion.CoreVersion(), "abcdef", initialArchiveFiles) + var createdFilePaths []string + if copiedDir == "run_directory" { + // Create several files in the initial run path and save their paths in an array. + initialRunPath := paths.Run() + require.NoError(t, os.MkdirAll(initialRunPath, 0o755)) - targetVersion := agtversion.NewParsedSemVer(3, 4, 5, "SNAPSHOT", "") - targetArtifactName, err := artifact.GetArtifactName(agentArtifact, *targetVersion, tempConfig.OS(), tempConfig.Arch()) - require.NoError(t, err) + for i := 0; i < 3; i++ { + filePath := filepath.Join(initialRunPath, fmt.Sprintf("file%d.txt", i)) + err := os.WriteFile(filePath, []byte(fmt.Sprintf("content for file %d", i)), 0o600) + require.NoError(t, err) + createdFilePaths = append(createdFilePaths, filePath) + } + } - targetArchiveFiles := archiveFilesWithArchiveDirName(targetArtifactName, archiveFilesWithMoreComponents) - targetArchiveFiles = archiveFilesWithVersionedHome(targetVersion.CoreVersion(), "ghijkl", targetArchiveFiles) + targetArchive, err := createArchive(t, targetArtifactName, targetArchiveFiles) + require.NoError(t, err) - mockAgentInfo := mockinfo.NewAgent(t) - mockAgentInfo.On("Version").Return(targetVersion.String()) + t.Logf("Created archive: %s", targetArchive) - upgradeDetails := details.NewDetails(targetVersion.String(), details.StateRequested, "test") + newVersionedHome := "data/elastic-agent-3.4.5-SNAPSHOT-ghijkl" + newVersionedHomePath := filepath.Join(paths.Top(), newVersionedHome) + newRunPath := filepath.Join(newVersionedHomePath, "run") - tempUnpacker := &upgradeUnpacker{ // used only to unpack the initial archive - log: log, - } + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + http.ServeFile(w, r, targetArchive) + })) + t.Cleanup(server.Close) - testCases := map[string]struct { - dirCopyError error - expectedError error - }{ - "should return error if run directory copy fails": { - dirCopyError: errors.New("test dir copy error"), - expectedError: errors.New("test dir copy error"), - }, - } + if copiedDir == "run_directory" { + tmpDirCopy := dirCopy + t.Cleanup(func() { + dirCopy = tmpDirCopy + }) - for _, te := range TestErrors { - testCases[fmt.Sprintf("should return error if run directory copy fails with disk space error: %v", te)] = struct { - dirCopyError error - expectedError error - }{ - dirCopyError: te, - expectedError: upgradeErrors.ErrInsufficientDiskSpace, - } - } + dirCopy = func(src string, dest string, opts ...copy.Options) error { + require.DirExists(t, newRunPath, "new run path should exist before copying") + runEntries, err := os.ReadDir(newRunPath) + require.NoError(t, err, "reading new run directory failed") + require.Len(t, runEntries, 0, "new run directory should be empty before copying") - for name, tc := range testCases { - t.Run(name, func(t *testing.T) { - paths.SetTop(t.TempDir()) + err = tmpDirCopy(src, dest, opts...) + require.NoError(t, err) - initialArchive, err := createArchive(t, initialArtifactName, initialArchiveFiles) - require.NoError(t, err) + runEntries, err = os.ReadDir(newRunPath) + require.NoError(t, err, "reading new run directory failed") + for _, createdFilePath := range createdFilePaths { + _, fileName := filepath.Split(createdFilePath) + require.FileExists(t, filepath.Join(newRunPath, fileName), "expected run file %q to exist in new run directory", fileName) + } - t.Logf("Created archive: %s", initialArchive) + return tc.mockReturnedError + } + } else { + tmpWriteFile := writeFile + t.Cleanup(func() { + writeFile = tmpWriteFile + }) - initialUnpackRes, err := tempUnpacker.unpack(initialVersion.String(), initialArchive, paths.Data(), "") - require.NoError(t, err) + writeFile = func(name string, data []byte, perm os.FileMode) error { + require.DirExists(t, paths.HomePath(), "home path should exist before writing") + require.NoFileExists(t, name, fmt.Sprintf("file %s should not exist before writing", name)) - checkExtractedFilesWithManifestAndVersionedHome(t, paths.Data(), filepath.Join(paths.Top(), initialUnpackRes.VersionedHome)) + err := tmpWriteFile(name, data, perm) + require.NoError(t, err) - // Overriding HomePath which is just a var holding paths.Home() because - // Home() returns "unknow" short commit and returns the release version - // which is set in init. - paths.HomePath = func() string { - actualPath := filepath.Join(paths.Top(), initialUnpackRes.VersionedHome) - return actualPath - } + require.FileExists(t, name, fmt.Sprintf("file %s should exist after writing", name)) - // The file list does not contain the action store files, so we need to - // create them - err = os.WriteFile(paths.AgentActionStoreFile(), []byte("initial agent action store content"), 0o600) - require.NoError(t, err) - err = os.WriteFile(paths.AgentStateStoreYmlFile(), []byte("initial agent state yml content"), 0o600) - require.NoError(t, err) - err = os.WriteFile(paths.AgentStateStoreFile(), []byte("initial agent state enc content"), 0o600) - require.NoError(t, err) + return tc.mockReturnedError + } + } - // Create several files in the initial run path and save their paths in an array. - initialRunPath := paths.Run() - require.NoError(t, os.MkdirAll(initialRunPath, 0o755)) + config := artifact.Config{ + TargetDirectory: paths.Downloads(), + SourceURI: server.URL, + RetrySleepInitDuration: 1 * time.Second, + HTTPTransportSettings: httpcommon.HTTPTransportSettings{ + Timeout: 1 * time.Second, + }, + } - var createdFilePaths []string - for i := 0; i < 3; i++ { - filePath := filepath.Join(initialRunPath, fmt.Sprintf("file%d.txt", i)) - err := os.WriteFile(filePath, []byte(fmt.Sprintf("content for file %d", i)), 0o600) + upgrader, err := NewUpgrader(log, &config, mockAgentInfo) require.NoError(t, err) - createdFilePaths = append(createdFilePaths, filePath) - } - - targetArchive, err := createArchive(t, targetArtifactName, targetArchiveFiles) - require.NoError(t, err) - - t.Logf("Created archive: %s", targetArchive) - newVersionedHome := "data/elastic-agent-3.4.5-SNAPSHOT-ghijkl" - newVersionedHomePath := filepath.Join(paths.Top(), newVersionedHome) - newRunPath := filepath.Join(newVersionedHomePath, "run") + _, err = upgrader.Upgrade(context.Background(), targetVersion.String(), server.URL, nil, upgradeDetails, true, true) + require.ErrorIs(t, err, tc.expectedError, "expected error mismatch") - server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - http.ServeFile(w, r, targetArchive) - })) - t.Cleanup(server.Close) + require.NoDirExists(t, newVersionedHomePath, fmt.Sprintf("the new agent directory should be cleaned up if %s copy fails", copiedDir)) - tmpDirCopy := dirCopy - t.Cleanup(func() { - dirCopy = tmpDirCopy + entries, err := os.ReadDir(config.TargetDirectory) + require.NoError(t, err, "reading target directory failed") + require.Len(t, entries, 0, fmt.Sprintf("the downloaded artifact should be cleaned up if %s copy fails", copiedDir)) }) - - dirCopy = func(src string, dest string, opts ...copy.Options) error { - require.DirExists(t, newRunPath, "new run path should exist before copying") - runEntries, err := os.ReadDir(newRunPath) - require.NoError(t, err, "reading new run directory failed") - require.Len(t, runEntries, 0, "new run directory should be empty before copying") - - err = tmpDirCopy(src, dest, opts...) - require.NoError(t, err) - - runEntries, err = os.ReadDir(newRunPath) - require.NoError(t, err, "reading new run directory failed") - for _, createdFilePath := range createdFilePaths { - _, fileName := filepath.Split(createdFilePath) - require.FileExists(t, filepath.Join(newRunPath, fileName), "expected run file %q to exist in new run directory", fileName) - } - - return tc.dirCopyError - } - - config := artifact.Config{ - TargetDirectory: paths.Downloads(), - SourceURI: server.URL, - RetrySleepInitDuration: 1 * time.Second, - HTTPTransportSettings: httpcommon.HTTPTransportSettings{ - Timeout: 1 * time.Second, - }, - } - - upgrader, err := NewUpgrader(log, &config, mockAgentInfo) - require.NoError(t, err) - - _, err = upgrader.Upgrade(context.Background(), targetVersion.String(), server.URL, nil, upgradeDetails, true, true) - require.ErrorIs(t, err, tc.expectedError, "expected error mismatch") - - require.NoDirExists(t, newVersionedHomePath, "the new agent directory should be cleaned up if run directory copy fails") - - entries, err := os.ReadDir(config.TargetDirectory) - require.NoError(t, err, "reading target directory failed") - require.Len(t, entries, 0, "the downloaded artifact should be cleaned up if run directory copy fails") - }) + } } } From df1180ec8b242fbd31b2848b8cf9da8355f91abd Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sun, 10 Aug 2025 22:36:49 +0300 Subject: [PATCH 115/127] enhancement(5235): updated archive file modification functions --- .../agent/application/upgrade/upgrade_test.go | 42 ++++++++++++------- 1 file changed, 28 insertions(+), 14 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index 50ad32d5e94..860ac72ba8d 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -1426,22 +1426,19 @@ func TestUpgradeDownloadErrors(t *testing.T) { } } -func archiveFilesWithArchiveDirName(archiveName string, archiveFiles []files) []files { +func archiveFilesWithArchiveDirName(archiveName string) func(file files) files { archiveWithoutSuffix := strings.TrimSuffix(archiveName, ".tar.gz") archiveWithoutSuffix = strings.TrimSuffix(archiveWithoutSuffix, ".zip") - modifiedArchiveFiles := make([]files, len(archiveFiles)) - for i, file := range archiveFiles { + return func(file files) files { file.path = strings.Replace(file.path, "elastic-agent-1.2.3-SNAPSHOT-someos-x86_64", archiveWithoutSuffix, 1) - modifiedArchiveFiles[i] = file - } - return modifiedArchiveFiles + return file + } } -func archiveFilesWithVersionedHome(version string, meta string, archiveFiles []files) []files { - modifiedArchiveFiles := make([]files, len(archiveFiles)) - for i, file := range archiveFiles { +func archiveFilesWithVersionedHome(version string, meta string) func(file files) files { + return func(file files) files { if file.content == ea_123_manifest { newContent := strings.ReplaceAll(file.content, "1.2.3", version) newContent = strings.ReplaceAll(newContent, "abcdef", meta) @@ -1449,6 +1446,17 @@ func archiveFilesWithVersionedHome(version string, meta string, archiveFiles []f file.content = newContent } file.path = strings.ReplaceAll(file.path, "abcdef", meta) + + return file + } +} + +func modifyArchiveFiles(archiveFiles []files, modFuncs ...func(file files) files) []files { + modifiedArchiveFiles := make([]files, len(archiveFiles)) + for i, file := range archiveFiles { + for _, modFunc := range modFuncs { + file = modFunc(file) + } modifiedArchiveFiles[i] = file } @@ -1473,7 +1481,7 @@ func TestUpgradeUnpackErrors(t *testing.T) { artifactName, err := artifact.GetArtifactName(agentArtifact, *testVersion, tempConfig.OS(), tempConfig.Arch()) require.NoError(t, err) - archive, err := createArchive(t, artifactName, archiveFilesWithArchiveDirName(artifactName, archiveFilesWithMoreComponents)) + archive, err := createArchive(t, artifactName, modifyArchiveFiles(archiveFilesWithMoreComponents, archiveFilesWithArchiveDirName(artifactName))) require.NoError(t, err) t.Logf("Created archive: %s", archive) @@ -1582,15 +1590,21 @@ func TestUpgradeDirectoryCopyErrors(t *testing.T) { initialArtifactName, err := artifact.GetArtifactName(agentArtifact, *initialVersion, tempConfig.OS(), tempConfig.Arch()) require.NoError(t, err) - initialArchiveFiles := archiveFilesWithArchiveDirName(initialArtifactName, archiveFilesWithMoreComponents) - initialArchiveFiles = archiveFilesWithVersionedHome(initialVersion.CoreVersion(), "abcdef", initialArchiveFiles) + modFuncs := []func(file files) files{ + archiveFilesWithArchiveDirName(initialArtifactName), + archiveFilesWithVersionedHome(initialVersion.CoreVersion(), "abcdef"), + } + + initialArchiveFiles := modifyArchiveFiles(archiveFilesWithMoreComponents, modFuncs...) targetVersion := agtversion.NewParsedSemVer(3, 4, 5, "SNAPSHOT", "") targetArtifactName, err := artifact.GetArtifactName(agentArtifact, *targetVersion, tempConfig.OS(), tempConfig.Arch()) require.NoError(t, err) - targetArchiveFiles := archiveFilesWithArchiveDirName(targetArtifactName, archiveFilesWithMoreComponents) - targetArchiveFiles = archiveFilesWithVersionedHome(targetVersion.CoreVersion(), "ghijkl", targetArchiveFiles) + targetArchiveFiles := modifyArchiveFiles(archiveFilesWithMoreComponents, + archiveFilesWithArchiveDirName(targetArtifactName), + archiveFilesWithVersionedHome(targetVersion.CoreVersion(), "ghijkl"), + ) mockAgentInfo := mockinfo.NewAgent(t) mockAgentInfo.On("Version").Return(targetVersion.String()) From 66e7864c493bab57c6ebfe3f7c93a318fcf05703 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sun, 10 Aug 2025 22:38:11 +0300 Subject: [PATCH 116/127] enhancement(5235): using config target path for assertion --- internal/pkg/agent/application/upgrade/upgrade_test.go | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index 860ac72ba8d..2a3f60b3aba 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -1567,10 +1567,7 @@ func TestUpgradeUnpackErrors(t *testing.T) { require.ErrorIs(t, err, tc.expectedError, "expected error mismatch") require.NoDirExists(t, versionedHomePath, "partially unpacked archive should be cleaned up") - - artifactPath, err := artifact.GetArtifactPath(agentArtifact, *testVersion, runtime.GOOS, runtime.GOARCH, config.TargetDirectory) - require.NoError(t, err) - require.NoFileExists(t, artifactPath, "downloaded artifact should be cleaned up") + require.NoFileExists(t, config.TargetDirectory, "downloaded artifact should be cleaned up") }) } } From d58a4ad9c5ca252e2573fd8f91c47744d088e083 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sun, 10 Aug 2025 22:38:52 +0300 Subject: [PATCH 117/127] enhancement(5235): added package symlink func to abstract os symlink --- internal/pkg/agent/application/upgrade/step_relink.go | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/internal/pkg/agent/application/upgrade/step_relink.go b/internal/pkg/agent/application/upgrade/step_relink.go index 9eb3ceada45..2bfe7101906 100644 --- a/internal/pkg/agent/application/upgrade/step_relink.go +++ b/internal/pkg/agent/application/upgrade/step_relink.go @@ -22,6 +22,8 @@ const ( type upgradeRelinker struct { } +var symlinkFunc = os.Symlink + // TODO: add tests for this func (u *upgradeRelinker) changeSymlink(log *logger.Logger, topDirPath, symlinkPath, newTarget string) error { log.Infof("Changing symlink, topDirPath: %s, symlinkPath: %s, newTarget: %s", topDirPath, symlinkPath, newTarget) @@ -39,7 +41,7 @@ func (u *upgradeRelinker) changeSymlink(log *logger.Logger, topDirPath, symlinkP return err } - if err := os.Symlink(newTarget, prevNewPath); err != nil { + if err := symlinkFunc(newTarget, prevNewPath); err != nil { return errors.New(err, errors.TypeFilesystem, "failed to update agent symlink") } From cdba01330d9762737af55d54a3a235b041f80ab9 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sun, 10 Aug 2025 23:03:15 +0300 Subject: [PATCH 118/127] enhancement(5235): removed unused var from function, regenerated mocks --- .../pkg/agent/application/upgrade/upgrade.go | 4 ++-- .../application/upgrade/upgrade_executor.go | 2 +- .../upgrade/upgrade_executor_mock.go | 21 +++++++++---------- .../upgrade/upgrade_executor_test.go | 3 +-- .../handlers/diagnostics_provider_mock.go | 4 ---- .../actions/handlers/log_level_setter_mock.go | 4 ---- .../actions/handlers/uploader_mock.go | 4 ---- .../pkg/agent/application/info/agent_mock.go | 4 ---- .../pkg/agent/cmd/agent_watcher_mock.go | 7 +------ .../agent/cmd/installation_modifier_mock.go | 4 ---- .../pkg/agent/storage/storage_mock.go | 4 ---- .../pkg/fleetapi/acker/acker_mock.go | 7 +------ .../pkg/fleetapi/client/sender_mock.go | 4 ---- .../pkg/control/v2/client/client_mock.go | 4 ---- 14 files changed, 16 insertions(+), 60 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 4eabe0ee07f..3128fdaf5ae 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -98,7 +98,7 @@ type checkUpgradeFn func(log *logger.Logger, currentVersion, newVersion agentVer type upgradeExecutor interface { downloadArtifact(ctx context.Context, parsedTargetVersion *agtversion.ParsedSemVer, agentInfo info.Agent, sourceURI string, fleetServerURI string, upgradeDetails *details.Details, skipVerifyOverride, skipDefaultPgp bool, pgpBytes ...string) (download.DownloadResult, error) unpackArtifact(downloadResult download.DownloadResult, version, archivePath, topPath, flavor, dataPath, currentHome string, upgradeDetails *details.Details, currentVersion agentVersion, checkUpgradeFn checkUpgradeFn) (unpackStepResult, error) - replaceOldWithNew(unpackStepResult unpackStepResult, currentVersionedHome, topPath, agentName, currentHome, oldRunPath, newRunPath, symlinkPath, newBinPath string, upgradeDetails *details.Details) error + replaceOldWithNew(unpackStepResult unpackStepResult, currentVersionedHome, topPath, agentName, oldRunPath, newRunPath, symlinkPath, newBinPath string, upgradeDetails *details.Details) error watchNewAgent(ctx context.Context, markerFilePath, topPath, dataPath string, waitTime time.Duration, createTimeoutContext createContextWithTimeout, newAgentInstall agentInstall, previousAgentInstall agentInstall, action *fleetapi.ActionUpgrade, upgradeDetails *details.Details, upgradeOutcome UpgradeOutcome) error } @@ -326,7 +326,7 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string return nil, fmt.Errorf("calculating home path relative to top, home: %q top: %q : %w", paths.Home(), paths.Top(), err) } - err = u.upgradeExecutor.replaceOldWithNew(unpackRes, currentVersionedHome, paths.Top(), agentName, paths.Home(), oldRunPath, newRunPath, symlinkPath, newPath, det) + err = u.upgradeExecutor.replaceOldWithNew(unpackRes, currentVersionedHome, paths.Top(), agentName, oldRunPath, newRunPath, symlinkPath, newPath, det) if err != nil { return nil, err } diff --git a/internal/pkg/agent/application/upgrade/upgrade_executor.go b/internal/pkg/agent/application/upgrade/upgrade_executor.go index 8ba1c85db08..2576b730d5c 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_executor.go +++ b/internal/pkg/agent/application/upgrade/upgrade_executor.go @@ -139,7 +139,7 @@ func (u *executeUpgrade) unpackArtifact(downloadResult download.DownloadResult, return unpackStepResult, unpackErr } -func (u *executeUpgrade) replaceOldWithNew(unpackStepResult unpackStepResult, currentVersionedHome, topPath, agentName, currentHome, oldRunPath, newRunPath, symlinkPath, newBinPath string, upgradeDetails *details.Details) error { +func (u *executeUpgrade) replaceOldWithNew(unpackStepResult unpackStepResult, currentVersionedHome, topPath, agentName, oldRunPath, newRunPath, symlinkPath, newBinPath string, upgradeDetails *details.Details) error { if err := u.directoryCopier.copyActionStore(u.log, unpackStepResult.newHome); err != nil { return fmt.Errorf("failed to copy action store: %w", u.diskSpaceErrorFunc(err)) } diff --git a/internal/pkg/agent/application/upgrade/upgrade_executor_mock.go b/internal/pkg/agent/application/upgrade/upgrade_executor_mock.go index e4d84b94acf..491faaeecb8 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_executor_mock.go +++ b/internal/pkg/agent/application/upgrade/upgrade_executor_mock.go @@ -110,17 +110,17 @@ func (_c *mock_upgradeExecutor_downloadArtifact_Call) RunAndReturn(run func(cont return _c } -// replaceOldWithNew provides a mock function with given fields: unpackStepResult0, currentVersionedHome, topPath, agentName, currentHome, oldRunPath, newRunPath, symlinkPath, newBinPath, upgradeDetails -func (_m *mock_upgradeExecutor) replaceOldWithNew(unpackStepResult0 unpackStepResult, currentVersionedHome string, topPath string, agentName string, currentHome string, oldRunPath string, newRunPath string, symlinkPath string, newBinPath string, upgradeDetails *details.Details) error { - ret := _m.Called(unpackStepResult0, currentVersionedHome, topPath, agentName, currentHome, oldRunPath, newRunPath, symlinkPath, newBinPath, upgradeDetails) +// replaceOldWithNew provides a mock function with given fields: unpackStepResult0, currentVersionedHome, topPath, agentName, oldRunPath, newRunPath, symlinkPath, newBinPath, upgradeDetails +func (_m *mock_upgradeExecutor) replaceOldWithNew(unpackStepResult0 unpackStepResult, currentVersionedHome string, topPath string, agentName string, oldRunPath string, newRunPath string, symlinkPath string, newBinPath string, upgradeDetails *details.Details) error { + ret := _m.Called(unpackStepResult0, currentVersionedHome, topPath, agentName, oldRunPath, newRunPath, symlinkPath, newBinPath, upgradeDetails) if len(ret) == 0 { panic("no return value specified for replaceOldWithNew") } var r0 error - if rf, ok := ret.Get(0).(func(unpackStepResult, string, string, string, string, string, string, string, string, *details.Details) error); ok { - r0 = rf(unpackStepResult0, currentVersionedHome, topPath, agentName, currentHome, oldRunPath, newRunPath, symlinkPath, newBinPath, upgradeDetails) + if rf, ok := ret.Get(0).(func(unpackStepResult, string, string, string, string, string, string, string, *details.Details) error); ok { + r0 = rf(unpackStepResult0, currentVersionedHome, topPath, agentName, oldRunPath, newRunPath, symlinkPath, newBinPath, upgradeDetails) } else { r0 = ret.Error(0) } @@ -138,19 +138,18 @@ type mock_upgradeExecutor_replaceOldWithNew_Call struct { // - currentVersionedHome string // - topPath string // - agentName string -// - currentHome string // - oldRunPath string // - newRunPath string // - symlinkPath string // - newBinPath string // - upgradeDetails *details.Details -func (_e *mock_upgradeExecutor_Expecter) replaceOldWithNew(unpackStepResult0 interface{}, currentVersionedHome interface{}, topPath interface{}, agentName interface{}, currentHome interface{}, oldRunPath interface{}, newRunPath interface{}, symlinkPath interface{}, newBinPath interface{}, upgradeDetails interface{}) *mock_upgradeExecutor_replaceOldWithNew_Call { - return &mock_upgradeExecutor_replaceOldWithNew_Call{Call: _e.mock.On("replaceOldWithNew", unpackStepResult0, currentVersionedHome, topPath, agentName, currentHome, oldRunPath, newRunPath, symlinkPath, newBinPath, upgradeDetails)} +func (_e *mock_upgradeExecutor_Expecter) replaceOldWithNew(unpackStepResult0 interface{}, currentVersionedHome interface{}, topPath interface{}, agentName interface{}, oldRunPath interface{}, newRunPath interface{}, symlinkPath interface{}, newBinPath interface{}, upgradeDetails interface{}) *mock_upgradeExecutor_replaceOldWithNew_Call { + return &mock_upgradeExecutor_replaceOldWithNew_Call{Call: _e.mock.On("replaceOldWithNew", unpackStepResult0, currentVersionedHome, topPath, agentName, oldRunPath, newRunPath, symlinkPath, newBinPath, upgradeDetails)} } -func (_c *mock_upgradeExecutor_replaceOldWithNew_Call) Run(run func(unpackStepResult0 unpackStepResult, currentVersionedHome string, topPath string, agentName string, currentHome string, oldRunPath string, newRunPath string, symlinkPath string, newBinPath string, upgradeDetails *details.Details)) *mock_upgradeExecutor_replaceOldWithNew_Call { +func (_c *mock_upgradeExecutor_replaceOldWithNew_Call) Run(run func(unpackStepResult0 unpackStepResult, currentVersionedHome string, topPath string, agentName string, oldRunPath string, newRunPath string, symlinkPath string, newBinPath string, upgradeDetails *details.Details)) *mock_upgradeExecutor_replaceOldWithNew_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(unpackStepResult), args[1].(string), args[2].(string), args[3].(string), args[4].(string), args[5].(string), args[6].(string), args[7].(string), args[8].(string), args[9].(*details.Details)) + run(args[0].(unpackStepResult), args[1].(string), args[2].(string), args[3].(string), args[4].(string), args[5].(string), args[6].(string), args[7].(string), args[8].(*details.Details)) }) return _c } @@ -160,7 +159,7 @@ func (_c *mock_upgradeExecutor_replaceOldWithNew_Call) Return(_a0 error) *mock_u return _c } -func (_c *mock_upgradeExecutor_replaceOldWithNew_Call) RunAndReturn(run func(unpackStepResult, string, string, string, string, string, string, string, string, *details.Details) error) *mock_upgradeExecutor_replaceOldWithNew_Call { +func (_c *mock_upgradeExecutor_replaceOldWithNew_Call) RunAndReturn(run func(unpackStepResult, string, string, string, string, string, string, string, *details.Details) error) *mock_upgradeExecutor_replaceOldWithNew_Call { _c.Call.Return(run) return _c } diff --git a/internal/pkg/agent/application/upgrade/upgrade_executor_test.go b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go index 202e0b32836..d03065b4978 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_executor_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_executor_test.go @@ -535,7 +535,6 @@ func TestReplaceOldWithNewStep(t *testing.T) { currentVersionedHome := "mockCurrentVersionedHome" topPath := "mockTopPath" agentName := "mockAgentName" - currentHome := "mockCurrentHome" oldRunPath := "mockOldRunPath" newRunPath := "mockNewRunPath" symlinkPath := "mockSymlinkPath" @@ -653,7 +652,7 @@ func TestReplaceOldWithNewStep(t *testing.T) { }, } - err := upgradeExecutor.replaceOldWithNew(unpackStepResult, currentVersionedHome, topPath, agentName, currentHome, oldRunPath, newRunPath, symlinkPath, newBinPath, upgradeDetails) + err := upgradeExecutor.replaceOldWithNew(unpackStepResult, currentVersionedHome, topPath, agentName, oldRunPath, newRunPath, symlinkPath, newBinPath, upgradeDetails) mockDirectoryCopier.AssertExpectations(t) diff --git a/testing/mocks/internal_/pkg/agent/application/actions/handlers/diagnostics_provider_mock.go b/testing/mocks/internal_/pkg/agent/application/actions/handlers/diagnostics_provider_mock.go index 56409b31b2b..68ef7d1b5ff 100644 --- a/testing/mocks/internal_/pkg/agent/application/actions/handlers/diagnostics_provider_mock.go +++ b/testing/mocks/internal_/pkg/agent/application/actions/handlers/diagnostics_provider_mock.go @@ -1,7 +1,3 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License 2.0; -// you may not use this file except in compliance with the Elastic License 2.0. - // Code generated by mockery v2.53.4. DO NOT EDIT. package handlers diff --git a/testing/mocks/internal_/pkg/agent/application/actions/handlers/log_level_setter_mock.go b/testing/mocks/internal_/pkg/agent/application/actions/handlers/log_level_setter_mock.go index 1fa190652b2..d6371d4ef4a 100644 --- a/testing/mocks/internal_/pkg/agent/application/actions/handlers/log_level_setter_mock.go +++ b/testing/mocks/internal_/pkg/agent/application/actions/handlers/log_level_setter_mock.go @@ -1,7 +1,3 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License 2.0; -// you may not use this file except in compliance with the Elastic License 2.0. - // Code generated by mockery v2.53.4. DO NOT EDIT. package handlers diff --git a/testing/mocks/internal_/pkg/agent/application/actions/handlers/uploader_mock.go b/testing/mocks/internal_/pkg/agent/application/actions/handlers/uploader_mock.go index 76b66ad5513..8c638442a77 100644 --- a/testing/mocks/internal_/pkg/agent/application/actions/handlers/uploader_mock.go +++ b/testing/mocks/internal_/pkg/agent/application/actions/handlers/uploader_mock.go @@ -1,7 +1,3 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License 2.0; -// you may not use this file except in compliance with the Elastic License 2.0. - // Code generated by mockery v2.53.4. DO NOT EDIT. package handlers diff --git a/testing/mocks/internal_/pkg/agent/application/info/agent_mock.go b/testing/mocks/internal_/pkg/agent/application/info/agent_mock.go index 3c69cd9095e..2cf5bede728 100644 --- a/testing/mocks/internal_/pkg/agent/application/info/agent_mock.go +++ b/testing/mocks/internal_/pkg/agent/application/info/agent_mock.go @@ -1,7 +1,3 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License 2.0; -// you may not use this file except in compliance with the Elastic License 2.0. - // Code generated by mockery v2.53.4. DO NOT EDIT. package info diff --git a/testing/mocks/internal_/pkg/agent/cmd/agent_watcher_mock.go b/testing/mocks/internal_/pkg/agent/cmd/agent_watcher_mock.go index 4541e5d9aa0..4a0ec589880 100644 --- a/testing/mocks/internal_/pkg/agent/cmd/agent_watcher_mock.go +++ b/testing/mocks/internal_/pkg/agent/cmd/agent_watcher_mock.go @@ -1,7 +1,3 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License 2.0; -// you may not use this file except in compliance with the Elastic License 2.0. - // Code generated by mockery v2.53.4. DO NOT EDIT. package cmd @@ -9,9 +5,8 @@ package cmd import ( context "context" - mock "github.com/stretchr/testify/mock" - logp "github.com/elastic/elastic-agent-libs/logp" + mock "github.com/stretchr/testify/mock" time "time" ) diff --git a/testing/mocks/internal_/pkg/agent/cmd/installation_modifier_mock.go b/testing/mocks/internal_/pkg/agent/cmd/installation_modifier_mock.go index 14f73d912a8..cad2179c0c5 100644 --- a/testing/mocks/internal_/pkg/agent/cmd/installation_modifier_mock.go +++ b/testing/mocks/internal_/pkg/agent/cmd/installation_modifier_mock.go @@ -1,7 +1,3 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License 2.0; -// you may not use this file except in compliance with the Elastic License 2.0. - // Code generated by mockery v2.53.4. DO NOT EDIT. package cmd diff --git a/testing/mocks/internal_/pkg/agent/storage/storage_mock.go b/testing/mocks/internal_/pkg/agent/storage/storage_mock.go index 2bec6c96cbb..9f394de577d 100644 --- a/testing/mocks/internal_/pkg/agent/storage/storage_mock.go +++ b/testing/mocks/internal_/pkg/agent/storage/storage_mock.go @@ -1,7 +1,3 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License 2.0; -// you may not use this file except in compliance with the Elastic License 2.0. - // Code generated by mockery v2.53.4. DO NOT EDIT. package storage diff --git a/testing/mocks/internal_/pkg/fleetapi/acker/acker_mock.go b/testing/mocks/internal_/pkg/fleetapi/acker/acker_mock.go index cdeb18b9d57..233a175a754 100644 --- a/testing/mocks/internal_/pkg/fleetapi/acker/acker_mock.go +++ b/testing/mocks/internal_/pkg/fleetapi/acker/acker_mock.go @@ -1,7 +1,3 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License 2.0; -// you may not use this file except in compliance with the Elastic License 2.0. - // Code generated by mockery v2.53.4. DO NOT EDIT. package acker @@ -9,9 +5,8 @@ package acker import ( context "context" - mock "github.com/stretchr/testify/mock" - fleetapi "github.com/elastic/elastic-agent/internal/pkg/fleetapi" + mock "github.com/stretchr/testify/mock" ) // Acker is an autogenerated mock type for the Acker type diff --git a/testing/mocks/internal_/pkg/fleetapi/client/sender_mock.go b/testing/mocks/internal_/pkg/fleetapi/client/sender_mock.go index 68d4999a3a7..12f2713f6e7 100644 --- a/testing/mocks/internal_/pkg/fleetapi/client/sender_mock.go +++ b/testing/mocks/internal_/pkg/fleetapi/client/sender_mock.go @@ -1,7 +1,3 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License 2.0; -// you may not use this file except in compliance with the Elastic License 2.0. - // Code generated by mockery v2.53.4. DO NOT EDIT. package client diff --git a/testing/mocks/pkg/control/v2/client/client_mock.go b/testing/mocks/pkg/control/v2/client/client_mock.go index c408984634b..02a38fca3f8 100644 --- a/testing/mocks/pkg/control/v2/client/client_mock.go +++ b/testing/mocks/pkg/control/v2/client/client_mock.go @@ -1,7 +1,3 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License 2.0; -// you may not use this file except in compliance with the Elastic License 2.0. - // Code generated by mockery v2.53.4. DO NOT EDIT. package client From 5eced8d7e04dc69ac52410a12a3b90f6b985c0d6 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sun, 10 Aug 2025 23:04:02 +0300 Subject: [PATCH 119/127] enhancement(5235): using homepath instead of home --- internal/pkg/agent/application/upgrade/upgrade.go | 6 +++--- internal/pkg/agent/application/upgrade/upgrade_test.go | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 3128fdaf5ae..8f6f69a1359 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -306,7 +306,7 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string return nil, err } - unpackRes, err := u.upgradeExecutor.unpackArtifact(downloadResult, version, downloadResult.ArtifactPath, paths.Top(), "", paths.Data(), paths.Home(), det, currentVersion, checkUpgrade) + unpackRes, err := u.upgradeExecutor.unpackArtifact(downloadResult, version, downloadResult.ArtifactPath, paths.Top(), "", paths.Data(), paths.HomePath(), det, currentVersion, checkUpgrade) if err != nil { return nil, err } @@ -321,9 +321,9 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string newPath := paths.BinaryPath(filepath.Join(paths.Top(), unpackRes.VersionedHome), agentName) u.log.Infof("newPath: %s", newPath) - currentVersionedHome, err := filepath.Rel(paths.Top(), paths.Home()) + currentVersionedHome, err := filepath.Rel(paths.Top(), paths.HomePath()) if err != nil { - return nil, fmt.Errorf("calculating home path relative to top, home: %q top: %q : %w", paths.Home(), paths.Top(), err) + return nil, fmt.Errorf("calculating home path relative to top, home: %q top: %q : %w", paths.HomePath(), paths.Top(), err) } err = u.upgradeExecutor.replaceOldWithNew(unpackRes, currentVersionedHome, paths.Top(), agentName, oldRunPath, newRunPath, symlinkPath, newPath, det) diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index 2a3f60b3aba..3853697f978 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -1156,7 +1156,7 @@ func TestUpgrade(t *testing.T) { mockExecutor.EXPECT().downloadArtifact(ctx, tc.parsedTargetVersion, agentInfo, sourceURI, "", details, skipVerify, skipDefaultPgp, pgpBytesConverted...).Return(downloadResult, tc.downloadError) case "unpackArtifact": - mockExecutor.EXPECT().unpackArtifact(downloadResult, tc.targetVersion, downloadResult.ArtifactPath, topPath, "", paths.Data(), paths.Home(), details, currentVersion, mock.AnythingOfType("checkUpgradeFn")).Return(unpackStepResult, tc.unpackError) + mockExecutor.EXPECT().unpackArtifact(downloadResult, tc.targetVersion, downloadResult.ArtifactPath, topPath, "", paths.Data(), paths.HomePath(), details, currentVersion, mock.AnythingOfType("checkUpgradeFn")).Return(unpackStepResult, tc.unpackError) case "replaceOldWithNew": mockExecutor.EXPECT().replaceOldWithNew(unpackStepResult, currentVersionedHome, topPath, agentName, paths.Home(), paths.Run(), newRunPath, symlinkPath, newBinaryPath, details).Return(tc.replaceOldWithNewError) From e66408ab8b80f9ca5ce318dd8453ba8fdc5baa31 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sun, 10 Aug 2025 23:04:31 +0300 Subject: [PATCH 120/127] enhancement(5235): update mock use in test --- internal/pkg/agent/application/upgrade/upgrade_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index 3853697f978..e7f3735dd09 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -1159,7 +1159,7 @@ func TestUpgrade(t *testing.T) { mockExecutor.EXPECT().unpackArtifact(downloadResult, tc.targetVersion, downloadResult.ArtifactPath, topPath, "", paths.Data(), paths.HomePath(), details, currentVersion, mock.AnythingOfType("checkUpgradeFn")).Return(unpackStepResult, tc.unpackError) case "replaceOldWithNew": - mockExecutor.EXPECT().replaceOldWithNew(unpackStepResult, currentVersionedHome, topPath, agentName, paths.Home(), paths.Run(), newRunPath, symlinkPath, newBinaryPath, details).Return(tc.replaceOldWithNewError) + mockExecutor.EXPECT().replaceOldWithNew(unpackStepResult, currentVersionedHome, topPath, agentName, paths.Run(), newRunPath, symlinkPath, newBinaryPath, details).Return(tc.replaceOldWithNewError) case "watchNewAgent": mockExecutor.EXPECT().watchNewAgent(ctx, markerFilePath, topPath, paths.Data(), watcherMaxWaitTime, mock.AnythingOfType("createContextWithTimeout"), newAgentInstall, previousAgentInstall, action, details, OUTCOME_UPGRADE).Return(tc.watchNewAgentError) From 52676356aaca3ad129f163c507504ef5118f2338 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Sun, 10 Aug 2025 23:04:58 +0300 Subject: [PATCH 121/127] enhancement(5235): added symlink error test --- .../agent/application/upgrade/upgrade_test.go | 137 ++++++++++++++++++ 1 file changed, 137 insertions(+) diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index e7f3735dd09..b8fba45c126 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -1759,3 +1759,140 @@ func TestUpgradeDirectoryCopyErrors(t *testing.T) { } } } + +func TestUpgradeChangeSymlinkErrors(t *testing.T) { + log, _ := loggertest.New("test") + + tempConfig := &artifact.Config{} // used only to get os and arch, runtime.GOARCH returns amd64 which is not a valid arch when used in GetArtifactName + + // Prepare to override HomePath + tmpHomePath := paths.HomePath + t.Cleanup(func() { + paths.HomePath = tmpHomePath + }) + + initialVersion := agtversion.NewParsedSemVer(1, 2, 3, "SNAPSHOT", "") + initialArtifactName, err := artifact.GetArtifactName(agentArtifact, *initialVersion, tempConfig.OS(), tempConfig.Arch()) + require.NoError(t, err) + + initialArchiveFiles := modifyArchiveFiles(archiveFilesWithMoreComponents, + archiveFilesWithArchiveDirName(initialArtifactName), + archiveFilesWithVersionedHome(initialVersion.CoreVersion(), "abcdef"), + ) + + targetVersion := agtversion.NewParsedSemVer(3, 4, 5, "SNAPSHOT", "") + targetArtifactName, err := artifact.GetArtifactName(agentArtifact, *targetVersion, tempConfig.OS(), tempConfig.Arch()) + require.NoError(t, err) + + targetArchiveFiles := modifyArchiveFiles(archiveFilesWithMoreComponents, + archiveFilesWithArchiveDirName(targetArtifactName), + archiveFilesWithVersionedHome(targetVersion.CoreVersion(), "ghijkl"), + ) + + mockAgentInfo := mockinfo.NewAgent(t) + mockAgentInfo.On("Version").Return(targetVersion.String()) + + upgradeDetails := details.NewDetails(targetVersion.String(), details.StateRequested, "test") + + tempUnpacker := &upgradeUnpacker{ // used only to unpack the initial archive + log: log, + } + + mockReturnedError := errors.New("test symlink error") + + paths.SetTop(t.TempDir()) + + initialArchive, err := createArchive(t, initialArtifactName, initialArchiveFiles) + require.NoError(t, err) + + t.Logf("Created archive: %s", initialArchive) + + initialUnpackRes, err := tempUnpacker.unpack(initialVersion.String(), initialArchive, paths.Data(), "") + require.NoError(t, err) + + checkExtractedFilesWithManifestAndVersionedHome(t, paths.Data(), filepath.Join(paths.Top(), initialUnpackRes.VersionedHome)) + + // Overriding HomePath which is just a var holding paths.Home() because + // Home() returns "unknow" short commit and returns the release version + // which is set in init. + paths.HomePath = func() string { + actualPath := filepath.Join(paths.Top(), initialUnpackRes.VersionedHome) + return actualPath + } + + // The file list does not contain the action store files, so we need to + // create them + err = os.WriteFile(paths.AgentActionStoreFile(), []byte("initial agent action store content"), 0o600) + require.NoError(t, err) + err = os.WriteFile(paths.AgentStateStoreYmlFile(), []byte("initial agent state yml content"), 0o600) + require.NoError(t, err) + err = os.WriteFile(paths.AgentStateStoreFile(), []byte("initial agent state enc content"), 0o600) + require.NoError(t, err) + + var createdFilePaths []string + // Create several files in the initial run path and save their paths in an array. + initialRunPath := paths.Run() + require.NoError(t, os.MkdirAll(initialRunPath, 0o755)) + + for i := 0; i < 3; i++ { + filePath := filepath.Join(initialRunPath, fmt.Sprintf("file%d.txt", i)) + err := os.WriteFile(filePath, []byte(fmt.Sprintf("content for file %d", i)), 0o600) + require.NoError(t, err) + createdFilePaths = append(createdFilePaths, filePath) + } + + targetArchive, err := createArchive(t, targetArtifactName, targetArchiveFiles) + require.NoError(t, err) + + t.Logf("Created archive: %s", targetArchive) + + newVersionedHome := "data/elastic-agent-3.4.5-SNAPSHOT-ghijkl" + newVersionedHomePath := filepath.Join(paths.Top(), newVersionedHome) + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + http.ServeFile(w, r, targetArchive) + })) + t.Cleanup(server.Close) + + config := artifact.Config{ + TargetDirectory: paths.Downloads(), + SourceURI: server.URL, + RetrySleepInitDuration: 1 * time.Second, + HTTPTransportSettings: httpcommon.HTTPTransportSettings{ + Timeout: 1 * time.Second, + }, + } + + tmpSymlinkFunc := symlinkFunc + t.Cleanup(func() { + symlinkFunc = tmpSymlinkFunc + }) + + callCount := 0 + symlinkFunc = func(newTarget, prevNewPath string) error { + callCount++ + + err := tmpSymlinkFunc(newTarget, prevNewPath) + require.NoError(t, err) + + // return error when switching from old to new agent + if callCount == 1 { + return mockReturnedError + } + + // return nil when cleaning up the symlink + return nil + } + + upgrader, err := NewUpgrader(log, &config, mockAgentInfo) + require.NoError(t, err) + + _, err = upgrader.Upgrade(context.Background(), targetVersion.String(), server.URL, nil, upgradeDetails, true, true) + require.ErrorIs(t, err, mockReturnedError, "expected error mismatch") + + require.NoDirExists(t, newVersionedHomePath, "new versioned home path should be cleaned up") + + entries, err := os.ReadDir(config.TargetDirectory) + require.NoError(t, err, "reading target directory failed") + require.Len(t, entries, 0) +} From 203a1a1472bcc88c114b65c9c3a075e5a431641f Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Mon, 11 Aug 2025 00:16:39 +0300 Subject: [PATCH 122/127] enhancement(5235): added mark upgrade error handling test --- .../agent/application/upgrade/upgrade_test.go | 154 ++++++++++++++++++ 1 file changed, 154 insertions(+) diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index b8fba45c126..9a67cb2c0f7 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -1896,3 +1896,157 @@ func TestUpgradeChangeSymlinkErrors(t *testing.T) { require.NoError(t, err, "reading target directory failed") require.Len(t, entries, 0) } + +func TestUpgradeMarkUpgradeError(t *testing.T) { + log, _ := loggertest.New("test") + + tempConfig := &artifact.Config{} // used only to get os and arch, runtime.GOARCH returns amd64 which is not a valid arch when used in GetArtifactName + + // Prepare to override HomePath + tmpHomePath := paths.HomePath + t.Cleanup(func() { + paths.HomePath = tmpHomePath + }) + + initialVersion := agtversion.NewParsedSemVer(1, 2, 3, "SNAPSHOT", "") + initialArtifactName, err := artifact.GetArtifactName(agentArtifact, *initialVersion, tempConfig.OS(), tempConfig.Arch()) + require.NoError(t, err) + + initialArchiveFiles := modifyArchiveFiles(archiveFilesWithMoreComponents, + archiveFilesWithArchiveDirName(initialArtifactName), + archiveFilesWithVersionedHome(initialVersion.CoreVersion(), "abcdef"), + ) + + targetVersion := agtversion.NewParsedSemVer(3, 4, 5, "SNAPSHOT", "") + targetArtifactName, err := artifact.GetArtifactName(agentArtifact, *targetVersion, tempConfig.OS(), tempConfig.Arch()) + require.NoError(t, err) + + targetArchiveFiles := modifyArchiveFiles(archiveFilesWithMoreComponents, + archiveFilesWithArchiveDirName(targetArtifactName), + archiveFilesWithVersionedHome(targetVersion.CoreVersion(), "ghijkl"), + ) + + mockAgentInfo := mockinfo.NewAgent(t) + mockAgentInfo.On("Version").Return(targetVersion.String()) + + upgradeDetails := details.NewDetails(targetVersion.String(), details.StateRequested, "test") + + tempUnpacker := &upgradeUnpacker{ // used only to unpack the initial archive + log: log, + } + + testCases := map[string]testError{ + "should return error and cleanup if mark upgrade fails": { + copyFuncError: errors.New("test mark upgrade error"), + expectedError: errors.New("test mark upgrade error"), + }, + } + + for _, te := range TestErrors { + testCases[fmt.Sprintf("should return error and cleanup if mark upgrade fails with disk space error: %v", te)] = testError{ + copyFuncError: te, + expectedError: upgradeErrors.ErrInsufficientDiskSpace, + } + } + + for name, tc := range testCases { + t.Run(name, func(t *testing.T) { + paths.SetTop(t.TempDir()) + + initialArchive, err := createArchive(t, initialArtifactName, initialArchiveFiles) + require.NoError(t, err) + + t.Logf("Created archive: %s", initialArchive) + + initialUnpackRes, err := tempUnpacker.unpack(initialVersion.String(), initialArchive, paths.Data(), "") + require.NoError(t, err) + + checkExtractedFilesWithManifestAndVersionedHome(t, paths.Data(), filepath.Join(paths.Top(), initialUnpackRes.VersionedHome)) + + // Overriding HomePath which is just a var holding paths.Home() because + // Home() returns "unknow" short commit and returns the release version + // which is set in init. + paths.HomePath = func() string { + actualPath := filepath.Join(paths.Top(), initialUnpackRes.VersionedHome) + return actualPath + } + + // The file list does not contain the action store files, so we need to + // create them + err = os.WriteFile(paths.AgentActionStoreFile(), []byte("initial agent action store content"), 0o600) + require.NoError(t, err) + err = os.WriteFile(paths.AgentStateStoreYmlFile(), []byte("initial agent state yml content"), 0o600) + require.NoError(t, err) + err = os.WriteFile(paths.AgentStateStoreFile(), []byte("initial agent state enc content"), 0o600) + require.NoError(t, err) + + var createdFilePaths []string + // Create several files in the initial run path and save their paths in an array. + initialRunPath := paths.Run() + require.NoError(t, os.MkdirAll(initialRunPath, 0o755)) + + for i := 0; i < 3; i++ { + filePath := filepath.Join(initialRunPath, fmt.Sprintf("file%d.txt", i)) + err := os.WriteFile(filePath, []byte(fmt.Sprintf("content for file %d", i)), 0o600) + require.NoError(t, err) + createdFilePaths = append(createdFilePaths, filePath) + } + + targetArchive, err := createArchive(t, targetArtifactName, targetArchiveFiles) + require.NoError(t, err) + + t.Logf("Created archive: %s", targetArchive) + + newVersionedHome := "data/elastic-agent-3.4.5-SNAPSHOT-ghijkl" + newVersionedHomePath := filepath.Join(paths.Top(), newVersionedHome) + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + http.ServeFile(w, r, targetArchive) + })) + t.Cleanup(server.Close) + + config := artifact.Config{ + TargetDirectory: paths.Downloads(), + SourceURI: server.URL, + RetrySleepInitDuration: 1 * time.Second, + HTTPTransportSettings: httpcommon.HTTPTransportSettings{ + Timeout: 1 * time.Second, + }, + } + + markerFilePath := markerFilePath(paths.Data()) + + tmpWriteFile := writeFile + t.Cleanup(func() { + writeFile = tmpWriteFile + }) + + writeFile = func(name string, data []byte, perm os.FileMode) error { + if name != markerFilePath { + return tmpWriteFile(name, data, perm) + } + + require.NoFileExists(t, name, fmt.Sprintf("file %s should not exist before writing", name)) + + err := tmpWriteFile(name, data, perm) + require.NoError(t, err) + + require.FileExists(t, name, fmt.Sprintf("file %s should exist after writing", name)) + + return tc.copyFuncError + } + + upgrader, err := NewUpgrader(log, &config, mockAgentInfo) + require.NoError(t, err) + + _, err = upgrader.Upgrade(context.Background(), targetVersion.String(), server.URL, nil, upgradeDetails, true, true) + require.ErrorIs(t, err, tc.expectedError, "expected error mismatch") + + require.NoDirExists(t, newVersionedHomePath, "new versioned home path should be cleaned up") + + entries, err := os.ReadDir(config.TargetDirectory) + require.NoError(t, err, "reading target directory failed") + require.Len(t, entries, 0) + }) + } +} From 400b547cbc00309bed1cf574b0a8ef0a7e61d630 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Mon, 11 Aug 2025 00:17:28 +0300 Subject: [PATCH 123/127] enhancement(5235): added diskspace error conversion --- internal/pkg/agent/application/upgrade/upgrade_executor.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade_executor.go b/internal/pkg/agent/application/upgrade/upgrade_executor.go index 2576b730d5c..3ec95265205 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_executor.go +++ b/internal/pkg/agent/application/upgrade/upgrade_executor.go @@ -164,7 +164,7 @@ func (u *executeUpgrade) watchNewAgent(ctx context.Context, markerFilePath, topP previousAgentInstall, // old agent version data action, upgradeDetails, upgradeOutcome); err != nil { - return err + return u.diskSpaceErrorFunc(err) } watcherExecutable := u.watcher.selectWatcherExecutable(topPath, previousAgentInstall, newAgentInstall) From 84182531fc05b348b9bc7b4d24ba3e649b758c78 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Mon, 11 Aug 2025 00:18:58 +0300 Subject: [PATCH 124/127] enhancement(5235): using writefile stub --- internal/pkg/agent/application/upgrade/step_mark.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/pkg/agent/application/upgrade/step_mark.go b/internal/pkg/agent/application/upgrade/step_mark.go index 12f13cbd722..92dbb8dfcc3 100644 --- a/internal/pkg/agent/application/upgrade/step_mark.go +++ b/internal/pkg/agent/application/upgrade/step_mark.go @@ -228,7 +228,7 @@ func (u *upgradeWatcher) markUpgrade(log *logger.Logger, dataDirPath string, age markerPath := markerFilePath(dataDirPath) log.Infow("Writing upgrade marker file", "file.path", markerPath, "hash", marker.Hash, "prev_hash", marker.PrevHash) - if err := os.WriteFile(markerPath, markerBytes, 0600); err != nil { + if err := writeFile(markerPath, markerBytes, 0600); err != nil { return errors.New(err, errors.TypeFilesystem, "failed to create update marker file", errors.M(errors.MetaKeyPath, markerPath)) } From 818994f19e416faa654fc51082e3ad816fa434a0 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Mon, 11 Aug 2025 00:19:31 +0300 Subject: [PATCH 125/127] enhancement(5235): added command exec package var in rollback --- internal/pkg/agent/application/upgrade/rollback.go | 2 ++ internal/pkg/agent/application/upgrade/rollback_darwin.go | 2 +- internal/pkg/agent/application/upgrade/rollback_linux.go | 2 +- internal/pkg/agent/application/upgrade/rollback_windows.go | 2 +- 4 files changed, 5 insertions(+), 3 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/rollback.go b/internal/pkg/agent/application/upgrade/rollback.go index baedca3f71a..5e5de9d386b 100644 --- a/internal/pkg/agent/application/upgrade/rollback.go +++ b/internal/pkg/agent/application/upgrade/rollback.go @@ -33,6 +33,8 @@ const ( restartBackoffMax = 90 * time.Second ) +var execCommandFunc = exec.Command + // Rollback rollbacks to previous version which was functioning before upgrade. func Rollback(ctx context.Context, log *logger.Logger, c client.Client, topDirPath, prevVersionedHome, prevHash string) error { symlinkPath := filepath.Join(topDirPath, agentName) diff --git a/internal/pkg/agent/application/upgrade/rollback_darwin.go b/internal/pkg/agent/application/upgrade/rollback_darwin.go index 041abf11b40..475fccb0660 100644 --- a/internal/pkg/agent/application/upgrade/rollback_darwin.go +++ b/internal/pkg/agent/application/upgrade/rollback_darwin.go @@ -23,7 +23,7 @@ const ( func invokeCmd(agentExecutable string) *exec.Cmd { // #nosec G204 -- user cannot inject any parameters to this command - cmd := exec.Command(agentExecutable, watcherSubcommand, + cmd := execCommandFunc(agentExecutable, watcherSubcommand, "--path.config", paths.Config(), "--path.home", paths.Top(), ) diff --git a/internal/pkg/agent/application/upgrade/rollback_linux.go b/internal/pkg/agent/application/upgrade/rollback_linux.go index bdaf918a2b6..f680bc29a35 100644 --- a/internal/pkg/agent/application/upgrade/rollback_linux.go +++ b/internal/pkg/agent/application/upgrade/rollback_linux.go @@ -23,7 +23,7 @@ const ( func invokeCmd(agentExecutable string) *exec.Cmd { // #nosec G204 -- user cannot inject any parameters to this command - cmd := exec.Command(agentExecutable, watcherSubcommand, + cmd := execCommandFunc(agentExecutable, watcherSubcommand, "--path.config", paths.Config(), "--path.home", paths.Top(), ) diff --git a/internal/pkg/agent/application/upgrade/rollback_windows.go b/internal/pkg/agent/application/upgrade/rollback_windows.go index b7c273c9385..6efccba86c0 100644 --- a/internal/pkg/agent/application/upgrade/rollback_windows.go +++ b/internal/pkg/agent/application/upgrade/rollback_windows.go @@ -21,7 +21,7 @@ const ( func invokeCmd(agentExecutable string) *exec.Cmd { // #nosec G204 -- user cannot inject any parameters to this command - cmd := exec.Command(agentExecutable, watcherSubcommand, + cmd := execCommandFunc(agentExecutable, watcherSubcommand, "--path.config", paths.Config(), "--path.home", paths.Top(), ) From 64a6035190e18f2af51f68ce07276cbf61dc70d3 Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Mon, 11 Aug 2025 01:19:40 +0300 Subject: [PATCH 126/127] enhancement(5235): added release ugpradeable and context timeout package vars for testing --- internal/pkg/agent/application/upgrade/upgrade.go | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 8f6f69a1359..1a5e7d17361 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -114,11 +114,13 @@ type Upgrader struct { upgradeExecutor upgradeExecutor } +var releaseUpgradeableFunc = release.Upgradeable + // IsUpgradeable when agent is installed and running as a service or flag was provided. func IsUpgradeable() bool { // only upgradeable if running from Agent installer and running under the // control of the system supervisor (or built specifically with upgrading enabled) - return release.Upgradeable() || (paths.RunningInstalled() && info.RunningUnderSupervisor()) + return releaseUpgradeableFunc() || (paths.RunningInstalled() && info.RunningUnderSupervisor()) } // NewUpgrader creates an upgrader which is capable of performing upgrade operation @@ -256,6 +258,8 @@ func checkUpgrade(log *logger.Logger, currentVersion, newVersion agentVersion, m return nil } +var contextWithTimeoutFunc = context.WithTimeout + // Upgrade upgrades running agent, function returns shutdown callback that must be called by reexec. func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string, action *fleetapi.ActionUpgrade, det *details.Details, skipVerifyOverride bool, skipDefaultPgp bool, pgpBytes ...string) (_ reexec.ShutdownCallbackFn, err error) { defer func() { @@ -350,7 +354,7 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string versionedHome: currentVersionedHome, } - err = u.upgradeExecutor.watchNewAgent(ctx, markerFilePath(paths.Data()), paths.Top(), paths.Data(), watcherMaxWaitTime, context.WithTimeout, current, previous, action, det, OUTCOME_UPGRADE) + err = u.upgradeExecutor.watchNewAgent(ctx, markerFilePath(paths.Data()), paths.Top(), paths.Data(), watcherMaxWaitTime, contextWithTimeoutFunc, current, previous, action, det, OUTCOME_UPGRADE) if err != nil { return nil, err } From b824ae6622b7867bd6db16742257a5fe70bf20de Mon Sep 17 00:00:00 2001 From: kaanyalti Date: Mon, 11 Aug 2025 01:20:10 +0300 Subject: [PATCH 127/127] enhancement(5235): added invoke wathcer and wait for watcher error handling test cases --- .../agent/application/upgrade/upgrade_test.go | 301 ++++++++++++++++++ 1 file changed, 301 insertions(+) diff --git a/internal/pkg/agent/application/upgrade/upgrade_test.go b/internal/pkg/agent/application/upgrade/upgrade_test.go index 9a67cb2c0f7..d3eea7363d2 100644 --- a/internal/pkg/agent/application/upgrade/upgrade_test.go +++ b/internal/pkg/agent/application/upgrade/upgrade_test.go @@ -13,6 +13,7 @@ import ( "net/http" "net/http/httptest" "os" + "os/exec" "path/filepath" "reflect" "runtime" @@ -2050,3 +2051,303 @@ func TestUpgradeMarkUpgradeError(t *testing.T) { }) } } + +func TestUpgradeInvokeWatcherError(t *testing.T) { + log, _ := loggertest.New("test") + + tempConfig := &artifact.Config{} // used only to get os and arch, runtime.GOARCH returns amd64 which is not a valid arch when used in GetArtifactName + + // Prepare to override HomePath + tmpHomePath := paths.HomePath + t.Cleanup(func() { + paths.HomePath = tmpHomePath + }) + + initialVersion := agtversion.NewParsedSemVer(1, 2, 3, "SNAPSHOT", "") + initialArtifactName, err := artifact.GetArtifactName(agentArtifact, *initialVersion, tempConfig.OS(), tempConfig.Arch()) + require.NoError(t, err) + + initialArchiveFiles := modifyArchiveFiles(archiveFilesWithMoreComponents, + archiveFilesWithArchiveDirName(initialArtifactName), + archiveFilesWithVersionedHome(initialVersion.CoreVersion(), "abcdef"), + ) + + targetVersion := agtversion.NewParsedSemVer(3, 4, 5, "SNAPSHOT", "") + targetArtifactName, err := artifact.GetArtifactName(agentArtifact, *targetVersion, tempConfig.OS(), tempConfig.Arch()) + require.NoError(t, err) + + targetArchiveFiles := modifyArchiveFiles(archiveFilesWithMoreComponents, + archiveFilesWithArchiveDirName(targetArtifactName), + archiveFilesWithVersionedHome(targetVersion.CoreVersion(), "ghijkl"), + ) + + mockAgentInfo := mockinfo.NewAgent(t) + mockAgentInfo.On("Version").Return(targetVersion.String()) + + upgradeDetails := details.NewDetails(targetVersion.String(), details.StateRequested, "test") + + tempUnpacker := &upgradeUnpacker{ // used only to unpack the initial archive + log: log, + } + + testCases := map[string]testError{ + "should return error and cleanup if mark upgrade fails": { + copyFuncError: errors.New("test mark upgrade error"), + expectedError: errors.New("test mark upgrade error"), + }, + } + + for _, te := range TestErrors { + testCases[fmt.Sprintf("should return error and cleanup if mark upgrade fails with disk space error: %v", te)] = testError{ + copyFuncError: te, + expectedError: upgradeErrors.ErrInsufficientDiskSpace, + } + } + + paths.SetTop(t.TempDir()) + + initialArchive, err := createArchive(t, initialArtifactName, initialArchiveFiles) + require.NoError(t, err) + + t.Logf("Created archive: %s", initialArchive) + + initialUnpackRes, err := tempUnpacker.unpack(initialVersion.String(), initialArchive, paths.Data(), "") + require.NoError(t, err) + + checkExtractedFilesWithManifestAndVersionedHome(t, paths.Data(), filepath.Join(paths.Top(), initialUnpackRes.VersionedHome)) + + // Overriding HomePath which is just a var holding paths.Home() because + // Home() returns "unknow" short commit and returns the release version + // which is set in init. + paths.HomePath = func() string { + actualPath := filepath.Join(paths.Top(), initialUnpackRes.VersionedHome) + return actualPath + } + + // The file list does not contain the action store files, so we need to + // create them + err = os.WriteFile(paths.AgentActionStoreFile(), []byte("initial agent action store content"), 0o600) + require.NoError(t, err) + err = os.WriteFile(paths.AgentStateStoreYmlFile(), []byte("initial agent state yml content"), 0o600) + require.NoError(t, err) + err = os.WriteFile(paths.AgentStateStoreFile(), []byte("initial agent state enc content"), 0o600) + require.NoError(t, err) + + var createdFilePaths []string + // Create several files in the initial run path and save their paths in an array. + initialRunPath := paths.Run() + require.NoError(t, os.MkdirAll(initialRunPath, 0o755)) + + for i := 0; i < 3; i++ { + filePath := filepath.Join(initialRunPath, fmt.Sprintf("file%d.txt", i)) + err := os.WriteFile(filePath, []byte(fmt.Sprintf("content for file %d", i)), 0o600) + require.NoError(t, err) + createdFilePaths = append(createdFilePaths, filePath) + } + + targetArchive, err := createArchive(t, targetArtifactName, targetArchiveFiles) + require.NoError(t, err) + + t.Logf("Created archive: %s", targetArchive) + + newVersionedHome := "data/elastic-agent-3.4.5-SNAPSHOT-ghijkl" + newVersionedHomePath := filepath.Join(paths.Top(), newVersionedHome) + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + http.ServeFile(w, r, targetArchive) + })) + t.Cleanup(server.Close) + + config := artifact.Config{ + TargetDirectory: paths.Downloads(), + SourceURI: server.URL, + RetrySleepInitDuration: 1 * time.Second, + HTTPTransportSettings: httpcommon.HTTPTransportSettings{ + Timeout: 1 * time.Second, + }, + } + + intalledMarkerPath := filepath.Join(paths.Top(), paths.MarkerFileName) + _, err = os.Create(intalledMarkerPath) + require.NoError(t, err) + + tmpReleaseUpgradeableFunc := releaseUpgradeableFunc + t.Cleanup(func() { + releaseUpgradeableFunc = tmpReleaseUpgradeableFunc + }) + releaseUpgradeableFunc = func() bool { + return true + } + + upgrader, err := NewUpgrader(log, &config, mockAgentInfo) + require.NoError(t, err) + + _, err = upgrader.Upgrade(context.Background(), targetVersion.String(), server.URL, nil, upgradeDetails, true, true) + require.ErrorContains(t, err, "failed to start Upgrade Watcher", "expected error mismatch") + + require.NoDirExists(t, newVersionedHomePath, "new versioned home path should be cleaned up") + + entries, err := os.ReadDir(config.TargetDirectory) + require.NoError(t, err, "reading target directory failed") + require.Len(t, entries, 0) +} + +func TestUpgradeWaitForWatcherError(t *testing.T) { + + log, _ := loggertest.New("test") + + tempConfig := &artifact.Config{} // used only to get os and arch, runtime.GOARCH returns amd64 which is not a valid arch when used in GetArtifactName + + // Prepare to override HomePath + tmpHomePath := paths.HomePath + t.Cleanup(func() { + paths.HomePath = tmpHomePath + }) + + initialVersion := agtversion.NewParsedSemVer(1, 2, 3, "SNAPSHOT", "") + initialArtifactName, err := artifact.GetArtifactName(agentArtifact, *initialVersion, tempConfig.OS(), tempConfig.Arch()) + require.NoError(t, err) + + initialArchiveFiles := modifyArchiveFiles(archiveFilesWithMoreComponents, + archiveFilesWithArchiveDirName(initialArtifactName), + archiveFilesWithVersionedHome(initialVersion.CoreVersion(), "abcdef"), + ) + + targetVersion := agtversion.NewParsedSemVer(3, 4, 5, "SNAPSHOT", "") + targetArtifactName, err := artifact.GetArtifactName(agentArtifact, *targetVersion, tempConfig.OS(), tempConfig.Arch()) + require.NoError(t, err) + + targetArchiveFiles := modifyArchiveFiles(archiveFilesWithMoreComponents, + archiveFilesWithArchiveDirName(targetArtifactName), + archiveFilesWithVersionedHome(targetVersion.CoreVersion(), "ghijkl"), + ) + + mockAgentInfo := mockinfo.NewAgent(t) + mockAgentInfo.On("Version").Return(targetVersion.String()) + + upgradeDetails := details.NewDetails(targetVersion.String(), details.StateRequested, "test") + + tempUnpacker := &upgradeUnpacker{ // used only to unpack the initial archive + log: log, + } + + testCases := map[string]testError{ + "should return error and cleanup if mark upgrade fails": { + copyFuncError: errors.New("test mark upgrade error"), + expectedError: errors.New("test mark upgrade error"), + }, + } + + for _, te := range TestErrors { + testCases[fmt.Sprintf("should return error and cleanup if mark upgrade fails with disk space error: %v", te)] = testError{ + copyFuncError: te, + expectedError: upgradeErrors.ErrInsufficientDiskSpace, + } + } + + paths.SetTop(t.TempDir()) + + initialArchive, err := createArchive(t, initialArtifactName, initialArchiveFiles) + require.NoError(t, err) + + t.Logf("Created archive: %s", initialArchive) + + initialUnpackRes, err := tempUnpacker.unpack(initialVersion.String(), initialArchive, paths.Data(), "") + require.NoError(t, err) + + checkExtractedFilesWithManifestAndVersionedHome(t, paths.Data(), filepath.Join(paths.Top(), initialUnpackRes.VersionedHome)) + + // Overriding HomePath which is just a var holding paths.Home() because + // Home() returns "unknow" short commit and returns the release version + // which is set in init. + paths.HomePath = func() string { + actualPath := filepath.Join(paths.Top(), initialUnpackRes.VersionedHome) + return actualPath + } + + // The file list does not contain the action store files, so we need to + // create them + err = os.WriteFile(paths.AgentActionStoreFile(), []byte("initial agent action store content"), 0o600) + require.NoError(t, err) + err = os.WriteFile(paths.AgentStateStoreYmlFile(), []byte("initial agent state yml content"), 0o600) + require.NoError(t, err) + err = os.WriteFile(paths.AgentStateStoreFile(), []byte("initial agent state enc content"), 0o600) + require.NoError(t, err) + + var createdFilePaths []string + // Create several files in the initial run path and save their paths in an array. + initialRunPath := paths.Run() + require.NoError(t, os.MkdirAll(initialRunPath, 0o755)) + + for i := 0; i < 3; i++ { + filePath := filepath.Join(initialRunPath, fmt.Sprintf("file%d.txt", i)) + err := os.WriteFile(filePath, []byte(fmt.Sprintf("content for file %d", i)), 0o600) + require.NoError(t, err) + createdFilePaths = append(createdFilePaths, filePath) + } + + targetArchive, err := createArchive(t, targetArtifactName, targetArchiveFiles) + require.NoError(t, err) + + t.Logf("Created archive: %s", targetArchive) + + newVersionedHome := "data/elastic-agent-3.4.5-SNAPSHOT-ghijkl" + newVersionedHomePath := filepath.Join(paths.Top(), newVersionedHome) + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + http.ServeFile(w, r, targetArchive) + })) + t.Cleanup(server.Close) + + config := artifact.Config{ + TargetDirectory: paths.Downloads(), + SourceURI: server.URL, + RetrySleepInitDuration: 1 * time.Second, + HTTPTransportSettings: httpcommon.HTTPTransportSettings{ + Timeout: 1 * time.Second, + }, + } + + intalledMarkerPath := filepath.Join(paths.Top(), paths.MarkerFileName) + _, err = os.Create(intalledMarkerPath) + require.NoError(t, err) + + tmpExecCommandFunc := execCommandFunc + t.Cleanup(func() { + execCommandFunc = tmpExecCommandFunc + }) + execCommandFunc = func(name string, arg ...string) *exec.Cmd { + if runtime.GOOS == "windows" { + return exec.Command("timeout", "1") + } + return exec.Command("sleep", "1") + } + + tmpContextWithTimeoutFunc := contextWithTimeoutFunc + t.Cleanup(func() { + contextWithTimeoutFunc = tmpContextWithTimeoutFunc + }) + contextWithTimeoutFunc = func(ctx context.Context, timeout time.Duration) (context.Context, context.CancelFunc) { + return context.WithTimeout(ctx, 1*time.Second) + } + + tmpReleaseUpgradeableFunc := releaseUpgradeableFunc + t.Cleanup(func() { + releaseUpgradeableFunc = tmpReleaseUpgradeableFunc + }) + releaseUpgradeableFunc = func() bool { + return true + } + + upgrader, err := NewUpgrader(log, &config, mockAgentInfo) + require.NoError(t, err) + + _, err = upgrader.Upgrade(context.Background(), targetVersion.String(), server.URL, nil, upgradeDetails, true, true) + require.ErrorIs(t, err, context.DeadlineExceeded, "expected error mismatch") + + require.NoDirExists(t, newVersionedHomePath, "new versioned home path should be cleaned up") + + entries, err := os.ReadDir(config.TargetDirectory) + require.NoError(t, err, "reading target directory failed") + require.Len(t, entries, 0) +}