diff --git a/README.md b/README.md index 7a374a836..e85eb6b5f 100644 --- a/README.md +++ b/README.md @@ -35,6 +35,7 @@ NoOperation can be used to cancel an ongoing assess or patch operation. (**requi > > ```json > { +> "cloudType": "Azure", > "operation": "Assessment", > "activityId": "def820db-ec3c-4ecd-9d6c-cb95e6fd5231", > "startTime": "2021-08-10T23:37:14Z", diff --git a/src/core/__init__.py b/src/core/__init__.py index a1eee39e6..2e3211aae 100644 --- a/src/core/__init__.py +++ b/src/core/__init__.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/core/src/CoreMain.py b/src/core/src/CoreMain.py index 2380835f0..5ba77bb0e 100644 --- a/src/core/src/CoreMain.py +++ b/src/core/src/CoreMain.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,158 +13,78 @@ # limitations under the License. # # Requires Python 2.7+ -import os - from core.src.bootstrap.Bootstrapper import Bootstrapper +from core.src.bootstrap.ExitJanitor import ExitJanitor from core.src.bootstrap.Constants import Constants class CoreMain(object): def __init__(self, argv): - """The main entry point of patch operation execution""" - # Level 1 bootstrapping - bare minimum components to allow for diagnostics in further bootstrapping - bootstrapper = Bootstrapper(argv) - file_logger = bootstrapper.file_logger - composite_logger = bootstrapper.composite_logger - stdout_file_mirror = bootstrapper.stdout_file_mirror - telemetry_writer = bootstrapper.telemetry_writer - lifecycle_manager = status_handler = execution_config = None - - # Init operation statuses - patch_operation_requested = Constants.UNKNOWN - configure_patching_successful = False - patch_assessment_successful = False - overall_patch_installation_operation_successful = False - + """ Execution start point for core patch operations """ try: - # Level 2 bootstrapping - composite_logger.log_debug("Building out full container...") - container = bootstrapper.build_out_container() - lifecycle_manager, status_handler = bootstrapper.build_core_components(container) - composite_logger.log_debug("Completed building out full container.\n\n") - - # Current operation in status handler is set to either assessment or installation when these operations begin. Setting it to assessment since that is the first operation that runs always. - # This ensures all errors occurring before assessment starts are logged within the error objects of assessment substatus - if status_handler.get_current_operation() is None and not bootstrapper.auto_assessment_only: - status_handler.set_current_operation(Constants.ASSESSMENT) - - # Environment startup + self.stdout_file_mirror = self.file_logger = self.lifecycle_manager = self.telemetry_writer = None + self.safely_execute_core(argv) + except Exception as error: # this should never catch a failure in production - but defensive coding et al. + ExitJanitor.safely_handle_extreme_failure(self.stdout_file_mirror, self.file_logger, self.lifecycle_manager, self.telemetry_writer, error) + + def safely_execute_core(self, argv): + """ Encapsulates sequential safe initialization of components and delegates business-logic execution to the CoreExecutionEngine """ + # --------------------------------------------------------------------- + # Level 1 bootstrap - absolute bare minimum required for observability + # --------------------------------------------------------------------- + current_env = Constants.ExecEnv.PROD + try: + bootstrapper = Bootstrapper(argv) + current_env = bootstrapper.current_env + env_layer, self.file_logger, composite_logger, self.stdout_file_mirror, self.telemetry_writer = bootstrapper.get_foundational_components() + container = lifecycle_manager = status_handler = execution_config = None # for explicit clarity only bootstrapper.bootstrap_splash_text() - bootstrapper.basic_environment_health_check() - lifecycle_manager.execution_start_check() # terminates if this instance shouldn't be running (redundant) - - # Execution config retrieval - composite_logger.log_debug("Obtaining execution configuration...") - execution_config = container.get('execution_config') - telemetry_writer.set_operation_id(execution_config.activity_id) - telemetry_writer.set_task_name(Constants.TelemetryTaskName.AUTO_ASSESSMENT if execution_config.exec_auto_assess_only else Constants.TelemetryTaskName.EXEC) - patch_operation_requested = execution_config.operation.lower() - - # clean up temp folder before any operation execution begins from Core - if os.path.exists(execution_config.temp_folder): - composite_logger.log_debug("Deleting all files of certain format from temp folder [FileFormat={0}][TempFolderLocation={1}]" - .format(Constants.TEMP_FOLDER_CLEANUP_ARTIFACT_LIST, str(execution_config.temp_folder))) - bootstrapper.env_layer.file_system.delete_files_from_dir(execution_config.temp_folder, Constants.TEMP_FOLDER_CLEANUP_ARTIFACT_LIST) - - patch_assessor = container.get('patch_assessor') - package_manager = container.get('package_manager') - configure_patching_processor = container.get('configure_patching_processor') - - # Configure patching always runs first, except if it's AUTO_ASSESSMENT - if not execution_config.exec_auto_assess_only: - configure_patching_successful = configure_patching_processor.start_configure_patching() - - # Assessment happens for all operations. If the goal seeking tracked operation is CP, then its final status can only be written after assessment reaches a terminal state - assessment_exception_error = None - try: - patch_assessment_successful = patch_assessor.start_assessment() - except Exception as error: - assessment_exception_error = error # hold this until configure patching is closed out + if bootstrapper.auto_assessment_log_file_truncated: + composite_logger.log_debug("[CM] Auto-assessment log file was truncated in bootstrap.") + except Exception as error: + print("Critical: L1 Bootstrap failure. No logs were written. [Error={0}]".format(repr(error))) + return ExitJanitor.final_exit(Constants.ExitCode.CriticalError_NoLog, self.stdout_file_mirror, self.file_logger, self.lifecycle_manager, self.telemetry_writer, current_env) # return is only for IDE hinting - # close out configure patching if needed & then raise any 'uncontrolled' assessment exception if it occurred - if not execution_config.exec_auto_assess_only and patch_operation_requested == Constants.CONFIGURE_PATCHING.lower(): - configure_patching_processor.set_configure_patching_final_overall_status() # guarantee configure patching status write prior to throwing on any catastrophic assessment error - if assessment_exception_error is not None: - raise assessment_exception_error + # --------------------------------------------------------------------------------------- + # Level 2 bootstrap - required for service-side reporting & minimal lifecycle management + # --------------------------------------------------------------------------------------- + try: + container = bootstrapper.build_out_container() # nothing below this except maybe ExecutionConfig should fail (malformed input) - all init path code should be highly robust + self.lifecycle_manager, status_handler, execution_config = bootstrapper.get_service_components() + self.lifecycle_manager.execution_start_check() # terminates execution gracefully if nothing to do + package_manager = configure_patching_processor = patch_assessor = patch_installer = None + core_exec = exit_janitor = None + except Exception as error: + composite_logger.log_error("Critical: L2 Bootstrap failure. No status was written. [Error={0}][LogLocation={1}]".format(repr(error), bootstrapper.log_file_path)) + return ExitJanitor.final_exit(Constants.ExitCode.CriticalError_NoStatus, self.stdout_file_mirror, self.file_logger, self.lifecycle_manager, self.telemetry_writer, current_env) # return is only for IDE hinting - # Patching + additional assessment occurs if the operation is 'Installation' and not Auto Assessment. Need to check both since operation_requested from prev run is preserved in Auto Assessment - if not execution_config.exec_auto_assess_only and patch_operation_requested == Constants.INSTALLATION.lower(): - # setting current operation here, to include patch_installer init within installation actions, ensuring any exceptions during patch_installer init are added in installation summary errors object - status_handler.set_current_operation(Constants.INSTALLATION) - patch_installer = container.get('patch_installer') - patch_installation_successful = patch_installer.start_installation() - patch_assessment_successful = False - patch_assessment_successful = patch_assessor.start_assessment() + # --------------------------------------------------- + # Level 3 bootstrap - patch component initialization + # --------------------------------------------------- + try: + package_manager, configure_patching_processor, patch_assessor, patch_installer = bootstrapper.get_patch_components() + core_exec, exit_janitor = bootstrapper.get_core_exec_components() + except Exception as error: + composite_logger.log_error("Critical: L3 Bootstrap failure. [Error={0}][LogLocation={1}]".format(repr(error), bootstrapper.log_file_path)) + return ExitJanitor.final_exit(Constants.ExitCode.CriticalError_Reported, self.stdout_file_mirror, self.file_logger, self.lifecycle_manager, self.telemetry_writer, current_env) # return is only for IDE hinting - # PatchInstallationSummary to be marked as completed successfully only after the implicit (i.e. 2nd) assessment is completed, as per CRP's restrictions - if patch_assessment_successful and patch_installation_successful: - patch_installer.mark_installation_completed() - overall_patch_installation_operation_successful = True - self.update_patch_substatus_if_pending(patch_operation_requested, overall_patch_installation_operation_successful, patch_assessment_successful, configure_patching_successful, status_handler, composite_logger) + # ------------------------------ + # Core business logic execution + # ------------------------------ + try: + core_exec.perform_housekeeping_tasks() + core_exec.execute() + core_exec.set_final_status_handler_statuses() except Exception as error: - # Privileged operation handling for non-production use - if Constants.EnvLayer.PRIVILEGED_OP_MARKER in repr(error): - composite_logger.log_debug('\nPrivileged operation request intercepted: ' + repr(error)) + if Constants.EnvLayer.PRIVILEGED_OP_MARKER in repr(error): # Privileged operation handling for non-production use + composite_logger.log_debug('[CM] Privileged operation request intercepted: ' + repr(error)) raise - # General handling - composite_logger.log_error('\nEXCEPTION during patch operation: ' + repr(error)) - composite_logger.log_error('TO TROUBLESHOOT, please save this file before the next invocation: ' + bootstrapper.log_file_path) - - composite_logger.log_debug("Safely completing required operations after exception...") - if telemetry_writer is not None: - telemetry_writer.write_event("EXCEPTION: " + repr(error), Constants.TelemetryEventLevel.Error) - if status_handler is not None: - composite_logger.log_debug(' - Status handler pending writes flags [I=' + str(overall_patch_installation_operation_successful) + ', A=' + str(patch_assessment_successful) + ']') - - # Add any pending errors to appropriate substatus - if Constants.ERROR_ADDED_TO_STATUS not in repr(error): - status_handler.add_error_to_status("Terminal exception {0}".format(repr(error)), Constants.PatchOperationErrorCodes.OPERATION_FAILED) - else: - status_handler.add_error_to_status("Execution terminated due to last reported error.", Constants.PatchOperationErrorCodes.OPERATION_FAILED) - - self.update_patch_substatus_if_pending(patch_operation_requested, overall_patch_installation_operation_successful, patch_assessment_successful, configure_patching_successful, status_handler, composite_logger) - - else: - composite_logger.log_error(' - Status handler is not initialized, and status data cannot be written.') - composite_logger.log_debug("Completed exception handling.\n") + core_exec.try_set_final_status_handler_statuses() + exit_janitor.handle_terminal_exception(exception=error, log_file_path=bootstrapper.log_file_path) finally: - # clean up temp folder of files created by Core after execution completes - if self.is_temp_folder_available(bootstrapper.env_layer, execution_config): - composite_logger.log_debug("Deleting all files of certain format from temp folder [FileFormat={0}][TempFolderLocation={1}]" - .format(Constants.TEMP_FOLDER_CLEANUP_ARTIFACT_LIST, str(execution_config.temp_folder))) - bootstrapper.env_layer.file_system.delete_files_from_dir(execution_config.temp_folder, Constants.TEMP_FOLDER_CLEANUP_ARTIFACT_LIST) - - if lifecycle_manager is not None: - lifecycle_manager.update_core_sequence(completed=True) - - telemetry_writer.write_event("Completed Linux Patch core operation.", Constants.TelemetryEventLevel.Informational) - - stdout_file_mirror.stop() - file_logger.close(message_at_close="\n") - - @staticmethod - def update_patch_substatus_if_pending(patch_operation_requested, overall_patch_installation_operation_successful, patch_assessment_successful, configure_patching_successful, status_handler, composite_logger): - if patch_operation_requested == Constants.INSTALLATION.lower() and not overall_patch_installation_operation_successful: - status_handler.set_current_operation(Constants.INSTALLATION) - if not patch_assessment_successful: - status_handler.add_error_to_status("Installation failed due to assessment failure. Please refer the error details in assessment substatus") - status_handler.set_installation_substatus_json(status=Constants.STATUS_ERROR) - # NOTE: For auto patching requests, no need to report patch metadata to health store in case of failure - composite_logger.log_debug(' -- Persisted failed installation substatus.') - if not patch_assessment_successful and patch_operation_requested != Constants.CONFIGURE_PATCHING.lower(): - status_handler.set_assessment_substatus_json(status=Constants.STATUS_ERROR) - composite_logger.log_debug(' -- Persisted failed assessment substatus.') - if not configure_patching_successful: - status_handler.set_configure_patching_substatus_json(status=Constants.STATUS_ERROR) - composite_logger.log_debug(' -- Persisted failed configure patching substatus.') - - @staticmethod - def is_temp_folder_available(env_layer, execution_config): - return env_layer is not None \ - and execution_config is not None \ - and execution_config.temp_folder is not None \ - and os.path.exists(execution_config.temp_folder) + exit_janitor.perform_housekeeping_tasks() + exit_janitor.final_exit(Constants.ExitCode.Okay, self.stdout_file_mirror, self.file_logger, self.lifecycle_manager, self.telemetry_writer, current_env) diff --git a/src/core/src/__init__.py b/src/core/src/__init__.py index a1eee39e6..2e3211aae 100644 --- a/src/core/src/__init__.py +++ b/src/core/src/__init__.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/core/src/__main__.py b/src/core/src/__main__.py index 57317d3fc..2b87b7a1b 100644 --- a/src/core/src/__main__.py +++ b/src/core/src/__main__.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/core/src/bootstrap/Bootstrapper.py b/src/core/src/bootstrap/Bootstrapper.py index 4e349f264..5bae17f78 100644 --- a/src/core/src/bootstrap/Bootstrapper.py +++ b/src/core/src/bootstrap/Bootstrapper.py @@ -1,10 +1,11 @@ +# coding=utf-8 # Copyright 2020 Microsoft Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,29 +15,38 @@ # # Requires Python 2.7+ -""" Environment Manager """ +""" Bootstrapper """ import base64 import json import os import sys +import time from core.src.bootstrap.ConfigurationFactory import ConfigurationFactory from core.src.bootstrap.Constants import Constants from core.src.bootstrap.Container import Container from core.src.local_loggers.StdOutFileMirror import StdOutFileMirror +try: + import urllib.request as urlreq # Python 3.x +except ImportError: + import urllib2 as urlreq # Python 2.x + class Bootstrapper(object): def __init__(self, argv, capture_stdout=True): # Environment and basic execution awareness - self.current_env = self.get_current_env() self.argv = argv - self.auto_assessment_only = bool(self.get_value_from_argv(self.argv, Constants.ARG_AUTO_ASSESS_ONLY, "False") == "True") - self.log_file_path, self.real_record_path, self.events_folder, self.telemetry_supported = self.get_path_to_log_files_and_telemetry_dir(argv, self.auto_assessment_only) - self.recorder_enabled, self.emulator_enabled = self.get_recorder_emulator_flags(argv) + self.config_settings = None + self.current_env = self.__get_current_env() + self.auto_assessment_only = bool(self.__get_value_from_argv(Constants.ARG_AUTO_ASSESS_ONLY, "False") == "True") + self.cloud_type = self.__get_cloud_type(self.auto_assessment_only) + + self.log_file_path, self.real_record_path, self.events_folder, self.telemetry_supported = self.__get_path_to_log_files_and_telemetry_dir(argv, self.auto_assessment_only) + self.recorder_enabled, self.emulator_enabled = self.__get_recorder_emulator_flags() # Container initialization - print("Building bootstrap container configuration...") - self.configuration_factory = ConfigurationFactory(self.log_file_path, self.real_record_path, self.recorder_enabled, self.emulator_enabled, self.events_folder, self.telemetry_supported) + print("[-- DIALTONE --]\n[BS] Building bootstrap container configuration... " + ("[Environment=" + str(self.current_env) + "]") if self.current_env != Constants.ExecEnv.PROD else "") + self.configuration_factory = ConfigurationFactory(self.cloud_type, self.log_file_path, self.real_record_path, self.recorder_enabled, self.emulator_enabled, self.events_folder, self.telemetry_supported) self.container = Container() self.container.build(self.configuration_factory.get_bootstrap_configuration(self.current_env)) @@ -44,134 +54,176 @@ def __init__(self, argv, capture_stdout=True): self.env_layer = self.container.get('env_layer') # Logging initializations - self.reset_auto_assessment_log_file_if_needed() + self.auto_assessment_log_file_truncated = False # used for delayed logging + self.__reset_auto_assessment_log_file_if_too_large() self.file_logger = self.container.get('file_logger') - if capture_stdout: - self.stdout_file_mirror = StdOutFileMirror(self.env_layer, self.file_logger) + self.stdout_file_mirror = StdOutFileMirror(self.env_layer, self.file_logger, capture_stdout, self.current_env) self.composite_logger = self.container.get('composite_logger') self.telemetry_writer = self.container.get('telemetry_writer') self.composite_logger.telemetry_writer = self.telemetry_writer # Need to set telemetry_writer within logger to enable sending all logs to telemetry - print("\nCompleted building bootstrap container configuration.\n") + # Making telemetry better sooner + self.telemetry_writer.set_task_name(Constants.TelemetryTaskName.AUTO_ASSESSMENT if self.auto_assessment_only else Constants.TelemetryTaskName.EXEC) + self.telemetry_writer.set_operation_id(self.__get_activity_id_from_config_settings_for_telemetry()) - @staticmethod - def get_current_env(): - """ Decides what environment to bootstrap with """ - current_env = str(os.getenv(Constants.LPE_ENV_VARIABLE, Constants.PROD)) - if current_env not in [Constants.DEV, Constants.TEST, Constants.PROD]: - print("Unknown environment requested: {0}".format(current_env)) - current_env = Constants.PROD - print("Bootstrap environment: {0}".format(current_env)) - return current_env + print("\n[BS] Completed building bootstrap container configuration.") - def get_path_to_log_files_and_telemetry_dir(self, argv, auto_assessment_only): - """ Performs the minimum steps required to determine where to start logging """ - sequence_number = self.get_value_from_argv(argv, Constants.ARG_SEQUENCE_NUMBER) - decode_bytes = base64.b64decode(self.get_value_from_argv(argv, Constants.ARG_ENVIRONMENT_SETTINGS).replace("b\'", "")) - decode_value = decode_bytes.decode() - environment_settings = json.loads(decode_value) - log_folder = environment_settings[Constants.EnvSettings.LOG_FOLDER] # can throw exception and that's okay (since we can't recover from this) - exec_demarcator = ".aa" if auto_assessment_only else "" - log_file_path = os.path.join(log_folder, str(sequence_number) + exec_demarcator + ".core.log") - real_rec_path = os.path.join(log_folder, str(sequence_number) + exec_demarcator + ".core.rec") - events_folder = environment_settings[Constants.EnvSettings.EVENTS_FOLDER] # can throw exception and that's okay (since we can't recover from this) - telemetry_supported = environment_settings[Constants.EnvSettings.TELEMETRY_SUPPORTED] - return log_file_path, real_rec_path, events_folder, telemetry_supported + # region Public Methods + def get_foundational_components(self): + """ Components needed for code execution observability """ + return self.env_layer, self.file_logger, self.composite_logger, self.stdout_file_mirror, self.telemetry_writer - def reset_auto_assessment_log_file_if_needed(self): - """ Deletes the auto assessment log file when needed to prevent excessive growth """ + def build_out_container(self): + """ First output in a positive bootstrap """ try: - if self.auto_assessment_only and os.path.exists(self.log_file_path) and os.path.getsize(self.log_file_path) > Constants.MAX_AUTO_ASSESSMENT_LOGFILE_SIZE_IN_BYTES: - os.remove(self.log_file_path) - except Exception as error: - print("INFO: Error while checking/removing auto-assessment log file. [Path={0}][ExistsRecheck={1}]".format(self.log_file_path, str(os.path.exists(self.log_file_path)))) + # input parameter incorporation + arguments_config = self.configuration_factory.get_arguments_configuration(self.argv) + self.container.build(arguments_config) - def get_recorder_emulator_flags(self, argv): - """ Determines if the recorder or emulator flags need to be changed from the defaults """ - recorder_enabled = False - emulator_enabled = False - try: - recorder_enabled = bool(self.get_value_from_argv(argv, Constants.ARG_INTERNAL_RECORDER_ENABLED)) - emulator_enabled = bool(self.get_value_from_argv(argv, Constants.ARG_INTERNAL_EMULATOR_ENABLED)) + # full configuration incorporation + self.container.build(self.configuration_factory.get_configuration(self.current_env, self.env_layer.get_package_manager())) + + return self.container except Exception as error: - print("INFO: Default environment layer settings loaded.") - return recorder_enabled, emulator_enabled + raise Exception("Bootstrapper: Container build out failure. [Error={0}]".format(repr(error))) + + def get_service_components(self): + """ Components needed for higher-level observability and execution checks """ + return self.container.get('lifecycle_manager'), self.container.get('status_handler'), self.container.get('execution_config') + def get_patch_components(self): + """ Components needed for core business-logic execution and controlled core exit """ + return self.container.get('package_manager'), self.container.get('configure_patching_processor'), self.container.get('patch_assessor'), self.container.get('patch_installer') + + def get_core_exec_components(self): + """ Highest-level execution components in final initialization """ + return self.container.get('core_execution_engine'), self.container.get('exit_janitor') + + def bootstrap_splash_text(self): + self.composite_logger.log_raw("---------------------------------------------------------------------------------------------------------------------------------------------" + "\n Microsoft.CPlat.Core.LinuxPatchExtension (Compute Platform \\ AzGPS) -- Copyright (c) Microsoft Corporation. All rights reserved. " + "\n * Component: [%exec_name%]" + "\n * Version: [%exec_ver%] ([%exec_build_timestamp%])" + "\n * Source: https://github.com/Azure/LinuxPatchExtension " + "\n---------------------------------------------------------------------------------------------------------------------------------------------") + self.composite_logger.log("[BS] Execution environment: [PythonVersion={0}][Distribution={1}][ProcessId={2}][MachineId={3}]".format(sys.version.split()[0], str(self.env_layer.platform.linux_distribution()), str(os.getpid()), self.env_layer.platform.node())) + # endregion Public Methods + + # region High-risk Methods - no telemetry. must be extremely robust. handler must capture output and return. @staticmethod - def get_value_from_argv(argv, key, default_value=Constants.DEFAULT_UNSPECIFIED_VALUE): + def __get_current_env(): + """ Decides what execution environment to bootstrap with """ + current_env = str(os.getenv(Constants.AZGPS_LPE_ENVIRONMENT_VAR, Constants.ExecEnv.PROD)) + current_env = Constants.ExecEnv.PROD if current_env not in [Constants.ExecEnv.DEV, Constants.ExecEnv.TEST, Constants.ExecEnv.PROD] else current_env + return current_env + + def __get_value_from_argv(self, key, default_value=Constants.DEFAULT_UNSPECIFIED_VALUE): """ Discovers the value assigned to a given key based on the core contract on arguments """ + argv = self.argv for x in range(1, len(argv)): if x % 2 == 1: # key checker if str(argv[x]).lower() == key.lower() and x < len(argv): - return str(argv[x+1]) + return str(argv[x + 1]) if default_value == Constants.DEFAULT_UNSPECIFIED_VALUE: raise Exception("Unable to find key {0} in core arguments: {1}.".format(key, str(argv))) else: return default_value - def build_out_container(self): - # First output in a positive bootstrap + def __get_cloud_type(self, auto_assessment_only): + """ Tries to determine cloud type as efficiently and accurately as possible for dependency injection """ try: - # input parameter incorporation - arguments_config = self.configuration_factory.get_arguments_configuration(self.argv) - self.container.build(arguments_config) - - # full configuration incorporation - self.container.build(self.configuration_factory.get_configuration(self.current_env, self.env_layer.get_package_manager())) - - return self.container + if not auto_assessment_only: + return Constants.CloudType.AZURE # trivial selection for non-Auto-assessment scenarios + + cloud_type = self.__get_config_setting_value(Constants.ConfigSettings.CLOUD_TYPE) + if cloud_type not in [Constants.CloudType.AZURE, Constants.CloudType.ARC]: + raise Exception("Unknown cloud type. [CloudType={0}]".format(str(cloud_type))) + return Constants.CloudType.AZURE if cloud_type != Constants.CloudType.ARC else Constants.CloudType.ARC + except Exception as error: # this should not happen if services are configured correctly + print('[BS] Unable to read cloud type. Reverting to instance metadata service check. [Error={0}]'.format(repr(error))) + return self.__get_cloud_type_using_imds() + + def __get_config_setting_value(self, key): + """ This is only to be used for highly critical settings in bootstrapper to reduce failure probability. Lazy loading is for improving runtime safety. """ + if self.config_settings is None: + self.config_settings = self.__get_decoded_json_from_argv(Constants.ARG_CONFIG_SETTINGS) + return self.config_settings[key] + + def __get_decoded_json_from_argv(self, key): + """ Discovers and decodes the JSON body of a specific base64 encoded JSON object in input arguments. """ + value = self.__get_value_from_argv(key) + try: + decoded_json = json.loads(base64.b64decode(value.replace("b\'", "")).decode()) except Exception as error: - self.composite_logger.log_error('\nEXCEPTION during patch management core bootstrap: ' + repr(error)) - raise - pass - - def build_core_components(self, container): - self.composite_logger.log_debug(" - Instantiating lifecycle manager.") - lifecycle_manager = container.get('lifecycle_manager') - self.composite_logger.log_debug(" - Instantiating progress status writer.") - status_handler = container.get('status_handler') - return lifecycle_manager, status_handler + raise Exception('Unable to process JSON in core arguments. [Key={0}][Error={1}]'.format(str(key), repr(error))) + return decoded_json - def bootstrap_splash_text(self): - self.composite_logger.log("\n\n[%exec_name%] \t -- \t Copyright (c) Microsoft Corporation. All rights reserved. \nApplication version: 3.0.[%exec_sub_ver%]\n\n") + @staticmethod + def __get_cloud_type_using_imds(): + """ Detects cloud type of the VM, in auto-assessment scenarios where the AzGPS Linux Patch Extension runs in Azure Arc. + Logic taken from Hybrid Compute RP code: https://github.com/PowerShell/DesiredStateConfiguration/blob/dev/src/dsc/dsc_service/service_main.cpp#L115 """ + request = urlreq.Request(Constants.Config.IMDS_END_POINT) + request.add_header('Metadata', "True") + request.add_header('UserAgent', "ArcAgent") + for i in range(0, Constants.MAX_IMDS_CONNECTION_RETRY_COUNT): + try: + print("INFO: Bootstrapper: Trying to connect to the IMDS endpoint. [URL={0}][Attempt={1}]".format(str(Constants.Config.IMDS_END_POINT), str(i + 1))) + res = urlreq.urlopen(request, timeout=2) + if res.getcode() != 200: + raise Exception("Unexpected return code: {0}.".format(str(res.getcode()))) + else: + print("- Return code: 200. [CloudType=Azure]\n") + return Constants.CloudType.AZURE + except Exception as error: + # Failed to connect to Azure IMDS endpoint. This is expected on Arc machine - but not expected on Azure machine. + print('- IMDS connection attempt failed. [Error={0}]'.format(repr(error))) + if i < Constants.MAX_IMDS_CONNECTION_RETRY_COUNT - 1: + time.sleep(i + 1) + else: + print("INFO: Bootstrapper: Failed to connect to the IMDS endpoint after {0} retries. [CloudType=Arc]\n".format(Constants.MAX_IMDS_CONNECTION_RETRY_COUNT)) + return Constants.CloudType.ARC + + def __get_recorder_emulator_flags(self): + """ Determines if the recorder or emulator flags need to be changed from the defaults """ + recorder_enabled = False + emulator_enabled = False + try: + recorder_enabled = bool(self.__get_value_from_argv(Constants.ARG_INTERNAL_RECORDER_ENABLED)) + emulator_enabled = bool(self.__get_value_from_argv(Constants.ARG_INTERNAL_EMULATOR_ENABLED)) + print("INFO: Bootstrapper: [Recorder={0}][Emulator={1}]".format(recorder_enabled, emulator_enabled)) + except Exception: + pass + return recorder_enabled, emulator_enabled - def basic_environment_health_check(self): - self.composite_logger.log("Python version: " + " ".join(sys.version.splitlines())) - self.composite_logger.log("Linux distribution: " + str(self.env_layer.platform.linux_distribution()) + "\n") - self.composite_logger.log("Process id: " + str(os.getpid())) + def __get_path_to_log_files_and_telemetry_dir(self, argv, auto_assessment_only): + """ Performs the minimum steps required to determine where to start logging """ + sequence_number = self.__get_value_from_argv(Constants.ARG_SEQUENCE_NUMBER) + decode_bytes = base64.b64decode(self.__get_value_from_argv(Constants.ARG_ENVIRONMENT_SETTINGS).replace("b\'", "")) + decode_value = decode_bytes.decode() + environment_settings = json.loads(decode_value) + log_folder = environment_settings[Constants.EnvSettings.LOG_FOLDER] # can throw exception and that's okay (since we can't recover from this) + exec_demarcator = ".aa" if auto_assessment_only else "" + log_file_path = os.path.join(log_folder, str(sequence_number) + exec_demarcator + ".core.log") + real_rec_path = os.path.join(log_folder, str(sequence_number) + exec_demarcator + ".core.rec") + events_folder = environment_settings[Constants.EnvSettings.EVENTS_FOLDER] # can throw exception and that's okay (since we can't recover from this) + telemetry_supported = environment_settings[Constants.EnvSettings.TELEMETRY_SUPPORTED] + return log_file_path, real_rec_path, events_folder, telemetry_supported - # Ensure sudo works in the environment - sudo_check_result = self.check_sudo_status() - self.composite_logger.log_debug("Sudo status check: " + str(sudo_check_result) + "\n") + def __reset_auto_assessment_log_file_if_too_large(self): + """ Deletes the auto assessment log file when needed to prevent excessive growth """ + try: + if self.auto_assessment_only and os.path.exists(self.log_file_path) and os.path.getsize(self.log_file_path) > Constants.MAX_AUTO_ASSESSMENT_LOGFILE_SIZE_IN_BYTES: + os.remove(self.log_file_path) + self.auto_assessment_log_file_truncated = True + except Exception as error: + print("INFO: Bootstrapper: Error while checking/removing auto-assessment log file. [Path={0}][ExistsRecheck={1}]".format(self.log_file_path, str(os.path.exists(self.log_file_path)))) - def check_sudo_status(self, raise_if_not_sudo=True): - """ Checks if we can invoke sudo successfully. """ + def __get_activity_id_from_config_settings_for_telemetry(self): + """ Returns the activity id of the operation for use in telemetry *only* """ try: - self.composite_logger.log("Performing sudo status check... This should complete within 10 seconds.") - return_code, output = self.env_layer.run_command_output("timeout 10 sudo id && echo True || echo False", False, False) - # output should look like either this (bad): - # [sudo] password for username: - # False - # or this (good): - # uid=0(root) gid=0(root) groups=0(root) - # True - - output_lines = output.splitlines() - if len(output_lines) < 2: - raise Exception("Unexpected sudo check result. Output: " + " ".join(output.split("\n"))) - - if output_lines[1] == "True": - return True - elif output_lines[1] == "False": - if raise_if_not_sudo: - raise Exception("Unable to invoke sudo successfully. Output: " + " ".join(output.split("\n"))) - return False - else: - raise Exception("Unexpected sudo check result. Output: " + " ".join(output.split("\n"))) - except Exception as exception: - self.composite_logger.log_error("Sudo status check failed. Please ensure the computer is configured correctly for sudo invocation. " + - "Exception details: " + str(exception)) - if raise_if_not_sudo: - raise + return self.__get_config_setting_value(Constants.ConfigSettings.ACTIVITY_ID) + except Exception as error: + return Constants.DEFAULT_UNSPECIFIED_VALUE # no logging because the outcome in telemetry will be self-explanatory + # endregion High-risk Methods - no telemetry diff --git a/src/core/src/bootstrap/ConfigurationFactory.py b/src/core/src/bootstrap/ConfigurationFactory.py index 545d2921e..c53bea087 100644 --- a/src/core/src/bootstrap/ConfigurationFactory.py +++ b/src/core/src/bootstrap/ConfigurationFactory.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,20 +14,21 @@ # # Requires Python 2.7+ -""" Configure factory. This module populates configuration based on package manager and environment, e.g. TEST/DEV/PROD""" +""" Configuration Factory. This module populates configuration based on package manager and environment detected. """ from __future__ import print_function import os -import time from core.src.bootstrap.Constants import Constants from core.src.bootstrap.EnvLayer import EnvLayer +from core.src.bootstrap.ExitJanitor import ExitJanitor -from core.src.core_logic.ConfigurePatchingProcessor import ConfigurePatchingProcessor +from core_logic.patch_operators.ConfigurePatchingProcessor import ConfigurePatchingProcessor +from core_logic.CoreExecutionEngine import CoreExecutionEngine from core.src.core_logic.ExecutionConfig import ExecutionConfig from core.src.core_logic.MaintenanceWindow import MaintenanceWindow from core.src.core_logic.PackageFilter import PackageFilter from core.src.core_logic.RebootManager import RebootManager -from core.src.core_logic.PatchAssessor import PatchAssessor -from core.src.core_logic.PatchInstaller import PatchInstaller +from core_logic.patch_operators.PatchAssessor import PatchAssessor +from core_logic.patch_operators.PatchInstaller import PatchInstaller from core.src.core_logic.ServiceManager import ServiceManager from core.src.core_logic.ServiceManager import ServiceInfo @@ -36,56 +37,59 @@ from core.src.local_loggers.FileLogger import FileLogger from core.src.local_loggers.CompositeLogger import CompositeLogger -from core.src.package_managers.AptitudePackageManager import AptitudePackageManager -from core.src.package_managers.YumPackageManager import YumPackageManager -from core.src.package_managers.ZypperPackageManager import ZypperPackageManager +from package_managers.apt.AptPackageManager import AptPackageManager +from package_managers.apt.AptPatchModeManager import AptPatchModeManager +from package_managers.apt.AptSourcesManager import AptSourcesManager +from package_managers.apt.AptHealthManager import AptHealthManager -from core.src.service_interfaces.LifecycleManager import LifecycleManager -from core.src.service_interfaces.LifecycleManagerAzure import LifecycleManagerAzure -from core.src.service_interfaces.LifecycleManagerArc import LifecycleManagerArc +from package_managers.yum.YumPackageManager import YumPackageManager +from package_managers.yum.YumPatchModeManager import YumPatchModeManager +from package_managers.yum.YumSourcesManager import YumSourcesManager +from package_managers.yum.YumHealthManager import YumHealthManager + +from package_managers.zypper.ZypperPackageManager import ZypperPackageManager +from package_managers.zypper.ZypperPatchModeManager import ZypperPatchModeManager +from package_managers.zypper.ZypperSourcesManager import ZypperSourcesManager +from package_managers.zypper.ZypperHealthManager import ZypperHealthManager + +from service_interfaces.lifecycle_managers.LifecycleManagerAzure import LifecycleManagerAzure +from service_interfaces.lifecycle_managers.LifecycleManagerArc import LifecycleManagerArc from core.src.service_interfaces.StatusHandler import StatusHandler from core.src.service_interfaces.TelemetryWriter import TelemetryWriter -# Todo: find a different way to import these -try: - import urllib2 as urlreq #Python 2.x -except: - import urllib.request as urlreq #Python 3.x - class ConfigurationFactory(object): """ Class for generating module definitions. Configuration is list of key value pairs. Please DON'T change key name. DI container relies on the key name to find and resolve dependencies. If you do need change it, please make sure to update the key name in all places that reference it. """ - def __init__(self, log_file_path, real_record_path, recorder_enabled, emulator_enabled, events_folder, telemetry_supported): - self.vm_cloud_type = self.get_vm_cloud_type() - self.lifecycle_manager_component = self.get_lifecycle_manager_component(self.vm_cloud_type) + def __init__(self, cloud_type, log_file_path, real_record_path, recorder_enabled, emulator_enabled, events_folder, telemetry_supported): + self.cloud_type = cloud_type self.bootstrap_configurations = { - 'prod_config': self.new_bootstrap_configuration(Constants.PROD, log_file_path, real_record_path, recorder_enabled, emulator_enabled, events_folder, telemetry_supported), - 'dev_config': self.new_bootstrap_configuration(Constants.DEV, log_file_path, real_record_path, recorder_enabled, emulator_enabled, events_folder, telemetry_supported), - 'test_config': self.new_bootstrap_configuration(Constants.TEST, log_file_path, real_record_path, recorder_enabled, emulator_enabled, events_folder, telemetry_supported) + 'prod_config': self.__new_bootstrap_configuration(Constants.ExecEnv.PROD, log_file_path, real_record_path, recorder_enabled, emulator_enabled, events_folder, telemetry_supported), + 'dev_config': self.__new_bootstrap_configuration(Constants.ExecEnv.DEV, log_file_path, real_record_path, recorder_enabled, emulator_enabled, events_folder, telemetry_supported), + 'test_config': self.__new_bootstrap_configuration(Constants.ExecEnv.TEST, log_file_path, real_record_path, recorder_enabled, emulator_enabled, events_folder, telemetry_supported) } self.configurations = { - 'apt_prod_config': self.new_prod_configuration(Constants.APT, AptitudePackageManager), - 'yum_prod_config': self.new_prod_configuration(Constants.YUM, YumPackageManager), - 'zypper_prod_config': self.new_prod_configuration(Constants.ZYPPER, ZypperPackageManager), + 'apt_prod_config': self.__new_prod_configuration(Constants.APT, AptPackageManager, AptPatchModeManager, AptSourcesManager, AptHealthManager), + 'yum_prod_config': self.__new_prod_configuration(Constants.YUM, YumPackageManager, YumPatchModeManager, YumSourcesManager, YumHealthManager), + 'zypper_prod_config': self.__new_prod_configuration(Constants.ZYPPER, ZypperPackageManager, ZypperPatchModeManager, ZypperSourcesManager, ZypperHealthManager), - 'apt_dev_config': self.new_dev_configuration(Constants.APT, AptitudePackageManager), - 'yum_dev_config': self.new_dev_configuration(Constants.YUM, YumPackageManager), - 'zypper_dev_config': self.new_dev_configuration(Constants.ZYPPER, ZypperPackageManager), + 'apt_dev_config': self.__new_dev_configuration(Constants.APT, AptPackageManager, AptPatchModeManager, AptSourcesManager, AptHealthManager), + 'yum_dev_config': self.__new_dev_configuration(Constants.YUM, YumPackageManager, YumPatchModeManager, YumSourcesManager, YumHealthManager), + 'zypper_dev_config': self.__new_dev_configuration(Constants.ZYPPER, ZypperPackageManager, ZypperPatchModeManager, ZypperSourcesManager, ZypperHealthManager), - 'apt_test_config': self.new_test_configuration(Constants.APT, AptitudePackageManager), - 'yum_test_config': self.new_test_configuration(Constants.YUM, YumPackageManager), - 'zypper_test_config': self.new_test_configuration(Constants.ZYPPER, ZypperPackageManager) + 'apt_test_config': self.__new_test_configuration(Constants.APT, AptPackageManager, AptPatchModeManager, AptSourcesManager, AptHealthManager), + 'yum_test_config': self.__new_test_configuration(Constants.YUM, YumPackageManager, YumPatchModeManager, YumSourcesManager, YumHealthManager), + 'zypper_test_config': self.__new_test_configuration(Constants.ZYPPER, ZypperPackageManager, ZypperPatchModeManager, ZypperSourcesManager, ZypperHealthManager) } # region - Configuration Getters def get_bootstrap_configuration(self, env): """ Get core configuration for bootstrapping the application. """ - if str(env) not in [Constants.DEV, Constants.TEST, Constants.PROD]: - print ("Error: Environment configuration not supported - " + str(env)) + if str(env) not in [Constants.ExecEnv.DEV, Constants.ExecEnv.TEST, Constants.ExecEnv.PROD]: + print ("ERROR: Environment configuration not supported. [Environment={0}]".format(str(env))) return None configuration_key = str.lower('{0}_config'.format(str(env))) @@ -108,13 +112,11 @@ def get_arguments_configuration(argv): def get_configuration(self, env, package_manager_name): """ Gets the final configuration for a given env and package manager. """ - if str(env) not in [Constants.DEV, Constants.TEST, Constants.PROD]: - print ("Error: Environment configuration not supported - " + str(env)) - return None + if str(env) not in [Constants.ExecEnv.DEV, Constants.ExecEnv.TEST, Constants.ExecEnv.PROD]: + raise Exception("ERROR: Environment configuration not supported. [Env={0}]".format(str(env))) if str(package_manager_name) not in [Constants.APT, Constants.YUM, Constants.ZYPPER]: - print ("Error: Package manager configuration not supported - " + str(package_manager_name)) - return None + raise Exception("ERROR: Package manager configuration not supported. [PackageManagerName={0}]".format(str(package_manager_name))) configuration_key = str.lower('{0}_{1}_config'.format(str(package_manager_name), str(env))) selected_configuration = self.configurations[configuration_key] @@ -123,7 +125,7 @@ def get_configuration(self, env, package_manager_name): # region - Configuration Builders @staticmethod - def new_bootstrap_configuration(config_env, log_file_path, real_record_path, recorder_enabled, emulator_enabled, events_folder, telemetry_supported): + def __new_bootstrap_configuration(config_env, log_file_path, real_record_path, recorder_enabled, emulator_enabled, events_folder, telemetry_supported): """ Core configuration definition. """ configuration = { 'config_env': config_env, @@ -158,35 +160,54 @@ def new_bootstrap_configuration(config_env, log_file_path, real_record_path, rec 'events_folder_path': events_folder, 'telemetry_supported': telemetry_supported } - }, + } } - if config_env is Constants.DEV or config_env is Constants.TEST: + if config_env is Constants.ExecEnv.DEV or config_env is Constants.ExecEnv.TEST: pass # modify config as desired return configuration - def new_prod_configuration(self, package_manager_name, package_manager_component): - """ Base configuration for Prod V2. """ - + def __new_prod_configuration(self, package_manager_name, package_manager_component, patch_mode_manager_component, sources_manager_component, health_manager_component): + """ Base configuration for production environments. """ configuration = { - 'config_env': Constants.PROD, + 'config_env': Constants.ExecEnv.PROD, 'package_manager_name': package_manager_name, + 'exit_janitor': { + 'component': ExitJanitor, + 'component_args': ['env_layer', 'execution_config', 'composite_logger'], + 'component_kwargs': {} + }, 'status_handler': { 'component': StatusHandler, - 'component_args': ['env_layer', 'execution_config', 'composite_logger', 'telemetry_writer'], + 'component_args': ['env_layer', 'execution_config', 'composite_logger', 'telemetry_writer', 'package_manager_name'], 'component_kwargs': { - 'vm_cloud_type': self.vm_cloud_type + 'cloud_type': self.cloud_type } }, 'lifecycle_manager': { - 'component': self.lifecycle_manager_component, + 'component': LifecycleManagerAzure if self.cloud_type == Constants.CloudType.AZURE else LifecycleManagerArc, 'component_args': ['env_layer', 'execution_config', 'composite_logger', 'telemetry_writer', 'status_handler'], 'component_kwargs': {} }, + 'patch_mode_manager': { + 'component': patch_mode_manager_component, + 'component_args': ['env_layer', 'execution_config', 'composite_logger', 'telemetry_writer', 'status_handler', 'package_manager_name'], + 'component_kwargs': {} + }, + 'sources_manager': { + 'component': sources_manager_component, + 'component_args': ['env_layer', 'execution_config', 'composite_logger', 'telemetry_writer', 'status_handler', 'package_manager_name'], + 'component_kwargs': {} + }, + 'health_manager': { + 'component': health_manager_component, + 'component_args': ['env_layer', 'execution_config', 'composite_logger', 'telemetry_writer', 'status_handler', 'package_manager_name'], + 'component_kwargs': {} + }, 'package_manager': { 'component': package_manager_component, - 'component_args': ['env_layer', 'execution_config', 'composite_logger', 'telemetry_writer', 'status_handler'], + 'component_args': ['env_layer', 'execution_config', 'composite_logger', 'telemetry_writer', 'status_handler', 'patch_mode_manager', 'sources_manager', 'health_manager', 'package_manager_name'], 'component_kwargs': {} }, 'reboot_manager': { @@ -239,63 +260,27 @@ def new_prod_configuration(self, package_manager_name, package_manager_component 'component': MaintenanceWindow, 'component_args': ['env_layer', 'execution_config', 'composite_logger', 'status_handler'], 'component_kwargs': {} + }, + 'core_execution_engine': { + 'component': CoreExecutionEngine, + 'component_args': ['env_layer', 'execution_config', 'file_logger', 'composite_logger', 'telemetry_writer', 'lifecycle_manager', 'status_handler', 'package_manager', 'configure_patching_processor', 'patch_assessor', 'patch_installer'], + 'component_kwargs': {} } } return configuration - def new_dev_configuration(self, package_manager_name, package_manager_component): + def __new_dev_configuration(self, package_manager_name, package_manager_component, patch_mode_manager_component, sources_manager_component, health_manager_component): """ Base configuration definition for dev. It derives from the production configuration. """ - configuration = self.new_prod_configuration(package_manager_name, package_manager_component) - configuration['config_env'] = Constants.DEV + configuration = self.__new_prod_configuration(package_manager_name, package_manager_component, patch_mode_manager_component, sources_manager_component, health_manager_component) + configuration['config_env'] = Constants.ExecEnv.DEV # perform desired modifications to configuration return configuration - def new_test_configuration(self, package_manager_name, package_manager_component): + def __new_test_configuration(self, package_manager_name, package_manager_component, patch_mode_manager_component, sources_manager_component, health_manager_component): """ Base configuration definition for test. It derives from the production configuration. """ - configuration = self.new_prod_configuration(package_manager_name, package_manager_component) - configuration['config_env'] = Constants.TEST + configuration = self.__new_prod_configuration(package_manager_name, package_manager_component, patch_mode_manager_component, sources_manager_component, health_manager_component) + configuration['config_env'] = Constants.ExecEnv.TEST # perform desired modifications to configuration return configuration - - @staticmethod - def get_lifecycle_manager_component(vm_cloud_type): - """ finding life cycle manager based on vm and returning component name added in the prod configuration """ - azure_lifecycle_manager_component = LifecycleManagerAzure - arc_lifecycle_manager_component = LifecycleManagerArc - if vm_cloud_type == Constants.VMCloudType.AZURE: - return azure_lifecycle_manager_component - elif vm_cloud_type == Constants.VMCloudType.ARC: - return arc_lifecycle_manager_component - - return azure_lifecycle_manager_component - - @staticmethod - def get_vm_cloud_type(): - """ detects vm type. logic taken from HCRP code: https://github.com/PowerShell/DesiredStateConfiguration/blob/dev/src/dsc/dsc_service/service_main.cpp#L115 - Todo: how to check this only when it is Auto Assessment operation??? """ - metadata_value = "True" - user_agent_value = "ArcAgent" - request = urlreq.Request(Constants.IMDS_END_POINT) - request.add_header('Metadata', metadata_value) - request.add_header('UserAgent', user_agent_value) - print("\nTrying to connect IMDS end point. URL:{0}.".format(str(Constants.IMDS_END_POINT))) - for i in range(0, Constants.MAX_IMDS_CONNECTION_RETRY_COUNT): - try: - print("Connecting to IMDS endpoint...") - res = urlreq.urlopen(request, timeout=2) - print("- Return code from IMDS connection http request: {0}.".format(str(res.getcode()))) - if res.getcode() == 200: - print("- Connection to IMDS end point successfully established. VMCloudType is Azure.\n") - return Constants.VMCloudType.AZURE - else: - raise - except Exception as error: - """ Failed to connect to Azure IMDS endpoint. This is expected on Arc machine - but not expected on Azure machine.""" - print('- Exception from IMDS connection http request: ' + repr(error)) - print("- Failed to connect to IMDS end point. [Trial={0}].".format(str(i+1))) - if i < Constants.MAX_IMDS_CONNECTION_RETRY_COUNT - 1: - time.sleep(i+1) - else: - print("Failed to connect IMDS end point after 5 retries. This is expected in Arc VMs. VMCloudType is set to Arc.\n") - return Constants.VMCloudType.ARC # endregion + diff --git a/src/core/src/bootstrap/Constants.py b/src/core/src/bootstrap/Constants.py index 88a8be544..ce8704b43 100644 --- a/src/core/src/bootstrap/Constants.py +++ b/src/core/src/bootstrap/Constants.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -25,18 +25,27 @@ def __iter__(self): if item == self.__dict__[item]: yield item - DEFAULT_UNSPECIFIED_VALUE = '7d12c6abb5f74eecec4b94e19ac3d418' # non-colliding default to distinguish between user selection and true default where used - GLOBAL_EXCLUSION_LIST = "" # if a package needs to be blocked across all of Azure - UNKNOWN = "Unknown" + class ExecEnv(EnumBackport): + DEV = 'Dev' + TEST = 'Test' + PROD = 'Prod' + + class ExitCode(EnumBackport): + Okay = 0 + CriticalError = 1 + CriticalError_NoLog = 2 + CriticalError_NoStatus = 3 + CriticalError_Reported = 4 - # Extension version (todo: move to a different file) - EXT_VERSION = "1.6.48" + DEFAULT_UNSPECIFIED_VALUE = '7d12c6abb5f74eecec4b94e19ac3d418' # non-colliding default to distinguish between user selection and true default where used + + AZGPS_LPE_VERSION = "[%exec_ver%]" + AZGPS_LPE_ENVIRONMENT_VAR = "AZPGS_LPE_ENV" # Overrides environment setting - # Runtime environments - TEST = 'Test' - DEV = 'Dev' - PROD = 'Prod' - LPE_ENV_VARIABLE = "LPE_ENV" # Overrides environment setting + class BufferMessage(EnumBackport): + TRUE = 0 + FALSE = 1 + FLUSH = 2 # Execution Arguments ARG_SEQUENCE_NUMBER = '-sequenceNumber' @@ -50,6 +59,17 @@ def __iter__(self): # Max values MAX_AUTO_ASSESSMENT_LOGFILE_SIZE_IN_BYTES = 5*1024*1024 MAX_AUTO_ASSESSMENT_WAIT_FOR_MAIN_CORE_EXEC_IN_MINUTES = 3 * 60 + UTC_DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ" + + class Config(EnumBackport): + AZGPS_PACKAGE_EXCLUSION_LIST = "" # if a package needs to be blocked across all of Azure + IMDS_END_POINT = "http://169.254.169.254/metadata/instance/compute?api-version=2019-06-01" + PACKAGE_INSTALL_EXPECTED_MAX_TIME_IN_MINUTES = 5 + REBOOT_BUFFER_IN_MINUTES = 15 + REBOOT_WAIT_TIMEOUT_IN_MINUTES = 5 + STATUS_ERROR_MSG_SIZE_LIMIT_IN_CHARACTERS = 128 + STATUS_ERROR_LIMIT = 5 + LIFECYCLE_MANAGER_STATUS_CHECK_WAIT_IN_SECS = 30 class SystemPaths(EnumBackport): SYSTEMD_ROOT = "/etc/systemd/system/" @@ -66,6 +86,7 @@ class EnvSettings(EnumBackport): TELEMETRY_SUPPORTED = "telemetrySupported" class ConfigSettings(EnumBackport): + CLOUD_TYPE = 'cloudType' OPERATION = 'operation' ACTIVITY_ID = 'activityId' START_TIME = 'startTime' @@ -88,30 +109,31 @@ class EulaSettings(EnumBackport): TEMP_FOLDER_DIR_NAME = "tmp" TEMP_FOLDER_CLEANUP_ARTIFACT_LIST = ["*.list"] - # File to save default settings for auto OS updates - IMAGE_DEFAULT_PATCH_CONFIGURATION_BACKUP_PATH = "ImageDefaultPatchConfiguration.bak" - # Auto assessment shell script name - CORE_AUTO_ASSESS_SH_FILE_NAME = "MsftLinuxPatchAutoAssess.sh" - AUTO_ASSESSMENT_SERVICE_NAME = "MsftLinuxPatchAutoAssess" - AUTO_ASSESSMENT_SERVICE_DESC = "Microsoft Azure Linux Patch Extension - Auto Assessment" + CORE_AUTO_ASSESS_SH_FILE_NAME = "AzGPSLinuxPatchAutoAssess.sh" + AUTO_ASSESSMENT_SERVICE_NAME = "AzGPSLinuxPatchAutoAssess" + AUTO_ASSESSMENT_SERVICE_DESC = "Azure Guest Patching Service - Auto Assessment" # Operations - AUTO_ASSESSMENT = 'AutoAssessment' - ASSESSMENT = "Assessment" - INSTALLATION = "Installation" - CONFIGURE_PATCHING = "ConfigurePatching" - CONFIGURE_PATCHING_AUTO_ASSESSMENT = "ConfigurePatching_AutoAssessment" # only used internally - PATCH_ASSESSMENT_SUMMARY = "PatchAssessmentSummary" - PATCH_INSTALLATION_SUMMARY = "PatchInstallationSummary" - PATCH_METADATA_FOR_HEALTHSTORE = "PatchMetadataForHealthStore" - CONFIGURE_PATCHING_SUMMARY = "ConfigurePatchingSummary" + class Op(EnumBackport): + # NO_OPERATION = "NoOperation" # not used in Core + CONFIGURE_PATCHING = "ConfigurePatching" + CONFIGURE_PATCHING_AUTO_ASSESSMENT = "ConfigurePatching_AutoAssessment" # only used internally + ASSESSMENT = "Assessment" + INSTALLATION = "Installation" + + class OpSummary(EnumBackport): + # NO_OPERATION = "PatchNoOperationSummary" # not used in Core + CONFIGURE_PATCHING = "ConfigurePatchingSummary" + ASSESSMENT = "PatchAssessmentSummary" + INSTALLATION = "PatchInstallationSummary" + PATCH_METADATA_FOR_HEALTHSTORE = "PatchMetadataForHealthStore" # patch versions for healthstore when there is no maintenance run id PATCH_VERSION_UNKNOWN = "UNKNOWN" # Strings used in perf logs - class PerfLogTrackerParams: + class PerfLogTrackerParams(EnumBackport): TASK = "Task" TASK_STATUS = "TaskStatus" PACKAGE_MANAGER = "PackageManager" @@ -132,7 +154,7 @@ class TaskStatus(EnumBackport): SUCCEEDED = "succeeded" FAILED = "failed" - # Patch Modes for Configure Patching + # region - Configure Patching class PatchModes(EnumBackport): IMAGE_DEFAULT = "ImageDefault" AUTOMATIC_BY_PLATFORM = "AutomaticByPlatform" @@ -147,22 +169,23 @@ class AutomaticOSPatchStates(EnumBackport): DISABLED = "Disabled" ENABLED = "Enabled" - # List of auto OS update services in Yum - # todo: move to yumpackagemanager - class YumAutoOSUpdateServices(EnumBackport): - YUM_CRON = "yum-cron" - DNF_AUTOMATIC = "dnf-automatic" - PACKAGEKIT = "packagekit" - - # auto assessment states class AutoAssessmentStates(EnumBackport): UNKNOWN = "Unknown" ERROR = "Error" DISABLED = "Disabled" ENABLED = "Enabled" + # File to save default settings for auto OS updates + IMAGE_DEFAULT_PATCH_CONFIGURATION_BACKUP_PATH = "ImageDefaultPatchConfiguration.bak" + + class YumAutoOSUpdateServices(EnumBackport): + YUM_CRON = "yum-cron" + DNF_AUTOMATIC = "dnf-automatic" + PACKAGEKIT = "packagekit" + # endregion - Configure Patching + # To separately preserve assessment + auto-assessment state information - ASSESSMENT_STATE_FILE = "AssessmentState.json" + AUTO_ASSESSMENT_MAXIMUM_DURATION = "PT1H" # maximum time assessment is expected to take AUTO_ASSESSMENT_CRON_INTERVAL = "PT1H" # wake up to check for persistent assessment information this frequently AUTO_ASSESSMENT_INTERVAL_BUFFER = "PT1H" # allow for an hour's buffer from max interval passed down (PT6H) to keep within "max" SLA @@ -170,40 +193,46 @@ class AutoAssessmentStates(EnumBackport): # wait time after status updates WAIT_TIME_AFTER_HEALTHSTORE_STATUS_UPDATE_IN_SECS = 20 - # Status file states - STATUS_TRANSITIONING = "Transitioning" - STATUS_ERROR = "Error" - STATUS_SUCCESS = "Success" - STATUS_WARNING = "Warning" - # Wrapper-core handshake files - EXT_STATE_FILE = 'ExtState.json' - CORE_STATE_FILE = 'CoreState.json' - - # Operating System distributions - UBUNTU = 'Ubuntu' - RED_HAT = 'Red Hat' - SUSE = 'SUSE' - CENTOS = 'CentOS' + class StateFiles(EnumBackport): + EXT = 'ExtState.json' + CORE = 'CoreState.json' + ASSESSMENT = "AssessmentState.json" + HEARTBEAT = "Heartbeat.json" # Package Managers APT = 'apt' YUM = 'yum' ZYPPER = 'zypper' + class Status(EnumBackport): + TRANSITIONING = "Transitioning" + SUCCESS = "Success" + ERROR = "Error" + WARNING = "Warning" + # Package Statuses - INSTALLED = 'Installed' - FAILED = 'Failed' - EXCLUDED = 'Excluded' # explicitly excluded - PENDING = 'Pending' - NOT_SELECTED = 'NotSelected' # implicitly not installed as it wasn't explicitly included - AVAILABLE = 'Available' # assessment only + class PackageStatus(EnumBackport): + INSTALLED = "Installed" + FAILED = "Failed" + EXCLUDED = "Excluded" # explicitly excluded + PENDING = "Pending" + NOT_SELECTED = "NotSelected" # implicitly not installed as it wasn't explicitly included + AVAILABLE = "Available" # assessment only, but unused as it's implicit when we don't do inventory + + class PackageClassification(EnumBackport): + UNCLASSIFIED = 'Unclassified' + CRITICAL = 'Critical' + SECURITY = 'Security' + SECURITY_ESM = 'Security-ESM' + OTHER = 'Other' UA_ESM_REQUIRED = "UA_ESM_Required" UNKNOWN_PACKAGE_SIZE = "Unknown" PACKAGE_STATUS_REFRESH_RATE_IN_SECONDS = 10 MAX_FILE_OPERATION_RETRY_COUNT = 5 + MAX_PATCH_OPERATION_RETRY_COUNT = 5 MAX_ASSESSMENT_RETRY_COUNT = 5 MAX_INSTALLATION_RETRY_COUNT = 3 MAX_IMDS_CONNECTION_RETRY_COUNT = 5 @@ -211,112 +240,99 @@ class AutoAssessmentStates(EnumBackport): MAX_BATCH_SIZE_FOR_PACKAGES = 3 MAX_COMPLETE_STATUS_FILES_TO_RETAIN = 10 - class PackageClassification(EnumBackport): - UNCLASSIFIED = 'Unclassified' - CRITICAL = 'Critical' - SECURITY = 'Security' - SECURITY_ESM = 'Security-ESM' - OTHER = 'Other' + # region Telemetry related + class TelemetryConfig(EnumBackport): + """ Telemetry limits that are imposed by the Azure Linux Agent """ + MSG_SIZE_LIMIT_IN_CHARS = 3072 + EVENT_SIZE_LIMIT_IN_CHARS = 6144 + EVENT_FILE_SIZE_LIMIT_IN_CHARS = 4194304 + DIR_SIZE_LIMIT_IN_CHARS = 41943040 + BUFFER_FOR_DROPPED_COUNT_MSG_IN_CHARS = 25 # buffer for the chars dropped text added at the end of the truncated telemetry message + EVENT_COUNTER_MSG_SIZE_LIMIT_IN_CHARS = 15 # buffer for telemetry event counter text added at the end of every message sent to telemetry + MAX_EVENT_COUNT_THROTTLE = 72 # increased by Agent team for AzGPS in 2023 (up from 60) + MAX_TIME_IN_SECONDS_FOR_EVENT_COUNT_THROTTLE = 60 - PKG_MGR_SETTING_FILTER_CRITSEC_ONLY = 'FilterCritSecOnly' - PKG_MGR_SETTING_IDENTITY = 'PackageManagerIdentity' - PKG_MGR_SETTING_IGNORE_PKG_FILTER = 'IgnorePackageFilter' - - # Reboot Manager - REBOOT_NEVER = 'Never reboot' - REBOOT_IF_REQUIRED = 'Reboot if required' - REBOOT_ALWAYS = 'Always reboot' - REBOOT_SETTINGS = { # API to exec-code mapping (+incl. validation) - 'Never': REBOOT_NEVER, - 'IfRequired': REBOOT_IF_REQUIRED, - 'Always': REBOOT_ALWAYS - } - REBOOT_BUFFER_IN_MINUTES = 15 - REBOOT_WAIT_TIMEOUT_IN_MINUTES = 5 + class TelemetryTaskName(EnumBackport): + UNKNOWN = "Core.Unknown" # function parameter default + STARTUP = "Core.Startup" # initial value until execution mode is determined + EXEC = "Core.Exec" # mainline execution triggered from handler + AUTO_ASSESSMENT = "Core.AutoAssessment" # auto-assessment triggered from scheduler - # Installation Reboot Statuses - class RebootStatus(EnumBackport): - NOT_NEEDED = "NotNeeded" - REQUIRED = "Required" - STARTED = "Started" - COMPLETED = "Completed" - FAILED = "Failed" - - # Enum for VM Cloud Type - class VMCloudType(EnumBackport): + class EventLevel(EnumBackport): + # Critical = "Critical" # unused by AzGPS + Error = "Error" + Warning = "Warning" + Info = "Informational" + Debug = "Debug" + Verbose = "Verbose" # do not log to telemetry - AzGPS override + # LogAlways = "LogAlways" # unused by AzGPS + # endregion Telemetry related + + class RebootSettings(EnumBackport): + NEVER = "Never" # Never reboot + IF_REQUIRED = "IfRequired" # Reboot if required + ALWAYS = "Always" # Reboot at least once + + # region Internal constants + class CloudType(EnumBackport): UNKNOWN = "Unknown" AZURE = "Azure" ARC = "Arc" - IMDS_END_POINT = "http://169.254.169.254/metadata/instance/compute?api-version=2019-06-01" - - # StartedBy Patch Assessment Summary Status Values - class PatchAssessmentSummaryStartedBy(EnumBackport): - USER = "User" - PLATFORM = "Platform" - - # Maintenance Window - PACKAGE_INSTALL_EXPECTED_MAX_TIME_IN_MINUTES = 5 - # Package Manager Setting + PKG_MGR_SETTING_IDENTITY = 'PackageManagerIdentity' PACKAGE_MGR_SETTING_REPEAT_PATCH_OPERATION = "RepeatUpdateRun" + ERROR_ADDED_TO_STATUS = "Error_added_to_status" - # Settings for Error Objects logged in status file - STATUS_ERROR_MSG_SIZE_LIMIT_IN_CHARACTERS = 128 - STATUS_ERROR_LIMIT = 5 + # EnvLayer Constants + class EnvLayer(EnumBackport): + PRIVILEGED_OP_MARKER = "Privileged_Op_e6df678d-d09b-436a-a08a-65f2f70a6798" + PRIVILEGED_OP_REBOOT = PRIVILEGED_OP_MARKER + "Reboot_Exception" + PRIVILEGED_OP_EXIT = PRIVILEGED_OP_MARKER + "Exit_" + # endregion Internal constants + # region Status Handler constants class PatchOperationTopLevelErrorCode(EnumBackport): SUCCESS = 0 ERROR = 1 class PatchOperationErrorCodes(EnumBackport): + """ Error codes for significant errors. CL_ = Client error, SV_ = Service error. Others = specialized errors. """ + INFO = "INFO" # informational message; no error DEFAULT_ERROR = "ERROR" # default error code OPERATION_FAILED = "OPERATION_FAILED" - PACKAGE_MANAGER_FAILURE = "PACKAGE_MANAGER_FAILURE" - NEWER_OPERATION_SUPERSEDED = "NEWER_OPERATION_SUPERSEDED" + CL_PYTHON_TOO_OLD = "CL_PYTHON_TOO_OLD" + CL_SUDO_CHECK_FAILED = "CL_SUDO_CHECK_FAILED" + CL_AGENT_TOO_OLD = "CL_AGENT_TOO_OLD" + CL_PACKAGE_MANAGER_FAILURE = "CL_PACKAGE_MANAGER_FAILURE" + CL_NEWER_OPERATION_SUPERSEDED = "CL_NEWER_OPERATION_SUPERSEDED" + CL_SYSTEMD_NOT_PRESENT = "CL_SYSTEMD_NOT_PRESENT" + SV_MAINTENANCE_WINDOW_ERROR = "SV_MAINTENANCE_WINDOW_ERROR" + PATCH_MODE_SET_FAILURE = "PATCH_MODE_SET_FAILURE" UA_ESM_REQUIRED = "UA_ESM_REQUIRED" - ERROR_ADDED_TO_STATUS = "Error_added_to_status" - - TELEMETRY_ENABLED_AT_EXTENSION = True - - # Telemetry Settings - # Note: these limits are based on number of characters as confirmed with agent team - TELEMETRY_MSG_SIZE_LIMIT_IN_CHARS = 3072 - TELEMETRY_EVENT_SIZE_LIMIT_IN_CHARS = 6144 - TELEMETRY_EVENT_FILE_SIZE_LIMIT_IN_CHARS = 4194304 - TELEMETRY_DIR_SIZE_LIMIT_IN_CHARS = 41943040 - TELEMETRY_BUFFER_FOR_DROPPED_COUNT_MSG_IN_CHARS = 25 # buffer for the chars dropped text added at the end of the truncated telemetry message - TELEMETRY_EVENT_COUNTER_MSG_SIZE_LIMIT_IN_CHARS = 15 # buffer for telemetry event counter text added at the end of every message sent to telemetry - TELEMETRY_MAX_EVENT_COUNT_THROTTLE = 60 - TELEMETRY_MAX_TIME_IN_SECONDS_FOR_EVENT_COUNT_THROTTLE = 60 - - # Telemetry Event Level - class TelemetryEventLevel(EnumBackport): - Critical = "Critical" - Error = "Error" - Warning = "Warning" - Verbose = "Verbose" - Informational = "Informational" - LogAlways = "LogAlways" + class Errors(EnumBackport): + UNHANDLED_EXCEPTION = "Severe unhandled exception. [Error={0}]" + PYTHON_NOT_COMPATIBLE = "Unsupported older Python version. Minimum Python version required is 2.7. [DetectedPythonVersion={0}]" + SUDO_FAILURE = "Sudo status check failed. Please ensure the computer is configured correctly for sudo invocation." + NO_TELEMETRY_SUPPORT_AT_AGENT = "Unsupported older Azure Linux Agent version. To resolve: https://aka.ms/UpdateLinuxAgent" + MINIMUM_REQUIREMENTS_NOT_MET = "Minimum requirements for patch operation execution were not met. [PythonNotCompatible={0}][SudoFailure={1}][OldAgentVersion={2}]" + INVALID_REBOOT_SETTING = "Invalid reboot setting. Resetting to default. [RequestedRebootSetting={0}][DefaultRebootSetting={1}]" + SYSTEMD_NOT_PRESENT = "Systemd is not available on this system, and platform-based auto-assessment cannot be configured." + INSTALLATION_FAILED_DUE_TO_ASSESSMENT_FAILURE = "Patch installation failed due to assessment failure. Please refer to the error details in the assessment substatus." - # Telemetry Task Names for disambiguation - class TelemetryTaskName(EnumBackport): - UNKNOWN = "Core.Unknown" # function parameter default - STARTUP = "Core.Startup" # initial value until execution mode is determined - EXEC = "Core.Exec" # mainline execution triggered from handler - AUTO_ASSESSMENT = "Core.AutoAssessment" # auto-assessment triggered from scheduler - - TELEMETRY_NOT_COMPATIBLE_ERROR_MSG = "Unsupported older Azure Linux Agent version. To resolve: http://aka.ms/UpdateLinuxAgent" - TELEMETRY_COMPATIBLE_MSG = "Minimum Azure Linux Agent version prerequisite met" - PYTHON_NOT_COMPATIBLE_ERROR_MSG = "Unsupported older Python version. Minimum Python version required is 2.7. [DetectedPythonVersion={0}]" - UTC_DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ" + # Installation Reboot Statuses + class RebootStatus(EnumBackport): + NOT_NEEDED = "NotNeeded" + REQUIRED = "Required" + STARTED = "Started" + COMPLETED = "Completed" + FAILED = "Failed" - # EnvLayer Constants - class EnvLayer(EnumBackport): - PRIVILEGED_OP_MARKER = "Privileged_Op_e6df678d-d09b-436a-a08a-65f2f70a6798" - PRIVILEGED_OP_REBOOT = PRIVILEGED_OP_MARKER + "Reboot_Exception" - PRIVILEGED_OP_EXIT = PRIVILEGED_OP_MARKER + "Exit_" + # StartedBy Patch Assessment Summary Status Values + class PatchAssessmentSummaryStartedBy(EnumBackport): + USER = "User" + PLATFORM = "Platform" # Package / Patch State Ordering Constants # This ordering ensures that the most important information is preserved in the case of patch object truncation @@ -329,13 +345,16 @@ class EnvLayer(EnumBackport): } PatchStateOrderInStatusReporting = { - FAILED: 1, - INSTALLED: 2, - AVAILABLE: 3, - PENDING: 4, - EXCLUDED: 5, - NOT_SELECTED: 6 + PackageStatus.FAILED: 1, + PackageStatus.INSTALLED: 2, + PackageStatus.AVAILABLE: 3, + PackageStatus.PENDING: 4, + PackageStatus.EXCLUDED: 5, + PackageStatus.NOT_SELECTED: 6 } + # endregion Status Handler constants + + # Ubuntu Pro Client constants. class UbuntuProClientSettings(EnumBackport): @@ -344,8 +363,3 @@ class UbuntuProClientSettings(EnumBackport): MAX_OS_MAJOR_VERSION_SUPPORTED = 18 MINIMUM_CLIENT_VERSION = "27.14.4" - class BufferMessage(EnumBackport): - TRUE = 0 - FALSE = 1 - FLUSH = 2 - diff --git a/src/core/src/bootstrap/Container.py b/src/core/src/bootstrap/Container.py index 351e40221..feede15ba 100644 --- a/src/core/src/bootstrap/Container.py +++ b/src/core/src/bootstrap/Container.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/core/src/bootstrap/EnvLayer.py b/src/core/src/bootstrap/EnvLayer.py index ab2e1f6a9..75cbedb45 100644 --- a/src/core/src/bootstrap/EnvLayer.py +++ b/src/core/src/bootstrap/EnvLayer.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -89,7 +89,7 @@ def set_env_var(self, var_name, var_value=None, raise_if_not_success=False): try: environment_vars = self.file_system.read_with_retry(self.etc_environment_file_path) if environment_vars is None: - print("Error occurred while setting environment variable: File not found. [Variable={0}] [Value={1}] [Path={2}]".format(str(var_name), str(var_value), self.etc_environment_file_path)) + print("Error occurred while setting environment variable: File not found. [Variable={0}][Value={1}][Path={2}]".format(str(var_name), str(var_value), self.etc_environment_file_path)) return environment_vars_lines = environment_vars.strip().split("\n") @@ -121,7 +121,7 @@ def set_env_var(self, var_name, var_value=None, raise_if_not_success=False): self.file_system.write_with_retry(self.etc_environment_file_path, environment_vars, 'w') except Exception as error: - print("Error occurred while setting environment variable [Variable={0}] [Value={1}] [Exception={2}]".format(str(var_name), str(var_value), repr(error))) + print("Error occurred while setting environment variable [Variable={0}][Value={1}][Exception={2}]".format(str(var_name), str(var_value), repr(error))) if raise_if_not_success: raise @@ -130,7 +130,7 @@ def get_env_var(self, var_name, raise_if_not_success=False): try: environment_vars = self.file_system.read_with_retry(self.etc_environment_file_path) if environment_vars is None: - print("Error occurred while getting environment variable: File not found. [Variable={0}] [Path={1}]".format(str(var_name), self.etc_environment_file_path)) + print("Error occurred while getting environment variable: File not found. [Variable={0}][Path={1}]".format(str(var_name), self.etc_environment_file_path)) return None # get specific environment variable value @@ -143,7 +143,7 @@ def get_env_var(self, var_name, raise_if_not_success=False): return group[group.index("=")+1:] except Exception as error: - print("Error occurred while getting environment variable [Variable={0}] [Exception={1}]".format(str(var_name), repr(error))) + print("Error occurred while getting environment variable [Variable={0}][Exception={1}]".format(str(var_name), repr(error))) if raise_if_not_success: raise @@ -333,7 +333,7 @@ def __init__(self, recorder_enabled=True, emulator_enabled=False, write_record_d self.__emulator_root_path = emulator_root_path # file-names of files that other processes may changes the contents of - self.__non_exclusive_files = [Constants.EXT_STATE_FILE] + self.__non_exclusive_files = [Constants.StateFiles.EXT] def resolve_path(self, requested_path): """ Resolves any paths used with desired file system paths """ @@ -594,7 +594,7 @@ def __record_reader_init(self): # region - Legacy mode extensions def set_legacy_test_mode(self): - print("Switching env layer to legacy test mode...") + print("[EL] Switching env layer to legacy test mode.\n") self.datetime = self.DateTime(False, False, self.__write_record, self.__read_record) self.file_system = self.FileSystem(False, False, self.__write_record, self.__read_record, emulator_root_path=os.path.dirname(self.__real_record_path)) # endregion - Legacy mode extensions diff --git a/src/core/src/bootstrap/ExitJanitor.py b/src/core/src/bootstrap/ExitJanitor.py new file mode 100644 index 000000000..fa3896b38 --- /dev/null +++ b/src/core/src/bootstrap/ExitJanitor.py @@ -0,0 +1,76 @@ +# Copyright 2023 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Requires Python 2.7+ + +""" ExitJanitor - responsible for orchestrating all cleanup activities at managed execution termination """ +import os +from core.src.bootstrap.Constants import Constants + +# do not instantiate directly - these are exclusively for type hinting support +from core.src.bootstrap.EnvLayer import EnvLayer +from core.src.core_logic.ExecutionConfig import ExecutionConfig +from core.src.local_loggers.CompositeLogger import CompositeLogger + + +class ExitJanitor(object): + def __init__(self, env_layer, execution_config, composite_logger): + # type: (EnvLayer, ExecutionConfig, CompositeLogger) -> None + + # All the entities below are guaranteed to be working at initialization + self.env_layer = env_layer + self.execution_config = execution_config + self.composite_logger = composite_logger + + # region - Rudimentary clean up with minimal dependencies + @staticmethod + def final_exit(exit_code=Constants.ExitCode.Okay, stdout_file_mirror=None, file_logger=None, lifecycle_manager=None, telemetry_writer=None, config_env=Constants.ExecEnv.PROD): + """ Common code for exit in almost all cases (some exceptions apply) """ + if telemetry_writer is not None: + telemetry_writer.write_event("[EJ][EXIT] Completed Linux Patch Core execution.", Constants.EventLevel.Info) + if lifecycle_manager is not None: + lifecycle_manager.update_core_sequence(completed=True) + if stdout_file_mirror is not None: + stdout_file_mirror.stop() + if file_logger is not None: + file_logger.close(message_at_close="\n[EJ][EXIT] End of all output. Execution complete.") + + if config_env != Constants.ExecEnv.DEV: + exit(exit_code) + #raise Exception("[EJ][DEV] Intercepted exit. [ExitCode={0}]".format(str(exit_code)), Constants.EnvLayer.PRIVILEGED_OP_MARKER) + + @staticmethod + def safely_handle_extreme_failure(stdout_file_mirror, file_logger, lifecycle_manager, telemetry_writer, exception, config_env=Constants.ExecEnv.PROD): + """ Encapsulates the most basic failure management without instantiation of even ExitJanitor """ + if Constants.EnvLayer.PRIVILEGED_OP_MARKER in repr(exception): + raise # Privileged operation handling for non-production use + print(Constants.Errors.UNHANDLED_EXCEPTION.format(repr(exception))) # should be captured by handler + ExitJanitor.final_exit(Constants.ExitCode.CriticalError, stdout_file_mirror, file_logger, lifecycle_manager, telemetry_writer) + # endregion - Rudimentary clean up with minimal dependencies + + # region - Post-operational housekeeping + def perform_housekeeping_tasks(self): + # type: () -> None + """ Performs environment maintenance tasks that need to happen after core business logic execution. """ + if os.path.exists(self.execution_config.temp_folder): + self.composite_logger.log_debug("[EJ] Deleting all files of certain format from temp folder [FileFormat={0}][TempFolderLocation={1}]".format(Constants.TEMP_FOLDER_CLEANUP_ARTIFACT_LIST, str(self.execution_config.temp_folder))) + self.env_layer.file_system.delete_files_from_dir(self.execution_config.temp_folder, Constants.TEMP_FOLDER_CLEANUP_ARTIFACT_LIST) + # endregion - Post-operational housekeeping + + def handle_terminal_exception(self, exception, log_file_path): + # type: (Exception, str) -> None + """ Highest-level exception handling for core operations """ + self.composite_logger.log_error("TERMINAL EXCEPTION: {0}.\nLOGS FOR SUPPORT: {1}".format(str(exception.args[0] if len(exception.args) > 1 else repr(exception)), log_file_path)) + self.composite_logger.log_debug("[EJ] Terminal exception details for debugging: {0}".format(repr(exception))) + diff --git a/src/core/src/bootstrap/__init__.py b/src/core/src/bootstrap/__init__.py index a1eee39e6..2e3211aae 100644 --- a/src/core/src/bootstrap/__init__.py +++ b/src/core/src/bootstrap/__init__.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/core/src/core_logic/ConfigurePatchingProcessor.py b/src/core/src/core_logic/ConfigurePatchingProcessor.py deleted file mode 100644 index a6d700045..000000000 --- a/src/core/src/core_logic/ConfigurePatchingProcessor.py +++ /dev/null @@ -1,155 +0,0 @@ -# Copyright 2020 Microsoft Corporation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Requires Python 2.7+ - -""" Configure Patching """ -from core.src.bootstrap.Constants import Constants - - -class ConfigurePatchingProcessor(object): - def __init__(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler, package_manager, auto_assess_service_manager, auto_assess_timer_manager, lifecycle_manager): - self.env_layer = env_layer - self.execution_config = execution_config - - self.composite_logger = composite_logger - self.telemetry_writer = telemetry_writer - self.status_handler = status_handler - - self.package_manager = package_manager - self.auto_assess_service_manager = auto_assess_service_manager - self.auto_assess_timer_manager = auto_assess_timer_manager - self.lifecycle_manager = lifecycle_manager - - self.current_auto_os_patch_state = Constants.AutomaticOSPatchStates.UNKNOWN - self.current_auto_assessment_state = Constants.AutoAssessmentStates.UNKNOWN - self.configure_patching_successful = True - self.configure_patching_exception_error = None - - def start_configure_patching(self): - """ Start configure patching """ - try: - self.composite_logger.log("\nStarting configure patching... [MachineId: " + self.env_layer.platform.node() +"][ActivityId: " + self.execution_config.activity_id +"][StartTime: " + self.execution_config.start_time +"]") - self.status_handler.set_current_operation(Constants.CONFIGURE_PATCHING) - self.__raise_if_telemetry_unsupported() - - self.__report_consolidated_configure_patch_status(status=Constants.STATUS_TRANSITIONING) - self.__try_set_patch_mode() - self.__try_set_auto_assessment_mode() - - # If the tracked operation is Configure patching, we cannot write a final status until assessment has also written a final status (mitigation for a CRP bug) - if self.execution_config.operation.lower() != Constants.CONFIGURE_PATCHING.lower(): - self.set_configure_patching_final_overall_status() - except Exception as error: - self.current_auto_assessment_state = Constants.AutoAssessmentStates.ERROR - self.configure_patching_exception_error = error - # If the tracked operation is Configure patching, we cannot write a final status until assessment has also written a final status (mitigation for a CRP bug) - if self.execution_config.operation != Constants.CONFIGURE_PATCHING.lower(): - self.__report_consolidated_configure_patch_status(status=Constants.STATUS_ERROR, error=self.configure_patching_exception_error) - self.configure_patching_successful &= False - - self.composite_logger.log("\nConfigure patching completed.\n") - return self.configure_patching_successful - - def set_configure_patching_final_overall_status(self): - """ Writes the final overall status after any pre-requisite operation is also in a terminal state - currently this is only assessment """ - overall_status = Constants.STATUS_SUCCESS if self.configure_patching_successful else Constants.STATUS_ERROR - if self.configure_patching_exception_error is None: - self.__report_consolidated_configure_patch_status(status=overall_status) - else: - self.__report_consolidated_configure_patch_status(status=overall_status, error=self.configure_patching_exception_error) - - def __try_set_patch_mode(self): - """ Set the patch mode for the VM """ - try: - self.status_handler.set_current_operation(Constants.CONFIGURE_PATCHING) - self.current_auto_os_patch_state = self.package_manager.get_current_auto_os_patch_state() - - # disable auto OS updates if VM is configured for platform updates only. - # NOTE: this condition will be false for Assessment operations, since patchMode is not sent in the API request - if self.current_auto_os_patch_state != Constants.AutomaticOSPatchStates.DISABLED and self.execution_config.patch_mode == Constants.PatchModes.AUTOMATIC_BY_PLATFORM: - self.package_manager.disable_auto_os_update() - - self.current_auto_os_patch_state = self.package_manager.get_current_auto_os_patch_state() - - if self.execution_config.patch_mode == Constants.PatchModes.AUTOMATIC_BY_PLATFORM and self.current_auto_os_patch_state == Constants.AutomaticOSPatchStates.UNKNOWN: - # NOTE: only sending details in error objects for customer visibility on why patch state is unknown, overall configurepatching status will remain successful - self.configure_patching_exception_error = "Could not disable one or more automatic OS update services. Please check if they are configured correctly" - - self.composite_logger.log_debug("Completed processing patch mode configuration.") - except Exception as error: - self.composite_logger.log_error("Error while processing patch mode configuration. [Error={0}]".format(repr(error))) - self.configure_patching_exception_error = error - self.configure_patching_successful &= False - - def __try_set_auto_assessment_mode(self): - """ Sets the preferred auto-assessment mode for the VM """ - try: - self.status_handler.set_current_operation(Constants.CONFIGURE_PATCHING_AUTO_ASSESSMENT) - self.composite_logger.log_debug("Systemd information: {0}".format(str(self.auto_assess_service_manager.get_version()))) # proactive support telemetry - - if self.execution_config.assessment_mode is None: - self.composite_logger.log_debug("No assessment mode config was present. No configuration changes will occur.") - elif self.execution_config.assessment_mode == Constants.AssessmentModes.AUTOMATIC_BY_PLATFORM: - self.composite_logger.log_debug("Enabling platform-based automatic assessment.") - if not self.auto_assess_service_manager.systemd_exists(): - raise Exception("Systemd is not available on this system, and platform-based auto-assessment cannot be configured.") - self.auto_assess_service_manager.create_and_set_service_idem() - self.auto_assess_timer_manager.create_and_set_timer_idem() - self.current_auto_assessment_state = Constants.AutoAssessmentStates.ENABLED - elif self.execution_config.assessment_mode == Constants.AssessmentModes.IMAGE_DEFAULT: - self.composite_logger.log_debug("Disabling platform-based automatic assessment.") - self.auto_assess_timer_manager.remove_timer() - self.auto_assess_service_manager.remove_service() - self.current_auto_assessment_state = Constants.AutoAssessmentStates.DISABLED - else: - raise Exception("Unknown assessment mode specified. [AssessmentMode={0}]".format(self.execution_config.assessment_mode)) - - self.__report_consolidated_configure_patch_status() - self.composite_logger.log_debug("Completed processing automatic assessment mode configuration.") - except Exception as error: - # deliberately not setting self.configure_patching_exception_error here as it does not feed into the parent object. Not a bug, if you're thinking about it. - self.composite_logger.log_error("Error while processing automatic assessment mode configuration. [Error={0}]".format(repr(error))) - self.__report_consolidated_configure_patch_status(status=Constants.STATUS_TRANSITIONING, error=error) - self.configure_patching_successful &= False - - # revert operation back to parent - self.composite_logger.log_debug("Restoring status handler operation to {0}.".format(Constants.CONFIGURE_PATCHING)) - self.status_handler.set_current_operation(Constants.CONFIGURE_PATCHING) - - def __report_consolidated_configure_patch_status(self, status=Constants.STATUS_TRANSITIONING, error=Constants.DEFAULT_UNSPECIFIED_VALUE): - """ Reports """ - self.composite_logger.log_debug("Reporting consolidated current configure patch status. [OSPatchState={0}][AssessmentState={1}]".format(self.current_auto_os_patch_state, self.current_auto_assessment_state)) - - # report error if specified - if error != Constants.DEFAULT_UNSPECIFIED_VALUE: - error_msg = 'Error: ' + repr(error) - self.composite_logger.log_error(error_msg) - self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.DEFAULT_ERROR) - self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.DEFAULT_ERROR, current_operation_override_for_error=Constants.CONFIGURE_PATCHING_AUTO_ASSESSMENT) - - # write consolidated status - self.status_handler.set_configure_patching_substatus_json(status=status, - automatic_os_patch_state=self.current_auto_os_patch_state, - auto_assessment_state=self.current_auto_assessment_state) - - def __raise_if_telemetry_unsupported(self): - if self.lifecycle_manager.get_vm_cloud_type() == Constants.VMCloudType.ARC and self.execution_config.operation not in [Constants.ASSESSMENT, Constants.INSTALLATION]: - self.composite_logger.log("Skipping telemetry compatibility check for Arc cloud type when operation is not manual") - return - if not self.telemetry_writer.is_telemetry_supported(): - error_msg = "{0}".format(Constants.TELEMETRY_NOT_COMPATIBLE_ERROR_MSG) - raise Exception(error_msg) - - self.composite_logger.log("{0}".format(Constants.TELEMETRY_COMPATIBLE_MSG)) diff --git a/src/core/src/core_logic/CoreExecutionEngine.py b/src/core/src/core_logic/CoreExecutionEngine.py new file mode 100644 index 000000000..a210c049c --- /dev/null +++ b/src/core/src/core_logic/CoreExecutionEngine.py @@ -0,0 +1,190 @@ +# Copyright 2023 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Requires Python 2.7+ + +""" Core Execution Engine """ +import os +import sys +from core.src.bootstrap.Constants import Constants + +# do not instantiate directly - these are exclusively for type hinting support +from core.src.bootstrap.EnvLayer import EnvLayer +from core.src.core_logic.ExecutionConfig import ExecutionConfig +from core.src.local_loggers.FileLogger import FileLogger +from core.src.local_loggers.CompositeLogger import CompositeLogger +from core.src.service_interfaces.TelemetryWriter import TelemetryWriter +from core.src.service_interfaces.StatusHandler import StatusHandler +from core.src.package_managers.PackageManager import PackageManager +from core.src.core_logic.patch_operators.ConfigurePatchingProcessor import ConfigurePatchingProcessor +from core.src.core_logic.patch_operators.PatchAssessor import PatchAssessor +from core.src.core_logic.patch_operators.PatchInstaller import PatchInstaller +from core.src.service_interfaces.lifecycle_managers.LifecycleManager import LifecycleManager + + +class CoreExecutionEngine(object): + def __init__(self, env_layer, execution_config, file_logger, composite_logger, telemetry_writer,lifecycle_manager, status_handler, package_manager, configure_patching_processor, patch_assessor, patch_installer): + # type: (EnvLayer, ExecutionConfig, FileLogger, CompositeLogger, TelemetryWriter, LifecycleManager, StatusHandler, PackageManager, ConfigurePatchingProcessor, PatchAssessor, PatchInstaller) -> None + + # All the entities below are guaranteed to be working at initialization + self.env_layer = env_layer + self.execution_config = execution_config + self.file_logger = file_logger + self.composite_logger = composite_logger + self.telemetry_writer = telemetry_writer + self.lifecycle_manager = lifecycle_manager + self.status_handler = status_handler + + # Frequently referred fields + self.patch_operation_requested = self.execution_config.operation.lower() + self.package_manager = package_manager + self.configure_patching_processor = configure_patching_processor + self.patch_assessor = patch_assessor + self.patch_installer = patch_installer + + def execute(self): + # type: () -> None + """ Execution orchestrator for patch operations (business logic). + Each operation is expected to be self-contained and no longer raise exceptions -by design- (but failures can be tolerated for investigation).""" + + # Init operation statuses + self.check_minimum_environment_requirements_and_report(self.patch_operation_requested) + + # Auto-assessment only + if self.execution_config.exec_auto_assess_only: + self.patch_assessor.start_operation_with_retries() # auto-assessment only + return + + # Configure-patching and Assessment always happen together for both operations - historical note: this is due to a CRP bug whose mitigation is baked into lower layers + self.configure_patching_processor.start_operation_with_retries() + self.patch_assessor.start_operation_with_retries() + self.configure_patching_processor.set_final_operation_status() # configure patching can only be closed after assessment - required for CRP operation tracking + + # Installation + if self.patch_operation_requested == Constants.Op.INSTALLATION.lower(): + self.patch_installer.start_operation_with_retries() + + # second assessment after patch installation + self.patch_assessor.reset_operation_internal_state() + self.patch_assessor.start_operation_with_retries() + + if not self.patch_assessor.operation_successful: + self.patch_installer.operation_successful_incl_assessment = False + + self.patch_installer.set_final_operation_status() # installation is only closed after the final assessment - required for CRP operation tracking + + return + + def try_set_final_status_handler_statuses(self): + """ Non-throwing call-path for caller safety """ + try: + self.set_final_status_handler_statuses() + except Exception: + pass + + def set_final_status_handler_statuses(self): + """ """ + debug_log = "[CEE] Writing final status handler statuses. " + + if not self.configure_patching_processor.operation_successful: + self.composite_logger.log_verbose("[CEE] Persisting final configure patching status.") + self.status_handler.set_configure_patching_substatus_json(status=Constants.Status.ERROR, automatic_os_patch_state=self.configure_patching_processor.current_auto_os_patch_state, auto_assessment_state=self.configure_patching_processor.current_auto_assessment_state) + debug_log += "[CP=Error]" + + if not self.patch_assessor.operation_successful: + if self.patch_operation_requested == Constants.Op.INSTALLATION.lower(): + self.composite_logger.log_verbose("[CEE] Noting installation failed due to an assessment failure.") + self.status_handler.add_error_to_status(message=Constants.Errors.INSTALLATION_FAILED_DUE_TO_ASSESSMENT_FAILURE, error_code=Constants.PatchOperationErrorCodes.OPERATION_FAILED, current_operation_override_for_error=Constants.Op.INSTALLATION) + debug_log += "[IP_AF=StatusAdd]" + if self.patch_operation_requested != Constants.Op.CONFIGURE_PATCHING.lower(): + self.composite_logger.log_verbose("[CEE] Persisting final assess patches status.") + self.status_handler.set_assessment_substatus_json(status=Constants.Status.ERROR) + debug_log += "[AP=Error]" + + if self.patch_operation_requested == Constants.Op.INSTALLATION.lower() and not (self.patch_installer.operation_successful and self.patch_assessor.operation_successful): + self.composite_logger.log_verbose("[CEE] Persisting final install patches status.") + self.status_handler.set_installation_substatus_json(status=Constants.Status.ERROR) + debug_log += "[IP=Error]" + + self.composite_logger.log_debug(debug_log + "[#]") + + # region - Pre-operational housekeeping + def perform_housekeeping_tasks(self): + # type: () -> None + """ Performs environment maintenance tasks that need to happen before core business logic execution. """ + if os.path.exists(self.execution_config.temp_folder): + self.composite_logger.log_debug("[CEE] Deleting all files of certain format from temp folder [FileFormat={0}][TempFolderLocation={1}]".format(Constants.TEMP_FOLDER_CLEANUP_ARTIFACT_LIST, str(self.execution_config.temp_folder))) + self.env_layer.file_system.delete_files_from_dir(self.execution_config.temp_folder, Constants.TEMP_FOLDER_CLEANUP_ARTIFACT_LIST) + # endregion - Pre-operational housekeeping + + # region - Minimum environment requirements + def check_minimum_environment_requirements_and_report(self, patch_operation_requested): + # type: (Constants.Op) -> None + """ Checks all minimum environment requirements and reports to status_handler if needed """ + status_py, error_py = self.__check_if_min_python_version_met() + status_sudo, error_sudo = self.__check_sudo_status() + status_tel, error_tel = self.__check_telemetry_support_at_agent() + + for patch_operation in [Constants.Op.CONFIGURE_PATCHING, Constants.Op.ASSESSMENT, Constants.Op.INSTALLATION]: + if patch_operation_requested != Constants.Op.INSTALLATION.lower() and patch_operation == Constants.Op.INSTALLATION: + continue + self.status_handler.set_current_operation(patch_operation) + if not status_py: + self.status_handler.add_error_to_status(error_py, error_code=Constants.PatchOperationErrorCodes.CL_PYTHON_TOO_OLD) + if not status_sudo: + self.status_handler.add_error_to_status(error_sudo, error_code=Constants.PatchOperationErrorCodes.CL_SUDO_CHECK_FAILED) + if not status_tel: + self.status_handler.add_error_to_status(error_tel, error_code=Constants.PatchOperationErrorCodes.CL_AGENT_TOO_OLD) + if status_py & status_sudo & status_tel is not True: + self.status_handler.set_operation_substatus_json(operation_name=patch_operation, status=Constants.Status.ERROR) + + if status_py & status_sudo & status_tel is not True: + raise Exception(Constants.Errors.MINIMUM_REQUIREMENTS_NOT_MET.format(str(status_py),str(status_sudo),str(status_tel))) + + @staticmethod + def __check_if_min_python_version_met(): + # type: () -> (bool, str) + if sys.version_info < (2, 7): + error_msg = Constants.Errors.PYTHON_NOT_COMPATIBLE.format(sys.version_info) + return False, error_msg + else: + return True, None + + def __check_sudo_status(self): + # type: () -> (bool, str) + """ Checks if we can invoke sudo successfully. + Reference output: tools/references/cmd_output_references/sudo_output_expected.txt """ + try: + self.composite_logger.log_debug("Performing sudo status check... This should complete within 10 seconds.") + return_code, output = self.env_layer.run_command_output("sudo timeout 10 id && echo True || echo False", False, False) + + output_lines = output.splitlines() + if len(output_lines) >= 2 and output_lines[1] == "True": + return True, None + else: + error_msg = Constants.Errors.SUDO_FAILURE + " [Output={0}]".format(output) + except Exception as exception: + error_msg = Constants.Errors.SUDO_FAILURE + " [Error={0}]".format(str(exception)) + + return False, error_msg + + def __check_telemetry_support_at_agent(self): + # type: () -> (bool, str) + """ Checks if telemetry is supported by the Azure Linux Agent. Mocks a response if Arc. """ + if self.telemetry_writer.is_telemetry_supported() or self.lifecycle_manager.get_cloud_type() == Constants.CloudType.ARC: + return True, None + else: + return False, Constants.Errors.NO_TELEMETRY_SUPPORT_AT_AGENT + # endregion - Minimum environment requirements + diff --git a/src/core/src/core_logic/ExecutionConfig.py b/src/core/src/core_logic/ExecutionConfig.py index 48044e67e..6e5467c4f 100644 --- a/src/core/src/core_logic/ExecutionConfig.py +++ b/src/core/src/core_logic/ExecutionConfig.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -28,17 +28,17 @@ def __init__(self, env_layer, composite_logger, execution_parameters): self.composite_logger = composite_logger self.execution_parameters = eval(execution_parameters) # Environment details - self.global_exclusion_list = str(Constants.GLOBAL_EXCLUSION_LIST) if Constants.GLOBAL_EXCLUSION_LIST else None + self.global_exclusion_list = str(Constants.Config.AZGPS_PACKAGE_EXCLUSION_LIST) if Constants.Config.AZGPS_PACKAGE_EXCLUSION_LIST else None # Decoded input parameters - self.composite_logger.log_debug(" - Decoding input parameters...[InputParameters={0}]".format(str(execution_parameters))) + self.composite_logger.log_debug("[EC] Decoding input parameters...[InputParameters={0}]".format(str(execution_parameters))) self.sequence_number = self.__get_value_from_argv(self.execution_parameters, Constants.ARG_SEQUENCE_NUMBER) self.environment_settings = self.__get_decoded_json_from_argv(self.execution_parameters, Constants.ARG_ENVIRONMENT_SETTINGS) self.config_settings = self.__get_decoded_json_from_argv(self.execution_parameters, Constants.ARG_CONFIG_SETTINGS) self.exec_auto_assess_only = (self.__get_value_from_argv(self.execution_parameters, Constants.ARG_AUTO_ASSESS_ONLY, False)).lower() == 'true' # Environment Settings - self.composite_logger.log_debug(" - Parsing environment settings...") + self.composite_logger.log_debug("[EC] Parsing environment settings... [EnvironmentSettings={0}]".format(str(self.environment_settings))) self.log_folder = self.environment_settings[Constants.EnvSettings.LOG_FOLDER] self.config_folder = self.environment_settings[Constants.EnvSettings.CONFIG_FOLDER] self.status_folder = self.environment_settings[Constants.EnvSettings.STATUS_FOLDER] @@ -49,7 +49,7 @@ def __init__(self, env_layer, composite_logger, execution_parameters): self.telemetry_supported = self.environment_settings[Constants.EnvSettings.TELEMETRY_SUPPORTED] # Config Settings - self.composite_logger.log_debug(" - Parsing configuration settings... [ConfigSettings={0}]".format(str(self.config_settings))) + self.composite_logger.log_debug("[EC] Parsing configuration settings... [ConfigSettings={0}]".format(str(self.config_settings))) self.operation = self.config_settings[Constants.ConfigSettings.OPERATION] self.activity_id = self.config_settings[Constants.ConfigSettings.ACTIVITY_ID] self.start_time = self.config_settings[Constants.ConfigSettings.START_TIME] @@ -59,32 +59,31 @@ def __init__(self, env_layer, composite_logger, execution_parameters): self.excluded_package_name_mask_list = self.__get_execution_configuration_value_safely(self.config_settings, Constants.ConfigSettings.PATCHES_TO_EXCLUDE, []) self.maintenance_run_id = self.__get_execution_configuration_value_safely(self.config_settings, Constants.ConfigSettings.MAINTENANCE_RUN_ID) self.health_store_id = self.__get_execution_configuration_value_safely(self.config_settings, Constants.ConfigSettings.HEALTH_STORE_ID) - if self.operation == Constants.INSTALLATION: + if self.operation == Constants.Op.INSTALLATION: self.reboot_setting = self.config_settings[Constants.ConfigSettings.REBOOT_SETTING] # expected to throw if not present else: - self.reboot_setting = self.__get_execution_configuration_value_safely(self.config_settings, Constants.ConfigSettings.REBOOT_SETTING, Constants.REBOOT_NEVER) # safe extension-level default + self.reboot_setting = self.__get_execution_configuration_value_safely(self.config_settings, Constants.ConfigSettings.REBOOT_SETTING, Constants.RebootSettings.NEVER) # safe extension-level default self.patch_mode = self.__get_execution_configuration_value_safely(self.config_settings, Constants.ConfigSettings.PATCH_MODE) self.assessment_mode = self.__get_execution_configuration_value_safely(self.config_settings, Constants.ConfigSettings.ASSESSMENT_MODE) self.maximum_assessment_interval = self.__get_execution_configuration_value_safely(self.config_settings, Constants.ConfigSettings.MAXIMUM_ASSESSMENT_INTERVAL) - # Accommodation for bugs in higher-level components where 'Security' is being selected without selecting 'Critical' - should be rolled back no later than Jan 2022 + # Accommodation for bugs in higher-level components where 'Security' is being selected without selecting 'Critical' - left in place for long-term regression safety if self.included_classifications_list is not None and ('Security' in self.included_classifications_list and 'Critical' not in self.included_classifications_list): - self.composite_logger.log_debug("The included_classifications_list was corrected to include 'Critical' when 'Security' was specified.") + self.composite_logger.log_debug("[EC] The included_classifications_list was corrected to include 'Critical' when 'Security' was specified.", buffer_msg=Constants.BufferMessage.TRUE) self.included_classifications_list = ['Critical'] + self.included_classifications_list # Derived Settings self.log_file_path = os.path.join(self.log_folder, str(self.sequence_number) + ".core.log") self.complete_status_file_path = os.path.join(self.status_folder, str(self.sequence_number) + ".complete" + ".status") self.status_file_path = os.path.join(self.status_folder, str(self.sequence_number) + ".status") - self.include_assessment_with_configure_patching = (self.operation == Constants.CONFIGURE_PATCHING and self.assessment_mode == Constants.AssessmentModes.AUTOMATIC_BY_PLATFORM) - self.composite_logger.log_debug(" - Derived execution-config settings. [CoreLog={0}][CompleteStatusFile={1}][StatusFile={2}][IncludeAssessmentWithConfigurePatching={3}]" - .format(str(self.log_file_path), str(self.complete_status_file_path), str(self.status_file_path), self.include_assessment_with_configure_patching)) + self.include_assessment_with_configure_patching = (self.operation == Constants.Op.CONFIGURE_PATCHING and self.assessment_mode == Constants.AssessmentModes.AUTOMATIC_BY_PLATFORM) + self.composite_logger.log_debug("[EC] Derived execution-config settings: [CoreLog={0}][CompleteStatusFile={1}][StatusFile={2}][IncludeAssessmentWithConfigurePatching={3}]" + .format(str(self.log_file_path), str(self.complete_status_file_path), str(self.status_file_path), self.include_assessment_with_configure_patching), + buffer_msg=Constants.BufferMessage.TRUE) # Auto assessment overrides if self.exec_auto_assess_only: self.__transform_execution_config_for_auto_assessment() - else: - self.composite_logger.log_debug("Not executing in auto-assessment mode.") # EULA config self.accept_package_eula = self.__is_eula_accepted_for_all_patches() @@ -95,9 +94,9 @@ def __transform_execution_config_for_auto_assessment(self): self.maintenance_run_id = None self.start_time = self.env_layer.datetime.standard_datetime_to_utc(datetime.datetime.utcnow()) self.duration = Constants.AUTO_ASSESSMENT_MAXIMUM_DURATION - self.reboot_setting = Constants.REBOOT_NEVER + self.reboot_setting = Constants.RebootSettings.NEVER self.patch_mode = None - self.composite_logger.log_debug("Setting execution configuration values for auto assessment. [GeneratedActivityId={0}][StartTime={1}]".format(self.activity_id, str(self.start_time))) + self.composite_logger.log_debug("- [EC] Setting execution configuration values for auto assessment. [GeneratedActivityId={0}][StartTime={1}]".format(self.activity_id, str(self.start_time)), buffer_msg=Constants.BufferMessage.TRUE) @staticmethod def __get_value_from_argv(argv, key, default_value=Constants.DEFAULT_UNSPECIFIED_VALUE): @@ -175,7 +174,7 @@ def __check_and_create_temp_folder_if_not_exists(self): if self.temp_folder is None: par_dir = os.path.dirname(self.config_folder) if not os.path.exists(par_dir): - raise Exception("Parent directory for all extension artifacts such as config folder, status folder, etc. not found at [{0}].".format(repr(par_dir))) + raise Exception("Parent directory for all extension artifacts such as config folder, status folder, etc. not found. [Directory={0}].".format(repr(par_dir))) self.temp_folder = os.path.join(par_dir, Constants.TEMP_FOLDER_DIR_NAME) if not os.path.exists(self.temp_folder): @@ -194,12 +193,12 @@ def __is_eula_accepted_for_all_patches(self): last_modified = self.__fetch_specific_eula_setting(eula_settings, Constants.EulaSettings.LAST_MODIFIED) if accept_eula_for_all_patches is not None and accept_eula_for_all_patches in [True, 'True', 'true', '1', 1]: is_eula_accepted = True - self.composite_logger.log_debug("EULA config values from disk: [AcceptEULAForAllPatches={0}] [AcceptedBy={1}] [LastModified={2}]. Computed value of [IsEULAAccepted={3}]" + self.composite_logger.log_debug("[EC][PREVIEW] EULA config values from disk: [AcceptEULAForAllPatches={0}][AcceptedBy={1}][LastModified={2}][IsEULAAccepted={3}]" .format(str(accept_eula_for_all_patches), str(accepted_by), str(last_modified), str(is_eula_accepted))) else: - self.composite_logger.log_debug("No EULA Settings found on the VM. Computed value of [IsEULAAccepted={0}]".format(str(is_eula_accepted))) + self.composite_logger.log_debug("[EC][PREVIEW] No EULA Settings found on the VM. [IsEULAAccepted={0}]".format(str(is_eula_accepted))) except Exception as error: - self.composite_logger.log_debug("Error occurred while reading and parsing EULA settings. Not accepting EULA for any patch. Error=[{0}]".format(repr(error))) + self.composite_logger.log_debug("[EC][PREVIEW] ERROR occurred while processing EULA settings. [IsEULAAccepted={0}][Error={0}]".format(str(is_eula_accepted), repr(error))) return is_eula_accepted diff --git a/src/core/src/core_logic/MaintenanceWindow.py b/src/core/src/core_logic/MaintenanceWindow.py index 732e41928..528c186eb 100644 --- a/src/core/src/core_logic/MaintenanceWindow.py +++ b/src/core/src/core_logic/MaintenanceWindow.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -19,11 +19,18 @@ from datetime import timedelta from core.src.bootstrap.Constants import Constants +# do not instantiate directly - these are exclusively for type hinting support +from core.src.bootstrap.EnvLayer import EnvLayer +from core.src.core_logic.ExecutionConfig import ExecutionConfig +from core.src.local_loggers.CompositeLogger import CompositeLogger +from core.src.service_interfaces.StatusHandler import StatusHandler + class MaintenanceWindow(object): """Implements the maintenance window logic""" def __init__(self, env_layer, execution_config, composite_logger, status_handler): + # type: (EnvLayer, ExecutionConfig, CompositeLogger, StatusHandler) -> None self.execution_config = execution_config self.duration = self.execution_config.duration self.start_time = self.execution_config.start_time @@ -31,71 +38,60 @@ def __init__(self, env_layer, execution_config, composite_logger, status_handler self.env_layer = env_layer self.status_handler = status_handler - def get_remaining_time_in_minutes(self, current_time=None, log_to_stdout=False): - """Calculate time remaining base on the given job start time""" + def get_remaining_time_in_minutes(self, current_time=None): + # type: (str) -> int + """ Calculate time remaining base on the given job start time """ try: - if current_time is None: - current_time = self.env_layer.datetime.datetime_utcnow() - start_time = self.env_layer.datetime.utc_to_standard_datetime(self.start_time) - dur = datetime.datetime.strptime(self.duration, "%H:%M:%S") - dura = timedelta(hours=dur.hour, minutes=dur.minute, seconds=dur.second) - total_time_in_minutes = self.env_layer.datetime.total_minutes_from_time_delta(dura) - elapsed_time_in_minutes = self.env_layer.datetime.total_minutes_from_time_delta(current_time - start_time) - remaining_time_in_minutes = max((total_time_in_minutes - elapsed_time_in_minutes), 0) - - log_line = "Maintenance Window Utilization: " + str(timedelta(seconds=int(elapsed_time_in_minutes*60))) + " / " + self.duration + "\ - [Job start: " + str(start_time) + ", Current time: " + str(current_time.strftime("%Y-%m-%d %H:%M:%S")) + "]" - if log_to_stdout: - self.composite_logger.log(log_line) - else: - self.composite_logger.log_debug(log_line) + current_time = self.env_layer.datetime.datetime_utcnow() if current_time is None else current_time + local_start_time, elapsed_time_in_minutes, remaining_time_in_minutes, total_time_in_minutes = self.__get_start_elapsed_remaining_and_total_time_in_minutes(current_time) + self.composite_logger.log_verbose("[MW] Maintenance Window utilization. [ElapsedTime={0}][MaxDuration={1}][LocalStartTime={2}][CurrentTime={3}]".format(str(timedelta(seconds=int(elapsed_time_in_minutes*60))), self.duration, str(local_start_time), str(current_time.strftime("%Y-%m-%d %H:%M:%S")))) except ValueError as error: - error_msg = "Error calculating time remaining. Check patch operation input parameters." - self.composite_logger.log_error("\n" + error_msg) - self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.DEFAULT_ERROR) - if Constants.ERROR_ADDED_TO_STATUS not in repr(error): - error.args = (error.args, "[{0}]".format(Constants.ERROR_ADDED_TO_STATUS)) - raise + message = "Error calculating maintenance window time remaining. Check patch operation input parameters. [Error={0}]".format(repr(error)) + self.status_handler.add_error_to_status_and_log_error(message, raise_exception=True, error_code=Constants.PatchOperationErrorCodes.SV_MAINTENANCE_WINDOW_ERROR) + raise # redundant for IDE hinting return remaining_time_in_minutes + def get_maintenance_window_used_as_percentage(self): + # type: () -> int + """ Calculate percentage of maintenance window used. Not customer facing. """ + percent_maintenance_window_used = -1 + try: + local_start_time, elapsed_time_in_minutes, remaining_time_in_minutes, total_time_in_minutes = self.__get_start_elapsed_remaining_and_total_time_in_minutes() + percent_maintenance_window_used = (elapsed_time_in_minutes / total_time_in_minutes) * 100 + except Exception as error: + self.composite_logger.log_warning("[MW] Error calculating percentage of maintenance window used. [Error={0}]".format(repr(error))) + + return int(percent_maintenance_window_used) + def is_package_install_time_available(self, remaining_time_in_minutes=None, number_of_packages_in_batch=1): - """Check if time still available for package installation""" - cutoff_time_in_minutes = Constants.PACKAGE_INSTALL_EXPECTED_MAX_TIME_IN_MINUTES * number_of_packages_in_batch + # type: (int, int) -> bool + """ Check if time still available for package installation """ + cutoff_time_in_minutes = Constants.Config.PACKAGE_INSTALL_EXPECTED_MAX_TIME_IN_MINUTES * number_of_packages_in_batch - if Constants.REBOOT_SETTINGS[self.execution_config.reboot_setting] != Constants.REBOOT_NEVER: - cutoff_time_in_minutes = cutoff_time_in_minutes + Constants.REBOOT_BUFFER_IN_MINUTES + if self.execution_config.reboot_setting != Constants.RebootSettings.NEVER: + cutoff_time_in_minutes = cutoff_time_in_minutes + Constants.Config.REBOOT_BUFFER_IN_MINUTES if remaining_time_in_minutes is None: remaining_time_in_minutes = self.get_remaining_time_in_minutes() if remaining_time_in_minutes > cutoff_time_in_minutes: - self.composite_logger.log_debug("Time Remaining: " + str(timedelta(seconds=int(remaining_time_in_minutes * 60))) + ", Cutoff time: " + str(timedelta(minutes=cutoff_time_in_minutes))) + self.composite_logger.log_verbose("[MW] Sufficient package install time available. [TimeRemaining={0}][CutoffTime={1}][PackagesInBatch={2}]".format(str(timedelta(seconds=int(remaining_time_in_minutes * 60))), str(timedelta(minutes=cutoff_time_in_minutes)), str(number_of_packages_in_batch))) return True else: - self.composite_logger.log_warning("Time Remaining: " + str(timedelta(seconds=int(remaining_time_in_minutes * 60))) + ", Cutoff time: " + str(timedelta(minutes=cutoff_time_in_minutes)) + " [Out of time!]") + self.composite_logger.log_warning("[MW] Insufficient time to install additional packages. [TimeRemaining={0}][CutoffTime={1}][PackagesInBatch={2}]".format(str(timedelta(seconds=int(remaining_time_in_minutes * 60))), str(timedelta(minutes=cutoff_time_in_minutes)), str(number_of_packages_in_batch))) return False - def get_percentage_maintenance_window_used(self): - """Calculate percentage of maintenance window used""" - try: - current_time = self.env_layer.datetime.datetime_utcnow() - start_time = self.env_layer.datetime.utc_to_standard_datetime(self.start_time) - if current_time < start_time: - raise Exception("Start time {0} is greater than current time {1}".format(str(start_time), str(current_time))) - dur = datetime.datetime.strptime(self.duration, "%H:%M:%S") - dura = timedelta(hours=dur.hour, minutes=dur.minute, seconds=dur.second) - total_time_in_minutes = self.env_layer.datetime.total_minutes_from_time_delta(dura) - elapsed_time_in_minutes = self.env_layer.datetime.total_minutes_from_time_delta(current_time - start_time) - percent_maintenance_window_used = (elapsed_time_in_minutes / total_time_in_minutes) * 100 - except Exception as error: - error_msg = "Error calculating percentage of maintenance window used." - self.composite_logger.log_error("\n" + error_msg) - self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.DEFAULT_ERROR) - if Constants.ERROR_ADDED_TO_STATUS not in repr(error): - error.args = (error.args, "[{0}]".format(Constants.ERROR_ADDED_TO_STATUS)) - raise - - # Rounding off to one digit after decimal e.g. 14.514372666666667 will become 14.5 - percent_maintenance_window_used = round(percent_maintenance_window_used, 1) - return percent_maintenance_window_used \ No newline at end of file + def __get_start_elapsed_remaining_and_total_time_in_minutes(self, current_time=None): + # type: (str) -> (str, int, int , int) + """ Core maintenance window calculations. Current time format: "%Y-%m-%d %H:%M:%S.%f" """ + current_time = self.env_layer.datetime.datetime_utcnow() if current_time is None else current_time + local_start_time = self.env_layer.datetime.utc_to_standard_datetime(self.start_time) + dur = datetime.datetime.strptime(self.duration, "%H:%M:%S") + dura = timedelta(hours=dur.hour, minutes=dur.minute, seconds=dur.second) + total_time_in_minutes = self.env_layer.datetime.total_minutes_from_time_delta(dura) + elapsed_time_in_minutes = self.env_layer.datetime.total_minutes_from_time_delta(current_time - local_start_time) + remaining_time_in_minutes = max((total_time_in_minutes - elapsed_time_in_minutes), 0) + + return local_start_time, elapsed_time_in_minutes, remaining_time_in_minutes, total_time_in_minutes + diff --git a/src/core/src/core_logic/PackageFilter.py b/src/core/src/core_logic/PackageFilter.py index efed752a9..cb48a7a3a 100644 --- a/src/core/src/core_logic/PackageFilter.py +++ b/src/core/src/core_logic/PackageFilter.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -46,10 +46,10 @@ def __init__(self, execution_config, composite_logger): self.global_excluded_packages = [x for x in self.global_excluded_packages if x not in packages_to_clear_from_global] # Logging - self.composite_logger.log("\nAzure globally-excluded packages: " + str(self.global_excluded_packages)) - self.composite_logger.log("Included package classifications: " + ', '.join(self.installation_included_classifications)) - self.composite_logger.log("Included packages: " + str(self.installation_included_package_masks)) - self.composite_logger.log("Excluded packages: " + str(self.installation_excluded_packages)) + self.composite_logger.log_verbose("\n[PF] AzGPS globally-excluded packages: " + str(self.global_excluded_packages)) + self.composite_logger.log_verbose("[PF] Included package classifications: " + ', '.join(self.installation_included_classifications)) + self.composite_logger.log_verbose("[PF] Included packages: " + str(self.installation_included_package_masks)) + self.composite_logger.log_verbose("[PF] Excluded packages: " + str(self.installation_excluded_packages)) if '=' in str(self.installation_excluded_package_masks): self.composite_logger.log_error("\n /!\\ Package exclusions do not support version matching in the filter today. " "Due to this, more packages than expected may be excluded from this update deployment.") diff --git a/src/core/src/core_logic/PatchAssessor.py b/src/core/src/core_logic/PatchAssessor.py deleted file mode 100644 index 50d495842..000000000 --- a/src/core/src/core_logic/PatchAssessor.py +++ /dev/null @@ -1,260 +0,0 @@ -# Copyright 2020 Microsoft Corporation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Requires Python 2.7+ - -""" A patch assessment """ -import datetime -import json -import os -import shutil -import sys -import time -from core.src.bootstrap.Constants import Constants -from core.src.core_logic.Stopwatch import Stopwatch - - -class PatchAssessor(object): - """ Wrapper class of a single patch assessment """ - def __init__(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler, package_manager, lifecycle_manager): - self.env_layer = env_layer - self.execution_config = execution_config - - self.composite_logger = composite_logger - self.telemetry_writer = telemetry_writer - self.status_handler = status_handler - self.lifecycle_manager = lifecycle_manager - self.package_manager = package_manager - self.package_manager_name = self.package_manager.get_package_manager_setting(Constants.PKG_MGR_SETTING_IDENTITY) - self.assessment_state_file_path = os.path.join(self.execution_config.config_folder, Constants.ASSESSMENT_STATE_FILE) - self.stopwatch = Stopwatch(self.env_layer, self.telemetry_writer, self.composite_logger) - - def start_assessment(self): - """ Start a patch assessment """ - self.status_handler.set_current_operation(Constants.ASSESSMENT) - self.raise_if_telemetry_unsupported() - self.raise_if_min_python_version_not_met() - - if self.execution_config.exec_auto_assess_only and not self.should_auto_assessment_run(): - self.composite_logger.log("\nSkipping automatic patch assessment... [ShouldAutoAssessmentRun=False]\n") - self.lifecycle_manager.lifecycle_status_check() - return True - - self.composite_logger.log("\nStarting patch assessment... [MachineId: " + self.env_layer.platform.node() +"][ActivityId: " + self.execution_config.activity_id +"][StartTime: " + self.execution_config.start_time +"]") - self.write_assessment_state() # success / failure does not matter, only that an attempt started - - self.stopwatch.start() - self.status_handler.set_assessment_substatus_json(status=Constants.STATUS_TRANSITIONING) - retry_count = 0 - - for i in range(0, Constants.MAX_ASSESSMENT_RETRY_COUNT): - try: - self.composite_logger.log("\n\nGetting available patches...") - self.package_manager.refresh_repo() - self.status_handler.reset_assessment_data() - - if self.lifecycle_manager is not None: - self.lifecycle_manager.lifecycle_status_check() # may terminate the code abruptly, as designed - - # All updates - retry_count = retry_count + 1 - - # All updates - packages, package_versions = self.package_manager.get_all_updates() - self.telemetry_writer.write_event("Full assessment: " + str(packages), Constants.TelemetryEventLevel.Verbose) - self.status_handler.set_package_assessment_status(packages, package_versions) - if self.lifecycle_manager is not None: - self.lifecycle_manager.lifecycle_status_check() # may terminate the code abruptly, as designed - sec_packages, sec_package_versions = self.package_manager.get_security_updates() - - # Tag security updates - self.telemetry_writer.write_event("Security assessment: " + str(sec_packages), Constants.TelemetryEventLevel.Verbose) - self.status_handler.set_package_assessment_status(sec_packages, sec_package_versions, Constants.PackageClassification.SECURITY) - - # Set the security-esm packages in status. - self.package_manager.set_security_esm_package_status(Constants.ASSESSMENT, packages=[]) - - # ensure reboot status is set - reboot_pending = self.package_manager.is_reboot_pending() - self.status_handler.set_reboot_pending(reboot_pending) - - self.status_handler.set_assessment_substatus_json(status=Constants.STATUS_SUCCESS) - break # avoid retries for success - - except Exception as error: - if i < Constants.MAX_ASSESSMENT_RETRY_COUNT - 1: - error_msg = 'Retriable error retrieving available patches: ' + repr(error) - self.composite_logger.log_warning(error_msg) - self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.DEFAULT_ERROR) - time.sleep(2*(i + 1)) - else: - error_msg = 'Error retrieving available patches: ' + repr(error) - self.composite_logger.log_error(error_msg) - self.write_assessment_perf_logs(retry_count, Constants.TaskStatus.FAILED, error_msg) - self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.DEFAULT_ERROR) - if Constants.ERROR_ADDED_TO_STATUS not in repr(error): - error.args = (error.args, "[{0}]".format(Constants.ERROR_ADDED_TO_STATUS)) - self.status_handler.set_assessment_substatus_json(status=Constants.STATUS_ERROR) - raise - - self.write_assessment_perf_logs(retry_count, Constants.TaskStatus.SUCCEEDED, "") - self.composite_logger.log("\nPatch assessment completed.\n") - return True - - def write_assessment_perf_logs(self, retry_count, task_status, error_msg): - assessment_perf_log = "[{0}={1}][{2}={3}][{4}={5}][{6}={7}][{8}={9}][{10}={11}]".format( - Constants.PerfLogTrackerParams.TASK, Constants.ASSESSMENT, Constants.PerfLogTrackerParams.TASK_STATUS, str(task_status), - Constants.PerfLogTrackerParams.ERROR_MSG, error_msg, Constants.PerfLogTrackerParams.PACKAGE_MANAGER, self.package_manager_name, - Constants.PerfLogTrackerParams.RETRY_COUNT, str(retry_count), Constants.PerfLogTrackerParams.MACHINE_INFO, self.telemetry_writer.machine_info) - self.stopwatch.stop_and_write_telemetry(assessment_perf_log) - - def raise_if_telemetry_unsupported(self): - if self.lifecycle_manager.get_vm_cloud_type() == Constants.VMCloudType.ARC and self.execution_config.operation not in [Constants.ASSESSMENT, Constants.INSTALLATION]: - self.composite_logger.log("Skipping telemetry compatibility check for Arc cloud type when operation is not manual") - return - if not self.telemetry_writer.is_telemetry_supported(): - error_msg = "{0}".format(Constants.TELEMETRY_NOT_COMPATIBLE_ERROR_MSG) - self.composite_logger.log_error(error_msg) - self.status_handler.set_assessment_substatus_json(status=Constants.STATUS_ERROR) - raise Exception(error_msg) - - self.composite_logger.log("{0}".format(Constants.TELEMETRY_COMPATIBLE_MSG)) - - def raise_if_min_python_version_not_met(self): - if sys.version_info < (2, 7): - error_msg = Constants.PYTHON_NOT_COMPATIBLE_ERROR_MSG.format(sys.version_info) - self.composite_logger.log_error(error_msg) - self.status_handler.set_assessment_substatus_json(status=Constants.STATUS_ERROR) - raise Exception(error_msg) - - # region - Auto-assessment extensions - def should_auto_assessment_run(self): - # get last start time - try: - assessment_state = self.read_assessment_state() - last_start_in_seconds_since_epoch = assessment_state['lastStartInSecondsSinceEpoch'] - except Exception as error: - self.composite_logger.log_warning("No valid last start information available for auto-assessment.") - return True - - # get minimum elapsed time required - difference between max allowed (passed down) and a safe buffer to prevent exceeding that - maximum_assessment_interval_in_seconds = self.convert_iso8601_duration_to_total_seconds(self.execution_config.maximum_assessment_interval) - maximum_assessment_interval_buffer_in_seconds = self.convert_iso8601_duration_to_total_seconds(Constants.AUTO_ASSESSMENT_INTERVAL_BUFFER) - minimum_elapsed_time_required_in_seconds = maximum_assessment_interval_in_seconds - maximum_assessment_interval_buffer_in_seconds - - # check if required duration has passed - elapsed_time_in_seconds = self.__get_seconds_since_epoch() - last_start_in_seconds_since_epoch - if elapsed_time_in_seconds < 0: - self.composite_logger.log_warning("Anomaly detected in system time now or during the last assessment run. Assessment will run anyway.") - return True - else: - return elapsed_time_in_seconds >= minimum_elapsed_time_required_in_seconds - - def read_assessment_state(self): - """ Reads the assessment state file. """ - self.composite_logger.log_debug("Reading assessment state...") - if not os.path.exists(self.assessment_state_file_path) or not os.path.isfile(self.assessment_state_file_path): - # Neutralizes directories - if os.path.isdir(self.assessment_state_file_path): - self.composite_logger.log_error("Assessment state file path returned a directory. Attempting to reset.") - shutil.rmtree(self.assessment_state_file_path) - # Writes a vanilla assessment statefile - self.write_assessment_state(first_write=True) - - # Read (with retries for only IO Errors) - for i in range(0, Constants.MAX_FILE_OPERATION_RETRY_COUNT): - try: - with self.env_layer.file_system.open(self.assessment_state_file_path, mode="r") as file_handle: - return json.load(file_handle)['assessmentState'] - except Exception as error: - if i < Constants.MAX_FILE_OPERATION_RETRY_COUNT - 1: - self.composite_logger.log_warning("Exception on assessment state read. [Exception={0}] [RetryCount={1}]".format(repr(error), str(i))) - time.sleep(i + 1) - else: - self.composite_logger.log_error("Unable to read assessment state file (retries exhausted). [Exception={0}]".format(repr(error))) - raise - - def write_assessment_state(self, first_write=False): - """ - AssessmentState.json sample structure: - { - "number": "", - "lastStartInSecondsSinceEpoch": "", - "lastHeartbeat": "", - "processIds": ["", ...], - "autoAssessment": "" - } - """ - self.composite_logger.log_debug("Updating assessment state... ") - - # lastHeartbeat below is redundant, but is present for ease of debuggability - assessment_state = {'number': self.execution_config.sequence_number, - # Set lastStartInSecondsSinceEpoch to 0 if file did not exist before (first write) to ensure it can run assessment when first created - 'lastStartInSecondsSinceEpoch': self.__get_seconds_since_epoch() if not first_write else 0, - 'lastHeartbeat': str(self.env_layer.datetime.timestamp()), - 'processIds': [os.getpid()], - 'autoAssessment': str(self.execution_config.exec_auto_assess_only)} - assessment_state_payload = json.dumps({"assessmentState": assessment_state}) - - if os.path.isdir(self.assessment_state_file_path): - self.composite_logger.log_error("Assessment state file path returned a directory. Attempting to reset.") - shutil.rmtree(self.assessment_state_file_path) - - for i in range(0, Constants.MAX_FILE_OPERATION_RETRY_COUNT): - try: - with self.env_layer.file_system.open(self.assessment_state_file_path, 'w+') as file_handle: - file_handle.write(assessment_state_payload) - break - except Exception as error: - if i < Constants.MAX_FILE_OPERATION_RETRY_COUNT - 1: - self.composite_logger.log_warning("Exception on assessment state update. [Exception={0}] [RetryCount={1}]".format(repr(error), str(i))) - time.sleep(i + 1) - else: - self.composite_logger.log_error("Unable to write to assessment state file (retries exhausted). [Exception={0}]".format(repr(error))) - raise - - self.composite_logger.log_debug("Completed updating assessment state.") - - @staticmethod - def __get_seconds_since_epoch(): - return int((datetime.datetime.now() - datetime.datetime(1970, 1, 1)).total_seconds()) - - def convert_iso8601_duration_to_total_seconds(self, duration): - """ - No non-default period (Y,M,W,D) is supported. Time is supported (H,M,S). - """ - remaining = str(duration) - if 'PT' not in remaining: - raise Exception("Unexpected duration format. [Duration={0}]".format(duration)) - - discard, remaining = self.__extract_most_significant_unit_from_duration(remaining, 'PT') - hours, remaining = self.__extract_most_significant_unit_from_duration(remaining, 'H') - minutes, remaining = self.__extract_most_significant_unit_from_duration(remaining, 'M') - seconds, remaining = self.__extract_most_significant_unit_from_duration(remaining, 'S') - - return datetime.timedelta(hours=int(hours), minutes=int(minutes), seconds=int(seconds)).total_seconds() - - @staticmethod - def __extract_most_significant_unit_from_duration(duration_portion, unit_delimiter): - """ Internal helper function """ - duration_split = duration_portion.split(unit_delimiter) - most_significant_unit = 0 - remaining_duration_portion = '' - if len(duration_split) == 2: # found and extracted - most_significant_unit = duration_split[0] - remaining_duration_portion = duration_split[1] - elif len(duration_split) == 1: # not found - remaining_duration_portion = duration_split[0] - - return most_significant_unit, remaining_duration_portion diff --git a/src/core/src/core_logic/RebootManager.py b/src/core/src/core_logic/RebootManager.py index fc41de60f..d1436b2de 100644 --- a/src/core/src/core_logic/RebootManager.py +++ b/src/core/src/core_logic/RebootManager.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,13 @@ # Requires Python 2.7+ """Reboot management""" -import datetime -import subprocess import time from core.src.bootstrap.Constants import Constants class RebootManager(object): """Implements the reboot management logic""" - def __init__(self, env_layer, execution_config, composite_logger, status_handler, package_manager, default_reboot_setting='IfRequired'): + def __init__(self, env_layer, execution_config, composite_logger, status_handler, package_manager, default_reboot_setting=Constants.RebootSettings.IF_REQUIRED): self.execution_config = execution_config self.composite_logger = composite_logger @@ -31,7 +29,7 @@ def __init__(self, env_layer, execution_config, composite_logger, status_handler self.status_handler = status_handler self.env_layer = env_layer - self.minutes_to_shutdown = str((Constants.REBOOT_BUFFER_IN_MINUTES - 5) if (Constants.REBOOT_BUFFER_IN_MINUTES > 5) else Constants.REBOOT_BUFFER_IN_MINUTES) # give at least 5 minutes for a reboot unless the buffer is configured to be lower than that + self.minutes_to_shutdown = str((Constants.Config.REBOOT_BUFFER_IN_MINUTES - 5) if (Constants.Config.REBOOT_BUFFER_IN_MINUTES > 5) else Constants.Config.REBOOT_BUFFER_IN_MINUTES) # give at least 5 minutes for a reboot unless the buffer is configured to be lower than that self.reboot_cmd = 'sudo shutdown -r ' self.maintenance_window_exceeded_flag = False @@ -40,30 +38,27 @@ def __init__(self, env_layer, execution_config, composite_logger, status_handler @staticmethod def is_reboot_time_available(current_time_available): """ Check if time still available for system reboot """ - return current_time_available >= Constants.REBOOT_BUFFER_IN_MINUTES + return current_time_available >= Constants.Config.REBOOT_BUFFER_IN_MINUTES # REBOOT SETTING # ============== - def sanitize_reboot_setting(self, reboot_setting_key, default_reboot_setting): + def sanitize_reboot_setting(self, reboot_setting_selected, default_reboot_setting): """ Ensures that the value obtained is one we know what to do with. """ - reboot_setting = Constants.REBOOT_SETTINGS[default_reboot_setting] - - try: - reboot_setting = Constants.REBOOT_SETTINGS[reboot_setting_key] - except KeyError: - error_msg = 'Invalid reboot setting detected in update configuration: ' + str(reboot_setting_key) - self.composite_logger.log_error(error_msg) - self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.DEFAULT_ERROR) - self.composite_logger.log_warning('Defaulting reboot setting to: ' + str(default_reboot_setting)) - finally: - return reboot_setting + reboot_setting_selected = reboot_setting_selected.lower() + + for setting in (Constants.RebootSettings.NEVER, Constants.RebootSettings.IF_REQUIRED, Constants.RebootSettings.ALWAYS): + if reboot_setting_selected == setting.lower(): + return setting + + self.status_handler.add_error_to_status_and_log_error(Constants.Errors.INVALID_REBOOT_SETTING.format(str(reboot_setting_selected), str(default_reboot_setting))) + return default_reboot_setting def is_setting(self, setting_to_check): return self.reboot_setting == setting_to_check # REBOOT ACTION # ============= - def start_reboot(self, message="Azure Patch Management initiated a reboot after a patch installation run."): + def start_reboot(self, message="Azure Guest Patching Service initiated a reboot after a patch installation run."): """ Perform a system reboot """ self.composite_logger.log("\nThe machine is set to reboot in " + self.minutes_to_shutdown + " minutes.") @@ -72,7 +67,7 @@ def start_reboot(self, message="Azure Patch Management initiated a reboot after self.env_layer.reboot_machine(self.reboot_cmd + self.minutes_to_shutdown + ' ' + message) # Wait for timeout - max_allowable_time_to_reboot_in_minutes = int(self.minutes_to_shutdown) + Constants.REBOOT_WAIT_TIMEOUT_IN_MINUTES + max_allowable_time_to_reboot_in_minutes = int(self.minutes_to_shutdown) + Constants.Config.REBOOT_WAIT_TIMEOUT_IN_MINUTES while 1: current_time = self.env_layer.datetime.datetime_utcnow() elapsed_time_in_minutes = self.env_layer.datetime.total_minutes_from_time_delta(current_time - reboot_init_time) @@ -83,38 +78,33 @@ def start_reboot(self, message="Azure Patch Management initiated a reboot after raise Exception(error_msg, "[{0}]".format(Constants.ERROR_ADDED_TO_STATUS)) else: self.composite_logger.file_logger.flush() - self.composite_logger.log("Waiting for machine reboot. [ElapsedTimeInMinutes={0}] [MaxTimeInMinutes={1}]".format(str(elapsed_time_in_minutes), str(max_allowable_time_to_reboot_in_minutes))) + self.composite_logger.log_verbose("Waiting for machine reboot. [ElapsedTimeInMinutes={0}][MaxTimeInMinutes={1}]".format(str(elapsed_time_in_minutes), str(max_allowable_time_to_reboot_in_minutes))) self.composite_logger.file_logger.flush() time.sleep(60) def start_reboot_if_required_and_time_available(self, current_time_available): """ Starts a reboot if required. Happens only at the end of the run if required. """ - self.composite_logger.log("\nReboot Management") + self.composite_logger.log_verbose("[RM] Starting reboot if required and time available.") reboot_pending = self.is_reboot_pending() - if self.package_manager.force_reboot: - self.composite_logger.log("A reboot is pending as the package manager required it.") - # return false if never - if self.reboot_setting == Constants.REBOOT_NEVER: + if self.reboot_setting == Constants.RebootSettings.NEVER: if reboot_pending: - self.composite_logger.log_warning(' - There is a reboot pending, but reboot is blocked, as per patch installation configuration. (' + str(Constants.REBOOT_NEVER) + ')') - else: - self.composite_logger.log_warning(' - There is no reboot pending, and reboot is blocked regardless, as per patch installation configuration (' + str(Constants.REBOOT_NEVER) + ').') + self.status_handler.add_error_to_status_and_log_warning(message="Required reboot blocked by customer configuration. [RebootPending={0}][RebootSetting={1}]".format(str(reboot_pending), Constants.RebootSettings.NEVER)) return False # return if system doesn't require it (and only reboot if it does) - if self.reboot_setting == Constants.REBOOT_IF_REQUIRED and not reboot_pending: - self.composite_logger.log(" - There was no reboot pending detected. Reboot is being skipped as it's not required, as per patch installation configuration (" + str(Constants.REBOOT_IF_REQUIRED) + ").") + if self.reboot_setting == Constants.RebootSettings.IF_REQUIRED and not reboot_pending: + self.composite_logger.log(" - There was no reboot pending detected. Reboot is being skipped as it's not required, as per patch installation configuration (" + str(Constants.RebootSettings.IF_REQUIRED) + ").") return False # prevent repeated reboots - if self.reboot_setting == Constants.REBOOT_ALWAYS and not reboot_pending and self.status_handler.get_installation_reboot_status() == Constants.RebootStatus.COMPLETED: + if self.reboot_setting == Constants.RebootSettings.ALWAYS and not reboot_pending and self.status_handler.get_installation_reboot_status() == Constants.RebootStatus.COMPLETED: self.composite_logger.log(" - At least one reboot has occurred, and there's no reboot pending, so the conditions for the 'Reboot Always' setting is fulfilled and reboot won't be repeated.") return False # attempt to reboot is enough time is available - if self.reboot_setting == Constants.REBOOT_ALWAYS or (self.reboot_setting == Constants.REBOOT_IF_REQUIRED and reboot_pending): + if self.reboot_setting == Constants.RebootSettings.ALWAYS or (self.reboot_setting == Constants.RebootSettings.IF_REQUIRED and reboot_pending): if self.is_reboot_time_available(current_time_available): self.composite_logger.log(' - Reboot is being scheduled, as per patch installation configuration (' + str(self.reboot_setting) + ').') self.composite_logger.log(" - Reboot-pending status: " + str(reboot_pending)) @@ -128,4 +118,5 @@ def start_reboot_if_required_and_time_available(self, current_time_available): return False def is_reboot_pending(self): - return self.package_manager.force_reboot or (self.status_handler and self.status_handler.is_reboot_pending) + return self.package_manager.is_reboot_pending() or self.package_manager.force_reboot or (self.status_handler and self.status_handler.is_reboot_pending) + diff --git a/src/core/src/core_logic/ServiceManager.py b/src/core/src/core_logic/ServiceManager.py index 6e865a6e9..9794af978 100644 --- a/src/core/src/core_logic/ServiceManager.py +++ b/src/core/src/core_logic/ServiceManager.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/core/src/core_logic/Stopwatch.py b/src/core/src/core_logic/Stopwatch.py index 0fc52452d..2855fe27f 100644 --- a/src/core/src/core_logic/Stopwatch.py +++ b/src/core/src/core_logic/Stopwatch.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -16,15 +16,16 @@ from core.src.bootstrap.Constants import Constants + class Stopwatch(object): """Implements the stopwatch logic""" class StopwatchException(Constants.EnumBackport): # Stopwatch exception strings - STARTED_ALREADY = "Stopwatch is already started" - NOT_STARTED = "Stopwatch is not started" - NOT_STOPPED = "Stopwatch is not stoppped" - STOPPED_ALREADY = "Stopwatch is already stoppped" + STARTED_ALREADY = "[SW] Start attempted on already-started stopwatch." # bug in call-stack if logged + NOT_STARTED = "[SW] Stop attempted on non-started stopwatch." # bug in call-stack if logged + NOT_STOPPED = "[SW] Stopwatch is not stopped." + STOPPED_ALREADY = "[SW] Stop attempted on already-stopped stopwatch." # bug in call-stack if logged def __init__(self, env_layer, telemetry_writer, composite_logger): self.env_layer = env_layer @@ -40,10 +41,11 @@ def __del__(self): # call stop only if end_time is None otherwise stop() is already called. if self.start_time is not None and self.end_time is None: self.stop() - self.set_task_details("") - self.composite_logger.log("Stopwatch details before instance is destroyed: " + self.task_details) + self.__set_task_details("") + self.composite_logger.log_debug("[SW] Stopwatch destroyed in unexpected state. " + self.task_details) # bug or some other issue in call-stack def start(self): + """ Start the stopwatch and sets start_time. Resets other fields. """ if self.start_time is not None: self.composite_logger.log_debug(str(Stopwatch.StopwatchException.STARTED_ALREADY)) self.start_time = self.env_layer.datetime.datetime_utcnow() @@ -51,8 +53,8 @@ def start(self): self.time_taken_in_secs = None self.task_details = None - # Stop the stopwatch and set end_time. Create new end_time even if end_time is already set def stop(self): + """ Stop the stopwatch and set end_time. Create new end_time even if end_time is already set """ if self.end_time is not None: self.composite_logger.log_debug(str(Stopwatch.StopwatchException.STOPPED_ALREADY)) self.end_time = self.env_layer.datetime.datetime_utcnow() @@ -62,24 +64,25 @@ def stop(self): self.time_taken_in_secs = self.env_layer.datetime.total_seconds_from_time_delta_round_to_one_decimal_digit(self.end_time - self.start_time) - # Stop the stopwatch, set end_time and write details in telemetry. Create new end_time even if end_time is already set def stop_and_write_telemetry(self, message): + """ Stop the stopwatch, set end_time and write details in telemetry. Create new end_time even if end_time is already set """ self.stop() - self.set_task_details(message) - self.composite_logger.log("Stopwatch details: " + self.task_details) + self.__set_task_details(message) + self.composite_logger.log("Stopwatch details: " + self.task_details) # needs to change to "[SW] Stopwatch terminal log. " + self.task_details || not changing yet to avoid disruption to querying - # Write stopwatch details in telemetry. Use the existing end_time if it is already set otherwise set new end_time def write_telemetry_for_stopwatch(self, message): + """ Write stopwatch details in telemetry. Use the existing end_time if it is already set otherwise set new end_time """ if self.end_time is None: - self.composite_logger.log_debug(str(Stopwatch.StopwatchException.NOT_STOPPED)) + self.composite_logger.log_verbose(str(Stopwatch.StopwatchException.NOT_STOPPED)) self.end_time = self.env_layer.datetime.datetime_utcnow() if self.start_time is None: self.composite_logger.log_debug(str(Stopwatch.StopwatchException.NOT_STARTED)) self.start_time = self.end_time self.time_taken_in_secs = self.env_layer.datetime.total_seconds_from_time_delta_round_to_one_decimal_digit(self.end_time - self.start_time) - self.set_task_details(message) - self.composite_logger.log("Stopwatch details: " + str(self.task_details)) + self.__set_task_details(message) + self.composite_logger.log("Stopwatch details: " + str(self.task_details)) # needs to change to "[SW] Stopwatch intermediate log. " + self.task_details || not changing yet to avoid disruption to querying - def set_task_details(self, message): + def __set_task_details(self, message): self.task_details = "[{0}={1}][{2}={3}][{4}={5}][{6}={7}]".format(Constants.PerfLogTrackerParams.MESSAGE, str(message), Constants.PerfLogTrackerParams.TIME_TAKEN_IN_SECS, str(self.time_taken_in_secs), - Constants.PerfLogTrackerParams.START_TIME, str(self.start_time), Constants.PerfLogTrackerParams.END_TIME, str(self.end_time)) + Constants.PerfLogTrackerParams.START_TIME, str(self.start_time), Constants.PerfLogTrackerParams.END_TIME, str(self.end_time)) + diff --git a/src/core/src/core_logic/SystemctlManager.py b/src/core/src/core_logic/SystemctlManager.py index 73e125d50..daff3d565 100644 --- a/src/core/src/core_logic/SystemctlManager.py +++ b/src/core/src/core_logic/SystemctlManager.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -49,10 +49,9 @@ def systemctl_daemon_reload(self): def invoke_systemctl(self, command, action_description=None): """ Invokes systemctl with the specified command and standardized logging """ - self.composite_logger.log('[Invoking systemctl] Action: ' + str(action_description) + ' Command: ' + command) + self.composite_logger.log_debug("[SCM] Invoking systemctl. [Action={0}][Cmd={1}]".format(str(action_description), command)) self.composite_logger.file_logger.flush() code, out = self.env_layer.run_command_output(command, False, False) - out = ("\n|\t" + "\n|\t".join(out.splitlines())) if out.strip() != "" else "None" - self.composite_logger.log_debug(" - Return code: " + str(code) + ". Output: " + out) + self.composite_logger.log_debug("[SCM] |- Systemctl invocation results. [Code={0}][Output={1}]".format(str(code), out)) self.composite_logger.file_logger.flush() return code, out diff --git a/src/core/src/core_logic/TimerManager.py b/src/core/src/core_logic/TimerManager.py index e10fa0cab..67366ec1b 100644 --- a/src/core/src/core_logic/TimerManager.py +++ b/src/core/src/core_logic/TimerManager.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/core/src/core_logic/__init__.py b/src/core/src/core_logic/__init__.py index a1eee39e6..2e3211aae 100644 --- a/src/core/src/core_logic/__init__.py +++ b/src/core/src/core_logic/__init__.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/core/src/core_logic/patch_operators/ConfigurePatchingProcessor.py b/src/core/src/core_logic/patch_operators/ConfigurePatchingProcessor.py new file mode 100644 index 000000000..645e73313 --- /dev/null +++ b/src/core/src/core_logic/patch_operators/ConfigurePatchingProcessor.py @@ -0,0 +1,147 @@ +# Copyright 2020 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Requires Python 2.7+ + +""" Configure Patching """ +from core.src.bootstrap.Constants import Constants +from core_logic.patch_operators.PatchOperator import PatchOperator + +# do not instantiate directly - these are exclusively for type hinting support +from core.src.bootstrap.EnvLayer import EnvLayer +from core.src.core_logic.ExecutionConfig import ExecutionConfig +from core.src.local_loggers.CompositeLogger import CompositeLogger +from core.src.service_interfaces.TelemetryWriter import TelemetryWriter +from core.src.service_interfaces.StatusHandler import StatusHandler +from core.src.package_managers.PackageManager import PackageManager +from core.src.core_logic.ServiceManager import ServiceManager +from core.src.core_logic.TimerManager import TimerManager +from core.src.service_interfaces.lifecycle_managers.LifecycleManager import LifecycleManager + + +class ConfigurePatchingProcessor(PatchOperator): + def __init__(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler, package_manager, auto_assess_service_manager, auto_assess_timer_manager, lifecycle_manager): + # type: (EnvLayer, ExecutionConfig, CompositeLogger, TelemetryWriter, StatusHandler, PackageManager, ServiceManager, TimerManager, LifecycleManager) -> None + super(ConfigurePatchingProcessor, self).__init__(env_layer, execution_config, composite_logger, telemetry_writer, status_handler, package_manager, lifecycle_manager, operation_name=Constants.Op.CONFIGURE_PATCHING) + self.current_auto_os_patch_state = Constants.AutomaticOSPatchStates.UNKNOWN + self.current_auto_assessment_state = Constants.AutoAssessmentStates.UNKNOWN + + # Auto-assessment bits + self.auto_assess_service_manager = auto_assess_service_manager + self.auto_assess_timer_manager = auto_assess_timer_manager + + # region - PatchOperator interface implementations + def should_operation_run(self): + """ [Interface implementation] Performs evaluation of if the specific operation should be running at all """ + self.lifecycle_manager.lifecycle_status_check() + return True + + def start_retryable_operation_unit(self): + """ [Interface implementation] The core retryable actions for patch operation """ + self.operation_successful = True + self.operation_exception_error = None + self.__try_set_patch_mode() + self.__try_set_auto_assessment_mode() + + if self.execution_config.operation.lower() != Constants.Op.CONFIGURE_PATCHING.lower(): + # Mitigation for CRP bug ---- Final status for configure patching CANNOT be written until assessment is complete. Okay to write for other operations. + self.set_final_operation_status() + + def process_operation_terminal_exception(self, error): + """ [Interface implementation] Exception handling post all retries for the patch operation """ + self.current_auto_assessment_state = Constants.AutoAssessmentStates.ERROR if self.current_auto_assessment_state not in (Constants.AutoAssessmentStates.ENABLED, Constants.AutoAssessmentStates.DISABLED) else self.current_auto_assessment_state + self.operation_exception_error = error + + if self.execution_config.operation != Constants.Op.CONFIGURE_PATCHING.lower(): + # Mitigation for CRP bug ---- Final status for configure patching CANNOT be written until assessment is complete. Okay to write for other operations. + self.set_operation_status(status=Constants.Status.ERROR, error=self.operation_exception_error) + self.operation_successful &= False + + def set_final_operation_status(self): + """ [Interface implementation] Business logic to write the final status (implicitly covering external dependencies from external callers) """ + """ Writes the final overall status after any pre-requisite operation is also in a terminal state - currently this is only assessment """ + overall_status = Constants.Status.SUCCESS if self.operation_successful else Constants.Status.ERROR + self.set_operation_status(status=overall_status, error=self.operation_exception_error) + + def set_operation_status(self, status=Constants.Status.TRANSITIONING, error=None): + """ [Interface implementation] Generic operation status setter """ + self.operation_status = status + self.composite_logger.log_debug("[CPP] Reporting consolidated current configure patch status. [OSPatchState={0}][AssessmentState={1}]".format(self.current_auto_os_patch_state, self.current_auto_assessment_state)) + if error is not None: + self.status_handler.add_error_to_status_and_log_error(message="Error in configure patching operation. [Error={0}] ".format(repr(error)), raise_exception=False) + self.status_handler.set_configure_patching_substatus_json(status=status, automatic_os_patch_state=self.current_auto_os_patch_state, auto_assessment_state=self.current_auto_assessment_state) + # endregion - PatchOperator interface implementations + + # region - Retryable operation support + def __try_set_patch_mode(self): + """ Set the patch mode for the VM """ + try: + self.status_handler.set_current_operation(Constants.Op.CONFIGURE_PATCHING) + self.current_auto_os_patch_state = self.package_manager.patch_mode_manager.get_current_auto_os_patch_state() + + if self.execution_config.patch_mode == Constants.PatchModes.AUTOMATIC_BY_PLATFORM and self.current_auto_os_patch_state != Constants.AutomaticOSPatchStates.DISABLED: + # disable auto OS updates if VM is configured for platform updates only. + # NOTE: this condition will be false for Assessment operations, since patchMode is not sent in the API request + self.package_manager.patch_mode_manager.disable_auto_os_update() + self.current_auto_os_patch_state = self.package_manager.patch_mode_manager.get_current_auto_os_patch_state() + elif self.execution_config.patch_mode == Constants.PatchModes.IMAGE_DEFAULT and self.current_auto_os_patch_state == Constants.PatchModes.AUTOMATIC_BY_PLATFORM and self.package_manager.patch_mode_manager.image_default_patch_configuration_backup_exists(): + raise Exception("PatchMode transition to ImageDefault is currently not supported.") # This was excluded in the original PatchMode implementation, and not caught later in the backlog. + + if self.execution_config.patch_mode == Constants.PatchModes.AUTOMATIC_BY_PLATFORM and self.current_auto_os_patch_state == Constants.AutomaticOSPatchStates.UNKNOWN: + # NOTE: only sending details in error objects for customer visibility on why patch state is unknown, overall ConfigurePatching status will remain successful + self.operation_successful &= False + self.operation_exception_error = "Could not disable one or more automatic OS update services. Please check if they are configured correctly." + + self.composite_logger.log_verbose("[CPP] Completed processing patch mode configuration.") + except Exception as error: + self.composite_logger.log_error("Error while processing patch mode configuration. [Error={0}]".format(repr(error))) + self.operation_exception_error = error + self.operation_successful &= False + + def __try_set_auto_assessment_mode(self): + """ Sets the preferred auto-assessment mode for the VM """ + try: + self.status_handler.set_current_operation(Constants.Op.CONFIGURE_PATCHING_AUTO_ASSESSMENT) + self.composite_logger.log_debug("[CPP] Systemd information: {0}".format(str(self.auto_assess_service_manager.get_version()))) # proactive support telemetry + + if self.execution_config.assessment_mode is None: + self.composite_logger.log_warning("[CPP] No assessment mode config was present. Treating as disabled.") + elif self.execution_config.assessment_mode == Constants.AssessmentModes.AUTOMATIC_BY_PLATFORM: + self.composite_logger.log_debug("[CPP] Enabling platform-based automatic assessment.") + if not self.auto_assess_service_manager.systemd_exists(): + self.status_handler.add_error_to_status_and_log_error(message=Constants.Errors.SYSTEMD_NOT_PRESENT, raise_exception=True, error_code=Constants.PatchOperationErrorCodes.CL_SYSTEMD_NOT_PRESENT) + self.auto_assess_service_manager.create_and_set_service_idem() + self.auto_assess_timer_manager.create_and_set_timer_idem() + self.current_auto_assessment_state = Constants.AutoAssessmentStates.ENABLED + elif self.execution_config.assessment_mode in Constants.AssessmentModes.IMAGE_DEFAULT: + self.composite_logger.log_debug("[CPP] Disabling platform-based automatic assessment.") + self.auto_assess_timer_manager.remove_timer() + self.auto_assess_service_manager.remove_service() + self.current_auto_assessment_state = Constants.AutoAssessmentStates.DISABLED + else: + raise Exception("Unknown AssessmentMode specified. [AssessmentMode={0}]".format(self.execution_config.assessment_mode)) + + self.set_operation_status() + self.composite_logger.log_verbose("[CPP] Completed processing automatic assessment mode configuration.") + except Exception as error: + # deliberately not setting self.operation_exception_error here as it does not feed into the parent object. Not a bug, if you're thinking about it. + self.composite_logger.log_error("Error while processing automatic assessment mode configuration. [Error={0}]".format(repr(error))) + self.set_operation_status(status=Constants.Status.TRANSITIONING, error=repr(error)) + self.operation_successful &= False + + # revert operation back to parent + self.composite_logger.log_verbose("[CPP] Restoring status handler operation to {0}.".format(Constants.Op.CONFIGURE_PATCHING)) + self.status_handler.set_current_operation(Constants.Op.CONFIGURE_PATCHING) + # endregion - Retryable operation support + diff --git a/src/core/src/core_logic/patch_operators/PatchAssessor.py b/src/core/src/core_logic/patch_operators/PatchAssessor.py new file mode 100644 index 000000000..95dd7d031 --- /dev/null +++ b/src/core/src/core_logic/patch_operators/PatchAssessor.py @@ -0,0 +1,223 @@ +# Copyright 2020 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Requires Python 2.7+ + +""" A patch assessment """ +import datetime +import json +import os +import shutil +import time +from core.src.bootstrap.Constants import Constants +from core_logic.patch_operators.PatchOperator import PatchOperator +from core.src.core_logic.Stopwatch import Stopwatch + +# do not instantiate directly - these are exclusively for type hinting support +from core.src.bootstrap.EnvLayer import EnvLayer +from core.src.core_logic.ExecutionConfig import ExecutionConfig +from core.src.local_loggers.CompositeLogger import CompositeLogger +from core.src.service_interfaces.TelemetryWriter import TelemetryWriter +from core.src.service_interfaces.StatusHandler import StatusHandler +from core.src.package_managers.PackageManager import PackageManager +from core.src.service_interfaces.lifecycle_managers.LifecycleManager import LifecycleManager + + +class PatchAssessor(PatchOperator): + """ Wrapper class of a single patch assessment """ + def __init__(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler, package_manager, lifecycle_manager): + # type: (EnvLayer, ExecutionConfig, CompositeLogger, TelemetryWriter, StatusHandler, PackageManager, LifecycleManager) -> None + super(PatchAssessor, self).__init__(env_layer, execution_config, composite_logger, telemetry_writer, status_handler, package_manager, lifecycle_manager, operation_name=Constants.Op.ASSESSMENT) + self.package_manager_name = self.package_manager.package_manager_name + self.assessment_state_file_path = os.path.join(self.execution_config.config_folder, Constants.StateFiles.ASSESSMENT) + self.stopwatch = Stopwatch(self.env_layer, self.telemetry_writer, self.composite_logger) + + # region - PatchOperator interface implementations + def should_operation_run(self): + """ [Interface implementation] Performs evaluation of if the specific operation should be running at all """ + if (not self.execution_config.exec_auto_assess_only) or (self.execution_config.exec_auto_assess_only and self.should_auto_assessment_run()): + return True # regular assessment or (auto-assessment that is eligible to run) + + self.composite_logger.log_debug("[PA] Skipping automatic patch assessment... [ShouldAutoAssessmentRun=False]\n") + self.lifecycle_manager.lifecycle_status_check() + return False + + def start_retryable_operation_unit(self): + """ [Interface implementation] The core retryable actions for patch operation """ + self.operation_successful = True + self.operation_exception_error = None + + self.write_assessment_state() # success / failure does not matter, only that an attempt started + self.package_manager.refresh_repo() + self.status_handler.reset_assessment_data() + + if self.lifecycle_manager is not None: + self.lifecycle_manager.lifecycle_status_check() # may terminate the code abruptly, as designed + + # All updates + packages, package_versions = self.package_manager.get_all_updates() + self.telemetry_writer.write_event("Full assessment: " + str(packages), Constants.EventLevel.Verbose) + self.status_handler.set_package_assessment_status(packages, package_versions) + if self.lifecycle_manager is not None: + self.lifecycle_manager.lifecycle_status_check() # may terminate the code abruptly, as designed + sec_packages, sec_package_versions = self.package_manager.get_security_updates() + + # Tag security updates + self.telemetry_writer.write_event("Security assessment: " + str(sec_packages), Constants.EventLevel.Verbose) + self.status_handler.set_package_assessment_status(sec_packages, sec_package_versions, Constants.PackageClassification.SECURITY) + + # Set the security-esm packages in status. + self.package_manager.set_security_esm_package_status(Constants.Op.ASSESSMENT, packages=[]) + + # ensure reboot status is set + reboot_pending = self.package_manager.is_reboot_pending() + self.status_handler.set_reboot_pending(reboot_pending) + + self.status_handler.set_assessment_substatus_json(status=Constants.Status.SUCCESS) + self.set_final_operation_status() + + def process_operation_terminal_exception(self, error): + """ [Interface implementation] Exception handling post all retries for the patch operation """ + error_msg = "Error completing patch assessment. [Error={0}]".format(repr(error)) + self.operation_successful = False + self.operation_exception_error = error_msg + self.status_handler.set_assessment_substatus_json(status=Constants.Status.ERROR) + self.status_handler.add_error_to_status_and_log_error(message=error_msg, raise_exception=False) + + def set_final_operation_status(self): + """ [Interface implementation] Business logic to write the final status (implicitly covering external dependencies from external callers) """ + """ Writes the final overall status after any pre-requisite operation is also in a terminal state - currently this is only assessment """ + overall_status = Constants.Status.SUCCESS if self.operation_successful else Constants.Status.ERROR + self.set_operation_status(status=overall_status, error=self.operation_exception_error) + + def set_operation_status(self, status=Constants.Status.TRANSITIONING, error=None): + """ [Interface implementation] Generic operation status setter """ + self.operation_status = status + if error is not None: + self.status_handler.add_error_to_status_and_log_error(message="Error in patch assessment operation. [Error={0}]".format(repr(error)), raise_exception=False) + self.status_handler.set_assessment_substatus_json(status=status) + # endregion - PatchOperator interface implementations + + # region - Auto-assessment extensions + def should_auto_assessment_run(self): + """ Checks if enough time has passed since the last run """ + try: + assessment_state = self.read_assessment_state() + last_start_in_seconds_since_epoch = assessment_state['lastStartInSecondsSinceEpoch'] # get last start time + except Exception as error: + self.composite_logger.log_warning("[PA] No valid last start information available for auto-assessment.") + return True + + # get minimum elapsed time required - difference between max allowed (passed down) and a safe buffer to prevent exceeding that + maximum_assessment_interval_in_seconds = self.convert_iso8601_duration_to_total_seconds(self.execution_config.maximum_assessment_interval) + maximum_assessment_interval_buffer_in_seconds = self.convert_iso8601_duration_to_total_seconds(Constants.AUTO_ASSESSMENT_INTERVAL_BUFFER) + minimum_elapsed_time_required_in_seconds = maximum_assessment_interval_in_seconds - maximum_assessment_interval_buffer_in_seconds + + # check if required duration has passed + elapsed_time_in_seconds = self.__get_seconds_since_epoch() - last_start_in_seconds_since_epoch + if elapsed_time_in_seconds < 0: + self.composite_logger.log_warning("[PA] Anomaly detected in system time now or during the last assessment run. Assessment will run anyway.") + return True + else: + return elapsed_time_in_seconds >= minimum_elapsed_time_required_in_seconds + + def read_assessment_state(self): + """ Reads the assessment state file. """ + self.composite_logger.log_verbose("[PA] Reading assessment state...") + if not os.path.exists(self.assessment_state_file_path) or not os.path.isfile(self.assessment_state_file_path): + # Neutralizes directories + if os.path.isdir(self.assessment_state_file_path): + self.composite_logger.log_debug("[PA] Assessment state file path returned a directory. Attempting to reset.") + shutil.rmtree(self.assessment_state_file_path) + # Writes a vanilla assessment state file + self.write_assessment_state(first_write=True) + + # Read (with retries for only IO Errors) + for i in range(0, Constants.MAX_FILE_OPERATION_RETRY_COUNT): + try: + with self.env_layer.file_system.open(self.assessment_state_file_path, mode="r") as file_handle: + return json.load(file_handle)['assessmentState'] + except Exception as error: + if i < Constants.MAX_FILE_OPERATION_RETRY_COUNT - 1: + self.composite_logger.log_verbose("[PA] Exception on assessment state read. [Exception={0}][RetryCount={1}]".format(repr(error), str(i))) + time.sleep(i + 1) + else: + self.composite_logger.log_error("[PA] Unable to read assessment state file (retries exhausted). [Exception={0}]".format(repr(error))) + raise + + def write_assessment_state(self, first_write=False): + """ + AssessmentState.json sample structure: + { + "number": "", + "lastStartInSecondsSinceEpoch": "", + "lastHeartbeat": "", + "processIds": ["", ...], + "autoAssessment": "" + } + """ + self.composite_logger.log_verbose("[PA] Updating assessment state... ") + + # lastHeartbeat below is redundant, but is present for ease of debuggability + assessment_state = {'number': self.execution_config.sequence_number, + 'lastStartInSecondsSinceEpoch': self.__get_seconds_since_epoch() if not first_write else 0, # Set lastStartInSecondsSinceEpoch to 0 if file did not exist before (first write) to ensure it can run assessment when first created + 'lastHeartbeat': str(self.env_layer.datetime.timestamp()), + 'processIds': [os.getpid()], + 'autoAssessment': str(self.execution_config.exec_auto_assess_only)} + assessment_state_payload = json.dumps({"assessmentState": assessment_state}) + + if os.path.isdir(self.assessment_state_file_path): + self.composite_logger.log_debug("[PA] Assessment state file path returned a directory. Attempting to reset.") + shutil.rmtree(self.assessment_state_file_path) + + for i in range(0, Constants.MAX_FILE_OPERATION_RETRY_COUNT): + try: + with self.env_layer.file_system.open(self.assessment_state_file_path, 'w+') as file_handle: + file_handle.write(assessment_state_payload) + break + except Exception as error: + if i < Constants.MAX_FILE_OPERATION_RETRY_COUNT - 1: + self.composite_logger.log_verbose("[PA] Exception on assessment state update. [Exception={0}][RetryCount={1}]".format(repr(error), str(i))) + time.sleep(i + 1) + else: + self.composite_logger.log_error("[PA] Unable to write to assessment state file (retries exhausted). [Exception={0}]".format(repr(error))) + raise + + self.composite_logger.log_verbose("[PA] Completed updating assessment state.") + + @staticmethod + def __get_seconds_since_epoch(): + return int((datetime.datetime.now() - datetime.datetime(1970, 1, 1)).total_seconds()) + + @staticmethod + def convert_iso8601_duration_to_total_seconds(duration): + """ No non-default period (Y,M,W,D) is supported. Time is supported (H,M,S). """ + remaining = str(duration) + if 'PT' not in remaining: + raise Exception("Unexpected duration format. [Duration={0}]".format(duration)) + + def __extract_most_significant_unit_from_duration(duration_portion, unit_delimiter): + duration_split = duration_portion.split(unit_delimiter) + duration_split_len = len(duration_split) + most_significant_unit = 0 if duration_split_len != 2 else duration_split[0] + remaining_duration_portion = '' if duration_split_len == 0 else duration_split[duration_split_len - 1] + return most_significant_unit, remaining_duration_portion + + discard, remaining = __extract_most_significant_unit_from_duration(remaining, 'PT') + hours, remaining = __extract_most_significant_unit_from_duration(remaining, 'H') + minutes, remaining = __extract_most_significant_unit_from_duration(remaining, 'M') + seconds, remaining = __extract_most_significant_unit_from_duration(remaining, 'S') + + return datetime.timedelta(hours=int(hours), minutes=int(minutes), seconds=int(seconds)).total_seconds() + diff --git a/src/core/src/core_logic/PatchInstaller.py b/src/core/src/core_logic/patch_operators/PatchInstaller.py similarity index 82% rename from src/core/src/core_logic/PatchInstaller.py rename to src/core/src/core_logic/patch_operators/PatchInstaller.py index a80c1436c..51dc03bca 100644 --- a/src/core/src/core_logic/PatchInstaller.py +++ b/src/core/src/core_logic/patch_operators/PatchInstaller.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -17,28 +17,36 @@ """ The patch install orchestrator """ import datetime import math -import sys import time from core.src.bootstrap.Constants import Constants +from core_logic.patch_operators.PatchOperator import PatchOperator from core.src.core_logic.Stopwatch import Stopwatch -class PatchInstaller(object): - """" Wrapper class for a single patch installation operation """ - def __init__(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler, lifecycle_manager, package_manager, package_filter, maintenance_window, reboot_manager): - self.env_layer = env_layer - self.execution_config = execution_config +# do not instantiate directly - these are exclusively for type hinting support +from core.src.bootstrap.EnvLayer import EnvLayer +from core.src.core_logic.ExecutionConfig import ExecutionConfig +from core.src.local_loggers.CompositeLogger import CompositeLogger +from core.src.service_interfaces.TelemetryWriter import TelemetryWriter +from core.src.service_interfaces.StatusHandler import StatusHandler +from core.src.service_interfaces.lifecycle_managers.LifecycleManager import LifecycleManager +from core.src.package_managers.PackageManager import PackageManager +from core.src.core_logic.PackageFilter import PackageFilter +from core.src.core_logic.MaintenanceWindow import MaintenanceWindow +from core.src.core_logic.RebootManager import RebootManager - self.composite_logger = composite_logger - self.telemetry_writer = telemetry_writer - self.status_handler = status_handler - self.lifecycle_manager = lifecycle_manager - self.package_manager = package_manager - self.package_manager_name = self.package_manager.get_package_manager_setting(Constants.PKG_MGR_SETTING_IDENTITY) +class PatchInstaller(PatchOperator): + """" Wrapper class for a single patch installation operation """ + def __init__(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler, lifecycle_manager, package_manager, package_filter, maintenance_window, reboot_manager): + # type: (EnvLayer, ExecutionConfig, CompositeLogger, TelemetryWriter, StatusHandler, LifecycleManager, PackageManager, PackageFilter, MaintenanceWindow, RebootManager) -> None + super(PatchInstaller, self).__init__(env_layer, execution_config, composite_logger, telemetry_writer, status_handler, package_manager, lifecycle_manager, operation_name=Constants.Op.INSTALLATION) + self.package_manager_name = self.package_manager.package_manager_name self.package_filter = package_filter self.maintenance_window = maintenance_window self.reboot_manager = reboot_manager + self.operation_successful_incl_assessment = False + self.last_still_needed_packages = None # Used for 'Installed' status records self.last_still_needed_package_versions = None self.progress_template = "[Time available: {0} | A: {1}, S: {2}, F: {3} | D: {4}]\t {5}" @@ -52,77 +60,112 @@ def __init__(self, env_layer, execution_config, composite_logger, telemetry_writ self.stopwatch = Stopwatch(self.env_layer, self.telemetry_writer, self.composite_logger) - def start_installation(self, simulate=False): - """ Kick off a patch installation run """ - self.status_handler.set_current_operation(Constants.INSTALLATION) - self.raise_if_telemetry_unsupported() - self.raise_if_min_python_version_not_met() - - self.composite_logger.log('\nStarting patch installation...') - - self.stopwatch.start() - - self.composite_logger.log("\nMachine Id: " + self.env_layer.platform.node()) - self.composite_logger.log("Activity Id: " + self.execution_config.activity_id) - self.composite_logger.log("Operation request time: " + self.execution_config.start_time + ", Maintenance Window Duration: " + self.execution_config.duration) - - maintenance_window = self.maintenance_window - package_manager = self.package_manager - reboot_manager = self.reboot_manager + # region - PatchOperator interface implementations + def should_operation_run(self): + """ [Interface implementation] Performs evaluation of if the specific operation should be running at all """ + self.lifecycle_manager.lifecycle_status_check() + return True - # Early reboot if reboot is allowed by settings and required by the machine - reboot_pending = self.package_manager.is_reboot_pending() + def start_retryable_operation_unit(self): + """ [Interface implementation] The core retryable actions for patch operation """ + self.operation_successful = True + self.operation_exception_error = None + + self.start_installation() + + self.status_handler.set_installation_substatus_json(status=Constants.Status.SUCCESS) + self.set_final_operation_status() + + def process_operation_terminal_exception(self, error): + """ [Interface implementation] Exception handling post all retries for the patch operation """ + error_msg = "Error completing patch assessment. [Error={0}]".format(repr(error)) + self.operation_successful = False + self.operation_exception_error = error_msg + self.status_handler.set_installation_substatus_json()(status=Constants.Status.ERROR) + self.status_handler.add_error_to_status_and_log_error(message=error_msg, raise_exception=False) + + def set_final_operation_status(self): + """ [Interface implementation] Business logic to write the final status (implicitly covering external dependencies from external callers) """ + """ Writes the final overall status after any pre-requisite operation is also in a terminal state - currently this is only assessment """ + overall_status = Constants.Status.SUCCESS if self.operation_successful else Constants.Status.ERROR + self.set_operation_status(status=overall_status, error=self.operation_exception_error) + + def set_operation_status(self, status=Constants.Status.TRANSITIONING, error=None): + """ [Interface implementation] Generic operation status setter """ + self.operation_status = status + if error is not None: + self.status_handler.add_error_to_status_and_log_error(message="Error in patch assessment operation. [Error={0}]".format(repr(error)), raise_exception=False) + self.status_handler.set_assessment_substatus_json(status=status) + # endregion - PatchOperator interface implementations + + def __process_pre_installation_reboot_state(self): + # type: () -> None + """ Write reboot status information & reboots the machine if needed and allowed """ + reboot_pending = self.reboot_manager.is_reboot_pending() self.status_handler.set_reboot_pending(reboot_pending) if reboot_pending: - if reboot_manager.is_setting(Constants.REBOOT_NEVER): - self.composite_logger.log_warning("/!\\ There was a pending reboot on the machine before any package installations started.\n" + - " Consider re-running the patch installation after a reboot if any packages fail to install due to this.") + if self.reboot_manager.is_setting(Constants.RebootSettings.NEVER): + self.status_handler.add_error_to_status_and_log_warning(message="Required reboot blocked by customer configuration - higher failure probability! [RebootPending={0}][RebootSetting={1}]".format(str(reboot_pending),Constants.RebootSettings.NEVER)) else: - self.composite_logger.log_debug("Attempting to reboot the machine prior to patch installation as there is a reboot pending...") - reboot_manager.start_reboot_if_required_and_time_available(maintenance_window.get_remaining_time_in_minutes(None, False)) + self.reboot_manager.start_reboot_if_required_and_time_available(self.maintenance_window.get_remaining_time_in_minutes()) + + def start_installation(self, simulate=False): + """ Kick off a patch installation run """ + self.__process_pre_installation_reboot_state() # Install Updates - installed_update_count, update_run_successful, maintenance_window_exceeded = self.install_updates(maintenance_window, package_manager, simulate) + installed_update_count, update_run_successful, maintenance_window_exceeded = self.install_updates(self.maintenance_window, self.package_manager, simulate) retry_count = 1 # Repeat patch installation if flagged as required and time is available - if not maintenance_window_exceeded and package_manager.get_package_manager_setting(Constants.PACKAGE_MGR_SETTING_REPEAT_PATCH_OPERATION, False): + if not maintenance_window_exceeded and self.package_manager.get_package_manager_setting(Constants.PACKAGE_MGR_SETTING_REPEAT_PATCH_OPERATION, False): self.composite_logger.log("\nInstalled update count (first round): " + str(installed_update_count)) self.composite_logger.log("\nPatch installation run will be repeated as the package manager recommended it --------------------------------------------->") - package_manager.set_package_manager_setting(Constants.PACKAGE_MGR_SETTING_REPEAT_PATCH_OPERATION, False) # Resetting - new_installed_update_count, update_run_successful, maintenance_window_exceeded = self.install_updates(maintenance_window, package_manager, simulate) + self.package_manager.set_package_manager_setting(Constants.PACKAGE_MGR_SETTING_REPEAT_PATCH_OPERATION, False) # Resetting + new_installed_update_count, update_run_successful, maintenance_window_exceeded = self.install_updates(self.maintenance_window, self.package_manager, simulate) installed_update_count += new_installed_update_count retry_count = retry_count + 1 - if package_manager.get_package_manager_setting(Constants.PACKAGE_MGR_SETTING_REPEAT_PATCH_OPERATION, False): # We should not see this again + if self.package_manager.get_package_manager_setting(Constants.PACKAGE_MGR_SETTING_REPEAT_PATCH_OPERATION, False): # We should not see this again error_msg = "Unexpected repeated package manager update occurred. Please re-run the update deployment." - self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.PACKAGE_MANAGER_FAILURE) - self.write_installer_perf_logs(update_run_successful, installed_update_count, retry_count, maintenance_window, maintenance_window_exceeded, Constants.TaskStatus.FAILED, error_msg) + self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.CL_PACKAGE_MANAGER_FAILURE) + self.write_installer_perf_logs(update_run_successful, installed_update_count, retry_count, self.maintenance_window, maintenance_window_exceeded, Constants.TaskStatus.FAILED, error_msg) raise Exception(error_msg, "[{0}]".format(Constants.ERROR_ADDED_TO_STATUS)) self.composite_logger.log("\nInstalled update count: " + str(installed_update_count) + " (including dependencies)") - self.write_installer_perf_logs(update_run_successful, installed_update_count, retry_count, maintenance_window, maintenance_window_exceeded, Constants.TaskStatus.SUCCEEDED, "") + self.write_installer_perf_logs(update_run_successful, installed_update_count, retry_count, self.maintenance_window, maintenance_window_exceeded, Constants.TaskStatus.SUCCEEDED, "") # Reboot as per setting and environment state - reboot_manager.start_reboot_if_required_and_time_available(maintenance_window.get_remaining_time_in_minutes(None, False)) - maintenance_window_exceeded = maintenance_window_exceeded or reboot_manager.maintenance_window_exceeded_flag + self.reboot_manager.start_reboot_if_required_and_time_available(self.maintenance_window.get_remaining_time_in_minutes()) + maintenance_window_exceeded = maintenance_window_exceeded or self.reboot_manager.maintenance_window_exceeded_flag # Combining maintenance overall_patch_installation_successful = bool(update_run_successful and not maintenance_window_exceeded) # NOTE: Not updating installation substatus at this point because we need to wait for the implicit/second assessment to complete first, as per CRP's instructions + self.composite_logger.log("|\nCOMPLETED PATCH INSTALLATION.") + return overall_patch_installation_successful + def set_additional_operation_specific_perf_logs(self, installed_patch_count, maintenance_window_exceeded): + # type: (int, bool) -> None + """ Sets internal state for additional operation specific performance logs. This will be appended when logged. """ + self.additional_operation_specific_perf_logs = "[{0}={1}][{2}={3}][{4}={5}][{6}={7}]".format( + Constants.PerfLogTrackerParams.INSTALLED_PATCH_COUNT, str(installed_patch_count), + Constants.PerfLogTrackerParams.MAINTENANCE_WINDOW, str(self.maintenance_window.duration), + Constants.PerfLogTrackerParams.MAINTENANCE_WINDOW_USED_PERCENT, str(self.maintenance_window.get_maintenance_window_used_as_percentage()), + Constants.PerfLogTrackerParams.MAINTENANCE_WINDOW_EXCEEDED, str(maintenance_window_exceeded)) + def write_installer_perf_logs(self, patch_operation_successful, installed_patch_count, retry_count, maintenance_window, maintenance_window_exceeded, task_status, error_msg): perc_maintenance_window_used = -1 try: - perc_maintenance_window_used = maintenance_window.get_percentage_maintenance_window_used() + perc_maintenance_window_used = maintenance_window.get_maintenance_window_used_as_percentage() except Exception as error: self.composite_logger.log_debug("Error in writing patch installation performance logs. Error is: " + repr(error)) patch_installation_perf_log = "[{0}={1}][{2}={3}][{4}={5}][{6}={7}][{8}={9}][{10}={11}][{12}={13}][{14}={15}][{16}={17}][{18}={19}][{20}={21}]".format( - Constants.PerfLogTrackerParams.TASK, Constants.INSTALLATION, Constants.PerfLogTrackerParams.TASK_STATUS, str(task_status), Constants.PerfLogTrackerParams.ERROR_MSG, error_msg, + Constants.PerfLogTrackerParams.TASK, Constants.Op.INSTALLATION, Constants.PerfLogTrackerParams.TASK_STATUS, str(task_status), Constants.PerfLogTrackerParams.ERROR_MSG, error_msg, Constants.PerfLogTrackerParams.PACKAGE_MANAGER, self.package_manager_name, Constants.PerfLogTrackerParams.PATCH_OPERATION_SUCCESSFUL, str(patch_operation_successful), Constants.PerfLogTrackerParams.INSTALLED_PATCH_COUNT, str(installed_patch_count), Constants.PerfLogTrackerParams.RETRY_COUNT, str(retry_count), Constants.PerfLogTrackerParams.MAINTENANCE_WINDOW, str(maintenance_window.duration), Constants.PerfLogTrackerParams.MAINTENANCE_WINDOW_USED_PERCENT, str(perc_maintenance_window_used), @@ -130,37 +173,19 @@ def write_installer_perf_logs(self, patch_operation_successful, installed_patch_ self.stopwatch.stop_and_write_telemetry(patch_installation_perf_log) return True - def raise_if_telemetry_unsupported(self): - if self.lifecycle_manager.get_vm_cloud_type() == Constants.VMCloudType.ARC and self.execution_config.operation not in [Constants.ASSESSMENT, Constants.INSTALLATION]: - self.composite_logger.log("Skipping telemetry compatibility check for Arc cloud type when operation is not manual") - return - if not self.telemetry_writer.is_telemetry_supported(): - error_msg = "{0}".format(Constants.TELEMETRY_NOT_COMPATIBLE_ERROR_MSG) - self.composite_logger.log_error(error_msg) - raise Exception(error_msg) - - self.composite_logger.log("{0}".format(Constants.TELEMETRY_COMPATIBLE_MSG)) - - def raise_if_min_python_version_not_met(self): - if sys.version_info < (2, 7): - error_msg = Constants.PYTHON_NOT_COMPATIBLE_ERROR_MSG.format(sys.version_info) - self.composite_logger.log_error(error_msg) - self.status_handler.set_installation_substatus_json(status=Constants.STATUS_ERROR) - raise Exception(error_msg) - def install_updates(self, maintenance_window, package_manager, simulate=False): """wrapper function of installing updates""" self.composite_logger.log("\n\nGetting available updates...") package_manager.refresh_repo() packages, package_versions = package_manager.get_available_updates(self.package_filter) # Initial, ignoring exclusions - self.telemetry_writer.write_event("Initial package list: " + str(packages), Constants.TelemetryEventLevel.Verbose) + self.telemetry_writer.write_event("Initial package list: " + str(packages), Constants.EventLevel.Verbose) not_included_packages, not_included_package_versions = self.get_not_included_updates(package_manager, packages) - self.telemetry_writer.write_event("Not Included package list: " + str(not_included_packages), Constants.TelemetryEventLevel.Verbose) + self.telemetry_writer.write_event("Not Included package list: " + str(not_included_packages), Constants.EventLevel.Verbose) excluded_packages, excluded_package_versions = self.get_excluded_updates(package_manager, packages, package_versions) - self.telemetry_writer.write_event("Excluded package list: " + str(excluded_packages), Constants.TelemetryEventLevel.Verbose) + self.telemetry_writer.write_event("Excluded package list: " + str(excluded_packages), Constants.EventLevel.Verbose) packages, package_versions = self.filter_out_excluded_updates(packages, package_versions, excluded_packages) # honoring exclusions @@ -170,22 +195,22 @@ def install_updates(self, maintenance_window, package_manager, simulate=False): # Adding this after filtering excluded packages, so we don`t un-intentionally mark excluded esm-package status as failed. packages, package_versions, self.skipped_esm_packages, self.skipped_esm_package_versions, self.esm_packages_found_without_attach = package_manager.separate_out_esm_packages(packages, package_versions) - self.telemetry_writer.write_event("Final package list: " + str(packages), Constants.TelemetryEventLevel.Verbose) + self.telemetry_writer.write_event("Final package list: " + str(packages), Constants.EventLevel.Verbose) # Set initial statuses if not package_manager.get_package_manager_setting(Constants.PACKAGE_MGR_SETTING_REPEAT_PATCH_OPERATION, False): # 'Not included' list is not accurate when a repeat is required - self.status_handler.set_package_install_status(not_included_packages, not_included_package_versions, Constants.NOT_SELECTED) - self.status_handler.set_package_install_status(excluded_packages, excluded_package_versions, Constants.EXCLUDED) - self.status_handler.set_package_install_status(packages, package_versions, Constants.PENDING) - self.status_handler.set_package_install_status(self.skipped_esm_packages, self.skipped_esm_package_versions, Constants.FAILED) + self.status_handler.set_package_install_status(not_included_packages, not_included_package_versions, Constants.PackageStatus.NOT_SELECTED) + self.status_handler.set_package_install_status(excluded_packages, excluded_package_versions, Constants.PackageStatus.EXCLUDED) + self.status_handler.set_package_install_status(packages, package_versions, Constants.PackageStatus.PENDING) + self.status_handler.set_package_install_status(self.skipped_esm_packages, self.skipped_esm_package_versions, Constants.PackageStatus.FAILED) self.composite_logger.log("\nList of packages to be updated: \n" + str(packages)) sec_packages, sec_package_versions = self.package_manager.get_security_updates() - self.telemetry_writer.write_event("Security packages out of the final package list: " + str(sec_packages), Constants.TelemetryEventLevel.Verbose) + self.telemetry_writer.write_event("Security packages out of the final package list: " + str(sec_packages), Constants.EventLevel.Verbose) self.status_handler.set_package_install_status_classification(sec_packages, sec_package_versions, classification="Security") # Set the security-esm package status. - package_manager.set_security_esm_package_status(Constants.INSTALLATION, packages) + package_manager.set_security_esm_package_status(Constants.Op.INSTALLATION, packages) self.composite_logger.log("\nNote: Packages that are neither included nor excluded may still be installed if an included package has a dependency on it.") # We will see this as packages going from NotSelected --> Installed. We could remove them preemptively from not_included_packages, but we're explicitly choosing not to. @@ -196,7 +221,7 @@ def install_updates(self, maintenance_window, package_manager, simulate=False): patch_installation_successful = True maintenance_window_exceeded = False all_packages, all_package_versions = package_manager.get_all_updates(True) # cached is fine - self.telemetry_writer.write_event("All available packages list: " + str(all_packages), Constants.TelemetryEventLevel.Verbose) + self.telemetry_writer.write_event("All available packages list: " + str(all_packages), Constants.EventLevel.Verbose) self.last_still_needed_packages = list(all_packages) self.last_still_needed_package_versions = list(all_package_versions) @@ -271,11 +296,11 @@ def install_updates(self, maintenance_window, package_manager, simulate=False): self.include_dependencies(package_manager, [package], all_packages, all_package_versions, packages, package_versions, package_and_dependencies, package_and_dependency_versions) # parent package install (+ dependencies) and parent package result management - install_result = Constants.FAILED + install_result = Constants.PackageStatus.FAILED for i in range(0, Constants.MAX_INSTALLATION_RETRY_COUNT): install_result = package_manager.install_update_and_dependencies_and_get_status(package_and_dependencies, package_and_dependency_versions, simulate) - if install_result != Constants.INSTALLED: + if install_result != Constants.PackageStatus.INSTALLED: if i < Constants.MAX_INSTALLATION_RETRY_COUNT - 1: time.sleep(i + 1) self.composite_logger.log_warning("Retrying installation of package. [Package={0}]".format(package_manager.get_product_name(package_and_dependencies[0]))) @@ -285,12 +310,12 @@ def install_updates(self, maintenance_window, package_manager, simulate=False): # Update reboot pending status in status_handler self.status_handler.set_reboot_pending(self.package_manager.is_reboot_pending()) - if install_result == Constants.FAILED: - self.status_handler.set_package_install_status(package_manager.get_product_name(str(package_and_dependencies[0])), str(package_and_dependency_versions[0]), Constants.FAILED) + if install_result == Constants.PackageStatus.FAILED: + self.status_handler.set_package_install_status(package_manager.get_product_name(str(package_and_dependencies[0])), str(package_and_dependency_versions[0]), Constants.PackageStatus.FAILED) self.failed_parent_package_install_count += 1 patch_installation_successful = False - elif install_result == Constants.INSTALLED: - self.status_handler.set_package_install_status(package_manager.get_product_name(str(package_and_dependencies[0])), str(package_and_dependency_versions[0]), Constants.INSTALLED) + elif install_result == Constants.PackageStatus.INSTALLED: + self.status_handler.set_package_install_status(package_manager.get_product_name(str(package_and_dependencies[0])), str(package_and_dependency_versions[0]), Constants.PackageStatus.INSTALLED) self.successful_parent_package_install_count += 1 if package in self.last_still_needed_packages: index = self.last_still_needed_packages.index(package) @@ -308,7 +333,7 @@ def install_updates(self, maintenance_window, package_manager, simulate=False): if package_manager.is_package_version_installed(dependency, dependency_version): self.composite_logger.log_debug(" - Marking dependency as succeeded: " + str(dependency) + "(" + str(dependency_version) + ")") - self.status_handler.set_package_install_status(package_manager.get_product_name(str(dependency)), str(dependency_version), Constants.INSTALLED) + self.status_handler.set_package_install_status(package_manager.get_product_name(str(dependency)), str(dependency_version), Constants.PackageStatus.INSTALLED) index = self.last_still_needed_packages.index(dependency) self.last_still_needed_packages.pop(index) self.last_still_needed_package_versions.pop(index) @@ -362,11 +387,15 @@ def log_final_metrics(self, maintenance_window, patch_installation_successful, m self.composite_logger.log(progress_status) if not patch_installation_successful or maintenance_window_exceeded: - message = "\n\nOperation status was marked as failed because: " - message += "[X] a failure occurred during the operation " if not patch_installation_successful else "" - message += "[X] maintenance window exceeded " if maintenance_window_exceeded else "" - self.status_handler.add_error_to_status(message, Constants.PatchOperationErrorCodes.OPERATION_FAILED) - self.composite_logger.log_error(message) + message = "\n\nOperation status was set to FAILED because " + if patch_installation_successful and maintenance_window_exceeded: + message += "maintenance window was exceeded." + elif not patch_installation_successful and not maintenance_window_exceeded: + message += "one or more errors occurred." + else: + message += "one or more errors occurred, and maintenance window was exceeded." + + self.status_handler.add_error_to_status_and_log_error(message, raise_exception=False, error_code=Constants.PatchOperationErrorCodes.OPERATION_FAILED) def include_dependencies(self, package_manager, packages_in_batch, all_packages, all_package_versions, packages, package_versions, package_and_dependencies, package_and_dependency_versions): """ @@ -503,10 +532,10 @@ def install_packages_in_batches(self, all_packages, all_package_versions, packag for package,version in zip(package_and_dependencies, package_and_dependency_versions): install_result = package_manager.get_installation_status(code, out, exec_cmd, package, version, simulate) - if install_result == Constants.FAILED: + if install_result == Constants.PackageStatus.FAILED: if package in packages_in_batch: # parent package - self.status_handler.set_package_install_status(package_manager.get_product_name(str(package)), str(version), Constants.FAILED) + self.status_handler.set_package_install_status(package_manager.get_product_name(str(package)), str(version), Constants.PackageStatus.FAILED) self.failed_parent_package_install_count += 1 patch_installation_successful = False parent_packages_failed_in_batch_count += 1 @@ -515,8 +544,8 @@ def install_packages_in_batches(self, all_packages, all_package_versions, packag else: # dependent package number_of_dependencies_failed +=1 - elif install_result == Constants.INSTALLED: - self.status_handler.set_package_install_status(package_manager.get_product_name(str(package)), str(version), Constants.INSTALLED) + elif install_result == Constants.PackageStatus.INSTALLED: + self.status_handler.set_package_install_status(package_manager.get_product_name(str(package)), str(version), Constants.PackageStatus.INSTALLED) if package in packages_in_batch: # parent package self.successful_parent_package_install_count += 1 @@ -561,19 +590,19 @@ def install_packages_in_batches(self, all_packages, all_package_versions, packag def mark_installation_completed(self): """ Marks Installation operation as completed by updating the status of PatchInstallationSummary as success and patch metadata to be sent to healthstore. This is set outside of start_installation function to a restriction in CRP, where installation substatus should be marked as completed only after the implicit (2nd) assessment operation """ - self.status_handler.set_current_operation(Constants.INSTALLATION) # Required for status handler to log errors, that occur during marking installation completed, in installation substatus + self.status_handler.set_current_operation(Constants.Op.INSTALLATION) # Required for status handler to log errors, that occur during marking installation completed, in installation substatus # RebootNever is selected and pending, set status warning else success - if self.reboot_manager.reboot_setting == Constants.REBOOT_NEVER and self.reboot_manager.is_reboot_pending(): + if self.reboot_manager.reboot_setting == Constants.RebootSettings.NEVER and self.reboot_manager.is_reboot_pending(): # Set error details inline with windows extension when setting warning status. This message will be shown in portal. self.status_handler.add_error_to_status("Machine is Required to reboot. However, the customer-specified reboot setting doesn't allow reboots.", Constants.PatchOperationErrorCodes.DEFAULT_ERROR) - self.status_handler.set_installation_substatus_json(status=Constants.STATUS_WARNING) + self.status_handler.set_installation_substatus_json(status=Constants.Status.WARNING) else: - self.status_handler.set_installation_substatus_json(status=Constants.STATUS_SUCCESS) + self.status_handler.set_installation_substatus_json(status=Constants.Status.SUCCESS) # If esm packages are found, set the status as warning. This will show up in portal along with the error message we already set. if self.esm_packages_found_without_attach: - self.status_handler.set_installation_substatus_json(status=Constants.STATUS_WARNING) + self.status_handler.set_installation_substatus_json(status=Constants.Status.WARNING) # Update patch metadata in status for auto patching request, to be reported to healthStore # When available, HealthStoreId always takes precedence over the 'overriden' Maintenance Run Id that is being re-purposed for other reasons @@ -598,7 +627,7 @@ def perform_status_reconciliation_conditionally(self, package_manager, condition """Periodically based on the condition check, writes out success records as required; returns count of detected installs. This is mostly to capture the dependencies that get silently installed recorded. VERY IMPORTANT NOTE: THIS ONLY WORKS IF EACH DEPENDENCY INSTALLED WAS THE VERY LATEST VERSION AVAILABLE. - So it's only here as a fall back method and shouldn't normally be required with newer code - it will be removed in the future.""" + So it's only here as a fallback method and shouldn't normally be required with newer code - it will be removed in the future.""" if not condition: return 0 @@ -612,7 +641,7 @@ def perform_status_reconciliation_conditionally(self, package_manager, condition successful_packages.append(self.last_still_needed_packages[i]) successful_package_versions.append(self.last_still_needed_package_versions[i]) - self.status_handler.set_package_install_status(successful_packages, successful_package_versions, Constants.INSTALLED) + self.status_handler.set_package_install_status(successful_packages, successful_package_versions, Constants.PackageStatus.INSTALLED) self.last_still_needed_packages = still_needed_packages self.last_still_needed_package_versions = still_needed_package_versions self.composite_logger.log_debug("Completed status reconciliation. Time taken: " + str(time.time() - start_time) + " seconds.") diff --git a/src/core/src/core_logic/patch_operators/PatchOperator.py b/src/core/src/core_logic/patch_operators/PatchOperator.py new file mode 100644 index 000000000..7abcb9899 --- /dev/null +++ b/src/core/src/core_logic/patch_operators/PatchOperator.py @@ -0,0 +1,153 @@ +# Copyright 2023 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Requires Python 2.7+ +import time +from core.src.bootstrap.Constants import Constants +from core.src.core_logic.Stopwatch import Stopwatch +from abc import ABCMeta, abstractmethod + +# do not instantiate directly - these are exclusively for type hinting support +from core.src.bootstrap.EnvLayer import EnvLayer +from core.src.core_logic.ExecutionConfig import ExecutionConfig +from core.src.local_loggers.CompositeLogger import CompositeLogger +from core.src.service_interfaces.TelemetryWriter import TelemetryWriter +from core.src.service_interfaces.StatusHandler import StatusHandler +from core.src.package_managers.PackageManager import PackageManager +from core.src.service_interfaces.lifecycle_managers.LifecycleManager import LifecycleManager + + +class PatchOperator(object): + """ Base class for all first-class patch operations (ConfigurePatchingProcessor, PatchAssessor, PatchInstaller) """ + + def __init__(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler, package_manager, lifecycle_manager, operation_name): + # type: (EnvLayer, ExecutionConfig, CompositeLogger, TelemetryWriter, StatusHandler, PackageManager, LifecycleManager, str) -> None + self.env_layer = env_layer + self.execution_config = execution_config + self.composite_logger = composite_logger + self.telemetry_writer = telemetry_writer + self.lifecycle_manager = lifecycle_manager + self.status_handler = status_handler + self.package_manager = package_manager + + self.__operation_name = operation_name + + # operation state caching + self.operation_successful = True # starts true until negated + self.operation_status = Constants.Status.TRANSITIONING + self.operation_exception_error = None + self.additional_operation_specific_perf_logs = str() + + # operation stopwatch instance + self.stopwatch = Stopwatch(self.env_layer, self.telemetry_writer, self.composite_logger) + + __metaclass__ = ABCMeta # For Python 3.0+, it changes to class Abstract(metaclass=ABCMeta) + + def set_operation_internal_state(self, operation_successful, operation_status, operation_exception_error=str(), additional_operation_specific_perf_logs=str()): + # type: (bool, Constants.Status, str, str) -> None + """ Allows for concise internal operation state caching """ + self.operation_successful = operation_successful + self.operation_status = operation_status + self.operation_exception_error = operation_exception_error if operation_exception_error != str() else None + self.additional_operation_specific_perf_logs = additional_operation_specific_perf_logs + + def reset_operation_internal_state(self): + # type: () -> None + """ Resets the operation state as though it never ran - primarily meant for assessment """ + self.set_operation_internal_state(operation_successful=True, operation_status=Constants.Status.TRANSITIONING, operation_exception_error=str(), additional_operation_specific_perf_logs=str()) + + def start_operation_with_retries(self): + # type: () -> bool + """ [External call] Initiates retry-based execution on core operations """ + if not self.should_operation_run(): + return True + + self.composite_logger.log("\nSTARTING PATCH OPERATION... [Operation={0}][ActivityId={1}][StartTime={2}]".format(self.__operation_name, self.execution_config.activity_id,str(self.execution_config.start_time))) + + self.status_handler.set_current_operation(self.__operation_name) + self.set_operation_status(status=Constants.Status.TRANSITIONING) + self.stopwatch.start() + + for i in range(0, Constants.MAX_PATCH_OPERATION_RETRY_COUNT): + try: + self.lifecycle_manager.lifecycle_status_check() # keep checking if operation interrupt needs to happen + self.start_retryable_operation_unit() + self.set_operation_internal_state(operation_successful=True, operation_status=Constants.Status.SUCCESS, operation_exception_error=str()) + self.write_operation_perf_logs(retry_count=i) + break # avoid retries for success + except Exception as error: + if Constants.EnvLayer.PRIVILEGED_OP_MARKER in repr(error): # Privileged operation handling for non-production use + self.composite_logger.log_debug('[PO] Privileged operation request intercepted: ' + repr(error)) + raise + + if i < Constants.MAX_PATCH_OPERATION_RETRY_COUNT - 1: + self.composite_logger.log_verbose("Retryable error in patch operation. [Operation={0}][Error={1}]".format(self.__operation_name, repr(error))) + time.sleep(2 * (i + 1)) + else: + self.set_operation_internal_state(operation_successful=False, operation_status=Constants.Status.ERROR, operation_exception_error=repr(error)) + self.write_operation_perf_logs(retry_count=i) + self.process_operation_terminal_exception(error) + + self.composite_logger.log("COMPLETED PATCH OPERATION. [Operation={0}][ActivityId={1}]".format(self.__operation_name, self.execution_config.activity_id)) + return self.operation_successful + + def write_operation_perf_logs(self, retry_count=0): + # type: (int) -> None + """ Generic operation perf logs with expandability - to be only called once per operation """ + operation_perf_logs = "[{0}={1}][{2}={3}][{4}={5}][{6}={7}][{8}={9}][{10}={11}]{12}".format( + # Core operation information + Constants.PerfLogTrackerParams.TASK, self.__operation_name, + Constants.PerfLogTrackerParams.TASK_STATUS, str(self.operation_status), + Constants.PerfLogTrackerParams.RETRY_COUNT, str(retry_count), + Constants.PerfLogTrackerParams.ERROR_MSG, str(self.operation_exception_error), + + # Correlation information + Constants.PerfLogTrackerParams.PACKAGE_MANAGER, self.package_manager.package_manager_name, + Constants.PerfLogTrackerParams.MACHINE_INFO, self.telemetry_writer.machine_info, + + # Unique operation information, if any + self.additional_operation_specific_perf_logs) # non-generic entries that are not common to all operations + + self.stopwatch.stop_and_write_telemetry(operation_perf_logs) + + @abstractmethod + def should_operation_run(self): + # type: () -> bool + """ Performs evaluation of if the specific operation should be running at all """ + pass + + @abstractmethod + def start_retryable_operation_unit(self): + # type: () -> None + """ Idempotent operation unit of execution that can be retried """ + pass + + @abstractmethod + def process_operation_terminal_exception(self, error): + # type: (str) -> None + """ Handling of any exception that occurs in the last retry attempt """ + pass + + @abstractmethod + def set_final_operation_status(self): + # type: () -> None + """ Ensures that the operation status is set to a terminal state """ + pass + + @abstractmethod + def set_operation_status(self, status=Constants.Status.TRANSITIONING, error=Constants.DEFAULT_UNSPECIFIED_VALUE): + # type: (Constants.Status, str) -> None + """ Abstracts away specificities in setting operation status """ + pass + diff --git a/src/core/src/core_logic/patch_operators/__init__.py b/src/core/src/core_logic/patch_operators/__init__.py new file mode 100644 index 000000000..e96580122 --- /dev/null +++ b/src/core/src/core_logic/patch_operators/__init__.py @@ -0,0 +1,15 @@ +# Copyright 2023 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Requires Python 2.7+ \ No newline at end of file diff --git a/src/core/src/external_dependencies/__init__.py b/src/core/src/external_dependencies/__init__.py index a1eee39e6..2e3211aae 100644 --- a/src/core/src/external_dependencies/__init__.py +++ b/src/core/src/external_dependencies/__init__.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/core/src/external_dependencies/distro.py b/src/core/src/external_dependencies/distro.py index 0611b62a3..ffa5f208f 100644 --- a/src/core/src/external_dependencies/distro.py +++ b/src/core/src/external_dependencies/distro.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/core/src/local_loggers/CompositeLogger.py b/src/core/src/local_loggers/CompositeLogger.py index e51a89027..74bf226e2 100644 --- a/src/core/src/local_loggers/CompositeLogger.py +++ b/src/core/src/local_loggers/CompositeLogger.py @@ -1,10 +1,11 @@ +# coding=utf-8 # Copyright 2020 Microsoft Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,91 +15,109 @@ # # Requires Python 2.7+ +""" Composite Logger - Manages diverting different kinds of output to the right sinks for them with consistent formatting. """ from __future__ import print_function -import os from core.src.bootstrap.Constants import Constants class CompositeLogger(object): - """ Manages diverting different kinds of output to the right sinks for them. """ + class MessageFormatType(Constants.EnumBackport): + """ Keys represent standard formats used. Values are representational only - non-functional. """ + PIPED_SINGLE_LINE = "x | y | z", + INDENTED_MULTI_LINE= "\n\tx \n\ty \n\tz", + PIPED_MULTI_LINE = "\n| x\n| y\n| z" def __init__(self, env_layer=None, file_logger=None, current_env=None, telemetry_writer=None): self.env_layer = env_layer self.file_logger = file_logger - self.telemetry_writer = telemetry_writer # Although telemetry_writer is an independent entity, it is used within composite_logger for ease of sending all logs to telemetry - self.ERROR = "ERROR:" - self.WARNING = "WARNING:" - self.DEBUG = "DEBUG:" - self.VERBOSE = "VERBOSE:" - self.TELEMETRY_ERROR = "TELEMETRY_ERROR:" - self.TELEMETRY_LOG = "TELEMETRY_LOG:" + self.telemetry_writer = telemetry_writer self.current_env = current_env - self.NEWLINE_REPLACE_CHAR = " " - - def log(self, message, message_type=Constants.TelemetryEventLevel.Informational, buffer_msg=Constants.BufferMessage.FALSE): - """log output""" - message = self.__remove_substring_from_message(message, Constants.ERROR_ADDED_TO_STATUS) - message = message.strip() - if self.telemetry_writer is not None and self.telemetry_writer.events_folder_path is not None and self.current_env != Constants.DEV: # turned off for dev environment as it severely slows down execution - self.telemetry_writer.write_event_with_buffer(message, message_type, buffer_msg) - if self.current_env in (Constants.DEV, Constants.TEST): - for line in message.splitlines(): # allows the extended file logger to strip unnecessary white space - print(line) - elif self.file_logger is not None: - timestamp = self.env_layer.datetime.timestamp() - self.file_logger.write("\n" + timestamp + "> " + message.strip(), fail_silently=False) + + # region Public Methods + def log(self, message): + """ Log an info message """ + self.__log(message, event_level=Constants.EventLevel.Info, buffer_msg=Constants.BufferMessage.FALSE, prefix=None) def log_error(self, message): - """log errors""" - message = self.__remove_substring_from_message(message, Constants.ERROR_ADDED_TO_STATUS) - message = self.ERROR + (self.NEWLINE_REPLACE_CHAR.join(message.split(os.linesep))).strip() - self.log(message, message_type=Constants.TelemetryEventLevel.Error) + """ Logs an error """ + self.__log(message, event_level=Constants.EventLevel.Error, buffer_msg=Constants.BufferMessage.FALSE, prefix="ERROR:") def log_warning(self, message): - """log warning""" - message = self.__remove_substring_from_message(message, Constants.ERROR_ADDED_TO_STATUS) - message = self.WARNING + (self.NEWLINE_REPLACE_CHAR.join(message.split(os.linesep))).strip() - self.log(message, message_type=Constants.TelemetryEventLevel.Warning) + """ Logs a warning """ + self.__log(message, event_level=Constants.EventLevel.Warning, buffer_msg=Constants.BufferMessage.FALSE, prefix="WARNING:") def log_debug(self, message, buffer_msg=Constants.BufferMessage.FALSE): - """log debug""" - message = self.__remove_substring_from_message(message, Constants.ERROR_ADDED_TO_STATUS) - message = message.strip() - if self.telemetry_writer is not None and self.telemetry_writer.events_folder_path is not None and self.current_env not in (Constants.DEV, Constants.TEST): - self.telemetry_writer.write_event_with_buffer(message, Constants.TelemetryEventLevel.Verbose, buffer_msg) - if self.current_env in (Constants.DEV, Constants.TEST): - self.log(self.current_env + ": " + str(self.env_layer.datetime.datetime_utcnow()) + ": " + message, Constants.TelemetryEventLevel.Verbose) # send to standard output if dev or test env - elif self.file_logger is not None: - self.file_logger.write("\n\t" + self.DEBUG + " " + "\n\t".join(message.splitlines()).strip()) + """ Logs debugging data """ + self.__log(message, event_level=Constants.EventLevel.Debug, buffer_msg=buffer_msg, prefix=None) def log_verbose(self, message): - """log verbose""" - message = self.__remove_substring_from_message(message, Constants.ERROR_ADDED_TO_STATUS) - # Only log verbose events to file, not to telemetry - if self.file_logger is not None: - self.file_logger.write("\n\t" + self.VERBOSE + " " + "\n\t".join(message.strip().splitlines()).strip()) + """ Logs optional debugging data (local file only) """ + """ Note: Use this for mature code. Use regular debug for new code before stabilization. """ + self.__file_logger_write(message, prefix="Verbose") - def log_telemetry_module_error(self, message): - """Used exclusively by telemetry writer to log any errors raised within it's operation""" - message = (self.NEWLINE_REPLACE_CHAR.join(message.split(os.linesep))).strip() - if self.file_logger is not None: - timestamp = self.env_layer.datetime.timestamp() - self.file_logger.write("\n" + timestamp + "> " + self.TELEMETRY_ERROR + message.strip(), fail_silently=False) + def log_raw(self, message): + """ Logs to file/stdout without any formatting """ + if self.current_env in (Constants.ExecEnv.DEV, Constants.ExecEnv.TEST): + self.__stdout_write(message, prefix=None) else: - print(self.TELEMETRY_ERROR + " " + message) + self.file_logger.write(message) + # endregion Public Methods + + # region TelemetryWriter-only logging + def log_telemetry_module_error(self, message): + """ Used exclusively by telemetry writer to log any errors raised within its operation """ + self.__file_logger_write(message, "TELEMETRY_ERROR:") def log_telemetry_module(self, message): - """Used exclusively by telemetry writer to log messages from it's operation""" - message = (self.NEWLINE_REPLACE_CHAR.join(message.split(os.linesep))).strip() + """ Used exclusively by telemetry writer to log messages from its operation """ + self.__file_logger_write(message, "TELEMETRY_LOG:") + # endregion TelemetryWriter-only logging + + # region Private Methods + def __log(self, message, event_level, buffer_msg=Constants.BufferMessage.FALSE, prefix=None): + """ Log an info message, and is also delegated handling error, warning and debug messages. """ + if self.current_env != Constants.ExecEnv.DEV: # to avoid dev environment slow downs + self.__telemetry_write(message, event_level, buffer_msg) + + if self.current_env in (Constants.ExecEnv.DEV, Constants.ExecEnv.TEST): + self.__stdout_write(message, prefix) + + self.__file_logger_write(message, prefix, fail_silently=False) + + def __stdout_write(self, message, prefix): + """ Writes logs to standard output """ + """ Format: [Prefix-if-any] Non-indented single line or Indented multi-line string """ + print(self.__message_format(message, include_timestamp=False, prefix=prefix, format_type=self.MessageFormatType.INDENTED_MULTI_LINE)) + + def __file_logger_write(self, message, prefix, fail_silently=True): + """ Writes logs to file when possible """ + """ Format: timestamp> [Prefix-if-any] Non-indented single line or Piped multi-line string """ if self.file_logger is not None: - timestamp = self.env_layer.datetime.timestamp() - self.file_logger.write("\n" + timestamp + "> " + self.TELEMETRY_LOG + message.strip(), fail_silently=False) - else: - print(self.TELEMETRY_LOG + " " + message) + message = self.__message_format(message, include_timestamp=True, prefix=prefix, format_type=self.MessageFormatType.PIPED_MULTI_LINE) + self.file_logger.write("\n" + message, fail_silently) + + def __telemetry_write(self, message, event_level, buffer_msg): + """ Writes telemetry when possible """ + """ Format: Single line or piped | single | line - TelemetryWriter handles the rest """ + if self.telemetry_writer is not None and self.telemetry_writer.events_folder_path is not None and self.current_env != Constants.ExecEnv.DEV: + message = self.__message_format(message, include_timestamp=False, prefix=None, format_type=self.MessageFormatType.PIPED_SINGLE_LINE) # only sanitize and strip + if self.telemetry_writer is not None and self.telemetry_writer.events_folder_path is not None and self.current_env not in (Constants.ExecEnv.DEV, Constants.ExecEnv.TEST): + self.telemetry_writer.write_event_with_buffer(message, event_level, buffer_msg) + + def __message_format(self, message, include_timestamp=True, prefix=None, format_type=None): + """" Helps format the message for the desired logging sink """ + substring = Constants.ERROR_ADDED_TO_STATUS # remove internal message descriptor if any + message = message.replace("[{0}]".format(substring), "") if substring in message else message + + if format_type == self.MessageFormatType.PIPED_SINGLE_LINE: + message = (" | ".join(message.strip().splitlines())) if "\n" in message else message.strip() + elif format_type == self.MessageFormatType.INDENTED_MULTI_LINE: + message = ("\n" if message.startswith("\n") or "\n" in message.strip() else "") + ("\t" if ("\n" in message.strip()) else "") + ("\n\t".join(message.strip().splitlines())).strip() + elif format_type == self.MessageFormatType.PIPED_MULTI_LINE: + message = ("\n" if message.startswith("\n") or "\n" in message.strip() else "") + ("| " if ("\n" in message.strip()) else "") + ("\n| ".join(message.strip().splitlines())).strip() - @staticmethod - def __remove_substring_from_message(message, substring=Constants.ERROR_ADDED_TO_STATUS): - """Remove substring from a string""" - if substring in message: - message = message.replace("[{0}]".format(Constants.ERROR_ADDED_TO_STATUS), "") + message = "{0}{1}{2}".format(str(self.env_layer.datetime.timestamp())+"> " if include_timestamp else "", + "["+prefix+"] " if prefix is not None else "", + message) return message + # endregion Private Methods diff --git a/src/core/src/local_loggers/FileLogger.py b/src/core/src/local_loggers/FileLogger.py index cc47a1137..e254c4aa9 100644 --- a/src/core/src/local_loggers/FileLogger.py +++ b/src/core/src/local_loggers/FileLogger.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/core/src/local_loggers/StdOutFileMirror.py b/src/core/src/local_loggers/StdOutFileMirror.py index 569ba6da3..9eaa6f82c 100644 --- a/src/core/src/local_loggers/StdOutFileMirror.py +++ b/src/core/src/local_loggers/StdOutFileMirror.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -17,26 +17,29 @@ """Mirrors all terminal output to a local file If the log file language is set to 'Python' in Notepad++, with code as implemented below, useful collapsibility is obtained.""" import sys +import zlib +from bootstrap.Constants import Constants class StdOutFileMirror(object): """Mirrors all terminal output to a local file""" - def __init__(self, env_layer, file_logger): + def __init__(self, env_layer, file_logger, capture_stdout, current_env): + capture_stdout = capture_stdout is True and current_env == Constants.ExecEnv.PROD self.env_layer = env_layer self.terminal = sys.stdout # preserve for recovery self.file_logger = file_logger - splash = "\n,---. ,---. | | ,-.-. | \n|---|,---,. .,---.,---. |---',---.|--- ,---.|---. | | |,---.,---.,---.,---.,---.,-.-.,---.,---.|--- \n| | .-' | || |---' | ,---|| | | | | | |,---|| |,---|| ||---'| | ||---'| || \n` ''---'`---'` `---' ` `---^`---'`---'` ' ` ' '`---^` '`---^`---|`---'` ' '`---'` '`---'\n `---' " + splash = 'x\x9c\xc5Tm\x8b\xdb0\x0c\xfe\xde_!\xf2%\x0cRw\x1b\x8c\xed6\x18\x84\x83\xc1F5\n\x07\x85B87\x89\x9d\\X\xe2\\\xd3d\xac\xa3\xdco\x9fd\xe7\xada\xdfOm\\I\xd6\xa3G\x8aD\xd7\xeb\xd7\x96\xd5\n&Q\t,D\xe4\x9f<\xcfKr\x05p\xfc\x88H:\x9e\x04\xb9\xd1yg\xe0/\x88\x9fo\xa1j\xe7[\xe5\x88|"Z\x15E\x80\xdf\xd9\xf6\x0e\xb7L!dugT\xdc\x16\xb5\x89K\x08\xffv\x8d\x86\xfb\xda\x1e\xd5s\xd7j\xd8\x95q\x9b\xd5M\x05u\x96\xe9\xa60\xf9\xc4\x1e\xec\x1f)\xb3\xa5\xe1\x12q\x0f\n\xfd\xfe\xee8\xb2\x07x\xc7\x15$b\xd9%\x0be\x86\xb4\xac;\xb56T\xc3o\xa2\x8b\xdb\xf4\t06q\xae+mZ\xa8\r\xec\xf1\x0c\x1b:\x1f\x1e\x82\x91<@\xdb\x17\xabD\x19\x12\xb7c\x9b\x04\xb1\xaa\x14qs1\x07\xc4\x9f\xa7\xff\x14\xd0>\xc5\xad\x7f\x86\xac+\xcb\x0b\x14\xa6\xd5y\x13\xb7Z\xb1Z\xd3\xa5\x86\xb2\xc8tzIKM\xed\x8f\xdcaRU\xd5\tCg05\xf4\xef\x81\xa7\xf5\xcd\x9f\x95"\xfaY\xdc\x96\xe6\x84\xdb\x8a\x8d\xb2\x8d\x01\xf1\xfd\xd2\xfa\x99I+8\xeb\x94\xe6\x10@\xa3\xcb"N\xca\x8b\x18\xa8C\xbfGn\x07\xe6\x80\x92\xf7\xaf\x16\xa7\x89[\xc5\xad\x95B\x07\x16!\x86\xc2\xfd\x92\x12\xf2\xdb\xa9\xecrx\xdc\x8d\xda\xd1b\x88\x1f\x88[\x97k\xe7\x1d\xc8\xad\xbcE\xc9v:\xb4\x03\xb0-L\xf7\xc7vp.L\xaa\xe1\xfd\xdbw\x1f\x04\xdc\xcf\'9^\xdc\x89\xd9\xbaK9Wdo\x8e^\xe7q\x8f\\\x02\xe4 0: try: @@ -44,10 +47,11 @@ def write(self, message): self.file_logger.write("\n" + timestamp + "> " + message, fail_silently=False) # also write to the file logger file except Exception as error: sys.stdout = self.terminal # suppresses further job output mirror failures - sys.stdout.write("WARNING: StdOutFileMirror - Error writing to log file: " + repr(error)) + sys.stdout.write("[SOFM][Terminal-only] Error writing to log file. [Error={0}]".format(repr(error))) def flush(self): pass def stop(self): sys.stdout = self.terminal + diff --git a/src/core/src/local_loggers/__init__.py b/src/core/src/local_loggers/__init__.py index a1eee39e6..2e3211aae 100644 --- a/src/core/src/local_loggers/__init__.py +++ b/src/core/src/local_loggers/__init__.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/core/src/package_managers/HealthManager.py b/src/core/src/package_managers/HealthManager.py new file mode 100644 index 000000000..905db1848 --- /dev/null +++ b/src/core/src/package_managers/HealthManager.py @@ -0,0 +1,37 @@ +# Copyright 2023 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Requires Python 2.7+ + +from abc import ABCMeta, abstractmethod +from core.src.bootstrap.Constants import Constants + + +class HealthManager(object): + def __init__(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler, package_manager_name): + pass + + __metaclass__ = ABCMeta # For Python 3.0+, it changes to class Abstract(metaclass=ABCMeta) + + # region Handling known errors + @abstractmethod + def try_mitigate_issues_if_any(self, command, code, out): + """ Attempt to fix the errors occurred while executing a command. Repeat check until no issues found """ + pass + + @abstractmethod + def check_known_issues_and_attempt_fix(self, output): + """ Checks if issue falls into known issues and attempts to mitigate """ + return True + # endregion \ No newline at end of file diff --git a/src/core/src/package_managers/PackageManager.py b/src/core/src/package_managers/PackageManager.py index 824fa9997..a3a09034f 100644 --- a/src/core/src/package_managers/PackageManager.py +++ b/src/core/src/package_managers/PackageManager.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -15,21 +15,38 @@ # Requires Python 2.7+ """The is base package manager, which defines the package management relevant operations""" -import json import os +import time from abc import ABCMeta, abstractmethod from core.src.bootstrap.Constants import Constants -import time +# do not instantiate directly - these are exclusively for type hinting support +from core.src.bootstrap.EnvLayer import EnvLayer +from core.src.core_logic.ExecutionConfig import ExecutionConfig +from core.src.local_loggers.CompositeLogger import CompositeLogger +from core.src.service_interfaces.TelemetryWriter import TelemetryWriter +from core.src.service_interfaces.StatusHandler import StatusHandler +from core.src.package_managers.PatchModeManager import PatchModeManager +from core.src.package_managers.SourcesManager import SourcesManager +from core.src.package_managers.HealthManager import HealthManager class PackageManager(object): """Base class of package manager""" - def __init__(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler): + def __init__(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler, patch_mode_manager, sources_manager, health_manager, package_manager_name): + # type: (EnvLayer, ExecutionConfig, CompositeLogger, TelemetryWriter, StatusHandler, PatchModeManager, SourcesManager, HealthManager, str) -> None self.env_layer = env_layer + self.execution_config = execution_config self.composite_logger = composite_logger self.telemetry_writer = telemetry_writer self.status_handler = status_handler + self.package_manager_name = package_manager_name + + # Package manager compartmentalization + self.patch_mode_manager = patch_mode_manager # expects type of PatchModeManager + self.sources_manager = sources_manager # expects type of SourcesManager + self.health_manager = health_manager # expects type of HealthManager + self.single_package_upgrade_cmd = '' self.single_package_upgrade_simulation_cmd = 'simulate-install' self.package_manager_settings = {} @@ -40,7 +57,7 @@ def __init__(self, env_layer, execution_config, composite_logger, telemetry_writ self.all_update_versions_cached = [] # auto OS updates - self.image_default_patch_configuration_backup_path = os.path.join(execution_config.config_folder, Constants.IMAGE_DEFAULT_PATCH_CONFIGURATION_BACKUP_PATH) + self.image_default_patch_configuration_backup_path = os.path.join(self.execution_config.config_folder, Constants.IMAGE_DEFAULT_PATCH_CONFIGURATION_BACKUP_PATH) # Constants self.STR_NOTHING_TO_DO = "Error: Nothing to do" @@ -83,7 +100,7 @@ def get_updates_for_classification(self, package_filter): if package_filter.is_invalid_classification_combination(): error_msg = "Invalid classification combination selection detected. Please edit the update deployment configuration, " \ "unselect + reselect the desired classifications and save." - self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.PACKAGE_MANAGER_FAILURE) + self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.CL_PACKAGE_MANAGER_FAILURE) raise Exception(error_msg, "[{0}]".format(Constants.ERROR_ADDED_TO_STATUS)) if package_filter.is_msft_critsec_classification_only(): @@ -176,7 +193,7 @@ def dedupe_update_packages(packages, package_versions): # region Install Update def get_install_command(self, cmd, packages, package_versions): - """ Composes the install command for one or more packages with versions""" + """ Composes the installation command for one or more packages with versions""" composite_cmd = cmd for index, package in enumerate(packages): if index != 0: @@ -242,13 +259,13 @@ def get_installation_status(self, code, out, exec_cmd, package, version, simulat Returns: install_result (string): Package installation result """ - install_result = Constants.INSTALLED + install_result = Constants.PackageStatus.INSTALLED package_no_longer_required = False code_path = "| Install" start_time = time.time() # special case of package no longer being required (or maybe even present on the system) - if code == 1 and self.get_package_manager_setting(Constants.PKG_MGR_SETTING_IDENTITY) == Constants.YUM: + if code == 1 and self.package_manager_name == Constants.YUM: self.composite_logger.log_debug(" - Detecting if package is no longer required (as return code is 1):") if self.STR_NOTHING_TO_DO in out: code_path += " > Nothing to do. (succeeded)" @@ -265,23 +282,23 @@ def get_installation_status(self, code, out, exec_cmd, package, version, simulat # It is premature to fail this package. In the *unlikely* case it never gets picked up, it'll remain NotStarted. # The NotStarted status must not be written again in the calling function (it's not at the time of this writing). code_path += " > Package has no prior version. (no operation; return 'not started')" - install_result = Constants.PENDING + install_result = Constants.PackageStatus.PENDING self.composite_logger.log_warning(" |- Package " + package + " (" + version + ") needs to already have an older version installed in order to be upgraded. " + "\n |- Another upgradeable package requiring it as a dependency can cause it to get installed later. No action may be required.\n") elif code == 0 and self.STR_OBSOLETED.replace('', self.get_composite_package_identifier(package, version)) in out: # Package can be obsoleted by another package installed in the run (via dependencies) code_path += " > Package obsoleted. (succeeded)" - install_result = Constants.INSTALLED # close approximation to obsoleted + install_result = Constants.PackageStatus.INSTALLED # close approximation to obsoleted self.composite_logger.log_debug(" - Package was discovered to be obsoleted.") elif code == 0 and len(out.split(self.STR_REPLACED)) > 1 and package in out.split(self.STR_REPLACED)[1]: code_path += " > Package replaced. (succeeded)" - install_result = Constants.INSTALLED # close approximation to replaced + install_result = Constants.PackageStatus.INSTALLED # close approximation to replaced self.composite_logger.log_debug(" - Package was discovered to be replaced by another during its installation.") else: # actual failure - install_result = Constants.FAILED + install_result = Constants.PackageStatus.FAILED if code != 0: code_path += " > Package NOT installed. (failed)" self.composite_logger.log_error(" |- Package failed to install: " + package + " (" + version + "). " + @@ -300,7 +317,7 @@ def get_installation_status(self, code, out, exec_cmd, package, version, simulat if not simulate: package_size = self.get_package_size(out) - if install_result == Constants.FAILED: + if install_result == Constants.PackageStatus.FAILED: error = self.telemetry_writer.write_package_info(package, version, package_size, round(time.time() - start_time, 2), install_result, code_path, exec_cmd, str(out)) else: error = self.telemetry_writer.write_package_info(package, version, package_size, round(time.time() - start_time, 2), install_result, code_path, exec_cmd) @@ -369,7 +386,7 @@ def get_package_manager_setting(self, setting_key, default_value='d5414abb-62f9- return default_value else: error_msg = "Setting key [" + setting_key + "] does not exist in package manager settings." - self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.PACKAGE_MANAGER_FAILURE) + self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.CL_PACKAGE_MANAGER_FAILURE) raise Exception(error_msg, "[{0}]".format(Constants.ERROR_ADDED_TO_STATUS)) def set_package_manager_setting(self, setting_key, setting_value=""): @@ -378,53 +395,6 @@ def set_package_manager_setting(self, setting_key, setting_value=""): self.package_manager_settings[setting_key] = setting_value # endregion - # region auto OS updates - @abstractmethod - def get_current_auto_os_patch_state(self): - """ Gets the current auto OS update patch state on the machine """ - pass - - @abstractmethod - def disable_auto_os_update(self): - """ Disables auto OS updates on the machine only if they are enabled and logs the default settings the machine comes with """ - pass - - @abstractmethod - def backup_image_default_patch_configuration_if_not_exists(self): - """ Records the default system settings for auto OS updates within patch extension artifacts for future reference. - We only log the default system settings a VM comes with, any subsequent updates will not be recorded""" - pass - - def image_default_patch_configuration_backup_exists(self): - """ Checks whether default auto OS update settings have been recorded earlier within patch extension artifacts """ - self.composite_logger.log_debug("Checking if extension contains a backup for default auto OS update configuration settings...") - - # backup does not exist - if not os.path.exists(self.image_default_patch_configuration_backup_path) or not os.path.isfile(self.image_default_patch_configuration_backup_path): - self.composite_logger.log_debug("Default system configuration settings for auto OS updates aren't recorded in the extension") - return False - - return True - - @abstractmethod - def is_image_default_patch_configuration_backup_valid(self, image_default_patch_configuration_backup): - pass - - @abstractmethod - def update_os_patch_configuration_sub_setting(self, patch_configuration_sub_setting, value, patch_configuration_sub_setting_pattern_to_match): - pass - # endregion - - # region Handling known errors - def try_mitigate_issues_if_any(self, command, code, out): - """ Attempt to fix the errors occurred while executing a command. Repeat check until no issues found """ - pass - - def check_known_issues_and_attempt_fix(self, output): - """ Checks if issue falls into known issues and attempts to mitigate """ - return True - # endregion - @abstractmethod def is_reboot_pending(self): """ Checks if there is a pending reboot on the machine. """ diff --git a/src/core/src/package_managers/PatchModeManager.py b/src/core/src/package_managers/PatchModeManager.py new file mode 100644 index 000000000..c005d422e --- /dev/null +++ b/src/core/src/package_managers/PatchModeManager.py @@ -0,0 +1,76 @@ +# Copyright 2023 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Requires Python 2.7+ + +import json +import os +from abc import ABCMeta, abstractmethod +from core.src.bootstrap.Constants import Constants +import time + + +class PatchModeManager(object): + """Base class of package manager""" + + def __init__(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler, package_manager_name): + self.env_layer = env_layer + self.execution_config = execution_config + self.composite_logger = composite_logger + self.telemetry_writer = telemetry_writer + self.status_handler = status_handler + self.package_manager_name = package_manager_name + + # auto OS updates + self.image_default_patch_configuration_backup_path = os.path.join(self.execution_config.config_folder, Constants.IMAGE_DEFAULT_PATCH_CONFIGURATION_BACKUP_PATH) + + __metaclass__ = ABCMeta # For Python 3.0+, it changes to class Abstract(metaclass=ABCMeta) + + # region auto OS updates + @abstractmethod + def get_current_auto_os_patch_state(self): + """ Gets the current auto OS update patch state on the machine """ + pass + + @abstractmethod + def disable_auto_os_update(self): + """ Disables auto OS updates on the machine only if they are enabled and logs the default settings the machine comes with """ + pass + + @abstractmethod + def backup_image_default_patch_configuration_if_not_exists(self): + """ Records the default system settings for auto OS updates within patch extension artifacts for future reference. + We only log the default system settings a VM comes with, any subsequent updates will not be recorded """ + pass + + def image_default_patch_configuration_backup_exists(self): + """ Checks whether default auto OS update settings have been recorded earlier within patch extension artifacts """ + self.composite_logger.log_verbose("[PMM] Checking if extension contains a backup for default auto OS update configuration settings...") + + # backup does not exist + if not os.path.exists(self.image_default_patch_configuration_backup_path) or not os.path.isfile(self.image_default_patch_configuration_backup_path): + self.composite_logger.log_verbose("[PMM] Default system configuration settings for auto OS updates aren't recorded in the extension.") + return False + + return True + + @abstractmethod + def is_image_default_patch_configuration_backup_valid(self, image_default_patch_configuration_backup): + pass + + @abstractmethod + def update_os_patch_configuration_sub_setting(self, patch_configuration_sub_setting, value, patch_configuration_sub_setting_pattern_to_match): + pass + # endregion + diff --git a/src/core/src/package_managers/SourcesManager.py b/src/core/src/package_managers/SourcesManager.py new file mode 100644 index 000000000..0b73c360a --- /dev/null +++ b/src/core/src/package_managers/SourcesManager.py @@ -0,0 +1,29 @@ +# Copyright 2023 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Requires Python 2.7+ + +from abc import ABCMeta, abstractmethod +from core.src.bootstrap.Constants import Constants + + +class SourcesManager(object): + def __init__(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler, package_manager_name): + pass + + __metaclass__ = ABCMeta # For Python 3.0+, it changes to class Abstract(metaclass=ABCMeta) + + @abstractmethod + def function_name(self): + pass \ No newline at end of file diff --git a/src/core/src/package_managers/__init__.py b/src/core/src/package_managers/__init__.py index a1eee39e6..2e3211aae 100644 --- a/src/core/src/package_managers/__init__.py +++ b/src/core/src/package_managers/__init__.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/core/src/package_managers/apt/AptHealthManager.py b/src/core/src/package_managers/apt/AptHealthManager.py new file mode 100644 index 000000000..cfaf3304c --- /dev/null +++ b/src/core/src/package_managers/apt/AptHealthManager.py @@ -0,0 +1,35 @@ +# Copyright 2023 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Requires Python 2.7+ + +from core.src.bootstrap.Constants import Constants +from core.src.package_managers.HealthManager import HealthManager + + +class AptHealthManager(HealthManager): + """ Helps with attempting automatic environment health restoration for apt/dpkg where feasible to improve operation success rates """ + def init(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler): + super(HealthManager, self).__init__(env_layer, execution_config, composite_logger, telemetry_writer, status_handler, package_manager_name=Constants.APT) + pass + + # region Handling known errors + def try_mitigate_issues_if_any(self, command, code, out): + """ Attempt to fix the errors occurred while executing a command. Repeat check until no issues found """ + pass + + def check_known_issues_and_attempt_fix(self, output): + """ Checks if issue falls into known issues and attempts to mitigate """ + return True + # endregion \ No newline at end of file diff --git a/src/core/src/package_managers/AptitudePackageManager.py b/src/core/src/package_managers/apt/AptPackageManager.py similarity index 67% rename from src/core/src/package_managers/AptitudePackageManager.py rename to src/core/src/package_managers/apt/AptPackageManager.py index 737de12b1..d89deeaaf 100644 --- a/src/core/src/package_managers/AptitudePackageManager.py +++ b/src/core/src/package_managers/apt/AptPackageManager.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -23,15 +23,26 @@ from core.src.package_managers.PackageManager import PackageManager from core.src.bootstrap.Constants import Constants -from core.src.package_managers.UbuntuProClient import UbuntuProClient +from package_managers.apt.UbuntuProClient import UbuntuProClient +# do not instantiate directly - these are exclusively for type hinting support +from core.src.bootstrap.EnvLayer import EnvLayer +from core.src.core_logic.ExecutionConfig import ExecutionConfig +from core.src.local_loggers.CompositeLogger import CompositeLogger +from core.src.service_interfaces.TelemetryWriter import TelemetryWriter +from core.src.service_interfaces.StatusHandler import StatusHandler +from core.src.package_managers.PatchModeManager import PatchModeManager +from core.src.package_managers.SourcesManager import SourcesManager +from core.src.package_managers.HealthManager import HealthManager -class AptitudePackageManager(PackageManager): + +class AptPackageManager(PackageManager): """Implementation of Debian/Ubuntu based package management operations""" # For more details, try `man apt-get` on any Debian/Ubuntu based box. - def __init__(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler): - super(AptitudePackageManager, self).__init__(env_layer, execution_config, composite_logger, telemetry_writer, status_handler) + def __init__(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler, patch_mode_manager, sources_manager, health_manager, package_manager_name): + # type: (EnvLayer, ExecutionConfig, CompositeLogger, TelemetryWriter, StatusHandler, PatchModeManager, SourcesManager, HealthManager, str) -> None + super(AptPackageManager, self).__init__(env_layer, execution_config, composite_logger, telemetry_writer, status_handler, patch_mode_manager, sources_manager, health_manager, package_manager_name) security_list_guid = str(uuid.uuid4()) @@ -58,13 +69,6 @@ def __init__(self, env_layer, execution_config, composite_logger, telemetry_writ # Package manager exit code(s) self.apt_exitcode_ok = 0 - # auto OS updates - self.update_package_list = 'APT::Periodic::Update-Package-Lists' - self.unattended_upgrade = 'APT::Periodic::Unattended-Upgrade' - self.os_patch_configuration_settings_file_path = '/etc/apt/apt.conf.d/20auto-upgrades' - self.update_package_list_value = "" - self.unattended_upgrade_value = "" - # Miscellaneous os.environ['DEBIAN_FRONTEND'] = 'noninteractive' # Avoid a config prompt self.set_package_manager_setting(Constants.PKG_MGR_SETTING_IDENTITY, Constants.APT) @@ -80,59 +84,39 @@ def __init__(self, env_layer, execution_config, composite_logger, telemetry_writ self.ubuntu_pro_client_all_updates_versions_cached = [] def refresh_repo(self): - self.composite_logger.log("\nRefreshing local repo...") + self.composite_logger.log_verbose("[APM] Refreshing local repo.") self.invoke_package_manager(self.repo_refresh) # region Get Available Updates def invoke_package_manager_advanced(self, command, raise_on_exception=True): """Get missing updates using the command input""" - self.composite_logger.log_debug('\nInvoking package manager using: ' + command) + self.composite_logger.log_verbose('[APM] Invoking package manager. [Command={0}]'.format(command)) code, out = self.env_layer.run_command_output(command, False, False) if code != self.apt_exitcode_ok and self.STR_DPKG_WAS_INTERRUPTED in out: - self.composite_logger.log_error('[ERROR] YOU NEED TO TAKE ACTION TO PROCEED. The package manager on this machine is not in a healthy state, and ' - 'Patch Management cannot proceed successfully. Before the next Patch Operation, please run the following ' - 'command and perform any configuration steps necessary on the machine to return it to a healthy state: ' - 'sudo dpkg --configure -a') - self.telemetry_writer.write_execution_error(command, code, out) - error_msg = 'Package manager on machine is not healthy. To fix, please run: sudo dpkg --configure -a' - self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.PACKAGE_MANAGER_FAILURE) - if raise_on_exception: - raise Exception(error_msg, "[{0}]".format(Constants.ERROR_ADDED_TO_STATUS)) + self.composite_logger.log_error("[ERROR] YOU NEED TO TAKE ACTION TO PROCEED. The package manager on this machine is not in a healthy state, and Azure Guest Patching Service cannot proceed successfully. Before the next Patch Operation, please run the following command and perform any configuration steps necessary on the machine to return it to a healthy state: 'sudo dpkg --configure -a'") + self.status_handler.add_error_to_status_and_log_error(message="Package manager on machine is not healthy. To fix, please run: sudo dpkg --configure -a", + raise_exception=bool(raise_on_exception), error_code=Constants.PatchOperationErrorCodes.CL_PACKAGE_MANAGER_FAILURE) elif code != self.apt_exitcode_ok: - self.composite_logger.log('[ERROR] Package manager was invoked using: ' + command) - self.composite_logger.log_warning(" - Return code from package manager: " + str(code)) - self.composite_logger.log_warning(" - Output from package manager: \n|\t" + "\n|\t".join(out.splitlines())) - self.telemetry_writer.write_execution_error(command, code, out) - error_msg = 'Unexpected return code (' + str(code) + ') from package manager on command: ' + command - self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.PACKAGE_MANAGER_FAILURE) - if raise_on_exception: - raise Exception(error_msg, "[{0}]".format(Constants.ERROR_ADDED_TO_STATUS)) - # more known return codes should be added as appropriate - else: # verbose diagnostic log - self.composite_logger.log_verbose("\n\n==[SUCCESS]===============================================================") - self.composite_logger.log_debug(" - Return code from package manager: " + str(code)) - self.composite_logger.log_debug(" - Output from package manager: \n|\t" + "\n|\t".join(out.splitlines())) - self.composite_logger.log_verbose("==========================================================================\n\n") + self.composite_logger.log_error("[APM] Package Manager ERROR. [Command={0}][Code={1}][Output={2}]".format(command, str(code), str(out))) + self.status_handler.add_error_to_status_and_log_error(message="Unexpected return code from package manager. [Code={0}][Command={1}]".format(str(code), command), + raise_exception=bool(raise_on_exception), error_code=Constants.PatchOperationErrorCodes.CL_PACKAGE_MANAGER_FAILURE) + else: + self.composite_logger.log_verbose("[APM] Package Manager SUCCESS. [Command={0}][Code={1}][Output={2}]".format(command, str(code), str(out))) + return out, code def invoke_apt_cache(self, command): """Invoke apt-cache using the command input""" - self.composite_logger.log_debug('Invoking apt-cache using: ' + command) + self.composite_logger.log_verbose('[APM] Invoking apt-cache using: ' + command) code, out = self.env_layer.run_command_output(command, False, False) if code != 0: - self.composite_logger.log('[ERROR] apt-cache was invoked using: ' + command) - self.composite_logger.log_warning(" - Return code from apt-cache: " + str(code)) - self.composite_logger.log_warning(" - Output from apt-cache: \n|\t" + "\n|\t".join(out.splitlines())) - error_msg = 'Unexpected return code (' + str(code) + ') from apt-cache on command: ' + command - self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.PACKAGE_MANAGER_FAILURE) - raise Exception(error_msg, "[{0}]".format(Constants.ERROR_ADDED_TO_STATUS)) - # more known return codes should be added as appropriate + self.composite_logger.log_error("[APM] apt-cache ERROR. [Command={0}][Code={1}][Output={2}]".format(command, str(code), str(out))) + self.status_handler.add_error_to_status_and_log_error(message="Unexpected return code from apt-cache. [Code={0}][Command={1}]".format(str(code), command), + raise_exception=True, error_code=Constants.PatchOperationErrorCodes.CL_PACKAGE_MANAGER_FAILURE) else: # verbose diagnostic log - self.composite_logger.log_verbose("\n\n==[SUCCESS]===============================================================") - self.composite_logger.log_debug(" - Return code from apt-cache: " + str(code)) - self.composite_logger.log_debug(" - Output from apt-cache: \n|\t" + "\n|\t".join(out.splitlines())) - self.composite_logger.log_verbose("==========================================================================\n\n") + self.composite_logger.log_verbose("[APM] apt-cache SUCCESS. [Command={0}][Code={1}][Output={2}]".format(command, str(code), str(out))) + return out # region Classification-based (incl. All) update check @@ -248,7 +232,7 @@ def extract_packages_and_versions(self, output): # Inst python3-update-manager [1:16.10.7] (1:16.10.8 Ubuntu:16.10/yakkety-updates [all]) [update-manager-core:amd64 ] # Inst update-manager-core [1:16.10.7] (1:16.10.8 Ubuntu:16.10/yakkety-updates [all]) - self.composite_logger.log_debug("\nExtracting package and version data...") + self.composite_logger.log_verbose("[APM] Extracting package and version data...") packages = [] versions = [] @@ -260,7 +244,7 @@ def extract_packages_and_versions(self, output): packages.append(package[0]) versions.append(package[1]) - self.composite_logger.log_debug(" - Extracted package and version data for " + str(len(packages)) + " packages [BASIC].") + self.composite_logger.log_verbose("[APM] Extracted package and version data for " + str(len(packages)) + " packages [BASIC].") # Discovering ESM packages - Distro versions with extended security maintenance lines = output.strip().split('\n') @@ -271,7 +255,7 @@ def extract_packages_and_versions(self, output): if not esm_marker_found: if self.ESM_MARKER in line: - esm_marker_found = True + esm_marker_found = True continue esm_packages = line.split() @@ -280,7 +264,7 @@ def extract_packages_and_versions(self, output): for package in esm_packages: packages.append(package) versions.append(Constants.UA_ESM_REQUIRED) - self.composite_logger.log_debug(" - Extracted package and version data for " + str(len(packages)) + " packages [TOTAL].") + self.composite_logger.log_verbose("[APM] Extracted package and version data for " + str(len(packages)) + " packages [TOTAL].") return packages, versions # endregion @@ -303,6 +287,7 @@ def get_all_available_versions_of_package(self, package_name): # bash | 4.3-14ubuntu1 | http://us.archive.ubuntu.com/ubuntu xenial/main amd64 Packages package_versions = [] + debug_log = str() cmd = self.single_package_check_versions.replace('', package_name) output = self.invoke_apt_cache(cmd) @@ -311,11 +296,12 @@ def get_all_available_versions_of_package(self, package_name): for line in lines: package_details = line.split(' |') if len(package_details) == 3: - self.composite_logger.log_debug(" - Applicable line: " + str(line)) + debug_log += "[A] {0}\n".format(str(line)) # applicable package_versions.append(package_details[1].strip()) else: - self.composite_logger.log_debug(" - Inapplicable line: " + str(line)) + debug_log += "[N] {0}\n".format(str(line)) # not applicable + self.composite_logger.log_debug("[APM] Debug log on get all available versions of package: {0}".format(debug_log)) return package_versions def is_package_version_installed(self, package_name, package_version): @@ -343,7 +329,7 @@ def is_package_version_installed(self, package_name, package_version): else: self.composite_logger.log_debug(" - Inapplicable line: " + str(line)) - self.telemetry_writer.write_event("[Installed check] Return code: 1. Unable to verify package not present on the system: " + str(output), Constants.TelemetryEventLevel.Verbose) + self.telemetry_writer.write_event("[Installed check] Return code: 1. Unable to verify package not present on the system: " + str(output), Constants.EventLevel.Verbose) elif code == 0: # likely found # Sample output format ------------------------------------------ # Package: mysql-server @@ -365,31 +351,31 @@ def is_package_version_installed(self, package_name, package_version): composite_found_flag = composite_found_flag | 1 else: # should never hit for the way this is invoked, hence telemetry self.composite_logger.log_debug(" - Did not match name: " + str(package_name) + " (" + str(line) + ")") - self.telemetry_writer.write_event("[Installed check] Name did not match: " + package_name + " (line=" + str(line) + ")(out=" + str(output) + ")", Constants.TelemetryEventLevel.Verbose) + self.telemetry_writer.write_event("[Installed check] Name did not match: " + package_name + " (line=" + str(line) + ")(out=" + str(output) + ")", Constants.EventLevel.Verbose) continue if 'Version: ' in line: if package_version in line: composite_found_flag = composite_found_flag | 2 else: # should never hit for the way this is invoked, hence telemetry self.composite_logger.log_debug(" - Did not match version: " + str(package_version) + " (" + str(line) + ")") - self.telemetry_writer.write_event("[Installed check] Version did not match: " + str(package_version) + " (line=" + str(line) + ")(out=" + str(output) + ")", Constants.TelemetryEventLevel.Verbose) + self.telemetry_writer.write_event("[Installed check] Version did not match: " + str(package_version) + " (line=" + str(line) + ")(out=" + str(output) + ")", Constants.EventLevel.Verbose) continue if 'Status: ' in line: if 'install ok installed' in line: composite_found_flag = composite_found_flag | 4 else: # should never hit for the way this is invoked, hence telemetry self.composite_logger.log_debug(" - Did not match status: " + str(package_name) + " (" + str(line) + ")") - self.telemetry_writer.write_event("[Installed check] Status did not match: 'install ok installed' (line=" + str(line) + ")(out=" + str(output) + ")", Constants.TelemetryEventLevel.Verbose) + self.telemetry_writer.write_event("[Installed check] Status did not match: 'install ok installed' (line=" + str(line) + ")(out=" + str(output) + ")", Constants.EventLevel.Verbose) continue if composite_found_flag & 7 == 7: # whenever this becomes true, the exact package version is installed self.composite_logger.log_debug(" - Package, Version and Status matched. Package is detected as 'Installed'.") return True self.composite_logger.log_debug(" - Inapplicable line: " + str(line)) self.composite_logger.log_debug(" - Install status check did NOT find the package installed: (composite_found_flag=" + str(composite_found_flag) + ")") - self.telemetry_writer.write_event("Install status check did NOT find the package installed: (composite_found_flag=" + str(composite_found_flag) + ")(output=" + output + ")", Constants.TelemetryEventLevel.Verbose) + self.telemetry_writer.write_event("Install status check did NOT find the package installed: (composite_found_flag=" + str(composite_found_flag) + ")(output=" + output + ")", Constants.EventLevel.Verbose) else: # This is not expected to execute. If it does, the details will show up in telemetry. Improve this code with that information. self.composite_logger.log_debug(" - Unexpected return code from dpkg: " + str(code) + ". Output: " + str(output)) - self.telemetry_writer.write_event("Unexpected return code from dpkg: Cmd=" + str(cmd) + ". Code=" + str(code) + ". Output=" + str(output), Constants.TelemetryEventLevel.Verbose) + self.telemetry_writer.write_event("Unexpected return code from dpkg: Cmd=" + str(cmd) + ". Code=" + str(code) + ". Output=" + str(output), Constants.EventLevel.Verbose) # SECONDARY METHOD - Fallback # Sample output format @@ -417,7 +403,7 @@ def is_package_version_installed(self, package_name, package_version): self.composite_logger.log_debug(" - Did not find status: " + str(package_details[3] + " (" + str(package_details[3]) + ")")) continue self.composite_logger.log_debug(" - Package version specified was determined to be installed.") - self.telemetry_writer.write_event("[Installed check] Fallback code disagreed with dpkg.", Constants.TelemetryEventLevel.Verbose) + self.telemetry_writer.write_event("[Installed check] Fallback code disagreed with dpkg.", Constants.EventLevel.Verbose) return True self.composite_logger.log_debug(" - Package version specified was determined to NOT be installed.") @@ -476,129 +462,6 @@ def get_package_size(self, output): return Constants.UNKNOWN_PACKAGE_SIZE # endregion - # region auto OS updates - def get_current_auto_os_patch_state(self): - """ Gets the current auto OS update patch state on the machine """ - self.composite_logger.log("Fetching the current automatic OS patch state on the machine...") - if os.path.exists(self.os_patch_configuration_settings_file_path): - self.__get_current_auto_os_updates_setting_on_machine() - if not os.path.exists(self.os_patch_configuration_settings_file_path) or int(self.unattended_upgrade_value) == 0: - current_auto_os_patch_state = Constants.AutomaticOSPatchStates.DISABLED - elif int(self.unattended_upgrade_value) == 1: - current_auto_os_patch_state = Constants.AutomaticOSPatchStates.ENABLED - else: - current_auto_os_patch_state = Constants.AutomaticOSPatchStates.UNKNOWN - - self.composite_logger.log_debug("Current Auto OS Patch State is [State={0}]".format(str(current_auto_os_patch_state))) - return current_auto_os_patch_state - - def __get_current_auto_os_updates_setting_on_machine(self): - """ Gets all the update settings related to auto OS updates currently set on the machine """ - try: - image_default_patch_configuration = self.env_layer.file_system.read_with_retry(self.os_patch_configuration_settings_file_path) - settings = image_default_patch_configuration.strip().split('\n') - for setting in settings: - if self.update_package_list in str(setting): - self.update_package_list_value = re.search(self.update_package_list + ' *"(.*?)".', str(setting)).group(1) - if self.unattended_upgrade in str(setting): - self.unattended_upgrade_value = re.search(self.unattended_upgrade + ' *"(.*?)".', str(setting)).group(1) - - if self.update_package_list_value == "": - self.composite_logger.log_debug("Machine did not have any value set for [Setting={0}]".format(str(self.update_package_list))) - - if self.unattended_upgrade_value == "": - self.composite_logger.log_debug("Machine did not have any value set for [Setting={0}]".format(str(self.unattended_upgrade))) - - except Exception as error: - raise Exception("Error occurred in fetching default auto OS updates from the machine. [Exception={0}]".format(repr(error))) - - def disable_auto_os_update(self): - """ Disables auto OS updates on the machine only if they are enabled and logs the default settings the machine comes with """ - try: - self.composite_logger.log_debug("Disabling auto OS updates if they are enabled") - self.backup_image_default_patch_configuration_if_not_exists() - self.update_os_patch_configuration_sub_setting(self.update_package_list, "0") - self.update_os_patch_configuration_sub_setting(self.unattended_upgrade, "0") - self.composite_logger.log("Successfully disabled auto OS updates") - except Exception as error: - self.composite_logger.log_error("Could not disable auto OS updates. [Error={0}]".format(repr(error))) - raise - - def backup_image_default_patch_configuration_if_not_exists(self): - """ Records the default system settings for auto OS updates within patch extension artifacts for future reference. - We only log the default system settings a VM comes with, any subsequent updates will not be recorded""" - try: - image_default_patch_configuration_backup = {} - image_default_patch_configuration_backup_exists = self.image_default_patch_configuration_backup_exists() - - # read existing backup since it also contains backup from other update services. We need to preserve any existing data with backup file - if image_default_patch_configuration_backup_exists: - try: - image_default_patch_configuration_backup = json.loads(self.env_layer.file_system.read_with_retry(self.image_default_patch_configuration_backup_path)) - except Exception as error: - self.composite_logger.log_error("Unable to read backup for default patch state. Will attempt to re-write. [Exception={0}]".format(repr(error))) - - # verify if existing backup is valid if not, write to backup - is_backup_valid = image_default_patch_configuration_backup_exists and self.is_image_default_patch_configuration_backup_valid(image_default_patch_configuration_backup) - if is_backup_valid: - self.composite_logger.log_debug("Since extension has a valid backup, no need to log the current settings again. [Default Auto OS update settings={0}] [File path={1}]" - .format(str(image_default_patch_configuration_backup), self.image_default_patch_configuration_backup_path)) - else: - self.composite_logger.log_debug("Since the backup is invalid or does not exist, will add a new backup with the current auto OS update settings") - self.__get_current_auto_os_updates_setting_on_machine() - - backup_image_default_patch_configuration_json = { - self.update_package_list: self.update_package_list_value, - self.unattended_upgrade: self.unattended_upgrade_value - } - - self.composite_logger.log_debug("Logging default system configuration settings for auto OS updates. [Settings={0}] [Log file path={1}]" - .format(str(backup_image_default_patch_configuration_json), self.image_default_patch_configuration_backup_path)) - self.env_layer.file_system.write_with_retry(self.image_default_patch_configuration_backup_path, '{0}'.format(json.dumps(backup_image_default_patch_configuration_json)), mode='w+') - except Exception as error: - error_message = "Exception during fetching and logging default auto update settings on the machine. [Exception={0}]".format(repr(error)) - self.composite_logger.log_error(error_message) - self.status_handler.add_error_to_status(error_message, Constants.PatchOperationErrorCodes.DEFAULT_ERROR) - raise - - def is_image_default_patch_configuration_backup_valid(self, image_default_patch_configuration_backup): - if self.update_package_list in image_default_patch_configuration_backup and self.unattended_upgrade in image_default_patch_configuration_backup: - self.composite_logger.log_debug("Extension already has a valid backup of the default system configuration settings for auto OS updates.") - return True - else: - self.composite_logger.log_error("Extension does not have a valid backup of the default system configuration settings for auto OS updates.") - return False - - def update_os_patch_configuration_sub_setting(self, patch_configuration_sub_setting, value="0", patch_configuration_sub_setting_pattern_to_match=""): - """ Updates (or adds if it doesn't exist) the given patch_configuration_sub_setting with the given value in os_patch_configuration_settings_file """ - try: - # note: adding space between the patch_configuration_sub_setting and value since, we will have to do that if we have to add a patch_configuration_sub_setting that did not exist before - self.composite_logger.log("Updating system configuration settings for auto OS updates. [Patch Configuration Sub Setting={0}] [Value={1}]".format(str(patch_configuration_sub_setting), value)) - os_patch_configuration_settings = self.env_layer.file_system.read_with_retry(self.os_patch_configuration_settings_file_path) - patch_configuration_sub_setting_to_update = patch_configuration_sub_setting + ' "' + value + '";' - patch_configuration_sub_setting_found_in_file = False - updated_patch_configuration_sub_setting = "" - settings = os_patch_configuration_settings.strip().split('\n') - - # update value of existing setting - for i in range(len(settings)): - if patch_configuration_sub_setting in settings[i]: - settings[i] = patch_configuration_sub_setting_to_update - patch_configuration_sub_setting_found_in_file = True - updated_patch_configuration_sub_setting += settings[i] + "\n" - - # add setting to configuration file, since it doesn't exist - if not patch_configuration_sub_setting_found_in_file: - updated_patch_configuration_sub_setting += patch_configuration_sub_setting_to_update + "\n" - - self.env_layer.file_system.write_with_retry(self.os_patch_configuration_settings_file_path, '{0}'.format(updated_patch_configuration_sub_setting.lstrip()), mode='w+') - except Exception as error: - error_msg = "Error occurred while updating system configuration settings for auto OS updates. [Patch Configuration={0}] [Error={1}]".format(str(patch_configuration_sub_setting), repr(error)) - self.composite_logger.log_error(error_msg) - self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.DEFAULT_ERROR) - raise - # endregion - # region Reboot Management def do_processes_require_restart(self): """ Fulfilling base class contract """ @@ -649,13 +512,13 @@ def set_security_esm_package_status(self, operation, packages): """Set the security-ESM classification for the esm packages.""" security_esm_update_query_success, security_esm_updates, security_esm_updates_versions = self.get_security_esm_updates() if self.__pro_client_prereq_met and security_esm_update_query_success and len(security_esm_updates) > 0: - self.telemetry_writer.write_event("set Security-ESM package status:[Operation={0}][Updates={1}]".format(operation, str(security_esm_updates)), Constants.TelemetryEventLevel.Verbose) - if operation == Constants.ASSESSMENT: + self.telemetry_writer.write_event("set Security-ESM package status:[Operation={0}][Updates={1}]".format(operation, str(security_esm_updates)), Constants.EventLevel.Verbose) + if operation == Constants.Op.ASSESSMENT: self.status_handler.set_package_assessment_status(security_esm_updates, security_esm_updates_versions, Constants.PackageClassification.SECURITY_ESM) # If the Ubuntu Pro Client is not attached, set the error with the code UA_ESM_REQUIRED. This will be used in portal to mark the VM as unattached to pro. if not self.ubuntu_pro_client.is_ubuntu_pro_client_attached: self.status_handler.add_error_to_status("{0} patches requires Ubuntu Pro for Infrastructure with Extended Security Maintenance".format(len(security_esm_updates)), Constants.PatchOperationErrorCodes.UA_ESM_REQUIRED) - elif operation == Constants.INSTALLATION: + elif operation == Constants.Op.INSTALLATION: if security_esm_update_query_success: esm_packages_selected_to_install = [package for package in packages if package in security_esm_updates] self.composite_logger.log_debug("Setting security ESM package status. [SelectedEsmPackagesCount={0}]".format(len(esm_packages_selected_to_install))) diff --git a/src/core/src/package_managers/apt/AptPatchModeManager.py b/src/core/src/package_managers/apt/AptPatchModeManager.py new file mode 100644 index 000000000..5839512aa --- /dev/null +++ b/src/core/src/package_managers/apt/AptPatchModeManager.py @@ -0,0 +1,153 @@ +# Copyright 2023 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Requires Python 2.7+ + +import json +import os +import re +from core.src.bootstrap.Constants import Constants +from core.src.package_managers.PatchModeManager import PatchModeManager + + +class AptPatchModeManager(PatchModeManager): + """ Helps with translating PatchModes set by the customer to in-VM configurations """ + + def __init__(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler): + super(AptPatchModeManager, self).__init__(env_layer, execution_config, composite_logger, telemetry_writer, status_handler, package_manager_name=Constants.APT) + self.update_package_list = 'APT::Periodic::Update-Package-Lists' + self.unattended_upgrade = 'APT::Periodic::Unattended-Upgrade' + self.os_patch_configuration_settings_file_path = '/etc/apt/apt.conf.d/20auto-upgrades' + self.update_package_list_value = "" + self.unattended_upgrade_value = "" + + # region auto OS updates + def get_current_auto_os_patch_state(self): + """ Gets the current auto OS update patch state on the machine """ + self.composite_logger.log_verbose("[APMM] Fetching the current automatic OS patch state on the machine...") + if os.path.exists(self.os_patch_configuration_settings_file_path): + self.__get_current_auto_os_updates_setting_on_machine() + if not os.path.exists(self.os_patch_configuration_settings_file_path) or int(self.unattended_upgrade_value) == 0: + current_auto_os_patch_state = Constants.AutomaticOSPatchStates.DISABLED + elif int(self.unattended_upgrade_value) == 1: + current_auto_os_patch_state = Constants.AutomaticOSPatchStates.ENABLED + else: + current_auto_os_patch_state = Constants.AutomaticOSPatchStates.UNKNOWN + + self.composite_logger.log_debug("[APMM] Current Auto OS Patch State detected. [State={0}]".format(str(current_auto_os_patch_state))) + return current_auto_os_patch_state + + def __get_current_auto_os_updates_setting_on_machine(self): + """ Gets all the update settings related to auto OS updates currently set on the machine """ + try: + image_default_patch_configuration = self.env_layer.file_system.read_with_retry(self.os_patch_configuration_settings_file_path) + settings = image_default_patch_configuration.strip().split('\n') + for setting in settings: + if self.update_package_list in str(setting): + self.update_package_list_value = re.search(self.update_package_list + ' *"(.*?)".', str(setting)).group(1) + if self.unattended_upgrade in str(setting): + self.unattended_upgrade_value = re.search(self.unattended_upgrade + ' *"(.*?)".', str(setting)).group(1) + + if self.update_package_list_value == "": + self.composite_logger.log_debug("[APMM] Machine did not have any value set for [Setting={0}]".format(str(self.update_package_list))) + + if self.unattended_upgrade_value == "": + self.composite_logger.log_debug("[APMM] Machine did not have any value set for [Setting={0}]".format(str(self.unattended_upgrade))) + + except Exception as error: + raise Exception("Error occurred in fetching default auto OS updates from the machine. [Exception={0}]".format(repr(error))) + + def disable_auto_os_update(self): + """ Disables auto OS updates on the machine only if they are enabled and logs the default settings the machine comes with """ + try: + self.composite_logger.log_verbose("[APMM] Disabling auto OS updates if they are enabled...") + self.backup_image_default_patch_configuration_if_not_exists() + self.update_os_patch_configuration_sub_setting(self.update_package_list, "0") + self.update_os_patch_configuration_sub_setting(self.unattended_upgrade, "0") + self.composite_logger.log_verbose("[APMM] Successfully disabled auto OS updates") + except Exception as error: + self.composite_logger.log_error("Could not disable auto OS updates. [Error={0}]".format(repr(error))) + raise + + def backup_image_default_patch_configuration_if_not_exists(self): + """ Records the default system settings for auto OS updates within patch extension artifacts for future reference. + We only log the default system settings a VM comes with, any subsequent updates will not be recorded""" + try: + image_default_patch_configuration_backup = {} + image_default_patch_configuration_backup_exists = self.image_default_patch_configuration_backup_exists() + + # read existing backup since it also contains backup from other update services. We need to preserve any existing data with backup file + if image_default_patch_configuration_backup_exists: + try: + image_default_patch_configuration_backup = json.loads(self.env_layer.file_system.read_with_retry(self.image_default_patch_configuration_backup_path)) + except Exception as error: + self.composite_logger.log_error("[APMM] Unable to read backup for default patch state. Will attempt to re-write. [Exception={0}]".format(repr(error))) + + # verify if existing backup is valid if not, write to backup + is_backup_valid = image_default_patch_configuration_backup_exists and self.is_image_default_patch_configuration_backup_valid(image_default_patch_configuration_backup) + if is_backup_valid: + self.composite_logger.log_verbose("[APMM] Since extension has a valid backup, no need to log the current settings again. [Default Auto OS update settings={0}][File path={1}]" + .format(str(image_default_patch_configuration_backup), self.image_default_patch_configuration_backup_path)) + else: + self.composite_logger.log_verbose("[APMM] Since the backup is invalid or does not exist, will add a new backup with the current auto OS update settings") + self.__get_current_auto_os_updates_setting_on_machine() + + backup_image_default_patch_configuration_json = { + self.update_package_list: self.update_package_list_value, + self.unattended_upgrade: self.unattended_upgrade_value + } + + self.composite_logger.log_debug("[APMM] Logging default system configuration settings for auto OS updates. [Settings={0}][Log file path={1}]" + .format(str(backup_image_default_patch_configuration_json), self.image_default_patch_configuration_backup_path)) + self.env_layer.file_system.write_with_retry(self.image_default_patch_configuration_backup_path, '{0}'.format(json.dumps(backup_image_default_patch_configuration_json)), mode='w+') + except Exception as error: + error_message = "Exception during fetching and logging default auto OS update settings on the machine. [Exception={0}]".format(repr(error)) + self.status_handler.add_error_to_status_and_log_error(error_message, raise_exception=True, error_code=Constants.PatchOperationErrorCodes.PATCH_MODE_SET_FAILURE) + + def is_image_default_patch_configuration_backup_valid(self, image_default_patch_configuration_backup): + if self.update_package_list in image_default_patch_configuration_backup and self.unattended_upgrade in image_default_patch_configuration_backup: + self.composite_logger.log_verbose("[APMM] Extension already has a valid backup of the default system configuration settings for auto OS updates.") + return True + else: + self.composite_logger.log_verbose("[APMM] Extension does not have a valid backup of the default system configuration settings for auto OS updates.") + return False + + def update_os_patch_configuration_sub_setting(self, patch_configuration_sub_setting, value="0", patch_configuration_sub_setting_pattern_to_match=""): + """ Updates (or adds if it doesn't exist) the given patch_configuration_sub_setting with the given value in os_patch_configuration_settings_file """ + try: + # note: adding space between the patch_configuration_sub_setting and value since, we will have to do that if we have to add a patch_configuration_sub_setting that did not exist before + self.composite_logger.log_debug("[APMM] Updating system configuration settings for auto OS updates. [Patch Configuration Sub Setting={0}][Value={1}]".format(str(patch_configuration_sub_setting), value)) + os_patch_configuration_settings = self.env_layer.file_system.read_with_retry(self.os_patch_configuration_settings_file_path) + patch_configuration_sub_setting_to_update = patch_configuration_sub_setting + ' "' + value + '";' + patch_configuration_sub_setting_found_in_file = False + updated_patch_configuration_sub_setting = "" + settings = os_patch_configuration_settings.strip().split('\n') + + # update value of existing setting + for i in range(len(settings)): + if patch_configuration_sub_setting in settings[i]: + settings[i] = patch_configuration_sub_setting_to_update + patch_configuration_sub_setting_found_in_file = True + updated_patch_configuration_sub_setting += settings[i] + "\n" + + # add setting to configuration file, since it doesn't exist + if not patch_configuration_sub_setting_found_in_file: + updated_patch_configuration_sub_setting += patch_configuration_sub_setting_to_update + "\n" + + self.env_layer.file_system.write_with_retry(self.os_patch_configuration_settings_file_path, '{0}'.format(updated_patch_configuration_sub_setting.lstrip()), mode='w+') + except Exception as error: + error_message = "Error occurred while updating system configuration settings for auto OS updates. [Patch Configuration={0}][Error={1}]".format(str(patch_configuration_sub_setting), repr(error)) + self.status_handler.add_error_to_status_and_log_error(error_message, raise_exception=True, error_code=Constants.PatchOperationErrorCodes.PATCH_MODE_SET_FAILURE) + # endregion + diff --git a/src/core/src/package_managers/apt/AptSourcesManager.py b/src/core/src/package_managers/apt/AptSourcesManager.py new file mode 100644 index 000000000..d81d5ceec --- /dev/null +++ b/src/core/src/package_managers/apt/AptSourcesManager.py @@ -0,0 +1,32 @@ +# Copyright 2023 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Requires Python 2.7+ + +from core.src.bootstrap.Constants import Constants +from core.src.package_managers.SourcesManager import SourcesManager + + +class AptSourcesManager(SourcesManager): + """ Helps with sources list management for Apt """ + def init(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler): + super(SourcesManager, self).__init__(env_layer, execution_config, composite_logger, telemetry_writer, status_handler, package_manager_name=Constants.APT) + pass + + def function_name(self): + pass + + + +# https://manpages.debian.org/jessie/apt/sources.list.5.en.html#:~:text=The%20source%20list%20%2Fetc%2Fapt%2Fsources.list%20is%20designed%20to%20support,by%20an%20equivalent%20command%20from%20another%20APT%20front-end%29. \ No newline at end of file diff --git a/src/core/src/package_managers/UbuntuProClient.py b/src/core/src/package_managers/apt/UbuntuProClient.py similarity index 99% rename from src/core/src/package_managers/UbuntuProClient.py rename to src/core/src/package_managers/apt/UbuntuProClient.py index 0fe82346a..9670a219c 100644 --- a/src/core/src/package_managers/UbuntuProClient.py +++ b/src/core/src/package_managers/apt/UbuntuProClient.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/core/src/package_managers/apt/__init__.py b/src/core/src/package_managers/apt/__init__.py new file mode 100644 index 000000000..e96580122 --- /dev/null +++ b/src/core/src/package_managers/apt/__init__.py @@ -0,0 +1,15 @@ +# Copyright 2023 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Requires Python 2.7+ \ No newline at end of file diff --git a/src/core/src/package_managers/yum/YumHealthManager.py b/src/core/src/package_managers/yum/YumHealthManager.py new file mode 100644 index 000000000..d391cd012 --- /dev/null +++ b/src/core/src/package_managers/yum/YumHealthManager.py @@ -0,0 +1,35 @@ +# Copyright 2023 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Requires Python 2.7+ + +from core.src.bootstrap.Constants import Constants +from core.src.package_managers.HealthManager import HealthManager + + +class YumHealthManager(HealthManager): + """ Helps with attempting automatic environment health restoration for yum where feasible to improve operation success rates """ + def init(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler): + super(HealthManager, self).__init__(env_layer, execution_config, composite_logger, telemetry_writer, status_handler, package_manager_name=Constants.APT) + pass + + # region Handling known errors + def try_mitigate_issues_if_any(self, command, code, out): + """ Attempt to fix the errors occurred while executing a command. Repeat check until no issues found """ + pass + + def check_known_issues_and_attempt_fix(self, output): + """ Checks if issue falls into known issues and attempts to mitigate """ + return True + # endregion \ No newline at end of file diff --git a/src/core/src/package_managers/yum/YumPackageManager.py b/src/core/src/package_managers/yum/YumPackageManager.py new file mode 100644 index 000000000..84c793281 --- /dev/null +++ b/src/core/src/package_managers/yum/YumPackageManager.py @@ -0,0 +1,525 @@ +# Copyright 2020 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Requires Python 2.7+ + +"""YumPackageManager for Redhat and CentOS""" +import json +import os +import re +from core.src.package_managers.PackageManager import PackageManager +from core.src.bootstrap.Constants import Constants + +# do not instantiate directly - these are exclusively for type hinting support +from core.src.bootstrap.EnvLayer import EnvLayer +from core.src.core_logic.ExecutionConfig import ExecutionConfig +from core.src.local_loggers.CompositeLogger import CompositeLogger +from core.src.service_interfaces.TelemetryWriter import TelemetryWriter +from core.src.service_interfaces.StatusHandler import StatusHandler +from core.src.package_managers.PatchModeManager import PatchModeManager +from core.src.package_managers.SourcesManager import SourcesManager +from core.src.package_managers.HealthManager import HealthManager + + +class YumPackageManager(PackageManager): + """Implementation of Redhat/CentOS package management operations""" + + def __init__(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler, patch_mode_manager, sources_manager, health_manager, package_manager_name): + # type: (EnvLayer, ExecutionConfig, CompositeLogger, TelemetryWriter, StatusHandler, PatchModeManager, SourcesManager, HealthManager, str) -> None + super(YumPackageManager, self).__init__(env_layer, execution_config, composite_logger, telemetry_writer, status_handler, patch_mode_manager, sources_manager, health_manager, package_manager_name) + # Repo refresh + # There is no command as this is a no op. + + # Support to get updates and their dependencies + self.yum_check = 'sudo yum -q check-update' + self.yum_check_security_prerequisite = 'sudo yum -y install yum-plugin-security' + self.yum_check_security = 'sudo yum -q --security check-update' + self.single_package_check_versions = 'sudo yum list available --showduplicates' + self.single_package_check_installed = 'sudo yum list installed ' + self.single_package_upgrade_simulation_cmd = 'LANG=en_US.UTF8 sudo yum install --assumeno ' + + # Install update + self.single_package_upgrade_cmd = 'sudo yum -y install ' + self.all_but_excluded_upgrade_cmd = 'sudo yum -y update --exclude=' + + # Package manager exit code(s) + self.yum_exitcode_no_applicable_packages = 0 + self.yum_exitcode_ok = 1 + self.yum_exitcode_updates_available = 100 + + # Support to check for processes requiring restart + self.yum_utils_prerequisite = 'sudo yum -y install yum-utils' + self.needs_restarting = 'sudo LANG=en_US.UTF8 needs-restarting' + self.needs_restarting_with_flag = 'sudo LANG=en_US.UTF8 needs-restarting -r' + self.yum_ps_prerequisite = 'sudo yum -y install yum-plugin-ps' + self.yum_ps = 'sudo yum ps' + + # Miscellaneous + self.set_package_manager_setting(Constants.PKG_MGR_SETTING_IDENTITY, Constants.YUM) + self.STR_TOTAL_DOWNLOAD_SIZE = "Total download size: " + + # if an Auto Patching request comes in on a CentOS machine with Security and/or Critical classifications selected, we need to install all patches + installation_included_classifications = [] if execution_config.included_classifications_list is None else execution_config.included_classifications_list + if execution_config.maintenance_run_id is not None and execution_config.operation.lower() == Constants.Op.INSTALLATION.lower() \ + and 'CentOS' in str(env_layer.platform.linux_distribution()) \ + and 'Critical' in installation_included_classifications and 'Security' in installation_included_classifications: + self.composite_logger.log_debug("Updating classifications list to install all patches for the Auto Patching request since classification based patching is not available on CentOS machines") + execution_config.included_classifications_list = [Constants.PackageClassification.CRITICAL, Constants.PackageClassification.SECURITY, Constants.PackageClassification.OTHER] + + # Known errors and the corresponding action items + self.known_errors_and_fixes = {"SSL peer rejected your certificate as expired": self.fix_ssl_certificate_issue, + "Error: Cannot retrieve repository metadata (repomd.xml) for repository": self.fix_ssl_certificate_issue, + "Error: Failed to download metadata for repo": self.fix_ssl_certificate_issue} + + self.yum_update_client_package = "sudo yum update -y --disablerepo='*' --enablerepo='*microsoft*'" + + def refresh_repo(self): + pass # Refresh the repo is no ops in YUM + + # region Get Available Updates + def invoke_package_manager_advanced(self, command, raise_on_exception=True): + """Get missing updates using the command input""" + self.composite_logger.log_verbose('[YPM] Invoking package manager. [Command={0}]'.format(command)) + code, out = self.env_layer.run_command_output(command, False, False) + + code, out = self.try_mitigate_issues_if_any(command, code, out) + + if code not in [self.yum_exitcode_ok, self.yum_exitcode_no_applicable_packages, self.yum_exitcode_updates_available]: + self.composite_logger.log_error("[YPM] Package Manager ERROR. [Command={0}][Code={1}][Output={2}]".format(command, str(code), str(out))) + self.status_handler.add_error_to_status_and_log_error(message="Unexpected return code from package manager. [Code={0}][Command={1}]".format(str(code), command), + raise_exception=bool(raise_on_exception), error_code=Constants.PatchOperationErrorCodes.CL_PACKAGE_MANAGER_FAILURE) + else: + self.composite_logger.log_verbose("[APM] Package Manager SUCCESS. [Command={0}][Code={1}][Output={2}]".format(command, str(code), str(out))) + return out, code + + # region Classification-based (incl. All) update check + def get_all_updates(self, cached=False): + """Get all missing updates""" + self.composite_logger.log_debug("\nDiscovering all packages...") + if cached and not len(self.all_updates_cached) == 0: + self.composite_logger.log_debug(" - Returning cached package data.") + return self.all_updates_cached, self.all_update_versions_cached # allows for high performance reuse in areas of the code explicitly aware of the cache + + out = self.invoke_package_manager(self.yum_check) + self.all_updates_cached, self.all_update_versions_cached = self.extract_packages_and_versions(out) + self.composite_logger.log_debug("Discovered " + str(len(self.all_updates_cached)) + " package entries.") + return self.all_updates_cached, self.all_update_versions_cached + + def get_security_updates(self): + """Get missing security updates""" + self.composite_logger.log("\nDiscovering 'security' packages...") + self.install_yum_security_prerequisite() + out = self.invoke_package_manager(self.yum_check_security) + security_packages, security_package_versions = self.extract_packages_and_versions(out) + + if len(security_packages) == 0 and 'CentOS' in str(self.env_layer.platform.linux_distribution()): # deliberately non-terminal + self.composite_logger.log_warning("Classification-based patching is only supported on YUM if the machine is independently configured to receive classification information.") + + self.composite_logger.log("Discovered " + str(len(security_packages)) + " 'security' package entries.") + return security_packages, security_package_versions + + def get_other_updates(self): + """Get missing other updates""" + self.composite_logger.log("\nDiscovering 'other' packages...") + other_packages = [] + other_package_versions = [] + + all_packages, all_package_versions = self.get_all_updates(True) + security_packages, security_package_versions = self.get_security_updates() + if len(security_packages) == 0 and 'CentOS' in str(self.env_layer.platform.linux_distribution()): # deliberately terminal - erring on the side of caution to avoid dissat in uninformed customers + self.composite_logger.log_error("Please review patch management documentation for information on classification-based patching on YUM.") + error_msg = "Classification-based patching is only supported on YUM if the computer is independently configured to receive classification information." \ + "Please remove classifications from update deployments to CentOS machines to bypass this error." + self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.CL_PACKAGE_MANAGER_FAILURE) + raise Exception(error_msg, "[{0}]".format(Constants.ERROR_ADDED_TO_STATUS)) + + for index, package in enumerate(all_packages): + if package not in security_packages: + other_packages.append(package) + other_package_versions.append(all_package_versions[index]) + + self.composite_logger.log("Discovered " + str(len(other_packages)) + " 'other' package entries.") + return other_packages, other_package_versions + + def install_yum_security_prerequisite(self): + """Not installed by default in versions prior to RHEL 7. This step is idempotent and fast, so we're not writing more complex code.""" + self.composite_logger.log_debug('Ensuring RHEL yum-plugin-security is present.') + code, out = self.env_layer.run_command_output(self.yum_check_security_prerequisite, False, False) + self.composite_logger.log_debug(" - Code: " + str(code) + ", Output : \n|\t" + "\n|\t".join(out.splitlines())) + # endregion + + # region Output Parser(s) + def extract_packages_and_versions(self, output): + """Returns packages and versions from given output""" + packages, versions = self.extract_packages_and_versions_including_duplicates(output) + packages, versions = self.dedupe_update_packages(packages, versions) + return packages, versions + + def extract_packages_and_versions_including_duplicates(self, output): + """Returns packages and versions from given output""" + self.composite_logger.log_debug("\nExtracting package and version data...") + packages = [] + versions = [] + package_extensions = ['.x86_64', '.noarch', '.i686'] + + def is_package(chunk): + # Using a list comprehension to determine if chunk is a package + return len([p for p in package_extensions if p in chunk]) == 1 + + lines = output.strip().split('\n') + + for line_index in range(0, len(lines)): + # Do not install Obsoleting Packages. The obsoleting packages list comes towards end in the output. + if lines[line_index].strip().startswith("Obsoleting Packages"): + break + + line = re.split(r'\s+', lines[line_index].strip()) + next_line = [] + + if line_index < len(lines) - 1: + next_line = re.split(r'\s+', lines[line_index + 1].strip()) + + # If we run into a length of 3, we'll accept it and continue + if len(line) == 3 and is_package(line[0]): + packages.append(self.get_product_name(line[0])) + versions.append(line[1]) + # We will handle these two edge cases where the output is on + # two different lines and treat them as one line + elif len(line) == 1 and len(next_line) == 2 and is_package(line[0]): + packages.append(self.get_product_name(line[0])) + versions.append(next_line[0]) + line_index += 1 + elif len(line) == 2 and len(next_line) == 1 and is_package(line[0]): + packages.append(self.get_product_name(line[0])) + versions.append(line[1]) + line_index += 1 + else: + self.composite_logger.log_debug(" - Inapplicable line (" + str(line_index) + "): " + lines[line_index]) + + return packages, versions + # endregion + # endregion + + # region Install Update + def get_composite_package_identifier(self, package, package_version): + package_without_arch, arch = self.get_product_name_and_arch(package) + package_identifier = package_without_arch + '-' + self.get_package_version_without_epoch(package_version) + if arch is not None: + package_identifier += arch + return package_identifier + + def install_updates_fail_safe(self, excluded_packages): + excluded_string = "" + for excluded_package in excluded_packages: + excluded_string += excluded_package + ' ' + cmd = self.all_but_excluded_upgrade_cmd + excluded_string + + self.composite_logger.log_debug("[FAIL SAFE MODE] UPDATING PACKAGES USING COMMAND: " + cmd) + self.invoke_package_manager(cmd) + # endregion + + # region Package Information + def get_all_available_versions_of_package(self, package_name): + """ Returns a list of all the available versions of a package """ + # Sample output format + # Available Packages + # kernel.x86_64 3.10.0-862.el7 base + # kernel.x86_64 3.10.0-862.2.3.el7 updates + # kernel.x86_64 3.10.0-862.3.2.el7 updates + cmd = self.single_package_check_versions.replace('', package_name) + output = self.invoke_package_manager(cmd) + packages, package_versions = self.extract_packages_and_versions_including_duplicates(output) + return package_versions + + def is_package_version_installed(self, package_name, package_version): + """ Returns true if the specific package version is installed """ + # Loaded plugins: product-id, search-disabled-repos, subscription-manager + # Installed Packages + # kernel.x86_64 3.10.0-514.el7 @anaconda/7.3 + self.composite_logger.log_debug("\nCHECKING PACKAGE INSTALL STATUS FOR: " + str(package_name) + " (" + str(package_version) + ")") + cmd = self.single_package_check_installed.replace('', package_name) + output = self.invoke_package_manager(cmd) + packages, package_versions = self.extract_packages_and_versions_including_duplicates(output) + + for index, package in enumerate(packages): + if package == package_name and (package_versions[index] == package_version): + self.composite_logger.log_debug(" - Installed version match found.") + return True + else: + self.composite_logger.log_debug(" - Did not match: " + package + " (" + package_versions[index] + ")") + + # sometimes packages are removed entirely from the system during installation of other packages + # so let's check that the package is still needed before + + return False + + def extract_dependencies(self, output, packages): + # Sample output for the cmd 'sudo yum update --assumeno selinux-policy.noarch' is : + # + # Loaded plugins: langpacks, product-id, search-disabled-repos + # Resolving Dependencies + # --> Running transaction check + # ---> Package selinux-policy.noarch 0:3.13.1-102.el7_3.15 will be updated + # --> Processing Dependency: selinux-policy = 3.13.1-102.el7_3.15 for \ + # package: selinux-policy-targeted-3.13.1-102.el7_3.15.noarch + # --> Processing Dependency: selinux-policy = 3.13.1-102.el7_3.15 for \ + # package: selinux-policy-targeted-3.13.1-102.el7_3.15.noarch + # ---> Package selinux-policy.noarch 0:3.13.1-102.el7_3.16 will be an update + # --> Running transaction check + # ---> Package selinux-policy-targeted.noarch 0:3.13.1-102.el7_3.15 will be updated + # ---> Package selinux-policy-targeted.noarch 0:3.13.1-102.el7_3.16 will be an update + # --> Finished Dependency Resolution + + dependencies = [] + lines = output.strip().split('\n') + + for line in lines: + if line.find(" will be updated") < 0 and line.find(" will be an update") < 0 and line.find(" will be installed") < 0: + self.composite_logger.log_debug(" - Inapplicable line: " + str(line)) + continue + + updates_line = re.split(r'\s+', line.strip()) + if len(updates_line) != 7: + self.composite_logger.log_debug(" - Inapplicable line: " + str(line)) + continue + + dependent_package_name = self.get_product_name(updates_line[2]) + if len(dependent_package_name) != 0 and dependent_package_name not in packages: + self.composite_logger.log_debug(" - Dependency detected: " + dependent_package_name) + dependencies.append(dependent_package_name) + + return dependencies + + def get_dependent_list(self, packages): + package_names = "" + for index, package in enumerate(packages): + if index != 0: + package_names += ' ' + package_names += package + + self.composite_logger.log_debug("\nRESOLVING DEPENDENCIES USING COMMAND: " + str(self.single_package_upgrade_simulation_cmd + package_names)) + output = self.invoke_package_manager(self.single_package_upgrade_simulation_cmd + package_names) + dependencies = self.extract_dependencies(output, packages) + self.composite_logger.log_debug(str(len(dependencies)) + " dependent packages were found for packages '" + str(packages) + "'.") + return dependencies + + def get_product_name(self, package_name): + """Retrieve product name including arch where present""" + return package_name + + def get_product_name_and_arch(self, package_name): + """Splits out product name and architecture - if this is changed, modify in PackageFilter also""" + architectures = ['.x86_64', '.noarch', '.i686'] + for arch in architectures: + if package_name.endswith(arch): + return package_name[:-len(arch)], arch + return package_name, None + + def get_product_name_without_arch(self, package_name): + """Retrieve product name only""" + product_name, arch = self.get_product_name_and_arch(package_name) + return product_name + + def get_product_arch(self, package_name): + """Retrieve product architecture only""" + product_name, arch = self.get_product_name_and_arch(package_name) + return arch + + def get_package_version_without_epoch(self, package_version): + """Returns the package version stripped of any epoch""" + package_version_split = str(package_version).split(':', 1) + + if len(package_version_split) == 2: + self.composite_logger.log_debug(" - Removed epoch from version (" + package_version + "): " + package_version_split[1]) + return package_version_split[1] + + if len(package_version_split) != 1: + self.composite_logger.log_error("Unexpected error during version epoch removal from: " + package_version) + + return package_version + + def get_package_size(self, output): + """Retrieve package size from installation output string""" + # Sample output line: + # Total download size: 15 M + if "No packages were marked for update" not in output: + lines = output.strip().split('\n') + for line in lines: + if line.find(self.STR_TOTAL_DOWNLOAD_SIZE) >= 0: + return line.replace(self.STR_TOTAL_DOWNLOAD_SIZE, "") + + return Constants.UNKNOWN_PACKAGE_SIZE + # endregion + + # region Handling known errors + def try_mitigate_issues_if_any(self, command, code, out): + """ Attempt to fix the errors occurred while executing a command. Repeat check until no issues found """ + if "Error" in out or "Errno" in out: + issue_mitigated = self.check_known_issues_and_attempt_fix(out) + if issue_mitigated: + self.composite_logger.log_debug('\nPost mitigation, invoking package manager again using: ' + command) + code_after_fix_attempt, out_after_fix_attempt = self.env_layer.run_command_output(command, False, False) + return self.try_mitigate_issues_if_any(command, code_after_fix_attempt, out_after_fix_attempt) + return code, out + + def check_known_issues_and_attempt_fix(self, output): + """ Checks if issue falls into known issues and attempts to mitigate """ + self.composite_logger.log_debug("Output from package manager containing error: \n|\t" + "\n|\t".join(output.splitlines())) + self.composite_logger.log_debug("\nChecking if this is a known error...") + for error in self.known_errors_and_fixes: + if error in output: + self.composite_logger.log_debug("\nFound a match within known errors list, attempting a fix...") + self.known_errors_and_fixes[error]() + return True + + self.composite_logger.log_debug("\nThis is not a known error for the extension and will require manual intervention") + return False + + def fix_ssl_certificate_issue(self): + command = self.yum_update_client_package + self.composite_logger.log_debug("\nUpdating client package to avoid errors from older certificates using command: [Command={0}]".format(str(command))) + code, out = self.env_layer.run_command_output(command, False, False) + if code != self.yum_exitcode_no_applicable_packages: + self.composite_logger.log('[ERROR] Package manager was invoked using: ' + command) + self.composite_logger.log_warning(" - Return code from package manager: " + str(code)) + self.composite_logger.log_warning(" - Output from package manager: \n|\t" + "\n|\t".join(out.splitlines())) + self.telemetry_writer.write_execution_error(command, code, out) + error_msg = 'Unexpected return code (' + str(code) + ') from package manager on command: ' + command + self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.CL_PACKAGE_MANAGER_FAILURE) + raise Exception(error_msg, "[{0}]".format(Constants.ERROR_ADDED_TO_STATUS)) + else: + self.composite_logger.log_debug("\n\n==[SUCCESS]===============================================================") + self.composite_logger.log_debug(" - Return code from package manager: " + str(code)) + self.composite_logger.log_debug(" - Output from package manager: \n|\t" + "\n|\t".join(out.splitlines())) + self.composite_logger.log_debug("==========================================================================\n\n") + self.composite_logger.log_debug("\nClient package update complete.") + # endregion + + # region Reboot Management + def is_reboot_pending(self): + """ Checks if there is a pending reboot on the machine. """ + try: + pending_file_exists = os.path.isfile(self.REBOOT_PENDING_FILE_PATH) # not intended for yum, but supporting as back-compat + pending_processes_exist = self.do_processes_require_restart() + self.composite_logger.log_debug(" - Reboot required debug flags (yum): " + str(pending_file_exists) + ", " + str(pending_processes_exist) + ".") + return pending_file_exists or pending_processes_exist + except Exception as error: + self.composite_logger.log_error('Error while checking for reboot pending (yum): ' + repr(error)) + return True # defaults for safety + + def do_processes_require_restart(self): + """Signals whether processes require a restart due to updates""" + self.composite_logger.log_debug("Checking if process requires reboot") + # Checking using yum-utils + self.composite_logger.log_debug("Ensuring yum-utils is present.") + code, out = self.env_layer.run_command_output(self.yum_utils_prerequisite, False, False) # idempotent, doesn't install if already present + self.composite_logger.log_debug(" - Code: " + str(code) + ", Output: \n|\t" + "\n|\t".join(out.splitlines())) + + # Checking for restart for distros with -r flag such as RHEL 7+ + code, out = self.env_layer.run_command_output(self.needs_restarting_with_flag, False, False) + self.composite_logger.log_debug(" - Code: " + str(code) + ", Output: \n|\t" + "\n|\t".join(out.splitlines())) + if out.find("Reboot is required") < 0: + self.composite_logger.log_debug(" - Reboot not detected to be required (L1).") + else: + self.composite_logger.log_debug(" - Reboot is detected to be required (L1).") + return True + + # Checking for restart for distro without -r flag such as RHEL 6 and CentOS 6 + if str(self.env_layer.platform.linux_distribution()[1]).split('.')[0] == '6': + code, out = self.env_layer.run_command_output(self.needs_restarting, False, False) + self.composite_logger.log_debug(" - Code: " + str(code) + ", Output: \n|\t" + "\n|\t".join(out.splitlines())) + if len(out.strip()) == 0 and code == 0: + self.composite_logger.log_debug(" - Reboot not detected to be required (L2).") + else: + self.composite_logger.log_debug(" - Reboot is detected to be required (L2).") + return True + + # Double-checking using yum ps (where available) + self.composite_logger.log_debug("Ensuring yum-plugin-ps is present.") + code, out = self.env_layer.run_command_output(self.yum_ps_prerequisite, False, False) # idempotent, doesn't install if already present + self.composite_logger.log_debug(" - Code: " + str(code) + ", Output: \n|\t" + "\n|\t".join(out.splitlines())) + + output = self.invoke_package_manager(self.yum_ps) + lines = output.strip().split('\n') + + process_list_flag = False + process_count = 0 + process_list_verbose = "" + + for line in lines: + if not process_list_flag: # keep going until the process list starts + if line.find("pid") < 0 and line.find("proc") < 0 and line.find("uptime") < 0: + self.composite_logger.log_debug(" - Inapplicable line: " + str(line)) + continue + else: + self.composite_logger.log_debug(" - Process list started: " + str(line)) + process_list_flag = True + continue + + process_details = re.split(r'\s+', line.strip()) + if len(process_details) < 7: + self.composite_logger.log_debug(" - Inapplicable line: " + str(line)) + continue + else: + # The first string should be process ID and hence it should be integer. + # If first string is not process ID then the line is not for a process detail. + try: + int(process_details[0]) + except Exception: + self.composite_logger.log_debug(" - Inapplicable line: " + str(line)) + continue + + self.composite_logger.log_debug(" - Applicable line: " + str(line)) + process_count += 1 + process_list_verbose += process_details[1] + " (" + process_details[0] + "), " # process name and id + + self.composite_logger.log(" - Processes requiring restart (" + str(process_count) + "): [" + process_list_verbose + "]") + return process_count != 0 # True if there were any + # endregion Reboot Management + + def add_arch_dependencies(self, package_manager, package, packages, package_versions, package_and_dependencies, package_and_dependency_versions): + """ + Add the packages with same name as that of input parameter package but with different architectures from packages list to the list package_and_dependencies. + Parameters: + package_manager (PackageManager): Package manager used. + package (string): Input package for which same package name but different architecture need to be added in the list package_and_dependencies. + packages (List of strings): List of all packages selected by user to install. + package_versions (List of strings): Versions of packages in packages list. + package_and_dependencies (List of strings): List of packages along with dependencies. This function adds packages with same name as input parameter package + but different architecture in this list. + package_and_dependency_versions (List of strings): Versions of packages in package_and_dependencies. + """ + package_name_without_arch = package_manager.get_product_name_without_arch(package) + for possible_arch_dependency, possible_arch_dependency_version in zip(packages, package_versions): + if package_manager.get_product_name_without_arch(possible_arch_dependency) == package_name_without_arch and possible_arch_dependency not in package_and_dependencies: + package_and_dependencies.append(possible_arch_dependency) + package_and_dependency_versions.append(possible_arch_dependency_version) + + def set_security_esm_package_status(self, operation, packages): + """ + Set the security-ESM classification for the esm packages. Only needed for apt. No-op for yum and zypper. + """ + pass + + def separate_out_esm_packages(self, packages, package_versions): + """ + Filter out packages from the list where the version matches the UA_ESM_REQUIRED string. + Only needed for apt. No-op for yum and zypper + """ + esm_packages = [] + esm_package_versions = [] + esm_packages_found = False + + return packages, package_versions, esm_packages, esm_package_versions, esm_packages_found + diff --git a/src/core/src/package_managers/YumPackageManager.py b/src/core/src/package_managers/yum/YumPatchModeManager.py similarity index 52% rename from src/core/src/package_managers/YumPackageManager.py rename to src/core/src/package_managers/yum/YumPatchModeManager.py index a6ccceaef..bab0c55f2 100644 --- a/src/core/src/package_managers/YumPackageManager.py +++ b/src/core/src/package_managers/yum/YumPatchModeManager.py @@ -1,10 +1,10 @@ -# Copyright 2020 Microsoft Corporation +# Copyright 2023 Microsoft Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,46 +14,18 @@ # # Requires Python 2.7+ -"""YumPackageManager for Redhat and CentOS""" -import json import os import re -from core.src.package_managers.PackageManager import PackageManager +import json from core.src.bootstrap.Constants import Constants +from core.src.package_managers.PatchModeManager import PatchModeManager -class YumPackageManager(PackageManager): - """Implementation of Redhat/CentOS package management operations""" +class YumPatchModeManager(PatchModeManager): + """ Helps with translating PatchModes set by the customer to in-VM configurations """ def __init__(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler): - super(YumPackageManager, self).__init__(env_layer, execution_config, composite_logger, telemetry_writer, status_handler) - # Repo refresh - # There is no command as this is a no op. - - # Support to get updates and their dependencies - self.yum_check = 'sudo yum -q check-update' - self.yum_check_security_prerequisite = 'sudo yum -y install yum-plugin-security' - self.yum_check_security = 'sudo yum -q --security check-update' - self.single_package_check_versions = 'sudo yum list available --showduplicates' - self.single_package_check_installed = 'sudo yum list installed ' - self.single_package_upgrade_simulation_cmd = 'LANG=en_US.UTF8 sudo yum install --assumeno ' - - # Install update - self.single_package_upgrade_cmd = 'sudo yum -y install ' - self.all_but_excluded_upgrade_cmd = 'sudo yum -y update --exclude=' - - # Package manager exit code(s) - self.yum_exitcode_no_applicable_packages = 0 - self.yum_exitcode_ok = 1 - self.yum_exitcode_updates_available = 100 - - # Support to check for processes requiring restart - self.yum_utils_prerequisite = 'sudo yum -y install yum-utils' - self.needs_restarting = 'sudo LANG=en_US.UTF8 needs-restarting' - self.needs_restarting_with_flag = 'sudo LANG=en_US.UTF8 needs-restarting -r' - self.yum_ps_prerequisite = 'sudo yum -y install yum-plugin-ps' - self.yum_ps = 'sudo yum ps' - + super(YumPatchModeManager, self).__init__(env_layer, execution_config, composite_logger, telemetry_writer, status_handler, package_manager_name=Constants.APT) # auto OS updates self.current_auto_os_update_service = None self.os_patch_configuration_settings_file_path = '' @@ -78,312 +50,6 @@ def __init__(self, env_layer, execution_config, composite_logger, telemetry_writ # commands for PackageKit service self.__init_constants_for_packagekit() - # Miscellaneous - self.set_package_manager_setting(Constants.PKG_MGR_SETTING_IDENTITY, Constants.YUM) - self.STR_TOTAL_DOWNLOAD_SIZE = "Total download size: " - - # if an Auto Patching request comes in on a CentOS machine with Security and/or Critical classifications selected, we need to install all patches - installation_included_classifications = [] if execution_config.included_classifications_list is None else execution_config.included_classifications_list - if execution_config.maintenance_run_id is not None and execution_config.operation.lower() == Constants.INSTALLATION.lower() \ - and 'CentOS' in str(env_layer.platform.linux_distribution()) \ - and 'Critical' in installation_included_classifications and 'Security' in installation_included_classifications: - self.composite_logger.log_debug("Updating classifications list to install all patches for the Auto Patching request since classification based patching is not available on CentOS machines") - execution_config.included_classifications_list = [Constants.PackageClassification.CRITICAL, Constants.PackageClassification.SECURITY, Constants.PackageClassification.OTHER] - - # Known errors and the corresponding action items - self.known_errors_and_fixes = {"SSL peer rejected your certificate as expired": self.fix_ssl_certificate_issue, - "Error: Cannot retrieve repository metadata (repomd.xml) for repository": self.fix_ssl_certificate_issue, - "Error: Failed to download metadata for repo": self.fix_ssl_certificate_issue} - - self.yum_update_client_package = "sudo yum update -y --disablerepo='*' --enablerepo='*microsoft*'" - - def refresh_repo(self): - pass # Refresh the repo is no ops in YUM - - # region Get Available Updates - def invoke_package_manager_advanced(self, command, raise_on_exception=True): - """Get missing updates using the command input""" - self.composite_logger.log_debug('\nInvoking package manager using: ' + command) - code, out = self.env_layer.run_command_output(command, False, False) - - code, out = self.try_mitigate_issues_if_any(command, code, out) - - if code not in [self.yum_exitcode_ok, self.yum_exitcode_no_applicable_packages, self.yum_exitcode_updates_available]: - self.composite_logger.log('[ERROR] Package manager was invoked using: ' + command) - self.composite_logger.log_warning(" - Return code from package manager: " + str(code)) - self.composite_logger.log_warning(" - Output from package manager: \n|\t" + "\n|\t".join(out.splitlines())) - self.telemetry_writer.write_execution_error(command, code, out) - error_msg = 'Unexpected return code (' + str(code) + ') from package manager on command: ' + command - self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.PACKAGE_MANAGER_FAILURE) - if raise_on_exception: - raise Exception(error_msg, "[{0}]".format(Constants.ERROR_ADDED_TO_STATUS)) - # more return codes should be added as appropriate - else: # verbose diagnostic log - self.composite_logger.log_verbose("\n\n==[SUCCESS]===============================================================") - self.composite_logger.log_debug(" - Return code from package manager: " + str(code)) - self.composite_logger.log_debug(" - Output from package manager: \n|\t" + "\n|\t".join(out.splitlines())) - self.composite_logger.log_verbose("==========================================================================\n\n") - return out, code - - # region Classification-based (incl. All) update check - def get_all_updates(self, cached=False): - """Get all missing updates""" - self.composite_logger.log_debug("\nDiscovering all packages...") - if cached and not len(self.all_updates_cached) == 0: - self.composite_logger.log_debug(" - Returning cached package data.") - return self.all_updates_cached, self.all_update_versions_cached # allows for high performance reuse in areas of the code explicitly aware of the cache - - out = self.invoke_package_manager(self.yum_check) - self.all_updates_cached, self.all_update_versions_cached = self.extract_packages_and_versions(out) - self.composite_logger.log_debug("Discovered " + str(len(self.all_updates_cached)) + " package entries.") - return self.all_updates_cached, self.all_update_versions_cached - - def get_security_updates(self): - """Get missing security updates""" - self.composite_logger.log("\nDiscovering 'security' packages...") - self.install_yum_security_prerequisite() - out = self.invoke_package_manager(self.yum_check_security) - security_packages, security_package_versions = self.extract_packages_and_versions(out) - - if len(security_packages) == 0 and 'CentOS' in str(self.env_layer.platform.linux_distribution()): # deliberately non-terminal - self.composite_logger.log_warning("Classification-based patching is only supported on YUM if the machine is independently configured to receive classification information.") - - self.composite_logger.log("Discovered " + str(len(security_packages)) + " 'security' package entries.") - return security_packages, security_package_versions - - def get_other_updates(self): - """Get missing other updates""" - self.composite_logger.log("\nDiscovering 'other' packages...") - other_packages = [] - other_package_versions = [] - - all_packages, all_package_versions = self.get_all_updates(True) - security_packages, security_package_versions = self.get_security_updates() - if len(security_packages) == 0 and 'CentOS' in str(self.env_layer.platform.linux_distribution()): # deliberately terminal - erring on the side of caution to avoid dissat in uninformed customers - self.composite_logger.log_error("Please review patch management documentation for information on classification-based patching on YUM.") - error_msg = "Classification-based patching is only supported on YUM if the computer is independently configured to receive classification information." \ - "Please remove classifications from update deployments to CentOS machines to bypass this error." - self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.PACKAGE_MANAGER_FAILURE) - raise Exception(error_msg, "[{0}]".format(Constants.ERROR_ADDED_TO_STATUS)) - - for index, package in enumerate(all_packages): - if package not in security_packages: - other_packages.append(package) - other_package_versions.append(all_package_versions[index]) - - self.composite_logger.log("Discovered " + str(len(other_packages)) + " 'other' package entries.") - return other_packages, other_package_versions - - def install_yum_security_prerequisite(self): - """Not installed by default in versions prior to RHEL 7. This step is idempotent and fast, so we're not writing more complex code.""" - self.composite_logger.log_debug('Ensuring RHEL yum-plugin-security is present.') - code, out = self.env_layer.run_command_output(self.yum_check_security_prerequisite, False, False) - self.composite_logger.log_debug(" - Code: " + str(code) + ", Output : \n|\t" + "\n|\t".join(out.splitlines())) - # endregion - - # region Output Parser(s) - def extract_packages_and_versions(self, output): - """Returns packages and versions from given output""" - packages, versions = self.extract_packages_and_versions_including_duplicates(output) - packages, versions = self.dedupe_update_packages(packages, versions) - return packages, versions - - def extract_packages_and_versions_including_duplicates(self, output): - """Returns packages and versions from given output""" - self.composite_logger.log_debug("\nExtracting package and version data...") - packages = [] - versions = [] - package_extensions = ['.x86_64', '.noarch', '.i686'] - - def is_package(chunk): - # Using a list comprehension to determine if chunk is a package - return len([p for p in package_extensions if p in chunk]) == 1 - - lines = output.strip().split('\n') - - for line_index in range(0, len(lines)): - # Do not install Obsoleting Packages. The obsoleting packages list comes towards end in the output. - if lines[line_index].strip().startswith("Obsoleting Packages"): - break - - line = re.split(r'\s+', lines[line_index].strip()) - next_line = [] - - if line_index < len(lines) - 1: - next_line = re.split(r'\s+', lines[line_index + 1].strip()) - - # If we run into a length of 3, we'll accept it and continue - if len(line) == 3 and is_package(line[0]): - packages.append(self.get_product_name(line[0])) - versions.append(line[1]) - # We will handle these two edge cases where the output is on - # two different lines and treat them as one line - elif len(line) == 1 and len(next_line) == 2 and is_package(line[0]): - packages.append(self.get_product_name(line[0])) - versions.append(next_line[0]) - line_index += 1 - elif len(line) == 2 and len(next_line) == 1 and is_package(line[0]): - packages.append(self.get_product_name(line[0])) - versions.append(line[1]) - line_index += 1 - else: - self.composite_logger.log_debug(" - Inapplicable line (" + str(line_index) + "): " + lines[line_index]) - - return packages, versions - # endregion - # endregion - - # region Install Update - def get_composite_package_identifier(self, package, package_version): - package_without_arch, arch = self.get_product_name_and_arch(package) - package_identifier = package_without_arch + '-' + self.get_package_version_without_epoch(package_version) - if arch is not None: - package_identifier += arch - return package_identifier - - def install_updates_fail_safe(self, excluded_packages): - excluded_string = "" - for excluded_package in excluded_packages: - excluded_string += excluded_package + ' ' - cmd = self.all_but_excluded_upgrade_cmd + excluded_string - - self.composite_logger.log_debug("[FAIL SAFE MODE] UPDATING PACKAGES USING COMMAND: " + cmd) - self.invoke_package_manager(cmd) - # endregion - - # region Package Information - def get_all_available_versions_of_package(self, package_name): - """ Returns a list of all the available versions of a package """ - # Sample output format - # Available Packages - # kernel.x86_64 3.10.0-862.el7 base - # kernel.x86_64 3.10.0-862.2.3.el7 updates - # kernel.x86_64 3.10.0-862.3.2.el7 updates - cmd = self.single_package_check_versions.replace('', package_name) - output = self.invoke_package_manager(cmd) - packages, package_versions = self.extract_packages_and_versions_including_duplicates(output) - return package_versions - - def is_package_version_installed(self, package_name, package_version): - """ Returns true if the specific package version is installed """ - # Loaded plugins: product-id, search-disabled-repos, subscription-manager - # Installed Packages - # kernel.x86_64 3.10.0-514.el7 @anaconda/7.3 - self.composite_logger.log_debug("\nCHECKING PACKAGE INSTALL STATUS FOR: " + str(package_name) + " (" + str(package_version) + ")") - cmd = self.single_package_check_installed.replace('', package_name) - output = self.invoke_package_manager(cmd) - packages, package_versions = self.extract_packages_and_versions_including_duplicates(output) - - for index, package in enumerate(packages): - if package == package_name and (package_versions[index] == package_version): - self.composite_logger.log_debug(" - Installed version match found.") - return True - else: - self.composite_logger.log_debug(" - Did not match: " + package + " (" + package_versions[index] + ")") - - # sometimes packages are removed entirely from the system during installation of other packages - # so let's check that the package is still needed before - - return False - - def extract_dependencies(self, output, packages): - # Sample output for the cmd 'sudo yum update --assumeno selinux-policy.noarch' is : - # - # Loaded plugins: langpacks, product-id, search-disabled-repos - # Resolving Dependencies - # --> Running transaction check - # ---> Package selinux-policy.noarch 0:3.13.1-102.el7_3.15 will be updated - # --> Processing Dependency: selinux-policy = 3.13.1-102.el7_3.15 for \ - # package: selinux-policy-targeted-3.13.1-102.el7_3.15.noarch - # --> Processing Dependency: selinux-policy = 3.13.1-102.el7_3.15 for \ - # package: selinux-policy-targeted-3.13.1-102.el7_3.15.noarch - # ---> Package selinux-policy.noarch 0:3.13.1-102.el7_3.16 will be an update - # --> Running transaction check - # ---> Package selinux-policy-targeted.noarch 0:3.13.1-102.el7_3.15 will be updated - # ---> Package selinux-policy-targeted.noarch 0:3.13.1-102.el7_3.16 will be an update - # --> Finished Dependency Resolution - - dependencies = [] - lines = output.strip().split('\n') - - for line in lines: - if line.find(" will be updated") < 0 and line.find(" will be an update") < 0 and line.find(" will be installed") < 0: - self.composite_logger.log_debug(" - Inapplicable line: " + str(line)) - continue - - updates_line = re.split(r'\s+', line.strip()) - if len(updates_line) != 7: - self.composite_logger.log_debug(" - Inapplicable line: " + str(line)) - continue - - dependent_package_name = self.get_product_name(updates_line[2]) - if len(dependent_package_name) != 0 and dependent_package_name not in packages: - self.composite_logger.log_debug(" - Dependency detected: " + dependent_package_name) - dependencies.append(dependent_package_name) - - return dependencies - - def get_dependent_list(self, packages): - package_names = "" - for index, package in enumerate(packages): - if index != 0: - package_names += ' ' - package_names += package - - self.composite_logger.log_debug("\nRESOLVING DEPENDENCIES USING COMMAND: " + str(self.single_package_upgrade_simulation_cmd + package_names)) - output = self.invoke_package_manager(self.single_package_upgrade_simulation_cmd + package_names) - dependencies = self.extract_dependencies(output, packages) - self.composite_logger.log_debug(str(len(dependencies)) + " dependent packages were found for packages '" + str(packages) + "'.") - return dependencies - - def get_product_name(self, package_name): - """Retrieve product name including arch where present""" - return package_name - - def get_product_name_and_arch(self, package_name): - """Splits out product name and architecture - if this is changed, modify in PackageFilter also""" - architectures = ['.x86_64', '.noarch', '.i686'] - for arch in architectures: - if package_name.endswith(arch): - return package_name[:-len(arch)], arch - return package_name, None - - def get_product_name_without_arch(self, package_name): - """Retrieve product name only""" - product_name, arch = self.get_product_name_and_arch(package_name) - return product_name - - def get_product_arch(self, package_name): - """Retrieve product architecture only""" - product_name, arch = self.get_product_name_and_arch(package_name) - return arch - - def get_package_version_without_epoch(self, package_version): - """Returns the package version stripped of any epoch""" - package_version_split = str(package_version).split(':', 1) - - if len(package_version_split) == 2: - self.composite_logger.log_debug(" - Removed epoch from version (" + package_version + "): " + package_version_split[1]) - return package_version_split[1] - - if len(package_version_split) != 1: - self.composite_logger.log_error("Unexpected error during version epoch removal from: " + package_version) - - return package_version - - def get_package_size(self, output): - """Retrieve package size from installation output string""" - # Sample output line: - # Total download size: 15 M - if "No packages were marked for update" not in output: - lines = output.strip().split('\n') - for line in lines: - if line.find(self.STR_TOTAL_DOWNLOAD_SIZE) >= 0: - return line.replace(self.STR_TOTAL_DOWNLOAD_SIZE, "") - - return Constants.UNKNOWN_PACKAGE_SIZE - # endregion - # region auto OS updates def __init_constants_for_yum_cron(self): self.yum_cron_configuration_settings_file_path = '/etc/yum/yum-cron.conf' @@ -426,7 +92,7 @@ def get_current_auto_os_patch_state(self): current_auto_os_patch_state_for_dnf_automatic = self.__get_current_auto_os_patch_state_for_dnf_automatic() current_auto_os_patch_state_for_packagekit = self.__get_current_auto_os_patch_state_for_packagekit() - self.composite_logger.log("OS patch state per auto OS update service: [yum-cron={0}] [dnf-automatic={1}] [packagekit={2}]" + self.composite_logger.log("OS patch state per auto OS update service: [yum-cron={0}][dnf-automatic={1}][packagekit={2}]" .format(str(current_auto_os_patch_state_for_yum_cron), str(current_auto_os_patch_state_for_dnf_automatic), str(current_auto_os_patch_state_for_packagekit))) if current_auto_os_patch_state_for_yum_cron == Constants.AutomaticOSPatchStates.ENABLED \ @@ -651,7 +317,7 @@ def backup_image_default_patch_configuration_if_not_exists(self): # verify if existing backup is valid if not, write to backup is_backup_valid = self.is_image_default_patch_configuration_backup_valid(image_default_patch_configuration_backup) if is_backup_valid: - self.composite_logger.log_debug("Since extension has a valid backup, no need to log the current settings again. [Default Auto OS update settings={0}] [File path={1}]" + self.composite_logger.log_debug("Since extension has a valid backup, no need to log the current settings again. [Default Auto OS update settings={0}][File path={1}]" .format(str(image_default_patch_configuration_backup), self.image_default_patch_configuration_backup_path)) else: self.composite_logger.log_debug("Since the backup is invalid, will add a new backup with the current auto OS update settings") @@ -669,7 +335,7 @@ def backup_image_default_patch_configuration_if_not_exists(self): image_default_patch_configuration_backup.update(backup_image_default_patch_configuration_json_to_add) - self.composite_logger.log_debug("Logging default system configuration settings for auto OS updates. [Settings={0}] [Log file path={1}]" + self.composite_logger.log_debug("Logging default system configuration settings for auto OS updates. [Settings={0}][Log file path={1}]" .format(str(image_default_patch_configuration_backup), self.image_default_patch_configuration_backup_path)) self.env_layer.file_system.write_with_retry(self.image_default_patch_configuration_backup_path, '{0}'.format(json.dumps(image_default_patch_configuration_backup)), mode='w+') except Exception as error: @@ -773,7 +439,7 @@ def update_os_patch_configuration_sub_setting(self, patch_configuration_sub_sett """ Updates (or adds if it doesn't exist) the given patch_configuration_sub_setting with the given value in os_patch_configuration_settings_file """ try: # note: adding space between the patch_configuration_sub_setting and value since, we will have to do that if we have to add a patch_configuration_sub_setting that did not exist before - self.composite_logger.log_debug("Updating system configuration settings for auto OS updates. [Patch Configuration Sub Setting={0}] [Value={1}]".format(str(patch_configuration_sub_setting), value)) + self.composite_logger.log_debug("Updating system configuration settings for auto OS updates. [Patch Configuration Sub Setting={0}][Value={1}]".format(str(patch_configuration_sub_setting), value)) os_patch_configuration_settings = self.env_layer.file_system.read_with_retry(self.os_patch_configuration_settings_file_path) patch_configuration_sub_setting_to_update = patch_configuration_sub_setting + ' = ' + value patch_configuration_sub_setting_found_in_file = False @@ -794,7 +460,7 @@ def update_os_patch_configuration_sub_setting(self, patch_configuration_sub_sett self.env_layer.file_system.write_with_retry(self.os_patch_configuration_settings_file_path, '{0}'.format(updated_patch_configuration_sub_setting.lstrip()), mode='w+') except Exception as error: - error_msg = "Error occurred while updating system configuration settings for auto OS updates. [Patch Configuration={0}] [Error={1}]".format(str(patch_configuration_sub_setting), repr(error)) + error_msg = "Error occurred while updating system configuration settings for auto OS updates. [Patch Configuration={0}][Error={1}]".format(str(patch_configuration_sub_setting), repr(error)) self.composite_logger.log_error(error_msg) self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.DEFAULT_ERROR) raise @@ -826,164 +492,3 @@ def is_auto_update_service_installed(self, install_check_cmd): return False # endregion - # region Handling known errors - def try_mitigate_issues_if_any(self, command, code, out): - """ Attempt to fix the errors occurred while executing a command. Repeat check until no issues found """ - if "Error" in out or "Errno" in out: - issue_mitigated = self.check_known_issues_and_attempt_fix(out) - if issue_mitigated: - self.composite_logger.log_debug('\nPost mitigation, invoking package manager again using: ' + command) - code_after_fix_attempt, out_after_fix_attempt = self.env_layer.run_command_output(command, False, False) - return self.try_mitigate_issues_if_any(command, code_after_fix_attempt, out_after_fix_attempt) - return code, out - - def check_known_issues_and_attempt_fix(self, output): - """ Checks if issue falls into known issues and attempts to mitigate """ - self.composite_logger.log_debug("Output from package manager containing error: \n|\t" + "\n|\t".join(output.splitlines())) - self.composite_logger.log_debug("\nChecking if this is a known error...") - for error in self.known_errors_and_fixes: - if error in output: - self.composite_logger.log_debug("\nFound a match within known errors list, attempting a fix...") - self.known_errors_and_fixes[error]() - return True - - self.composite_logger.log_debug("\nThis is not a known error for the extension and will require manual intervention") - return False - - def fix_ssl_certificate_issue(self): - command = self.yum_update_client_package - self.composite_logger.log_debug("\nUpdating client package to avoid errors from older certificates using command: [Command={0}]".format(str(command))) - code, out = self.env_layer.run_command_output(command, False, False) - if code != self.yum_exitcode_no_applicable_packages: - self.composite_logger.log('[ERROR] Package manager was invoked using: ' + command) - self.composite_logger.log_warning(" - Return code from package manager: " + str(code)) - self.composite_logger.log_warning(" - Output from package manager: \n|\t" + "\n|\t".join(out.splitlines())) - self.telemetry_writer.write_execution_error(command, code, out) - error_msg = 'Unexpected return code (' + str(code) + ') from package manager on command: ' + command - self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.PACKAGE_MANAGER_FAILURE) - raise Exception(error_msg, "[{0}]".format(Constants.ERROR_ADDED_TO_STATUS)) - else: - self.composite_logger.log_debug("\n\n==[SUCCESS]===============================================================") - self.composite_logger.log_debug(" - Return code from package manager: " + str(code)) - self.composite_logger.log_debug(" - Output from package manager: \n|\t" + "\n|\t".join(out.splitlines())) - self.composite_logger.log_debug("==========================================================================\n\n") - self.composite_logger.log_debug("\nClient package update complete.") - # endregion - - # region Reboot Management - def is_reboot_pending(self): - """ Checks if there is a pending reboot on the machine. """ - try: - pending_file_exists = os.path.isfile(self.REBOOT_PENDING_FILE_PATH) # not intended for yum, but supporting as back-compat - pending_processes_exist = self.do_processes_require_restart() - self.composite_logger.log_debug(" - Reboot required debug flags (yum): " + str(pending_file_exists) + ", " + str(pending_processes_exist) + ".") - return pending_file_exists or pending_processes_exist - except Exception as error: - self.composite_logger.log_error('Error while checking for reboot pending (yum): ' + repr(error)) - return True # defaults for safety - - def do_processes_require_restart(self): - """Signals whether processes require a restart due to updates""" - self.composite_logger.log_debug("Checking if process requires reboot") - # Checking using yum-utils - self.composite_logger.log_debug("Ensuring yum-utils is present.") - code, out = self.env_layer.run_command_output(self.yum_utils_prerequisite, False, False) # idempotent, doesn't install if already present - self.composite_logger.log_debug(" - Code: " + str(code) + ", Output: \n|\t" + "\n|\t".join(out.splitlines())) - - # Checking for restart for distros with -r flag such as RHEL 7+ - code, out = self.env_layer.run_command_output(self.needs_restarting_with_flag, False, False) - self.composite_logger.log_debug(" - Code: " + str(code) + ", Output: \n|\t" + "\n|\t".join(out.splitlines())) - if out.find("Reboot is required") < 0: - self.composite_logger.log_debug(" - Reboot not detected to be required (L1).") - else: - self.composite_logger.log_debug(" - Reboot is detected to be required (L1).") - return True - - # Checking for restart for distro without -r flag such as RHEL 6 and CentOS 6 - if str(self.env_layer.platform.linux_distribution()[1]).split('.')[0] == '6': - code, out = self.env_layer.run_command_output(self.needs_restarting, False, False) - self.composite_logger.log_debug(" - Code: " + str(code) + ", Output: \n|\t" + "\n|\t".join(out.splitlines())) - if len(out.strip()) == 0 and code == 0: - self.composite_logger.log_debug(" - Reboot not detected to be required (L2).") - else: - self.composite_logger.log_debug(" - Reboot is detected to be required (L2).") - return True - - # Double-checking using yum ps (where available) - self.composite_logger.log_debug("Ensuring yum-plugin-ps is present.") - code, out = self.env_layer.run_command_output(self.yum_ps_prerequisite, False, False) # idempotent, doesn't install if already present - self.composite_logger.log_debug(" - Code: " + str(code) + ", Output: \n|\t" + "\n|\t".join(out.splitlines())) - - output = self.invoke_package_manager(self.yum_ps) - lines = output.strip().split('\n') - - process_list_flag = False - process_count = 0 - process_list_verbose = "" - - for line in lines: - if not process_list_flag: # keep going until the process list starts - if line.find("pid") < 0 and line.find("proc") < 0 and line.find("uptime") < 0: - self.composite_logger.log_debug(" - Inapplicable line: " + str(line)) - continue - else: - self.composite_logger.log_debug(" - Process list started: " + str(line)) - process_list_flag = True - continue - - process_details = re.split(r'\s+', line.strip()) - if len(process_details) < 7: - self.composite_logger.log_debug(" - Inapplicable line: " + str(line)) - continue - else: - # The first string should be process ID and hence it should be integer. - # If first string is not process ID then the line is not for a process detail. - try: - int(process_details[0]) - except Exception: - self.composite_logger.log_debug(" - Inapplicable line: " + str(line)) - continue - - self.composite_logger.log_debug(" - Applicable line: " + str(line)) - process_count += 1 - process_list_verbose += process_details[1] + " (" + process_details[0] + "), " # process name and id - - self.composite_logger.log(" - Processes requiring restart (" + str(process_count) + "): [" + process_list_verbose + "]") - return process_count != 0 # True if there were any - # endregion Reboot Management - - def add_arch_dependencies(self, package_manager, package, packages, package_versions, package_and_dependencies, package_and_dependency_versions): - """ - Add the packages with same name as that of input parameter package but with different architectures from packages list to the list package_and_dependencies. - Parameters: - package_manager (PackageManager): Package manager used. - package (string): Input package for which same package name but different architecture need to be added in the list package_and_dependencies. - packages (List of strings): List of all packages selected by user to install. - package_versions (List of strings): Versions of packages in packages list. - package_and_dependencies (List of strings): List of packages along with dependencies. This function adds packages with same name as input parameter package - but different architecture in this list. - package_and_dependency_versions (List of strings): Versions of packages in package_and_dependencies. - """ - package_name_without_arch = package_manager.get_product_name_without_arch(package) - for possible_arch_dependency, possible_arch_dependency_version in zip(packages, package_versions): - if package_manager.get_product_name_without_arch(possible_arch_dependency) == package_name_without_arch and possible_arch_dependency not in package_and_dependencies: - package_and_dependencies.append(possible_arch_dependency) - package_and_dependency_versions.append(possible_arch_dependency_version) - - def set_security_esm_package_status(self, operation, packages): - """ - Set the security-ESM classification for the esm packages. Only needed for apt. No-op for yum and zypper. - """ - pass - - def separate_out_esm_packages(self, packages, package_versions): - """ - Filter out packages from the list where the version matches the UA_ESM_REQUIRED string. - Only needed for apt. No-op for yum and zypper - """ - esm_packages = [] - esm_package_versions = [] - esm_packages_found = False - - return packages, package_versions, esm_packages, esm_package_versions, esm_packages_found - diff --git a/src/core/src/package_managers/yum/YumSourcesManager.py b/src/core/src/package_managers/yum/YumSourcesManager.py new file mode 100644 index 000000000..9cea16ead --- /dev/null +++ b/src/core/src/package_managers/yum/YumSourcesManager.py @@ -0,0 +1,32 @@ +# Copyright 2023 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Requires Python 2.7+ + +from core.src.bootstrap.Constants import Constants +from core.src.package_managers.SourcesManager import SourcesManager + + +class YumSourcesManager(SourcesManager): + """ Helps with sources list management for Yum """ + def init(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler): + super(SourcesManager, self).__init__(env_layer, execution_config, composite_logger, telemetry_writer, status_handler, package_manager_name=Constants.YUM) + pass + + def function_name(self): + pass + + + +# https://manpages.debian.org/jessie/apt/sources.list.5.en.html#:~:text=The%20source%20list%20%2Fetc%2Fapt%2Fsources.list%20is%20designed%20to%20support,by%20an%20equivalent%20command%20from%20another%20APT%20front-end%29. \ No newline at end of file diff --git a/src/core/src/package_managers/yum/__init__.py b/src/core/src/package_managers/yum/__init__.py new file mode 100644 index 000000000..e96580122 --- /dev/null +++ b/src/core/src/package_managers/yum/__init__.py @@ -0,0 +1,15 @@ +# Copyright 2023 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Requires Python 2.7+ \ No newline at end of file diff --git a/src/core/src/package_managers/zypper/ZypperHealthManager.py b/src/core/src/package_managers/zypper/ZypperHealthManager.py new file mode 100644 index 000000000..e3a8c7213 --- /dev/null +++ b/src/core/src/package_managers/zypper/ZypperHealthManager.py @@ -0,0 +1,38 @@ +# Copyright 2023 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Requires Python 2.7+ + +from core.src.bootstrap.Constants import Constants +from core.src.package_managers.HealthManager import HealthManager + + +class ZypperHealthManager(HealthManager): + """ Helps with attempting automatic environment health restoration for zypper where feasible to improve operation success rates """ + def init(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler): + super(HealthManager, self).__init__(env_layer, execution_config, composite_logger, telemetry_writer, status_handler, package_manager_name=Constants.APT) + pass + + def function_name(self): + pass + + # region Handling known errors + def try_mitigate_issues_if_any(self, command, code, out): + """ Attempt to fix the errors occurred while executing a command. Repeat check until no issues found """ + pass + + def check_known_issues_and_attempt_fix(self, output): + """ Checks if issue falls into known issues and attempts to mitigate """ + return True + # endregion \ No newline at end of file diff --git a/src/core/src/package_managers/ZypperPackageManager.py b/src/core/src/package_managers/zypper/ZypperPackageManager.py similarity index 54% rename from src/core/src/package_managers/ZypperPackageManager.py rename to src/core/src/package_managers/zypper/ZypperPackageManager.py index 5f172d40d..0ca1bbed4 100644 --- a/src/core/src/package_managers/ZypperPackageManager.py +++ b/src/core/src/package_managers/zypper/ZypperPackageManager.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -22,21 +22,23 @@ from core.src.package_managers.PackageManager import PackageManager from core.src.bootstrap.Constants import Constants +# do not instantiate directly - these are exclusively for type hinting support +from core.src.bootstrap.EnvLayer import EnvLayer +from core.src.core_logic.ExecutionConfig import ExecutionConfig +from core.src.local_loggers.CompositeLogger import CompositeLogger +from core.src.service_interfaces.TelemetryWriter import TelemetryWriter +from core.src.service_interfaces.StatusHandler import StatusHandler +from core.src.package_managers.PatchModeManager import PatchModeManager +from core.src.package_managers.SourcesManager import SourcesManager +from core.src.package_managers.HealthManager import HealthManager + class ZypperPackageManager(PackageManager): """Implementation of SUSE package management operations""" - class ZypperAutoOSUpdateServices(Constants.EnumBackport): - YAST2_ONLINE_UPDATE_CONFIGURATION = "yast2-online-update-configuration" - - class YastOnlineUpdateConfigurationConstants(Constants.EnumBackport): - OS_PATCH_CONFIGURATION_SETTINGS_FILE_PATH = '/etc/sysconfig/automatic_online_update' - APPLY_UPDATES_IDENTIFIER_TEXT = 'AOU_ENABLE_CRONJOB' - AUTO_UPDATE_CONFIG_PATTERN_MATCH_TEXT = '="(true|false)"' - INSTALLATION_STATE_IDENTIFIER_TEXT = "installation_state" - - def __init__(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler): - super(ZypperPackageManager, self).__init__(env_layer, execution_config, composite_logger, telemetry_writer, status_handler) + def __init__(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler, patch_mode_manager, sources_manager, health_manager, package_manager_name): + # type: (EnvLayer, ExecutionConfig, CompositeLogger, TelemetryWriter, StatusHandler, PatchModeManager, SourcesManager, HealthManager, str) -> None + super(ZypperPackageManager, self).__init__(env_layer, execution_config, composite_logger, telemetry_writer, status_handler, patch_mode_manager, sources_manager, health_manager, package_manager_name) # Repo refresh self.repo_clean = 'sudo zypper clean -a' self.repo_refresh = 'sudo zypper refresh' @@ -63,7 +65,7 @@ def __init__(self, env_layer, execution_config, composite_logger, telemetry_writ self.zypper_exitcode_zypper_updated = 103 self.zypper_exitcode_repos_skipped = 106 self.zypper_success_exit_codes = [self.zypper_exitcode_ok, self.zypper_exitcode_zypper_updated, self.zypper_exitcode_reboot_required] - self.zypper_retriable_exit_codes = [self.zypper_exitcode_zypp_locked, self.zypper_exitcode_zypp_lib_exit_err, self.zypper_exitcode_repos_skipped] + self.zypper_retryable_exit_codes = [self.zypper_exitcode_zypp_locked, self.zypper_exitcode_zypp_lib_exit_err, self.zypper_exitcode_repos_skipped] # Additional output messages that corresponds with exit code 103 self.zypper_out_zypper_updated_msg = 'Warning: One of the installed patches affects the package manager itself. Run this command once more to install any other needed patches.' @@ -77,16 +79,6 @@ def __init__(self, env_layer, execution_config, composite_logger, telemetry_writ self.package_manager_max_retries = 5 self.zypp_lock_timeout_backup = None - # auto OS updates - self.current_auto_os_update_service = None - self.os_patch_configuration_settings_file_path = '' - self.auto_update_config_pattern_match_text = "" - self.apply_updates_identifier_text = "" - self.installation_state_identifier_text = "" - - # # commands for YaST2 online update configuration - # self.__init_constants_for_yast2_online_update_configuration() - def refresh_repo(self): self.composite_logger.log("Refreshing local repo...") # self.invoke_package_manager(self.repo_clean) # purges local metadata for rebuild - addresses a possible customer environment error @@ -95,10 +87,10 @@ def refresh_repo(self): except Exception as error: # Reboot if not already done if self.status_handler.get_installation_reboot_status() == Constants.RebootStatus.COMPLETED: - self.composite_logger.log_warning("Unable to refresh repo (retries exhausted after reboot).") + self.composite_logger.log_warning("[ZPM] Unable to refresh repo (retries exhausted after reboot).") raise else: - self.composite_logger.log_warning("Setting force_reboot flag to True.") + self.composite_logger.log_warning("[ZPM] Setting force_reboot flag to True.") self.force_reboot = True def __refresh_repo_services(self): @@ -109,16 +101,16 @@ def __refresh_repo_services(self): except Exception as error: # Reboot if not already done if self.status_handler.get_installation_reboot_status() == Constants.RebootStatus.COMPLETED: - self.composite_logger.log_warning("Unable to refresh repo services (retries exhausted after reboot).") + self.composite_logger.log_warning("[ZPM] Unable to refresh repo services (retries exhausted after reboot).") raise else: - self.composite_logger.log_warning("Setting force_reboot flag to True after refreshing repo services.") + self.composite_logger.log_warning("[ZPM] Setting force_reboot flag to True after refreshing repo services.") self.force_reboot = True # region Get Available Updates def invoke_package_manager_advanced(self, command, raise_on_exception=True): """Get missing updates using the command input""" - self.composite_logger.log_debug('\nInvoking package manager using: ' + command) + self.composite_logger.log_debug('[ZPM] Invoking package manager. [Command={0}]'.format(command)) repo_refresh_services_attempted = False for i in range(1, self.package_manager_max_retries + 1): @@ -143,26 +135,18 @@ def invoke_package_manager_advanced(self, command, raise_on_exception=True): self.composite_logger.log_debug("Retrying with modified command to replace files: {0}".format(str(command))) continue - self.log_errors_on_invoke(command, out, code) - error_msg = 'Unexpected return code (' + str(code) + ') from package manager on command: ' + command - self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.PACKAGE_MANAGER_FAILURE) - - # Not a retriable error code, so raise an exception - if code not in self.zypper_retriable_exit_codes and raise_on_exception: - raise Exception(error_msg, "[{0}]".format(Constants.ERROR_ADDED_TO_STATUS)) - - # Retriable error code, so check number of retries and wait then retry if applicable; otherwise, raise error after max retries - if i < self.package_manager_max_retries: - self.composite_logger.log_warning("Exception on package manager invoke. [Exception={0}] [RetryCount={1}]".format(error_msg, str(i))) + # Retryable error code, so check number of retries and wait then retry if applicable; otherwise, raise error after max retries + if i < self.package_manager_max_retries and code in self.zypper_retryable_exit_codes: + self.composite_logger.log_verbose("[ZPM] Retryable Package Manager ERROR. [Command={0}][Code={1}][Output={2}][RetryCount={0}]".format(command, str(code), str(out), str(i))) time.sleep(pow(2, i + 2)) continue else: - error_msg = "Unable to invoke package manager (retries exhausted) [{0}] [RetryCount={1}]".format(error_msg, str(i)) - self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.PACKAGE_MANAGER_FAILURE) - if raise_on_exception: - raise Exception(error_msg, "[{0}]".format(Constants.ERROR_ADDED_TO_STATUS)) - else: # verbose diagnostic log - self.log_success_on_invoke(code, out) + self.composite_logger.log_error("[ZPM] Package Manager ERROR. [Command={0}][Code={1}][Output={2}]".format(command, str(code), str(out))) + self.status_handler.add_error_to_status_and_log_error(message="Unexpected return code from package manager. [Code={0}][Command={1}]".format(str(code), command), + raise_exception=bool(code not in self.zypper_retryable_exit_codes and raise_on_exception), error_code=Constants.PatchOperationErrorCodes.CL_PACKAGE_MANAGER_FAILURE) + self.log_process_tree_if_exists(out) + else: + self.composite_logger.log_verbose("[ZPM] Package Manager SUCCESS. [Command={0}][Code={1}][Output={2}]".format(command, str(code), str(out))) self.__handle_zypper_updated_or_reboot_exit_codes(command, out, code) @@ -172,18 +156,14 @@ def __handle_zypper_updated_or_reboot_exit_codes(self, command, out, code): """ Handles exit code 102 or 103 when returned from invoking package manager. Does not repeat installation or reboot if it is a dry run. """ if "--dry-run" in command: - self.composite_logger.log_debug( - " - Exit code {0} detected from command \"{1}\", but it was a dry run. Continuing execution without performing additional actions.".format( - str(code), command)) + self.composite_logger.log_debug(" - Exit code {0} detected from command \"{1}\", but it was a dry run. Continuing execution without performing additional actions.".format(str(code), command)) return if code == self.zypper_exitcode_zypper_updated or self.zypper_out_zypper_updated_msg in out: - self.composite_logger.log_debug( - " - One of the installed patches affects the package manager itself. Patch installation run will be repeated.") + self.composite_logger.log_debug(" - One of the installed patches affects the package manager itself. Patch installation run will be repeated.") self.set_package_manager_setting(Constants.PACKAGE_MGR_SETTING_REPEAT_PATCH_OPERATION, True) elif code == self.zypper_exitcode_reboot_required: - self.composite_logger.log_warning( - " - Machine requires reboot after patch installation. Setting force_reboot flag to True.") + self.composite_logger.log_warning(" - Machine requires reboot after patch installation. Setting force_reboot flag to True.") self.force_reboot = True def modify_upgrade_or_patch_command_to_replacefiles(self, command): @@ -197,38 +177,22 @@ def modify_upgrade_or_patch_command_to_replacefiles(self, command): elif self.zypper_install_security_patches in command: return command.replace(self.zypper_install_security_patches, self.zypper_install_security_patches + ' --replacefiles') - def log_errors_on_invoke(self, command, out, code): - """Logs verbose error messages if there is an error on invoke_package_manager""" - self.composite_logger.log('[ERROR] Package manager was invoked using: ' + command) - self.composite_logger.log_warning(" - Return code from package manager: " + str(code)) - self.composite_logger.log_warning(" - Output from package manager: \n|\t" + "\n|\t".join(out.splitlines())) - self.log_process_tree_if_exists(out) - self.telemetry_writer.write_execution_error(command, code, out) - - def log_success_on_invoke(self, code, out): - """Logs verbose success messages on invoke_package_manager""" - self.composite_logger.log_verbose("\n\n==[SUCCESS]===============================================================") - self.composite_logger.log_debug(" - Return code from package manager: " + str(code)) - self.composite_logger.log_debug(" - Output from package manager: \n|\t" + "\n|\t".join(out.splitlines())) - self.composite_logger.log_verbose("==========================================================================\n\n") - def log_process_tree_if_exists(self, out): """Logs the process tree based on locking PID in output, if there is a process tree to be found""" process_tree = self.get_process_tree_from_pid_in_output(out) if process_tree is not None: - self.composite_logger.log_warning(" - Process tree for the pid in output: \n{}".format(str(process_tree))) + self.composite_logger.log_verbose("[ZPM] Process tree for the PID in output: \n{0}".format(str(process_tree))) def set_lock_timeout_and_backup_original(self): """Saves the env var ZYPP_LOCK_TIMEOUT and sets it to 5""" self.zypp_lock_timeout_backup = self.env_layer.get_env_var('ZYPP_LOCK_TIMEOUT') - self.composite_logger.log_debug("Original value of ZYPP_LOCK_TIMEOUT env var: {0}".format(str(self.zypp_lock_timeout_backup))) + self.composite_logger.log_verbose("[ZPM] Original value of ZYPP_LOCK_TIMEOUT env var: {0}".format(str(self.zypp_lock_timeout_backup))) self.env_layer.set_env_var('ZYPP_LOCK_TIMEOUT', 5) def restore_original_lock_timeout(self): """Restores the original value of the env var ZYPP_LOCK_TIMEOUT, if any was saved""" if self.zypp_lock_timeout_backup is None: - self.composite_logger.log_debug("Attempted to restore original lock timeout when none was saved") - + self.composite_logger.log_debug("[ZPM] Attempted to restore original lock timeout when none was saved.") self.env_layer.set_env_var('ZYPP_LOCK_TIMEOUT', self.zypp_lock_timeout_backup) self.zypp_lock_timeout_backup = None @@ -284,19 +248,19 @@ def get_process_tree_from_pid_in_output(self, message): # region Classification-based (incl. All) update check def get_all_updates(self, cached=False): """Get all missing updates""" - self.composite_logger.log_debug("\nDiscovering all packages...") + self.composite_logger.log_verbose("[ZPM] Discovering all packages...") if cached and not len(self.all_updates_cached) == 0: - self.composite_logger.log_debug(" - Returning cached package data.") + self.composite_logger.log_verbose(" - Returning cached package data.") return self.all_updates_cached, self.all_update_versions_cached # allows for high performance reuse in areas of the code explicitly aware of the cache out = self.invoke_package_manager(self.zypper_check) self.all_updates_cached, self.all_update_versions_cached = self.extract_packages_and_versions(out) - self.composite_logger.log_debug("Discovered " + str(len(self.all_updates_cached)) + " package entries.") + self.composite_logger.log_debug("[ZPM] Discovered " + str(len(self.all_updates_cached)) + " package entries.") return self.all_updates_cached, self.all_update_versions_cached def get_security_updates(self): """Get missing security updates""" - self.composite_logger.log_debug("\nDiscovering 'security' packages...") + self.composite_logger.log_verbose("[ZPM] Discovering 'security' packages...") security_packages = [] security_package_versions = [] @@ -311,14 +275,14 @@ def get_security_updates(self): if package in packages_from_patch_data: security_packages.append(package) security_package_versions.append(all_package_versions[index]) - self.composite_logger.log_debug(" - " + str(package) + " [" + str(all_package_versions[index]) + "]") + self.composite_logger.log_verbose(" - " + str(package) + " [" + str(all_package_versions[index]) + "]") - self.composite_logger.log_debug("Discovered " + str(len(security_packages)) + " 'security' package entries.\n") + self.composite_logger.log_debug("[ZPM] Discovered " + str(len(security_packages)) + " 'security' package entries.\n") return security_packages, security_package_versions def get_other_updates(self): """Get missing other updates""" - self.composite_logger.log_debug("\nDiscovering 'other' packages...") + self.composite_logger.log_verbose("[ZPM] Discovering 'other' packages...") other_packages = [] other_package_versions = [] @@ -328,8 +292,7 @@ def get_other_updates(self): # SPECIAL CONDITION IF ZYPPER UPDATE IS DETECTED - UNAVOIDABLE SECURITY UPDATE(S) WILL BE INSTALLED AND THE RUN REPEATED FOR 'OTHER". if self.get_package_manager_setting(Constants.PACKAGE_MGR_SETTING_REPEAT_PATCH_OPERATION, True): - self.composite_logger.log_warning("Important: Zypper-related security updates are necessary to continue - those will be installed first.") - self.composite_logger.log_warning("Temporarily skipping 'other' package entry discovery due to Zypper-related security updates.\n") + self.status_handler.add_error_to_status_and_log_warning("Important: Zypper-related security updates are necessary to continue - those will be installed first. Temporarily skipping 'other' package entry discovery due to Zypper-related security updates.") return self.get_security_updates() # TO DO: in some cases, some misc security updates may sneak in - filter this (to do item) # also for above: also note that simply force updating only zypper does not solve the issue - tried @@ -340,9 +303,9 @@ def get_other_updates(self): if package not in packages_from_patch_data: other_packages.append(package) other_package_versions.append(all_package_versions[index]) - self.composite_logger.log_debug(" - " + str(package) + " [" + str(all_package_versions[index]) + "]") + self.composite_logger.log_verbose(" - " + str(package) + " [" + str(all_package_versions[index]) + "]") - self.composite_logger.log_debug("Discovered " + str(len(other_packages)) + " 'other' package entries.\n") + self.composite_logger.log_debug("[ZPM] Discovered " + str(len(other_packages)) + " 'other' package entries.") return other_packages, other_package_versions # endregion @@ -361,6 +324,7 @@ def extract_packages_and_versions(self, output): self.composite_logger.log_debug("\nExtracting package and version data...") packages = [] versions = [] + debug_log = str() lines = output.strip().split('\n') for line in lines: @@ -370,16 +334,18 @@ def extract_packages_and_versions(self, output): packages.append(package) version = line_split[4].strip() versions.append(version) - self.composite_logger.log_debug(" - Applicable line: " + line + ". Package: " + package + ". Version: " + version + ".") + debug_log += "[A] {0}, [P={1},V={2}]\n".format(str(line), package, version) else: - self.composite_logger.log_debug(" - Inapplicable line: " + line) + debug_log += "[N] {0}\n".format(str(line)) + self.composite_logger.log_debug("[ZPM] Debug log on extracting packages and versions: {0}".format(debug_log)) return packages, versions def extract_packages_from_patch_data(self, output): """Returns packages (sometimes with version information embedded) from patch data""" - self.composite_logger.log_debug("\nExtracting package entries from security patch data...") + self.composite_logger.log_verbose("Extracting package entries from security patch data...") packages = [] + debug_log = str() parser_seeing_packages_flag = False lines = output.strip().split('\n') @@ -387,24 +353,25 @@ def extract_packages_from_patch_data(self, output): if not parser_seeing_packages_flag: if 'package is going to be installed' in line or 'package is going to be upgraded' in line or \ 'packages are going to be installed:' in line or 'packages are going to be upgraded:' in line: - self.composite_logger.log_debug(" - Start marker line: " + line) - parser_seeing_packages_flag = True # Start -- Next line contains information we need + debug_log += "[S] {0}\n".format(str(line)) # start marker line + parser_seeing_packages_flag = True # start -- next line contains information we need else: - self.composite_logger.log_debug(" - Inapplicable line: " + line) + debug_log += "[N] {0}\n".format(str(line)) continue if not line or line.isspace(): - self.composite_logger.log_debug(" - End marker line: " + line) + debug_log += "[E] {0}\n".format(str(line)) # end marker line parser_seeing_packages_flag = False continue # End -- We're past a package information block line_parts = line.strip().split(' ') - self.composite_logger.log_debug(" - Package list line: " + line) + debug_log += "[A] {0}\n".format(str(line)) # applicable package list line for line_part in line_parts: packages.append(line_part) - self.composite_logger.log_debug(" - Package: " + line_part) + debug_log += "[Package] {0}\n".format(str(line_part)) - self.composite_logger.log_debug("\nExtracted " + str(len(packages)) + " prospective package entries from security patch data.\n") + self.composite_logger.log_debug("[ZPM] Debug log on extracting packages from security patch data: {0}".format(debug_log)) + self.composite_logger.log_verbose("[ZPM] Extracted " + str(len(packages)) + " prospective package entries from security patch data.") return packages # endregion # endregion @@ -563,222 +530,23 @@ def get_package_size(self, output): return Constants.UNKNOWN_PACKAGE_SIZE # endregion - # region auto OS updates - # def __init_constants_for_yast2_online_update_configuration(self): - # self.yast2_online_update_configuration_os_patch_configuration_settings_file_path = '/etc/sysconfig/automatic_online_update' - # self.yast2_online_update_configuration_apply_updates_identifier_text = 'AOU_ENABLE_CRONJOB' - # self.yast2_online_update_configuration_auto_update_config_pattern_match_text = '="(true|false)"' - # self.yast2_online_update_configuration_installation_state_identifier_text = "installation_state" - - def get_current_auto_os_patch_state(self): - """ Gets the current auto OS update patch state on the machine """ - self.composite_logger.log("Fetching the current automatic OS patch state on the machine...") - - current_auto_os_patch_state_for_yast2_online_update_configuration = self.__get_current_auto_os_patch_state_for_yast2_online_update_configuration() - self.composite_logger.log("OS patch state per auto OS update service: [yast2-online-update-configuration={0}]".format(str(current_auto_os_patch_state_for_yast2_online_update_configuration))) - - current_auto_os_patch_state = current_auto_os_patch_state_for_yast2_online_update_configuration - self.composite_logger.log_debug("Overall Auto OS Patch State based on all auto OS update service states [OverallAutoOSPatchState={0}]".format(str(current_auto_os_patch_state))) - return current_auto_os_patch_state - - def __get_current_auto_os_patch_state_for_yast2_online_update_configuration(self): - """ Gets current auto OS update patch state for yast2-online-update-configuration """ - self.composite_logger.log_debug("Fetching current automatic OS patch state in yast2-online-update-configuration.") - self.__init_auto_update_for_yast_online_update_configuration() - is_service_installed, apply_updates_value = self.__get_current_auto_os_updates_setting_on_machine() - - apply_updates = self.__get_extension_standard_value_for_apply_updates(apply_updates_value) - - # OS patch state is considered to be disabled: a) if it was successfully disabled or b) if the service is not installed - if not is_service_installed or apply_updates == Constants.AutomaticOSPatchStates.DISABLED: - return Constants.AutomaticOSPatchStates.DISABLED - - return apply_updates - - @staticmethod - def __get_extension_standard_value_for_apply_updates(apply_updates_value): - if apply_updates_value.lower() == 'true': - return Constants.AutomaticOSPatchStates.ENABLED - elif apply_updates_value.lower() == 'false': - return Constants.AutomaticOSPatchStates.DISABLED - else: - return Constants.AutomaticOSPatchStates.UNKNOWN - - def __init_auto_update_for_yast_online_update_configuration(self): - """ Initializes all generic auto OS update variables with the config values for yum cron service """ - self.os_patch_configuration_settings_file_path = self.YastOnlineUpdateConfigurationConstants.OS_PATCH_CONFIGURATION_SETTINGS_FILE_PATH - self.apply_updates_identifier_text = self.YastOnlineUpdateConfigurationConstants.APPLY_UPDATES_IDENTIFIER_TEXT - self.auto_update_config_pattern_match_text = self.YastOnlineUpdateConfigurationConstants.AUTO_UPDATE_CONFIG_PATTERN_MATCH_TEXT - self.installation_state_identifier_text = self.YastOnlineUpdateConfigurationConstants.INSTALLATION_STATE_IDENTIFIER_TEXT - self.current_auto_os_update_service = self.ZypperAutoOSUpdateServices.YAST2_ONLINE_UPDATE_CONFIGURATION - - def __get_current_auto_os_updates_setting_on_machine(self): - """ Gets all the update settings related to auto OS updates currently set on the machine """ - try: - apply_updates_value = "" - is_service_installed = False - - # get install state - if not os.path.exists(self.os_patch_configuration_settings_file_path): - return is_service_installed, apply_updates_value - - is_service_installed = True - self.composite_logger.log_debug("Checking if auto updates are currently enabled...") - image_default_patch_configuration = self.env_layer.file_system.read_with_retry(self.os_patch_configuration_settings_file_path, raise_if_not_found=False) - if image_default_patch_configuration is not None: - settings = image_default_patch_configuration.strip().split('\n') - for setting in settings: - match = re.search(self.apply_updates_identifier_text + self.auto_update_config_pattern_match_text, str(setting)) - if match is not None: - apply_updates_value = match.group(1) - - if apply_updates_value == "": - self.composite_logger.log_debug("Machine did not have any value set for [Setting={0}]".format(str(self.apply_updates_identifier_text))) - else: - self.composite_logger.log_verbose("Current value set for [{0}={1}]".format(str(self.apply_updates_identifier_text), str(apply_updates_value))) - - return is_service_installed, apply_updates_value - - except Exception as error: - raise Exception("Error occurred in fetching current auto OS update settings from the machine. [Exception={0}]".format(repr(error))) - - def disable_auto_os_update(self): - """ Disables auto OS updates on the machine only if they are enable_on_reboot and logs the default settings the machine comes with """ - try: - self.composite_logger.log("Disabling auto OS updates in all identified services...") - self.disable_auto_os_update_for_yast_online_update_configuration() - self.composite_logger.log_debug("Completed attempt to disable auto OS updates") - - except Exception as error: - self.composite_logger.log_error("Could not disable auto OS updates. [Error={0}]".format(repr(error))) - raise - - def disable_auto_os_update_for_yast_online_update_configuration(self): - """ Disables auto OS updates, using yast online, and logs the default settings the machine comes with """ - self.composite_logger.log("Disabling auto OS updates using yast online update configuration") - self.__init_auto_update_for_yast_online_update_configuration() - - self.backup_image_default_patch_configuration_if_not_exists() - # check if file exists, if not do nothing - if not os.path.exists(self.os_patch_configuration_settings_file_path): - self.composite_logger.log_debug("Cannot disable auto updates using yast2-online-update-configuration because the configuration file does not exist, indicating the service is not installed") - return - - self.composite_logger.log_debug("Preemptively disabling auto OS updates using yum-cron") - self.update_os_patch_configuration_sub_setting(self.apply_updates_identifier_text, "false", self.auto_update_config_pattern_match_text) - - self.composite_logger.log("Successfully disabled auto OS updates using yast2-online-update-configuration") - - def backup_image_default_patch_configuration_if_not_exists(self): - """ Records the default system settings for auto OS updates within patch extension artifacts for future reference. - We only log the default system settings a VM comes with, any subsequent updates will not be recorded""" - """ JSON format for backup file: - { - "yast2-online-update-configuration": { - "apply_updates": "true/false/empty string" - "install_state": true/false - } - } """ - try: - self.composite_logger.log_debug("Ensuring there is a backup of the default patch state for [AutoOSUpdateService={0}]".format(str(self.current_auto_os_update_service))) - image_default_patch_configuration_backup = {} - - # read existing backup since it also contains backup from other update services. We need to preserve any existing data within the backup file - if self.image_default_patch_configuration_backup_exists(): - try: - image_default_patch_configuration_backup = json.loads(self.env_layer.file_system.read_with_retry(self.image_default_patch_configuration_backup_path)) - except Exception as error: - self.composite_logger.log_error("Unable to read backup for default patch state. Will attempt to re-write. [Exception={0}]".format(repr(error))) - - # verify if existing backup is valid if not, write to backup - is_backup_valid = self.is_image_default_patch_configuration_backup_valid(image_default_patch_configuration_backup) - if is_backup_valid: - self.composite_logger.log_debug("Since extension has a valid backup, no need to log the current settings again. [Default Auto OS update settings={0}] [File path={1}]" - .format(str(image_default_patch_configuration_backup), self.image_default_patch_configuration_backup_path)) - else: - self.composite_logger.log_debug("Since the backup is invalid, will add a new backup with the current auto OS update settings") - self.composite_logger.log_debug("Fetching current auto OS update settings for [AutoOSUpdateService={0}]".format(str(self.current_auto_os_update_service))) - is_service_installed, apply_updates_value = self.__get_current_auto_os_updates_setting_on_machine() - - backup_image_default_patch_configuration_json_to_add = { - self.current_auto_os_update_service: { - self.apply_updates_identifier_text: apply_updates_value, - self.installation_state_identifier_text: is_service_installed - } - } - - image_default_patch_configuration_backup.update(backup_image_default_patch_configuration_json_to_add) - - self.composite_logger.log_debug("Logging default system configuration settings for auto OS updates. [Settings={0}] [Log file path={1}]" - .format(str(image_default_patch_configuration_backup), self.image_default_patch_configuration_backup_path)) - self.env_layer.file_system.write_with_retry(self.image_default_patch_configuration_backup_path, '{0}'.format(json.dumps(image_default_patch_configuration_backup)), mode='w+') - except Exception as error: - error_message = "Exception during fetching and logging default auto update settings on the machine. [Exception={0}]".format(repr(error)) - self.composite_logger.log_error(error_message) - self.status_handler.add_error_to_status(error_message, Constants.PatchOperationErrorCodes.DEFAULT_ERROR) - raise - - def is_image_default_patch_configuration_backup_valid(self, image_default_patch_configuration_backup): - """ Verifies if default auto update configurations, for a service under consideration, are saved in backup """ - - # NOTE: Adding a separate function to check backup for multiple auto OS update services, if more are added in future. - return self.is_backup_valid_for_yast_online_update_configuration(image_default_patch_configuration_backup) - - def is_backup_valid_for_yast_online_update_configuration(self, image_default_patch_configuration_backup): - if self.ZypperAutoOSUpdateServices.YAST2_ONLINE_UPDATE_CONFIGURATION in image_default_patch_configuration_backup \ - and self.apply_updates_identifier_text in image_default_patch_configuration_backup[self.ZypperAutoOSUpdateServices.YAST2_ONLINE_UPDATE_CONFIGURATION]: - self.composite_logger.log_debug("Extension has a valid backup for default yum-cron configuration settings") - return True - else: - self.composite_logger.log_debug("Extension does not have a valid backup for default yum-cron configuration settings") - return False - - def update_os_patch_configuration_sub_setting(self, patch_configuration_sub_setting, value="false", config_pattern_match_text=""): - """ Updates (or adds if it doesn't exist) the given patch_configuration_sub_setting with the given value in os_patch_configuration_settings_file """ - try: - self.composite_logger.log_debug("Updating system configuration settings for auto OS updates. [Patch Configuration Sub Setting={0}] [Value={1}]".format(str(patch_configuration_sub_setting), value)) - os_patch_configuration_settings = self.env_layer.file_system.read_with_retry(self.os_patch_configuration_settings_file_path) - patch_configuration_sub_setting_to_update = patch_configuration_sub_setting + '="' + value + '"' - patch_configuration_sub_setting_found_in_file = False - updated_patch_configuration_sub_setting = "" - settings = os_patch_configuration_settings.strip().split('\n') - - # update value of existing setting - for i in range(len(settings)): - match = re.search(patch_configuration_sub_setting + config_pattern_match_text, settings[i]) - if match is not None: - settings[i] = patch_configuration_sub_setting_to_update - patch_configuration_sub_setting_found_in_file = True - updated_patch_configuration_sub_setting += settings[i] + "\n" - - # add setting to configuration file, since it doesn't exist - if not patch_configuration_sub_setting_found_in_file: - updated_patch_configuration_sub_setting += patch_configuration_sub_setting_to_update + "\n" - - self.env_layer.file_system.write_with_retry(self.os_patch_configuration_settings_file_path, '{0}'.format(updated_patch_configuration_sub_setting.lstrip()), mode='w+') - except Exception as error: - error_msg = "Error occurred while updating system configuration settings for auto OS updates. [Patch Configuration={0}] [Error={1}]".format(str(patch_configuration_sub_setting), repr(error)) - self.composite_logger.log_error(error_msg) - self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.DEFAULT_ERROR) - raise - # endregion - # region Reboot Management def is_reboot_pending(self): """ Checks if there is a pending reboot on the machine. """ try: pending_file_exists = os.path.isfile(self.REBOOT_PENDING_FILE_PATH) # not intended for zypper, but supporting as back-compat pending_processes_exist = self.do_processes_require_restart() - self.composite_logger.log_debug(" - Reboot required debug flags (zypper): " + str(pending_file_exists) + ", " + str(pending_processes_exist) + ".") + self.composite_logger.log_debug("[ZPM] Reboot required debug flags (zypper): " + str(pending_file_exists) + ", " + str(pending_processes_exist) + ".") return pending_file_exists or pending_processes_exist except Exception as error: - self.composite_logger.log_error('Error while checking for reboot pending (zypper): ' + repr(error)) + self.composite_logger.log_error('[ZPM] Error while checking for reboot pending (zypper): ' + repr(error)) return True # defaults for safety def do_processes_require_restart(self): """Signals whether processes require a restart due to updates""" output = self.invoke_package_manager(self.zypper_ps) lines = output.strip().split('\n') + debug_log = str() process_list_flag = False process_count = 0 @@ -786,23 +554,24 @@ def do_processes_require_restart(self): for line in lines: if not process_list_flag: # keep going until the process list starts if not all(word in line for word in ["PID", "PPID", "UID", "User", "Command", "Service"]): - self.composite_logger.log_debug(" - Inapplicable line: " + str(line)) + debug_log += "[N] {0}\n".format(str(line)) # not applicable continue else: - self.composite_logger.log_debug(" - Process list started: " + str(line)) + debug_log += "[PLS] {0}\n".format(str(line)) # process list started process_list_flag = True continue process_details = line.split(' |') if len(process_details) < 6: - self.composite_logger.log_debug(" - Inapplicable line: " + str(line)) + debug_log += "[N] {0}\n".format(str(line)) # not applicable continue else: - self.composite_logger.log_debug(" - Applicable line: " + str(line)) + debug_log += "[A] {0}\n".format(str(line)) # applicable process_count += 1 process_list_verbose += process_details[4].strip() + " (" + process_details[0].strip() + "), " # process name and id - self.composite_logger.log(" - Processes requiring restart (" + str(process_count) + "): [" + process_list_verbose + "]") + self.composite_logger.log_debug("[ZPM] Debug log on processes requiring restart: {0}]".format(debug_log)) + self.composite_logger.log("[ZPM] Processes requiring restart. [Count={0}][ProcessList={1}]".format(str(process_count), process_list_verbose)) return process_count != 0 # True if there were any # endregion Reboot Management diff --git a/src/core/src/package_managers/zypper/ZypperPatchModeManager.py b/src/core/src/package_managers/zypper/ZypperPatchModeManager.py new file mode 100644 index 000000000..b453bc3a1 --- /dev/null +++ b/src/core/src/package_managers/zypper/ZypperPatchModeManager.py @@ -0,0 +1,238 @@ +# Copyright 2023 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Requires Python 2.7+ + +import os +import re +import json +from core.src.bootstrap.Constants import Constants +from core.src.package_managers.PatchModeManager import PatchModeManager + + +class ZypperPatchModeManager(PatchModeManager): + """ Helps with translating PatchModes set by the customer to in-VM configurations """ + + class ZypperAutoOSUpdateServices(Constants.EnumBackport): + YAST2_ONLINE_UPDATE_CONFIGURATION = "yast2-online-update-configuration" + + class YastOnlineUpdateConfigurationConstants(Constants.EnumBackport): + OS_PATCH_CONFIGURATION_SETTINGS_FILE_PATH = '/etc/sysconfig/automatic_online_update' + APPLY_UPDATES_IDENTIFIER_TEXT = 'AOU_ENABLE_CRONJOB' + AUTO_UPDATE_CONFIG_PATTERN_MATCH_TEXT = '="(true|false)"' + INSTALLATION_STATE_IDENTIFIER_TEXT = "installation_state" + + def __init__(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler, package_manager_name): + super(ZypperPatchModeManager, self).__init__(env_layer, execution_config, composite_logger, telemetry_writer, status_handler, package_manager_name) + # auto OS updates + self.current_auto_os_update_service = None + self.os_patch_configuration_settings_file_path = '' + self.auto_update_config_pattern_match_text = "" + self.apply_updates_identifier_text = "" + self.installation_state_identifier_text = "" + + # region auto OS updates + def get_current_auto_os_patch_state(self): + """ Gets the current auto OS update patch state on the machine """ + self.composite_logger.log("Fetching the current automatic OS patch state on the machine...") + + current_auto_os_patch_state_for_yast2_online_update_configuration = self.__get_current_auto_os_patch_state_for_yast2_online_update_configuration() + self.composite_logger.log("OS patch state per auto OS update service: [yast2-online-update-configuration={0}]".format(str(current_auto_os_patch_state_for_yast2_online_update_configuration))) + + current_auto_os_patch_state = current_auto_os_patch_state_for_yast2_online_update_configuration + self.composite_logger.log_debug("Overall Auto OS Patch State based on all auto OS update service states [OverallAutoOSPatchState={0}]".format(str(current_auto_os_patch_state))) + return current_auto_os_patch_state + + def __get_current_auto_os_patch_state_for_yast2_online_update_configuration(self): + """ Gets current auto OS update patch state for yast2-online-update-configuration """ + self.composite_logger.log_debug("Fetching current automatic OS patch state in yast2-online-update-configuration.") + self.__init_auto_update_for_yast_online_update_configuration() + is_service_installed, apply_updates_value = self.__get_current_auto_os_updates_setting_on_machine() + + apply_updates = self.__get_extension_standard_value_for_apply_updates(apply_updates_value) + + # OS patch state is considered to be disabled: a) if it was successfully disabled or b) if the service is not installed + if not is_service_installed or apply_updates == Constants.AutomaticOSPatchStates.DISABLED: + return Constants.AutomaticOSPatchStates.DISABLED + + return apply_updates + + @staticmethod + def __get_extension_standard_value_for_apply_updates(apply_updates_value): + if apply_updates_value.lower() == 'true': + return Constants.AutomaticOSPatchStates.ENABLED + elif apply_updates_value.lower() == 'false': + return Constants.AutomaticOSPatchStates.DISABLED + else: + return Constants.AutomaticOSPatchStates.UNKNOWN + + def __init_auto_update_for_yast_online_update_configuration(self): + """ Initializes all generic auto OS update variables with the config values for yum cron service """ + self.os_patch_configuration_settings_file_path = self.YastOnlineUpdateConfigurationConstants.OS_PATCH_CONFIGURATION_SETTINGS_FILE_PATH + self.apply_updates_identifier_text = self.YastOnlineUpdateConfigurationConstants.APPLY_UPDATES_IDENTIFIER_TEXT + self.auto_update_config_pattern_match_text = self.YastOnlineUpdateConfigurationConstants.AUTO_UPDATE_CONFIG_PATTERN_MATCH_TEXT + self.installation_state_identifier_text = self.YastOnlineUpdateConfigurationConstants.INSTALLATION_STATE_IDENTIFIER_TEXT + self.current_auto_os_update_service = self.ZypperAutoOSUpdateServices.YAST2_ONLINE_UPDATE_CONFIGURATION + + def __get_current_auto_os_updates_setting_on_machine(self): + """ Gets all the update settings related to auto OS updates currently set on the machine """ + try: + apply_updates_value = "" + is_service_installed = False + + # get install state + if not os.path.exists(self.os_patch_configuration_settings_file_path): + return is_service_installed, apply_updates_value + + is_service_installed = True + self.composite_logger.log_debug("Checking if auto updates are currently enabled...") + image_default_patch_configuration = self.env_layer.file_system.read_with_retry(self.os_patch_configuration_settings_file_path, raise_if_not_found=False) + if image_default_patch_configuration is not None: + settings = image_default_patch_configuration.strip().split('\n') + for setting in settings: + match = re.search(self.apply_updates_identifier_text + self.auto_update_config_pattern_match_text, str(setting)) + if match is not None: + apply_updates_value = match.group(1) + + if apply_updates_value == "": + self.composite_logger.log_debug("Machine did not have any value set for [Setting={0}]".format(str(self.apply_updates_identifier_text))) + else: + self.composite_logger.log_verbose("Current value set for [{0}={1}]".format(str(self.apply_updates_identifier_text), str(apply_updates_value))) + + return is_service_installed, apply_updates_value + + except Exception as error: + raise Exception("Error occurred in fetching current auto OS update settings from the machine. [Exception={0}]".format(repr(error))) + + def disable_auto_os_update(self): + """ Disables auto OS updates on the machine only if they are enable_on_reboot and logs the default settings the machine comes with """ + try: + self.composite_logger.log("Disabling auto OS updates in all identified services...") + self.disable_auto_os_update_for_yast_online_update_configuration() + self.composite_logger.log_debug("Completed attempt to disable auto OS updates") + + except Exception as error: + self.composite_logger.log_error("Could not disable auto OS updates. [Error={0}]".format(repr(error))) + raise + + def disable_auto_os_update_for_yast_online_update_configuration(self): + """ Disables auto OS updates, using yast online, and logs the default settings the machine comes with """ + self.composite_logger.log("Disabling auto OS updates using yast online update configuration") + self.__init_auto_update_for_yast_online_update_configuration() + + self.backup_image_default_patch_configuration_if_not_exists() + # check if file exists, if not do nothing + if not os.path.exists(self.os_patch_configuration_settings_file_path): + self.composite_logger.log_debug("Cannot disable auto updates using yast2-online-update-configuration because the configuration file does not exist, indicating the service is not installed") + return + + self.composite_logger.log_debug("Preemptively disabling auto OS updates using yum-cron") + self.update_os_patch_configuration_sub_setting(self.apply_updates_identifier_text, "false", self.auto_update_config_pattern_match_text) + + self.composite_logger.log("Successfully disabled auto OS updates using yast2-online-update-configuration") + + def backup_image_default_patch_configuration_if_not_exists(self): + """ Records the default system settings for auto OS updates within patch extension artifacts for future reference. + We only log the default system settings a VM comes with, any subsequent updates will not be recorded""" + """ JSON format for backup file: + { + "yast2-online-update-configuration": { + "apply_updates": "true/false/empty string" + "install_state": true/false + } + } """ + try: + self.composite_logger.log_debug("Ensuring there is a backup of the default patch state for [AutoOSUpdateService={0}]".format(str(self.current_auto_os_update_service))) + image_default_patch_configuration_backup = {} + + # read existing backup since it also contains backup from other update services. We need to preserve any existing data within the backup file + if self.image_default_patch_configuration_backup_exists(): + try: + image_default_patch_configuration_backup = json.loads(self.env_layer.file_system.read_with_retry(self.image_default_patch_configuration_backup_path)) + except Exception as error: + self.composite_logger.log_error("Unable to read backup for default patch state. Will attempt to re-write. [Exception={0}]".format(repr(error))) + + # verify if existing backup is valid if not, write to backup + is_backup_valid = self.is_image_default_patch_configuration_backup_valid(image_default_patch_configuration_backup) + if is_backup_valid: + self.composite_logger.log_debug("Since extension has a valid backup, no need to log the current settings again. [Default Auto OS update settings={0}][File path={1}]" + .format(str(image_default_patch_configuration_backup), self.image_default_patch_configuration_backup_path)) + else: + self.composite_logger.log_debug("Since the backup is invalid, will add a new backup with the current auto OS update settings") + self.composite_logger.log_debug("Fetching current auto OS update settings for [AutoOSUpdateService={0}]".format(str(self.current_auto_os_update_service))) + is_service_installed, apply_updates_value = self.__get_current_auto_os_updates_setting_on_machine() + + backup_image_default_patch_configuration_json_to_add = { + self.current_auto_os_update_service: { + self.apply_updates_identifier_text: apply_updates_value, + self.installation_state_identifier_text: is_service_installed + } + } + + image_default_patch_configuration_backup.update(backup_image_default_patch_configuration_json_to_add) + + self.composite_logger.log_debug("Logging default system configuration settings for auto OS updates. [Settings={0}][Log file path={1}]" + .format(str(image_default_patch_configuration_backup), self.image_default_patch_configuration_backup_path)) + self.env_layer.file_system.write_with_retry(self.image_default_patch_configuration_backup_path, '{0}'.format(json.dumps(image_default_patch_configuration_backup)), mode='w+') + except Exception as error: + error_message = "Exception during fetching and logging default auto update settings on the machine. [Exception={0}]".format(repr(error)) + self.composite_logger.log_error(error_message) + self.status_handler.add_error_to_status(error_message, Constants.PatchOperationErrorCodes.DEFAULT_ERROR) + raise + + def is_image_default_patch_configuration_backup_valid(self, image_default_patch_configuration_backup): + """ Verifies if default auto update configurations, for a service under consideration, are saved in backup """ + + # NOTE: Adding a separate function to check backup for multiple auto OS update services, if more are added in future. + return self.is_backup_valid_for_yast_online_update_configuration(image_default_patch_configuration_backup) + + def is_backup_valid_for_yast_online_update_configuration(self, image_default_patch_configuration_backup): + if self.ZypperAutoOSUpdateServices.YAST2_ONLINE_UPDATE_CONFIGURATION in image_default_patch_configuration_backup \ + and self.apply_updates_identifier_text in image_default_patch_configuration_backup[self.ZypperAutoOSUpdateServices.YAST2_ONLINE_UPDATE_CONFIGURATION]: + self.composite_logger.log_debug("Extension has a valid backup for default yum-cron configuration settings") + return True + else: + self.composite_logger.log_debug("Extension does not have a valid backup for default yum-cron configuration settings") + return False + + def update_os_patch_configuration_sub_setting(self, patch_configuration_sub_setting, value="false", config_pattern_match_text=""): + """ Updates (or adds if it doesn't exist) the given patch_configuration_sub_setting with the given value in os_patch_configuration_settings_file """ + try: + self.composite_logger.log_debug("Updating system configuration settings for auto OS updates. [Patch Configuration Sub Setting={0}][Value={1}]".format(str(patch_configuration_sub_setting), value)) + os_patch_configuration_settings = self.env_layer.file_system.read_with_retry(self.os_patch_configuration_settings_file_path) + patch_configuration_sub_setting_to_update = patch_configuration_sub_setting + '="' + value + '"' + patch_configuration_sub_setting_found_in_file = False + updated_patch_configuration_sub_setting = "" + settings = os_patch_configuration_settings.strip().split('\n') + + # update value of existing setting + for i in range(len(settings)): + match = re.search(patch_configuration_sub_setting + config_pattern_match_text, settings[i]) + if match is not None: + settings[i] = patch_configuration_sub_setting_to_update + patch_configuration_sub_setting_found_in_file = True + updated_patch_configuration_sub_setting += settings[i] + "\n" + + # add setting to configuration file, since it doesn't exist + if not patch_configuration_sub_setting_found_in_file: + updated_patch_configuration_sub_setting += patch_configuration_sub_setting_to_update + "\n" + + self.env_layer.file_system.write_with_retry(self.os_patch_configuration_settings_file_path, '{0}'.format(updated_patch_configuration_sub_setting.lstrip()), mode='w+') + except Exception as error: + error_msg = "Error occurred while updating system configuration settings for auto OS updates. [Patch Configuration={0}][Error={1}]".format(str(patch_configuration_sub_setting), repr(error)) + self.composite_logger.log_error(error_msg) + self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.DEFAULT_ERROR) + raise + # endregion + diff --git a/src/core/src/package_managers/zypper/ZypperSourcesManager.py b/src/core/src/package_managers/zypper/ZypperSourcesManager.py new file mode 100644 index 000000000..9ebb6c38a --- /dev/null +++ b/src/core/src/package_managers/zypper/ZypperSourcesManager.py @@ -0,0 +1,32 @@ +# Copyright 2023 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Requires Python 2.7+ + +from core.src.bootstrap.Constants import Constants +from core.src.package_managers.SourcesManager import SourcesManager + + +class ZypperSourcesManager(SourcesManager): + """ Helps with sources list management for Apt """ + def init(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler): + super(SourcesManager, self).__init__(env_layer, execution_config, composite_logger, telemetry_writer, status_handler, package_manager_name=Constants.ZYPPER) + pass + + def function_name(self): + pass + + + +# https://manpages.debian.org/jessie/apt/sources.list.5.en.html#:~:text=The%20source%20list%20%2Fetc%2Fapt%2Fsources.list%20is%20designed%20to%20support,by%20an%20equivalent%20command%20from%20another%20APT%20front-end%29. \ No newline at end of file diff --git a/src/core/src/package_managers/zypper/__init__.py b/src/core/src/package_managers/zypper/__init__.py new file mode 100644 index 000000000..e96580122 --- /dev/null +++ b/src/core/src/package_managers/zypper/__init__.py @@ -0,0 +1,15 @@ +# Copyright 2023 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Requires Python 2.7+ \ No newline at end of file diff --git a/src/core/src/service_interfaces/LifecycleManagerAzure.py b/src/core/src/service_interfaces/LifecycleManagerAzure.py deleted file mode 100644 index 8e854b282..000000000 --- a/src/core/src/service_interfaces/LifecycleManagerAzure.py +++ /dev/null @@ -1,143 +0,0 @@ -# Copyright 2020 Microsoft Corporation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Requires Python 2.7+ - -import json -import os -import shutil -import time -from core.src.bootstrap.Constants import Constants -from core.src.service_interfaces.LifecycleManager import LifecycleManager - - -class LifecycleManagerAzure(LifecycleManager): - """Class for managing the core code's lifecycle within the extension wrapper""" - - def __init__(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler): - super(LifecycleManagerAzure, self).__init__(env_layer, execution_config, composite_logger, telemetry_writer, status_handler) - - # Handshake file paths - self.ext_state_file_path = os.path.join(self.execution_config.config_folder, Constants.EXT_STATE_FILE) - self.core_state_file_path = os.path.join(self.execution_config.config_folder, Constants.CORE_STATE_FILE) - # Writing to log - self.composite_logger.log_debug("Initializing LifecycleManagerAzure") - - # region - State checkers - def execution_start_check(self): - self.composite_logger.log_debug("\nExecution start check initiating...") - - if self.execution_config.exec_auto_assess_only: - timer_start_time = self.env_layer.datetime.datetime_utcnow() - while True: - extension_sequence = self.read_extension_sequence() - core_sequence = self.read_core_sequence() - - # Timer evaluation - current_time = self.env_layer.datetime.datetime_utcnow() - elapsed_time_in_minutes = self.env_layer.datetime.total_minutes_from_time_delta(current_time - timer_start_time) - - # Check for sequence number mismatches - if int(self.execution_config.sequence_number) != int(core_sequence['number']): - if int(self.execution_config.sequence_number) < int(extension_sequence['number']) or int(self.execution_config.sequence_number) < int(core_sequence['number']): - self.composite_logger.log_warning("Auto-assessment NOT STARTED as newer sequence number detected. [Attempted={0}][DetectedExt={1}][DetectedCore={2}]".format(str(self.execution_config.sequence_number), str(extension_sequence['number']), str(core_sequence['number']))) - elif int(self.execution_config.sequence_number) > int(extension_sequence['number']) or int(self.execution_config.sequence_number) > int(core_sequence['number']): - self.composite_logger.log_error("Auto-assessment NOT STARTED as an extension state anomaly was detected. [Attempted={0}][DetectedExt={1}][DetectedCore={2}]".format(str(self.execution_config.sequence_number), str(extension_sequence['number']), str(core_sequence['number']))) - self.composite_logger.file_logger.close() - self.env_layer.exit(0) - - # DEFINITELY SAFE TO START. Correct sequence number marked as completed - if core_sequence['completed'].lower() == 'true': - self.composite_logger.log("Auto-assessment is SAFE to start. Existing sequence number marked as COMPLETED.\n") - self.read_only_mode = False - break - - # Check for active running processes if not completed - if len(self.identify_running_processes(core_sequence['processIds'])) != 0: - if os.getpid() in core_sequence['processIds']: - self.composite_logger.log("Auto-assessment is SAFE to start. Core sequence ownership is already established.\n") - self.read_only_mode = False - break - - # DEFINITELY _NOT_ SAFE TO START. Possible reasons: full core operation is in progress (okay), some previous auto-assessment is still running (bad scheduling, adhoc run, or process stalled) - if elapsed_time_in_minutes > Constants.MAX_AUTO_ASSESSMENT_WAIT_FOR_MAIN_CORE_EXEC_IN_MINUTES: # will wait up to the max allowed - self.composite_logger.log_warning("Auto-assessment is NOT safe to start yet.TIMED-OUT waiting to Core to complete. EXITING. [LastHeartbeat={0}][Operation={1}]".format(str(core_sequence['lastHeartbeat']), str(core_sequence['action']))) - self.composite_logger.file_logger.close() - self.env_layer.exit(0) - else: - self.composite_logger.file_logger.flush() - self.composite_logger.log_warning("Auto-assessment is NOT safe to start yet. Waiting to retry (up to set timeout). [LastHeartbeat={0}][Operation={1}][ElapsedTimeInMinutes={2}][TotalWaitRequiredInMinutes={3}]".format(str(core_sequence['lastHeartbeat']), str(core_sequence['action']), str(elapsed_time_in_minutes), str(Constants.REBOOT_BUFFER_IN_MINUTES))) - self.composite_logger.file_logger.flush() - time.sleep(30) - continue - - # MAYBE SAFE TO START. Safely timeout if wait for any core restart events (from a potential reboot) has exceeded the maximum reboot buffer - if elapsed_time_in_minutes > Constants.REBOOT_BUFFER_IN_MINUTES: - self.composite_logger.log_debug("Auto-assessment is now considered SAFE to start as Core timed-out in reporting completion mark. [LastHeartbeat={0}][Operation={1}]".format(str(core_sequence['lastHeartbeat']), str(core_sequence['action']))) - self.read_only_mode = False - break - - # Briefly pause execution to re-check all states (including reboot buffer) again - self.composite_logger.file_logger.flush() - self.composite_logger.log_debug("Auto-assessment is waiting for Core state completion mark (up to set timeout). [LastHeartbeat={0}][Operation={1}][ElapsedTimeInMinutes={2}][TotalWaitRequiredInMinutes={3}]".format(str(core_sequence['lastHeartbeat']), str(core_sequence['action']), str(elapsed_time_in_minutes), str(Constants.REBOOT_BUFFER_IN_MINUTES))) - self.composite_logger.file_logger.flush() - time.sleep(30) - - # Signalling take-over of core state by auto-assessment after safety checks for any competing process - self.update_core_sequence(completed=False) - # Refresh status file in memory to be up-to-date - self.status_handler.load_status_file_components() - else: - # Logic for all non-Auto-assessment operations - extension_sequence = self.read_extension_sequence() - core_sequence = self.read_core_sequence() - - if int(extension_sequence['number']) == int(self.execution_config.sequence_number): - if core_sequence['completed'] is True: - # Block attempts to execute what last completed (fully) again - self.composite_logger.log_warning("LifecycleManager recorded false enable for completed sequence {0}.".format(str(extension_sequence['number']))) - self.composite_logger.file_logger.close() - self.env_layer.exit(0) - else: - # Incomplete current execution - self.composite_logger.log_debug("Restarting execution for incomplete sequence number: {0}.".format(str(self.execution_config.sequence_number))) - elif int(extension_sequence['number']) < int(self.execution_config.sequence_number): - # Allow this but log a warning - self.composite_logger.log_warning("Unexpected lower sequence number: {0} < {1}.".format(str(self.execution_config.sequence_number), str(extension_sequence['number']))) - else: - # New sequence number - self.composite_logger.log_debug("New sequence number accepted for execution: {0} > {1}.".format(str(self.execution_config.sequence_number), str(extension_sequence['number']))) - - self.composite_logger.log_debug("Completed execution start check.") - - def lifecycle_status_check(self): - self.composite_logger.log_debug("Performing lifecycle status check...") - extension_sequence = self.read_extension_sequence() - if int(extension_sequence['number']) == int(self.execution_config.sequence_number): - self.composite_logger.log_debug("Extension sequence number verified to have not changed: {0}".format(str(extension_sequence['number']))) - self.update_core_sequence(completed=False) - else: - self.composite_logger.log_error("Extension goal state has changed. Terminating current sequence: {0}".format(self.execution_config.sequence_number)) - self.status_handler.report_sequence_number_changed_termination() # fail everything in a sequence number change - self.update_core_sequence(completed=True) # forced-to-complete scenario | extension wrapper will be watching for this event - self.composite_logger.file_logger.close() - self.env_layer.exit(0) - self.composite_logger.log_debug("Completed lifecycle status check.") - - # End region State checkers - # region - Identity - def get_vm_cloud_type(self): - return Constants.VMCloudType.AZURE - # endregion - diff --git a/src/core/src/service_interfaces/StatusHandler.py b/src/core/src/service_interfaces/StatusHandler.py index 76c5c5ef9..a1ebbc291 100644 --- a/src/core/src/service_interfaces/StatusHandler.py +++ b/src/core/src/service_interfaces/StatusHandler.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -26,7 +26,10 @@ class StatusHandler(object): """Class for managing the core code's lifecycle within the extension wrapper""" - def __init__(self, env_layer, execution_config, composite_logger, telemetry_writer, vm_cloud_type): + def __init__(self, env_layer, execution_config, composite_logger, telemetry_writer, package_manager_name, cloud_type): + # Defaulting operation at init to Assessment always until explicitly initialized to ensure error logging defaults + self.__current_operation = Constants.Op.ASSESSMENT + # Map supporting components for operation self.env_layer = env_layer self.execution_config = execution_config @@ -35,7 +38,8 @@ def __init__(self, env_layer, execution_config, composite_logger, telemetry_writ self.complete_status_file_path = self.execution_config.complete_status_file_path self.status_file_path = self.execution_config.status_file_path self.__log_file_path = self.execution_config.log_file_path - self.vm_cloud_type = vm_cloud_type + self.package_manager_name = package_manager_name + self.cloud_type = cloud_type # Status components self.__high_level_status_message = "" @@ -81,17 +85,14 @@ def __init__(self, env_layer, execution_config, composite_logger, telemetry_writ # Discovers OS name and version for package id composition self.__os_name_and_version = self.get_os_name_and_version() - self.__current_operation = None - # Update patch metadata summary in status for auto patching installation requests, to be reported to healthstore - if (execution_config.maintenance_run_id is not None or execution_config.health_store_id is not None) and execution_config.operation.lower() == Constants.INSTALLATION.lower(): + if (execution_config.maintenance_run_id is not None or execution_config.health_store_id is not None) and execution_config.operation.lower() == Constants.Op.INSTALLATION.lower(): if self.__installation_reboot_status != Constants.RebootStatus.STARTED: self.set_patch_metadata_for_healthstore_substatus_json(report_to_healthstore=True, wait_after_update=True) # updating metadata summary again with reporting to healthstore turned off self.set_patch_metadata_for_healthstore_substatus_json(report_to_healthstore=False, wait_after_update=False) else: - self.composite_logger.log_debug("Since this is the previous patch operation re-triggered after a reboot, healthstore has the operation commencement details. " - "So, not sending another report to healthstore") + self.composite_logger.log_debug("[SH] Healthstore reporting skipped as VM is coming back from a reboot.") # Enable reboot completion status capture if self.__installation_reboot_status == Constants.RebootStatus.STARTED: @@ -109,13 +110,13 @@ def reset_assessment_data(self): def set_package_assessment_status(self, package_names, package_versions, classification="Other", status="Available"): """ Externally available method to set assessment status for one or more packages of the **SAME classification and status** """ - self.composite_logger.log_debug("Setting package assessment status in bulk. [Count={0}]".format(str(len(package_names)))) + self.composite_logger.log_verbose("[SH] Setting package assessment status in bulk. [Count={0}]".format(str(len(package_names)))) for package_name, package_version in zip(package_names, package_versions): patch_already_saved = False patch_id = self.__get_patch_id(package_name, package_version) - # Match patch_id in map and update existing patch's classification i.e from other -> security + # Match patch_id in map and update existing patch's classification i.e. from other -> security if len(self.__assessment_packages_map) > 0 and patch_id in self.__assessment_packages_map: self.__assessment_packages_map.setdefault(patch_id, {})['classifications'] = [classification] # self.__assessment_packages_map.setdefault(patch_id, {})['patchState'] = status @@ -164,7 +165,7 @@ def sort_classification_key(x): def set_package_install_status(self, package_names, package_versions, status="Pending", classification=None): """ Externally available method to set installation status for one or more packages of the **SAME classification and status** """ - self.composite_logger.log_debug("Setting package installation status in bulk. [Count={0}]".format(str(len(package_names)))) + self.composite_logger.log_verbose("[SH] Setting package installation status in bulk. [Count={0}]".format(str(len(package_names)))) package_names, package_versions = self.validate_packages_being_installed(package_names, package_versions) package_install_status_summary = "" @@ -191,9 +192,9 @@ def set_package_install_status(self, package_names, package_versions, status="Pe # Add new patch to ordered map self.__installation_packages_map[patch_id] = record - package_install_status_summary += "[P={0},V={1}] ".format(str(package_name), str(package_version)) + package_install_status_summary += " ".format(str(package_name), str(package_version)) - self.composite_logger.log_debug("Package install status summary [Status= " + status + "] : " + package_install_status_summary) + self.composite_logger.log_verbose("[SH] Completed setting package installation status in bulk. [Status={0}][Summary=\"{1}\"]" + package_install_status_summary.strip()) self.__installation_packages = list(self.__installation_packages_map.values()) self.__installation_packages = self.sort_packages_by_classification_and_state(self.__installation_packages) self.set_installation_substatus_json() @@ -202,22 +203,22 @@ def set_package_install_status(self, package_names, package_versions, status="Pe def validate_packages_being_installed(package_names, package_versions): # Data normalization and corruption guards - if these exceptions hit, a bug has been introduced elsewhere if isinstance(package_names, str) != isinstance(package_versions, str): - raise Exception("Internal error: Package name and version data corruption detected.") + raise Exception("[SH] Internal error: Package name and version data corruption detected.") if isinstance(package_names, str): package_names, package_versions = [package_names], [package_versions] if len(package_names) != len(package_versions): - raise Exception("Internal error: Bad package name and version data received for status reporting. [Names={0}][Versions={1}]".format(str(len(package_names)), str(len(package_versions)))) + raise Exception("[SH] Internal error: Bad package name and version data received for status reporting. [Names={0}][Versions={1}]".format(str(len(package_names)), str(len(package_versions)))) return package_names, package_versions def set_package_install_status_classification(self, package_names, package_versions, classification=None): """ Externally available method to set classification for one or more packages being installed """ if classification is None: - self.composite_logger.log_debug("Classification not provided for the set of packages being installed. [Package Count={0}]".format(str(len(package_names)))) + self.composite_logger.log_debug("[SH] Classification not provided for packages being installed. [PackageCount={0}]".format(str(len(package_names)))) return self.validate_packages_being_installed(package_names, package_versions) - self.composite_logger.log_debug("Setting package installation classification in bulk. [Count={0}]".format(str(len(package_names)))) + self.composite_logger.log_verbose("[SH] Setting package data in bulk. [Operation=Installation][Type=Classification][PackageCount={0}]".format(str(len(package_names)))) package_classification_summary = "" for package_name, package_version in zip(package_names, package_versions): classification_matching_package_found = False @@ -227,9 +228,9 @@ def set_package_install_status_classification(self, package_names, package_versi self.__installation_packages_map.setdefault(patch_id, {})['classifications'] = [classification] classification_matching_package_found = True - package_classification_summary += "[P={0},V={1},C={2}] ".format(str(package_name), str(package_version), str(classification if classification is not None and classification_matching_package_found else "-")) + package_classification_summary += " ".format(str(package_name), str(package_version), str(classification if classification is not None and classification_matching_package_found else "-")) - self.composite_logger.log_debug("Package install status summary (classification): " + package_classification_summary) + self.composite_logger.log_verbose("[SH] Completed setting package data in bulk. [Operation=Installation][Type=Classification][Summary=\"{0}\"]" + package_classification_summary.strip()) self.__installation_packages = list(self.__installation_packages_map.values()) self.__installation_packages = self.sort_packages_by_classification_and_state(self.__installation_packages) self.set_installation_substatus_json() @@ -241,11 +242,11 @@ def __get_patch_id(self, package_name, package_version): def get_os_name_and_version(self): try: if self.env_layer.platform.system() != "Linux": - raise Exception("Unsupported OS type: {0}.".format(self.env_layer.platform.system())) + raise Exception("[SH] Unsupported OS type. [OSType={0}].".format(self.env_layer.platform.system())) platform_info = self.env_layer.platform.linux_distribution() return "{0}_{1}".format(platform_info[0], platform_info[1]) except Exception as error: - self.composite_logger.log_error("Unable to determine platform information: {0}".format(repr(error))) + self.composite_logger.log_error("[SH] Unable to determine platform information. [Error={0}]".format(repr(error))) return "unknownDist_unknownVer" # endregion @@ -257,7 +258,7 @@ def get_installation_reboot_status(self): def set_installation_reboot_status(self, new_reboot_status): """ Valid reboot statuses: NotNeeded, Required, Started, Failed, Completed """ if new_reboot_status not in [Constants.RebootStatus.NOT_NEEDED, Constants.RebootStatus.REQUIRED, Constants.RebootStatus.STARTED, Constants.RebootStatus.FAILED, Constants.RebootStatus.COMPLETED]: - raise "Invalid reboot status specified. [Status={0}]".format(str(new_reboot_status)) + raise Exception("[SH] Invalid reboot status specified. [Status={0}]".format(str(new_reboot_status))) # State transition validation if (new_reboot_status == Constants.RebootStatus.NOT_NEEDED and self.__installation_reboot_status not in [Constants.RebootStatus.NOT_NEEDED])\ @@ -265,26 +266,25 @@ def set_installation_reboot_status(self, new_reboot_status): or (new_reboot_status == Constants.RebootStatus.STARTED and self.__installation_reboot_status not in [Constants.RebootStatus.NOT_NEEDED, Constants.RebootStatus.REQUIRED, Constants.RebootStatus.STARTED])\ or (new_reboot_status == Constants.RebootStatus.FAILED and self.__installation_reboot_status not in [Constants.RebootStatus.STARTED, Constants.RebootStatus.FAILED])\ or (new_reboot_status == Constants.RebootStatus.COMPLETED and self.__installation_reboot_status not in [Constants.RebootStatus.STARTED, Constants.RebootStatus.COMPLETED]): - self.composite_logger.log_error("Invalid reboot status transition attempted. [CurrentRebootStatus={0}] [NewRebootStatus={1}]".format(self.__installation_reboot_status, str(new_reboot_status))) + self.composite_logger.log_error("[SH] Invalid reboot status transition attempted. [CurrentRebootStatus={0}][NewRebootStatus={1}]".format(self.__installation_reboot_status, str(new_reboot_status))) return # Persisting new reboot status (with machine state incorporation) - self.composite_logger.log_debug("Setting new installation reboot status. [NewRebootStatus={0}] [CurrentRebootStatus={1}]".format(str(new_reboot_status), self.__installation_reboot_status)) + self.composite_logger.log_debug("[SH] Setting new installation reboot status. [NewRebootStatus={0}][CurrentRebootStatus={1}]".format(str(new_reboot_status), self.__installation_reboot_status)) self.__installation_reboot_status = new_reboot_status self.set_installation_substatus_json() def __refresh_installation_reboot_status(self): """ Discovers if the system needs a reboot. Never allows going back to NotNeeded (deliberate). ONLY called internally. """ - self.composite_logger.log_debug("Checking if reboot status needs to reflect machine reboot status.") if self.__installation_reboot_status in [Constants.RebootStatus.NOT_NEEDED, Constants.RebootStatus.COMPLETED]: # Checks only if it's a state transition we allow reboot_needed = self.is_reboot_pending if reboot_needed: - self.composite_logger.log_debug("Machine reboot status has changed to 'Required'.") + self.composite_logger.log_debug("[SH] Machine reboot status has changed to 'Required'.") self.__installation_reboot_status = Constants.RebootStatus.REQUIRED def set_reboot_pending(self, is_reboot_pending): - log_message = "Setting reboot pending status. [RebootPendingStatus={0}]".format(str(is_reboot_pending)) + log_message = "[SH] Setting reboot pending status. [RebootPendingStatus={0}]".format(str(is_reboot_pending)) self.composite_logger.log_debug(log_message) self.is_reboot_pending = is_reboot_pending # endregion @@ -293,35 +293,47 @@ def set_reboot_pending(self, is_reboot_pending): def report_sequence_number_changed_termination(self): """ Based on the current operation, adds an error status and sets the substatus to error """ current_operation = self.execution_config.operation.lower() - error_code = Constants.PatchOperationErrorCodes.NEWER_OPERATION_SUPERSEDED + error_code = Constants.PatchOperationErrorCodes.CL_NEWER_OPERATION_SUPERSEDED message = "Execution was stopped due to a newer operation taking precedence." - if current_operation == Constants.ASSESSMENT.lower() or self.execution_config.exec_auto_assess_only: - self.add_error_to_status(message, error_code, current_operation_override_for_error=Constants.ASSESSMENT) - self.set_assessment_substatus_json(status=Constants.STATUS_ERROR) - elif current_operation == Constants.CONFIGURE_PATCHING.lower() or current_operation == Constants.CONFIGURE_PATCHING_AUTO_ASSESSMENT.lower(): - self.add_error_to_status(message, error_code, current_operation_override_for_error=Constants.CONFIGURE_PATCHING) - self.add_error_to_status(message, error_code, current_operation_override_for_error=Constants.CONFIGURE_PATCHING_AUTO_ASSESSMENT) - self.set_configure_patching_substatus_json(status=Constants.STATUS_ERROR) - elif current_operation == Constants.INSTALLATION.lower(): - self.add_error_to_status(message, error_code, current_operation_override_for_error=Constants.INSTALLATION) - self.set_installation_substatus_json(status=Constants.STATUS_ERROR) + if current_operation == Constants.Op.ASSESSMENT.lower() or self.execution_config.exec_auto_assess_only: + self.add_error_to_status(message, error_code, current_operation_override_for_error=Constants.Op.ASSESSMENT) + self.set_assessment_substatus_json(status=Constants.Status.ERROR) + elif current_operation == Constants.Op.CONFIGURE_PATCHING.lower() or current_operation == Constants.Op.CONFIGURE_PATCHING_AUTO_ASSESSMENT.lower(): + self.add_error_to_status(message, error_code, current_operation_override_for_error=Constants.Op.CONFIGURE_PATCHING) + self.add_error_to_status(message, error_code, current_operation_override_for_error=Constants.Op.CONFIGURE_PATCHING_AUTO_ASSESSMENT) + self.set_configure_patching_substatus_json(status=Constants.Status.ERROR) + elif current_operation == Constants.Op.INSTALLATION.lower(): + self.add_error_to_status(message, error_code, current_operation_override_for_error=Constants.Op.INSTALLATION) + self.set_installation_substatus_json(status=Constants.Status.ERROR) # endregion - Terminal state management # region - Substatus generation + def set_operation_substatus_json(self, operation_name, status=Constants.Status.TRANSITIONING, code=0): + """ Sets the status of any managed first-class operation """ + operation_name = operation_name.lower() + if operation_name == Constants.Op.CONFIGURE_PATCHING.lower(): + self.set_configure_patching_substatus_json(status, code) + elif operation_name == Constants.Op.ASSESSMENT.lower(): + self.set_assessment_substatus_json(status, code) + elif operation_name == Constants.Op.CONFIGURE_PATCHING.lower(): + self.set_installation_substatus_json(status, code) + else: + self.composite_logger.log_debug("Invalid operation name for operation substatus set. [OperationName={0}]".format(str(operation_name))) + def set_maintenance_window_exceeded(self, maintenance_windows_exceeded): self.__maintenance_window_exceeded = maintenance_windows_exceeded self.set_installation_substatus_json() - def set_assessment_substatus_json(self, status=Constants.STATUS_TRANSITIONING, code=0): + def set_assessment_substatus_json(self, status=Constants.Status.TRANSITIONING, code=0): """ Prepare the assessment substatus json including the message containing assessment summary """ - self.composite_logger.log_debug("Setting assessment substatus. [Substatus={0}]".format(str(status))) + self.composite_logger.log_debug("[SH] Setting assessment substatus. [Substatus={0}]".format(str(status))) # Wrap patches into assessment summary self.__assessment_summary_json = self.__new_assessment_summary_json(self.__assessment_packages, status, code) # Wrap assessment summary into assessment substatus - self.__assessment_substatus_json = self.__new_substatus_json_for_operation(Constants.PATCH_ASSESSMENT_SUMMARY, status, code, json.dumps(self.__assessment_summary_json)) + self.__assessment_substatus_json = self.__new_substatus_json_for_operation(Constants.OpSummary.ASSESSMENT, status, code, json.dumps(self.__assessment_summary_json)) # Update complete status on disk self.__write_status_file() @@ -351,26 +363,27 @@ def __new_assessment_summary_json(self, assessment_packages_json, status, code): "criticalAndSecurityPatchCount": critsec_patch_count, "otherPatchCount": other_patch_count, "patches": assessment_packages_json, + # "patchServiceUsed": str(self.package_manager_name), # uncomment when service-side support is complete "startTime": str(self.execution_config.start_time), "lastModifiedTime": str(self.env_layer.datetime.timestamp()), "startedBy": str(started_by), "errors": self.__set_errors_json(self.__assessment_total_error_count, self.__assessment_errors) } - if self.vm_cloud_type == Constants.VMCloudType.ARC: + if self.cloud_type == Constants.CloudType.ARC: substatus_message["patchAssessmentStatus"] = code substatus_message["patchAssessmentStatusString"] = status return substatus_message - def set_installation_substatus_json(self, status=Constants.STATUS_TRANSITIONING, code=0): + def set_installation_substatus_json(self, status=Constants.Status.TRANSITIONING, code=0): """ Prepare the deployment substatus json including the message containing deployment summary """ - self.composite_logger.log_debug("Setting installation substatus. [Substatus={0}]".format(str(status))) + self.composite_logger.log_debug("[SH] Setting installation substatus. [Substatus={0}]".format(str(status))) # Wrap patches into installation summary self.__installation_summary_json = self.__new_installation_summary_json(self.__installation_packages) # Wrap deployment summary into installation substatus - self.__installation_substatus_json = self.__new_substatus_json_for_operation(Constants.PATCH_INSTALLATION_SUMMARY, status, code, json.dumps(self.__installation_summary_json)) + self.__installation_substatus_json = self.__new_substatus_json_for_operation(Constants.OpSummary.INSTALLATION, status, code, json.dumps(self.__installation_summary_json)) # Update complete status on disk self.__write_status_file() @@ -388,18 +401,18 @@ def __new_installation_summary_json(self, installation_packages_json): failed_patch_count = 0 for i in range(0, len(installation_packages_json)): patch_installation_state = installation_packages_json[i]['patchInstallationState'] - if patch_installation_state == Constants.NOT_SELECTED: + if patch_installation_state == Constants.PackageStatus.NOT_SELECTED: not_selected_patch_count += 1 - elif patch_installation_state == Constants.EXCLUDED: + elif patch_installation_state == Constants.PackageStatus.EXCLUDED: excluded_patch_count += 1 - elif patch_installation_state == Constants.PENDING: + elif patch_installation_state == Constants.PackageStatus.PENDING: pending_patch_count += 1 - elif patch_installation_state == Constants.INSTALLED: + elif patch_installation_state == Constants.PackageStatus.INSTALLED: installed_patch_count += 1 - elif patch_installation_state == Constants.FAILED: + elif patch_installation_state == Constants.PackageStatus.FAILED: failed_patch_count += 1 else: - self.composite_logger.log_error("Unknown patch state recorded: {0}".format(str(patch_installation_state))) + self.composite_logger.log_error("[SH] Unknown patch state recorded. [State={0}]".format(str(patch_installation_state))) # Reboot status refresh self.__refresh_installation_reboot_status() @@ -415,24 +428,25 @@ def __new_installation_summary_json(self, installation_packages_json): "installedPatchCount": installed_patch_count, "failedPatchCount": failed_patch_count, "patches": installation_packages_json, + # "patchServiceUsed": str(self.package_manager_name), # uncomment when service-side support is complete "startTime": str(self.execution_config.start_time), "lastModifiedTime": str(self.env_layer.datetime.timestamp()), "maintenanceRunId": str(self.execution_config.maintenance_run_id) if self.execution_config.maintenance_run_id is not None else '', "errors": self.__set_errors_json(self.__installation_total_error_count, self.__installation_errors) } - def set_patch_metadata_for_healthstore_substatus_json(self, status=Constants.STATUS_SUCCESS, code=0, patch_version=Constants.PATCH_VERSION_UNKNOWN, report_to_healthstore=False, wait_after_update=False): + def set_patch_metadata_for_healthstore_substatus_json(self, status=Constants.Status.SUCCESS, code=0, patch_version=Constants.PATCH_VERSION_UNKNOWN, report_to_healthstore=False, wait_after_update=False): """ Prepare the healthstore substatus json including message containing summary to be sent to healthstore """ if self.execution_config.exec_auto_assess_only: raise Exception("Auto-assessment mode. Unexpected attempt to update healthstore status.") - self.composite_logger.log_debug("Setting patch metadata for healthstore substatus. [Substatus={0}] [Report to HealthStore={1}]".format(str(status), str(report_to_healthstore))) + self.composite_logger.log_debug("[SH] Setting healtstore substatus. [Substatus={0}][ReportToHealthStore={1}][WaitTimeInSecs={2}]".format(str(status), str(report_to_healthstore), str(Constants.WAIT_TIME_AFTER_HEALTHSTORE_STATUS_UPDATE_IN_SECS))) # Wrap patch metadata into healthstore summary self.__metadata_for_healthstore_summary_json = self.__new_patch_metadata_for_healthstore_json(patch_version, report_to_healthstore) # Wrap healthstore summary into healthstore substatus - self.__metadata_for_healthstore_substatus_json = self.__new_substatus_json_for_operation(Constants.PATCH_METADATA_FOR_HEALTHSTORE, status, code, json.dumps(self.__metadata_for_healthstore_summary_json)) + self.__metadata_for_healthstore_substatus_json = self.__new_substatus_json_for_operation(Constants.OpSummary.PATCH_METADATA_FOR_HEALTHSTORE, status, code, json.dumps(self.__metadata_for_healthstore_summary_json)) # Update complete status on disk self.__write_status_file() @@ -452,20 +466,20 @@ def __new_patch_metadata_for_healthstore_json(self, patch_version=Constants.PATC "shouldReportToHealthStore": report_to_healthstore } - def set_configure_patching_substatus_json(self, status=Constants.STATUS_TRANSITIONING, code=0, + def set_configure_patching_substatus_json(self, status=Constants.Status.TRANSITIONING, code=0, automatic_os_patch_state=Constants.AutomaticOSPatchStates.UNKNOWN, auto_assessment_state=Constants.AutoAssessmentStates.UNKNOWN): """ Prepare the configure patching substatus json including the message containing configure patching summary """ if self.execution_config.exec_auto_assess_only: raise Exception("Auto-assessment mode. Unexpected attempt to update configure patching status.") - self.composite_logger.log_debug("Setting configure patching substatus. [Substatus={0}]".format(str(status))) + self.composite_logger.log_debug("[SH] Setting configure patching substatus. [Substatus={0}]".format(str(status))) # Wrap default automatic OS patch state on the machine, at the time of this request, into configure patching summary self.__configure_patching_summary_json = self.__new_configure_patching_summary_json(automatic_os_patch_state, auto_assessment_state, status, code) # Wrap configure patching summary into configure patching substatus - self.__configure_patching_substatus_json = self.__new_substatus_json_for_operation(Constants.CONFIGURE_PATCHING_SUMMARY, status, code, json.dumps(self.__configure_patching_summary_json)) + self.__configure_patching_substatus_json = self.__new_substatus_json_for_operation(Constants.OpSummary.CONFIGURE_PATCHING, status, code, json.dumps(self.__configure_patching_summary_json)) # Update complete status on disk self.__write_status_file() @@ -487,7 +501,7 @@ def __new_configure_patching_summary_json(self, automatic_os_patch_state, auto_a }, "errors": self.__set_errors_json(self.__configure_patching_top_level_error_count, self.__configure_patching_errors) } - if self.vm_cloud_type == Constants.VMCloudType.ARC: + if self.cloud_type == Constants.CloudType.ARC: substatus_message["configurePatchStatus"] = code substatus_message["configurePatchStatusString"] = status return substatus_message @@ -519,7 +533,7 @@ def __new_basic_status_json(self): "version": 1.0, "timestampUTC": str(self.env_layer.datetime.timestamp()), "status": { - "name": "Azure Patch Management", + "name": "Azure Guest Patching Service", "operation": str(self.execution_config.operation), "status": "success", "code": 0, @@ -568,30 +582,29 @@ def load_status_file_components(self, initial_load=False): self.__configure_patching_errors = [] self.__configure_patching_auto_assessment_errors = [] - self.composite_logger.log_debug("Loading status file components [InitialLoad={0}].".format(str(initial_load))) + self.composite_logger.log_debug("[SH] Loading status file components [InitialLoad={0}].".format(str(initial_load))) # Remove older complete status files self.__removed_older_complete_status_files(self.execution_config.status_folder) # Verify the status file exists - if not, reset status file if not os.path.exists(self.complete_status_file_path) and initial_load: - self.composite_logger.log_warning("Status file not found at initial load. Resetting status file to defaults.") + self.composite_logger.log_debug("[SH] Status file not found at initial load. Resetting status file to defaults.") self.__reset_status_file() return # Load status data and sanity check structure - raise exception if data loss risk is detected on corrupt data complete_status_file_data = self.__load_complete_status_file_data(self.complete_status_file_path) if 'status' not in complete_status_file_data or 'substatus' not in complete_status_file_data['status']: - self.composite_logger.log_error("Malformed status file. Resetting status file for safety.") + self.composite_logger.log_error("[SH] Malformed status file. Resetting status file for safety.") self.__reset_status_file() return # Load portions of data that need to be built on for next write - raise exception if corrupt data is encountered - # todo: refactor self.__high_level_status_message = complete_status_file_data['status']['formattedMessage']['message'] for i in range(0, len(complete_status_file_data['status']['substatus'])): name = complete_status_file_data['status']['substatus'][i]['name'] - if name == Constants.PATCH_INSTALLATION_SUMMARY: # if it exists, it must be to spec, or an exception will get thrown + if name == Constants.OpSummary.INSTALLATION: # if it exists, it must be to spec, or an exception will get thrown if self.execution_config.exec_auto_assess_only: self.__installation_substatus_json = complete_status_file_data['status']['substatus'][i] else: @@ -604,7 +617,7 @@ def load_status_file_components(self, initial_load=False): if errors is not None and errors['details'] is not None: self.__installation_errors = errors['details'] self.__installation_total_error_count = self.__get_total_error_count_from_prev_status(errors['message']) - if name == Constants.PATCH_ASSESSMENT_SUMMARY: # if it exists, it must be to spec, or an exception will get thrown + if name == Constants.OpSummary.ASSESSMENT: # if it exists, it must be to spec, or an exception will get thrown self.__assessment_summary_json = self.__get_substatus_message(complete_status_file_data, i) self.__assessment_packages_map = collections.OrderedDict((package["patchId"], package) for package in self.__assessment_summary_json['patches']) self.__assessment_packages = list(self.__assessment_packages_map.values()) @@ -612,12 +625,12 @@ def load_status_file_components(self, initial_load=False): if errors is not None and errors['details'] is not None: self.__assessment_errors = errors['details'] self.__assessment_total_error_count = self.__get_total_error_count_from_prev_status(errors['message']) - if name == Constants.PATCH_METADATA_FOR_HEALTHSTORE: # if it exists, it must be to spec, or an exception will get thrown + if name == Constants.OpSummary.PATCH_METADATA_FOR_HEALTHSTORE: # if it exists, it must be to spec, or an exception will get thrown if self.execution_config.exec_auto_assess_only: self.__metadata_for_healthstore_substatus_json = complete_status_file_data['status']['substatus'][i] else: self.__metadata_for_healthstore_summary_json = self.__get_substatus_message(complete_status_file_data, i) - if name == Constants.CONFIGURE_PATCHING_SUMMARY: # if it exists, it must be to spec, or an exception will get thrown + if name == Constants.OpSummary.CONFIGURE_PATCHING: # if it exists, it must be to spec, or an exception will get thrown if self.execution_config.exec_auto_assess_only: self.__configure_patching_substatus_json = complete_status_file_data['status']['substatus'][i] else: @@ -640,7 +653,7 @@ def __load_complete_status_file_data(self, file_path): if i < Constants.MAX_FILE_OPERATION_RETRY_COUNT - 1: time.sleep(i + 1) else: - self.composite_logger.log_error("Unable to read status file (retries exhausted). Error: {0}.".format(repr(error))) + self.composite_logger.log_error("[SH] Unable to read status file (retries exhausted). [Error={0}].".format(repr(error))) raise return complete_status_file_data @@ -693,7 +706,7 @@ def __write_status_file(self): if self.__configure_patching_substatus_json is not None: complete_status_payload['status']['substatus'].append(self.__configure_patching_substatus_json) if os.path.isdir(self.complete_status_file_path): - self.composite_logger.log_error("Core state file path returned a directory. Attempting to reset.") + self.composite_logger.log_debug("[SH] Core state file path returned a directory. Attempting to reset.") shutil.rmtree(self.complete_status_file_path) # Write complete status file .complete.status @@ -705,7 +718,7 @@ def __write_status_file(self): # region - Error objects def set_current_operation(self, operation): - if self.execution_config.exec_auto_assess_only and operation != Constants.ASSESSMENT: + if self.execution_config.exec_auto_assess_only and operation != Constants.Op.ASSESSMENT: raise Exception("Status reporting for a non-assessment operation was attempted when executing in auto-assessment mode. [Operation={0}]".format(str(operation))) self.__current_operation = operation @@ -716,9 +729,21 @@ def __get_total_error_count_from_prev_status(self, error_message): try: return int(re.search('(.+?) error/s reported.', error_message).group(1)) except AttributeError: - self.composite_logger.log("Unable to fetch error count from error message reported in status. Attempted to read [Message={0}]".format(error_message)) + self.composite_logger.log("[SH] Unable to parse error count from error message reported in status. [Message={0}]".format(error_message)) return 0 + def add_error_to_status_and_log_error(self, message, raise_exception=False, error_code=Constants.PatchOperationErrorCodes.DEFAULT_ERROR, current_operation_override_for_error=Constants.DEFAULT_UNSPECIFIED_VALUE): + """ Stitches together two commonly sequenced calls from the class guaranteed to be able to do it """ + self.composite_logger.log_error(message) + self.add_error_to_status(message, error_code, current_operation_override_for_error) + if raise_exception: + raise Exception(message, "[{0}]".format(Constants.ERROR_ADDED_TO_STATUS)) + + def add_error_to_status_and_log_warning(self, message, error_code=Constants.PatchOperationErrorCodes.DEFAULT_ERROR, current_operation_override_for_error=Constants.DEFAULT_UNSPECIFIED_VALUE): + """ Stitches together two commonly sequenced calls from the class guaranteed to be able to do it """ + self.composite_logger.log_warning(message) + self.add_error_to_status(message, error_code, current_operation_override_for_error) + def add_error_to_status(self, message, error_code=Constants.PatchOperationErrorCodes.DEFAULT_ERROR, current_operation_override_for_error=Constants.DEFAULT_UNSPECIFIED_VALUE): """ Add error to the respective error objects """ if not message or Constants.ERROR_ADDED_TO_STATUS in message: @@ -734,7 +759,7 @@ def add_error_to_status(self, message, error_code=Constants.PatchOperationErrorC # determine if a current operation override has been requested current_operation = self.__current_operation if current_operation_override_for_error == Constants.DEFAULT_UNSPECIFIED_VALUE else current_operation_override_for_error - if current_operation == Constants.ASSESSMENT: + if current_operation == Constants.Op.ASSESSMENT: if self.__try_add_error(self.__assessment_errors, error_detail): self.__assessment_total_error_count += 1 # retain previously set status and code for assessment substatus @@ -742,7 +767,7 @@ def add_error_to_status(self, message, error_code=Constants.PatchOperationErrorC self.set_assessment_substatus_json(status=self.__assessment_substatus_json["status"], code=self.__assessment_substatus_json["code"]) else: self.set_assessment_substatus_json() - elif current_operation == Constants.INSTALLATION: + elif current_operation == Constants.Op.INSTALLATION: if self.__try_add_error(self.__installation_errors, error_detail): self.__installation_total_error_count += 1 # retain previously set status and code for installation substatus @@ -750,8 +775,8 @@ def add_error_to_status(self, message, error_code=Constants.PatchOperationErrorC self.set_installation_substatus_json(status=self.__installation_substatus_json["status"], code=self.__installation_substatus_json["code"]) else: self.set_installation_substatus_json() - elif current_operation == Constants.CONFIGURE_PATCHING or current_operation == Constants.CONFIGURE_PATCHING_AUTO_ASSESSMENT: - if current_operation == Constants.CONFIGURE_PATCHING_AUTO_ASSESSMENT: + elif current_operation == Constants.Op.CONFIGURE_PATCHING or current_operation == Constants.Op.CONFIGURE_PATCHING_AUTO_ASSESSMENT: + if current_operation == Constants.Op.CONFIGURE_PATCHING_AUTO_ASSESSMENT: if self.__try_add_error(self.__configure_patching_auto_assessment_errors, error_detail): self.__configure_patching_auto_assessment_error_count += 1 else: @@ -772,7 +797,7 @@ def add_error_to_status(self, message, error_code=Constants.PatchOperationErrorC def __ensure_error_message_restriction_compliance(self, full_message): """ Removes line breaks, tabs and restricts message to a character limit """ - message_size_limit = Constants.STATUS_ERROR_MSG_SIZE_LIMIT_IN_CHARACTERS + message_size_limit = Constants.Config.STATUS_ERROR_MSG_SIZE_LIMIT_IN_CHARACTERS formatted_message = re.sub(r"\s+", " ", str(full_message)) return formatted_message[:message_size_limit - 3] + '...' if len(formatted_message) > message_size_limit else formatted_message @@ -791,19 +816,19 @@ def __try_add_error(error_list, detail): # All details contained from new message in an existing message already return False - if len(error_list) >= Constants.STATUS_ERROR_LIMIT: - errors_to_remove = len(error_list) - Constants.STATUS_ERROR_LIMIT + 1 + if len(error_list) >= Constants.Config.STATUS_ERROR_LIMIT: + errors_to_remove = len(error_list) - Constants.Config.STATUS_ERROR_LIMIT + 1 for x in range(0, errors_to_remove): error_list.pop() error_list.insert(0, detail) return True - def __set_errors_json(self, error_count_by_operation, errors_by_operation): + def __set_errors_json(self, total_error_count_by_operation, errors_by_operation): """ Compose the error object json to be added in 'errors' in given operation's summary """ - message = "{0} error/s reported.".format(error_count_by_operation) - message += " The latest {0} error/s are shared in detail. To view all errors, review this log file on the machine: {1}".format(len(errors_by_operation), self.__log_file_path) if error_count_by_operation > 0 else "" + message = "{0} error(s) occurred.".format(total_error_count_by_operation) + message += " {0} latest error details shared. Review log for all errors: {1}".format(len(errors_by_operation), self.__log_file_path) if total_error_count_by_operation > len(errors_by_operation) else "" return { - "code": Constants.PatchOperationTopLevelErrorCode.SUCCESS if error_count_by_operation == 0 else Constants.PatchOperationTopLevelErrorCode.ERROR, + "code": Constants.PatchOperationTopLevelErrorCode.SUCCESS if total_error_count_by_operation == 0 else Constants.PatchOperationTopLevelErrorCode.ERROR, "details": errors_by_operation, "message": message } @@ -823,7 +848,7 @@ def __removed_older_complete_status_files(self, status_folder): os.remove(complete_status_file) files_removed.append(complete_status_file) except Exception as e: - self.composite_logger.log_debug("Error deleting complete status file. [File={0} [Exception={1}]]".format(repr(complete_status_file), repr(e))) + self.composite_logger.log_debug("[SH] Error deleting complete status file. [File={0}][Exception={1}]".format(complete_status_file, repr(e))) - self.composite_logger.log_debug("Cleaned up older complete status files: {0}".format(files_removed)) + self.composite_logger.log_debug("[SH] Cleaned up older complete status files: {0}".format(files_removed)) diff --git a/src/core/src/service_interfaces/TelemetryWriter.py b/src/core/src/service_interfaces/TelemetryWriter.py index e3206fdce..ae86b5e12 100644 --- a/src/core/src/service_interfaces/TelemetryWriter.py +++ b/src/core/src/service_interfaces/TelemetryWriter.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -28,7 +28,7 @@ class TelemetryWriter(object): """Class for writing telemetry data to data transports""" - TELEMETRY_BUFFER_DELIMETER= "\n|\t" + TELEMETRY_BUFFER_DELIMITER = "\n|\t" def __init__(self, env_layer, composite_logger, events_folder_path, telemetry_supported): self.env_layer = env_layer @@ -46,20 +46,19 @@ def __init__(self, env_layer, composite_logger, events_folder_path, telemetry_su self.__is_telemetry_supported = telemetry_supported and self.events_folder_path is not None - self.write_event('Started Linux patch core operation.', Constants.TelemetryEventLevel.Informational) + self.write_event('Started AzGPS Linux Patch Extension (Core) telemetry.', Constants.EventLevel.Info) self.machine_info = None - self.set_and_write_machine_config_info() + self.__set_and_write_machine_config_info() self.telemetry_buffer_store = "" self.last_telemetry_event_level = None - def write_config_info(self, config_info, config_type='unknown'): # Configuration info payload_json = { 'config_type': config_type, 'config_value': config_info } - return self.write_event(payload_json, Constants.TelemetryEventLevel.Informational) + return self.write_event(payload_json, Constants.EventLevel.Info) def write_package_info(self, package_name, package_ver, package_size, install_dur, install_result, code_path, install_cmd, output=''): # Package information compiled after the package is attempted to be installed @@ -70,21 +69,21 @@ def write_package_info(self, package_name, package_ver, package_size, install_du 'package_size': str(package_size), 'install_duration': str(install_dur), 'install_result': str(install_result), 'code_path': code_path, 'install_cmd': str(install_cmd), 'output': str(output)[0:max_output_length]} - self.write_event(message, Constants.TelemetryEventLevel.Informational) + self.write_event(message, Constants.EventLevel.Info) # additional message payloads for output continuation only if we need it for specific troubleshooting if len(output) > max_output_length: for i in range(1, int(len(output)/max_output_length) + 1): message = {'install_cmd': str(install_cmd), 'output_continuation': str(output)[(max_output_length*i):(max_output_length*(i+1))]} - self.write_event(message, Constants.TelemetryEventLevel.Informational) + self.write_event(message, Constants.EventLevel.Info) # Composed payload - def set_and_write_machine_config_info(self): + def __set_and_write_machine_config_info(self): # Machine info - sent only once at the start of the run self.machine_info = "[PlatformName={0}][PlatformVersion={1}][MachineCpu={2}][MachineArch={3}][DiskType={4}]".format( str(self.env_layer.platform.linux_distribution()[0]), str(self.env_layer.platform.linux_distribution()[1]), - self.get_machine_processor(), str(self.env_layer.platform.machine()), self.get_disk_type()) - self.write_event("Machine info is: {0}".format(self.machine_info), Constants.TelemetryEventLevel.Informational) + self.__get_machine_processor(), str(self.env_layer.platform.machine()), self.__get_disk_type()) + self.write_event("[TW] System configuration for troubleshooting. {0}".format(self.machine_info), Constants.EventLevel.Info) def write_execution_error(self, cmd, code, output): # Expected to log any errors from a cmd execution, including package manager execution errors @@ -93,11 +92,11 @@ def write_execution_error(self, cmd, code, output): 'code': str(code), 'output': str(output) } - return self.write_event(error_payload, Constants.TelemetryEventLevel.Error) + return self.write_event(error_payload, Constants.EventLevel.Error) # endregion # region Machine config retrieval methods - def get_machine_processor(self): + def __get_machine_processor(self): """Retrieve machine processor info""" cmd = "cat /proc/cpuinfo | grep name" code, out = self.env_layer.run_command_output(cmd, False, False) @@ -109,16 +108,16 @@ def get_machine_processor(self): lines = out.split("\n") return lines[0].split(":")[1].lstrip() - def get_disk_type(self): + def __get_disk_type(self): """ Retrieve disk info """ cmd = "cat /sys/block/sda/queue/rotational" code, out = self.env_layer.run_command_output(cmd, False, False) if "1" in out: - return "Hard drive" + return "HDD" elif "0" in out: return "SSD" else: - return "Unknown" + return "" # end region @staticmethod @@ -128,14 +127,14 @@ def __get_events_folder_path_exists(events_folder_path): def __new_event_json(self, event_level, message, task_name): return { - "Version": Constants.EXT_VERSION, + "Version": Constants.AZGPS_LPE_VERSION, "Timestamp": str(datetime.datetime.utcnow()), "TaskName": task_name, "EventLevel": event_level, "Message": self.__ensure_message_restriction_compliance(message), "EventPid": "", "EventTid": "", - "OperationId": self.__operation_id # activity id from from config settings + "OperationId": self.__operation_id # activity id from config settings } def __ensure_message_restriction_compliance(self, full_message): @@ -144,14 +143,14 @@ def __ensure_message_restriction_compliance(self, full_message): Adds a telemetry event counter at the end of every event, irrespective of truncation, which can be used in debugging operation flow. """ try: - message_size_limit_in_chars = Constants.TELEMETRY_MSG_SIZE_LIMIT_IN_CHARS + message_size_limit_in_chars = Constants.TelemetryConfig.MSG_SIZE_LIMIT_IN_CHARS formatted_message = re.sub(r"\s+", " ", str(full_message)) - if len(formatted_message.encode('utf-8')) + Constants.TELEMETRY_EVENT_COUNTER_MSG_SIZE_LIMIT_IN_CHARS > message_size_limit_in_chars: + if len(formatted_message.encode('utf-8')) + Constants.TelemetryConfig.EVENT_COUNTER_MSG_SIZE_LIMIT_IN_CHARS > message_size_limit_in_chars: self.composite_logger.log_telemetry_module("Data sent to telemetry will be truncated as it exceeds size limit. [Message={0}]".format(str(formatted_message))) formatted_message = formatted_message.encode('utf-8') - chars_dropped = len(formatted_message) - message_size_limit_in_chars + Constants.TELEMETRY_BUFFER_FOR_DROPPED_COUNT_MSG_IN_CHARS + Constants.TELEMETRY_EVENT_COUNTER_MSG_SIZE_LIMIT_IN_CHARS - formatted_message = formatted_message[:message_size_limit_in_chars - Constants.TELEMETRY_BUFFER_FOR_DROPPED_COUNT_MSG_IN_CHARS - Constants.TELEMETRY_EVENT_COUNTER_MSG_SIZE_LIMIT_IN_CHARS].decode('utf-8') + '. [{0} chars dropped]'.format(chars_dropped) + chars_dropped = len(formatted_message) - message_size_limit_in_chars + Constants.TelemetryConfig.BUFFER_FOR_DROPPED_COUNT_MSG_IN_CHARS + Constants.TelemetryConfig.EVENT_COUNTER_MSG_SIZE_LIMIT_IN_CHARS + formatted_message = formatted_message[:message_size_limit_in_chars - Constants.TelemetryConfig.BUFFER_FOR_DROPPED_COUNT_MSG_IN_CHARS - Constants.TelemetryConfig.EVENT_COUNTER_MSG_SIZE_LIMIT_IN_CHARS].decode('utf-8') + '. [{0} chars dropped]'.format(chars_dropped) formatted_message += " [TC={0}]".format(self.__telemetry_event_counter) return formatted_message @@ -163,7 +162,7 @@ def __ensure_message_restriction_compliance(self, full_message): def write_event_with_buffer(self, message, event_level, buffer_msg): if buffer_msg == Constants.BufferMessage.TRUE and (event_level == self.last_telemetry_event_level or self.last_telemetry_event_level is None): if self.telemetry_buffer_store != "": - self.telemetry_buffer_store = self.telemetry_buffer_store + self.TELEMETRY_BUFFER_DELIMETER + message + self.telemetry_buffer_store = self.telemetry_buffer_store + self.TELEMETRY_BUFFER_DELIMITER + message else: self.telemetry_buffer_store = message @@ -179,7 +178,7 @@ def write_event_with_buffer(self, message, event_level, buffer_msg): elif buffer_msg == Constants.BufferMessage.FLUSH: if self.telemetry_buffer_store != "": - self.telemetry_buffer_store = self.telemetry_buffer_store + self.TELEMETRY_BUFFER_DELIMETER + message + self.telemetry_buffer_store = self.telemetry_buffer_store + self.TELEMETRY_BUFFER_DELIMITER + message self.write_event(self.telemetry_buffer_store, self.last_telemetry_event_level) else: self.write_event(message, event_level) @@ -187,12 +186,12 @@ def write_event_with_buffer(self, message, event_level, buffer_msg): self.last_telemetry_event_level = None self.telemetry_buffer_store = "" - def write_event(self, message, event_level=Constants.TelemetryEventLevel.Informational, task_name=Constants.TelemetryTaskName.UNKNOWN, is_event_file_throttling_needed=True): + def write_event(self, message, event_level=Constants.EventLevel.Info, task_name=Constants.TelemetryTaskName.UNKNOWN, is_event_file_throttling_needed=True): """ Creates and writes event to event file after validating none of the telemetry size restrictions are breached NOTE: is_event_file_throttling_needed is used to determine if event file throttling is required and as such should always be True. - The only scenario where this is False is when throttling is taking place and we write to telemetry about it. i.e. only from within __throttle_telemetry_writes_if_required()""" + The only scenario where this is False is when throttling is taking place, and, we write to telemetry about it. i.e. only from within __throttle_telemetry_writes_if_required()""" try: - if not self.is_telemetry_supported() or not Constants.TELEMETRY_ENABLED_AT_EXTENSION: + if not self.is_telemetry_supported(): return # ensure file throttle limit is reached @@ -205,20 +204,20 @@ def write_event(self, message, event_level=Constants.TelemetryEventLevel.Informa task_name = self.__task_name event = self.__new_event_json(event_level, message, task_name) - if len(json.dumps(event)) > Constants.TELEMETRY_EVENT_SIZE_LIMIT_IN_CHARS: + if len(json.dumps(event)) > Constants.TelemetryConfig.EVENT_SIZE_LIMIT_IN_CHARS: self.composite_logger.log_telemetry_module_error("Cannot send data to telemetry as it exceeded the acceptable data size. [Data not sent={0}]".format(json.dumps(message))) else: file_path, all_events = self.__get_file_and_content_to_write(self.events_folder_path, event) self.__write_event_using_temp_file(file_path, all_events) except Exception as e: - self.composite_logger.log_telemetry_module_error("Error occurred while writing telemetry events. [Error={0}]".format(repr(e))) - raise Exception("Internal reporting error. Execution could not complete.") + self.composite_logger.log_telemetry_module_error("[TW] Internal reporting error in writing telemetry events. [Error={0}]".format(repr(e))) + # Not raising an exception here because legitimate bugs can cause this in an isolated fashion without affecting other telemetry writes in the same operation / VM. def __delete_older_events_if_dir_size_limit_not_met(self): """ Delete older events until the at least one new event file can be added as per the size restrictions """ try: - if self.__get_events_dir_size() < Constants.TELEMETRY_DIR_SIZE_LIMIT_IN_CHARS - Constants.TELEMETRY_EVENT_FILE_SIZE_LIMIT_IN_CHARS: + if self.__get_events_dir_size() < Constants.TelemetryConfig.DIR_SIZE_LIMIT_IN_CHARS - Constants.TelemetryConfig.EVENT_FILE_SIZE_LIMIT_IN_CHARS: # Not deleting any existing event files as the event directory does not exceed max limit. At least one new event file can be added. Not printing this statement as it will add repetitive logs return @@ -228,7 +227,7 @@ def __delete_older_events_if_dir_size_limit_not_met(self): for event_file in event_files: try: - if self.__get_events_dir_size() < Constants.TELEMETRY_DIR_SIZE_LIMIT_IN_CHARS - Constants.TELEMETRY_EVENT_FILE_SIZE_LIMIT_IN_CHARS: + if self.__get_events_dir_size() < Constants.TelemetryConfig.DIR_SIZE_LIMIT_IN_CHARS - Constants.TelemetryConfig.EVENT_FILE_SIZE_LIMIT_IN_CHARS: # Not deleting any more event files as the event directory has sufficient space to add at least one new event file. Not printing this statement as it will add repetitive logs break @@ -236,9 +235,9 @@ def __delete_older_events_if_dir_size_limit_not_met(self): os.remove(event_file) self.composite_logger.log_telemetry_module("Deleted event file. [File={0}]".format(repr(event_file))) except Exception as e: - self.composite_logger.log_telemetry_module_error("Error deleting event file. [File={0}] [Exception={1}]".format(repr(event_file), repr(e))) + self.composite_logger.log_telemetry_module_error("Error deleting event file. [File={0}][Exception={1}]".format(repr(event_file), repr(e))) - if self.__get_events_dir_size() >= Constants.TELEMETRY_DIR_SIZE_LIMIT_IN_CHARS: + if self.__get_events_dir_size() >= Constants.TelemetryConfig.DIR_SIZE_LIMIT_IN_CHARS: self.composite_logger.log_telemetry_module_error("Older event files were not deleted. Current event will not be sent to telemetry as events directory size exceeds maximum limit") raise @@ -252,9 +251,9 @@ def __get_file_and_content_to_write(self, folder_path, data): file_path = self.__get_event_file_path(folder_path) all_events = [] if os.path.exists(file_path): - file_size = self.get_file_size(file_path) + file_size = self.__get_file_size(file_path) # if file_size exceeds max limit, sleep for 1 second, so the event can be written to a new file since the event file name is a timestamp - if file_size >= Constants.TELEMETRY_EVENT_FILE_SIZE_LIMIT_IN_CHARS: + if file_size >= Constants.TelemetryConfig.EVENT_FILE_SIZE_LIMIT_IN_CHARS: time.sleep(1) file_path = self.__get_event_file_path(folder_path) else: @@ -277,15 +276,15 @@ def __throttle_telemetry_writes_if_required(self, is_event_file_throttling_neede # Computing seconds as per: https://docs.python.org/2/library/datetime.html#datetime.timedelta.total_seconds, since total_seconds() is not supported in python 2.6 time_from_throttle_start_check_total_seconds = ((time_from_event_count_throttle_check_start.microseconds + (time_from_event_count_throttle_check_start.seconds + time_from_event_count_throttle_check_start.days * 24 * 3600) * 10 ** 6) / 10 ** 6) - if time_from_throttle_start_check_total_seconds < Constants.TELEMETRY_MAX_TIME_IN_SECONDS_FOR_EVENT_COUNT_THROTTLE: + if time_from_throttle_start_check_total_seconds < Constants.TelemetryConfig.MAX_TIME_IN_SECONDS_FOR_EVENT_COUNT_THROTTLE: # If event count limit reached before time period, wait out the remaining time. Checking against one less than max limit to allow room for writing a throttling msg to telemetry - if self.event_count >= Constants.TELEMETRY_MAX_EVENT_COUNT_THROTTLE - 1: - end_time_for_event_count_throttle_check = self.start_time_for_event_count_throttle_check + datetime.timedelta(seconds=Constants.TELEMETRY_MAX_TIME_IN_SECONDS_FOR_EVENT_COUNT_THROTTLE) + if self.event_count >= Constants.TelemetryConfig.MAX_EVENT_COUNT_THROTTLE - 1: + end_time_for_event_count_throttle_check = self.start_time_for_event_count_throttle_check + datetime.timedelta(seconds=Constants.TelemetryConfig.MAX_TIME_IN_SECONDS_FOR_EVENT_COUNT_THROTTLE) time_to_wait = (end_time_for_event_count_throttle_check - datetime.datetime.utcnow()) time_to_wait_in_secs = ((time_to_wait.microseconds + (time_to_wait.seconds + time_to_wait.days * 24 * 3600) * 10 ** 6) / 10 ** 6) # Computing seconds as per: https://docs.python.org/2/library/datetime.html#datetime.timedelta.total_seconds, since total_seconds() is not supported in python 2.6 event_write_throttled_msg = "Max telemetry event file limit reached. Extension will wait until a telemetry event file can be written again. [WaitTimeInSecs={0}]".format(str(time_to_wait_in_secs)) self.composite_logger.log_telemetry_module(event_write_throttled_msg) - self.write_event(message=event_write_throttled_msg, event_level=Constants.TelemetryEventLevel.Informational, is_event_file_throttling_needed=False) + self.write_event(message=event_write_throttled_msg, event_level=Constants.EventLevel.Info, is_event_file_throttling_needed=False) time.sleep(time_to_wait_in_secs) self.start_time_for_event_count_throttle_check = datetime.datetime.utcnow() self.event_count = 1 @@ -307,7 +306,7 @@ def __write_event_using_temp_file(self, file_path, all_events, mode='w'): self.__telemetry_event_counter += 1 self.event_count += 1 except Exception as error: - self.composite_logger.log_telemetry_module_error("Unable to write to telemetry. [Event File={0}] [Error={1}].".format(str(file_path), repr(error))) + self.composite_logger.log_telemetry_module_error("Unable to write to telemetry. [Event File={0}][Error={1}].".format(str(file_path), repr(error))) raise def __get_events_dir_size(self): @@ -332,7 +331,7 @@ def __get_event_file_path(folder_path): return os.path.join(folder_path, str(int(round(time.time() * 1000))) + ".json") @staticmethod - def get_file_size(file_path): + def __get_file_size(file_path): """ Returns the size of a file. Extracted out for mocking in unit test """ return os.path.getsize(file_path) @@ -346,7 +345,7 @@ def __fetch_events_from_previous_file(self, file_path): if error.errno == errno.ENOENT: return [] else: - self.composite_logger.log_telemetry_module_error("Error occurred while fetching contents from existing event file. [File={0}] [Error={1}].".format(repr(file_path), repr(error))) + self.composite_logger.log_telemetry_module_error("Error occurred while fetching contents from existing event file. [File={0}][Error={1}].".format(repr(file_path), repr(error))) raise def set_operation_id(self, operation_id): diff --git a/src/core/src/service_interfaces/__init__.py b/src/core/src/service_interfaces/__init__.py index a1eee39e6..2e3211aae 100644 --- a/src/core/src/service_interfaces/__init__.py +++ b/src/core/src/service_interfaces/__init__.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/core/src/service_interfaces/LifecycleManager.py b/src/core/src/service_interfaces/lifecycle_managers/LifecycleManager.py similarity index 75% rename from src/core/src/service_interfaces/LifecycleManager.py rename to src/core/src/service_interfaces/lifecycle_managers/LifecycleManager.py index ff2f9b34b..8b4377b96 100644 --- a/src/core/src/service_interfaces/LifecycleManager.py +++ b/src/core/src/service_interfaces/lifecycle_managers/LifecycleManager.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -32,8 +32,8 @@ def __init__(self, env_layer, execution_config, composite_logger, telemetry_writ self.status_handler = status_handler # Handshake file paths - self.ext_state_file_path = os.path.join(self.execution_config.config_folder, Constants.EXT_STATE_FILE) - self.core_state_file_path = os.path.join(self.execution_config.config_folder, Constants.CORE_STATE_FILE) + self.ext_state_file_path = os.path.join(self.execution_config.config_folder, Constants.StateFiles.EXT) + self.core_state_file_path = os.path.join(self.execution_config.config_folder, Constants.StateFiles.CORE) self.read_only_mode = True # safety valve on contention with redundancy @@ -47,7 +47,7 @@ def lifecycle_status_check(self): # region - State management def read_extension_sequence(self): - self.composite_logger.log_debug("Reading extension sequence...") + self.composite_logger.log_verbose("[LM] Reading extension sequence...") if not os.path.exists(self.ext_state_file_path) or not os.path.isfile(self.ext_state_file_path): raise Exception("Extension state file not found.") @@ -58,33 +58,33 @@ def read_extension_sequence(self): return json.load(file_handle)['extensionSequence'] except Exception as error: if i < Constants.MAX_FILE_OPERATION_RETRY_COUNT - 1: - self.composite_logger.log_warning("Exception on extension sequence read. [Exception={0}] [RetryCount={1}]".format(repr(error), str(i))) + self.composite_logger.log_verbose("[LM] Exception on extension sequence read. [Exception={0}][RetryCount={1}]".format(repr(error), str(i))) time.sleep(i+1) else: - self.composite_logger.log_error("Unable to read extension state file (retries exhausted). [Exception={0}]".format(repr(error))) + self.composite_logger.log_error("[LM] Unable to read extension state file (retries exhausted). [Exception={0}]".format(repr(error))) raise def identify_and_mitigate_core_sequence_issues(self): - """ Checks for issues with the core sequence file (file not exists, is dir, etc) and attempts to mitigate them. """ + """ Checks for issues with the core sequence file (file not exists, is dir, etc.) and attempts to mitigate them. """ if not os.path.exists(self.core_state_file_path) or not os.path.isfile(self.core_state_file_path): # Neutralizes directories if os.path.isdir(self.core_state_file_path): - self.composite_logger.log_error("Core state file path returned a directory. Attempting to reset.") + self.composite_logger.log_error("[LM] Core state file path returned a directory. Attempting to reset.") shutil.rmtree(self.core_state_file_path) # Writes a vanilla core sequence file - self.composite_logger.log_warning("Core state file did not exist. Attempting to reset.") + self.composite_logger.log_warning("[LM] Core state file did not exist. Attempting to reset.") self.read_only_mode = False self.update_core_sequence() elif os.path.exists(self.core_state_file_path) and os.path.isfile(self.core_state_file_path) and os.stat(self.core_state_file_path).st_size == 0: # Core sequence file exists but is empty (unexpected state that will result in a JSON decode error) # Write a vanilla core sequence file to correct empty file - self.composite_logger.log_warning("Core state file existed but was empty. Attempting to reset.") + self.composite_logger.log_warning("[LM] Core state file existed but was empty. Attempting to reset.") self.read_only_mode = False self.update_core_sequence() def read_core_sequence(self): """ Reads the core sequence file, but additionally establishes if this class is allowed to write to it when the freshest data is evaluated. """ - self.composite_logger.log_debug("Reading core sequence...") + self.composite_logger.log_verbose("[LM] Reading core sequence...") self.identify_and_mitigate_core_sequence_issues() # Read (with retries for only IO Errors) @@ -104,15 +104,15 @@ def read_core_sequence(self): if core_sequence['completed'].lower() == 'true' or len(self.identify_running_processes(core_sequence['processIds'])) == 0: # Short-circuit for re-enable for completed non-auto-assess operations that should not run if not self.execution_config.exec_auto_assess_only and core_sequence['number'] == self.execution_config.sequence_number and core_sequence['completed'].lower() == 'true': - self.composite_logger.log_debug("Not attempting to take ownership of core sequence since the sequence number as it's already done and this is the main process.") + self.composite_logger.log_verbose("[LM] Not attempting to take ownership of core sequence since the sequence number as it's already done and this is the main process.") return core_sequence # Auto-assess over non-auto-assess is not a trivial override and is short-circuited to be evaluated in detail later if self.execution_config.exec_auto_assess_only and not core_sequence["autoAssessment"].lower() == 'true': - self.composite_logger.log_debug("Auto-assessment cannot supersede the main core process trivially.") + self.composite_logger.log_verbose("[LM] Auto-assessment cannot supersede the main core process trivially.") return core_sequence - self.composite_logger.log_debug("Attempting to take ownership of core sequence.") + self.composite_logger.log_verbose("[LM] Attempting to take ownership of core sequence.") self.read_only_mode = False self.update_core_sequence() self.read_only_mode = True @@ -122,13 +122,13 @@ def read_core_sequence(self): core_sequence = json.load(file_handle)['coreSequence'] if os.getpid() in core_sequence['processIds']: - self.composite_logger.log_debug("Successfully took ownership of core sequence.") + self.composite_logger.log_debug("[LM] Successfully took ownership of core sequence.") self.read_only_mode = False return core_sequence except Exception as error: if i < Constants.MAX_FILE_OPERATION_RETRY_COUNT - 1: - self.composite_logger.log_warning("Exception on core sequence read. [Exception={0}] [RetryCount={1}]".format(repr(error), str(i))) + self.composite_logger.log_verbose("[LM] Exception on core sequence read. [Exception={0}][RetryCount={1}]".format(repr(error), str(i))) time.sleep(i + 1) else: self.composite_logger.log_error("Unable to read core state file (retries exhausted). [Exception={0}]".format(repr(error))) @@ -136,10 +136,10 @@ def read_core_sequence(self): def update_core_sequence(self, completed=False): if self.read_only_mode: - self.composite_logger.log_debug("Core sequence will not be updated to avoid contention... [DesiredCompletedValue={0}]".format(str(completed))) + self.composite_logger.log_debug("[LM] Core sequence will not be updated to avoid contention... [DesiredCompletedValue={0}]".format(str(completed))) return - self.composite_logger.log_debug("Updating core sequence... [Completed={0}]".format(str(completed))) + self.composite_logger.log_debug("[LM] Updating core sequence... [Completed={0}]".format(str(completed))) core_sequence = {'number': self.execution_config.sequence_number, 'action': self.execution_config.operation, 'completed': str(completed), @@ -149,7 +149,7 @@ def update_core_sequence(self, completed=False): core_state_payload = json.dumps({"coreSequence": core_sequence}) if os.path.isdir(self.core_state_file_path): - self.composite_logger.log_error("Core state file path returned a directory. Attempting to reset.") + self.composite_logger.log_debug("[LM] Core state file path returned a directory. Attempting to reset.") shutil.rmtree(self.core_state_file_path) for i in range(0, Constants.MAX_FILE_OPERATION_RETRY_COUNT): @@ -158,13 +158,13 @@ def update_core_sequence(self, completed=False): file_handle.write(core_state_payload) except Exception as error: if i < Constants.MAX_FILE_OPERATION_RETRY_COUNT - 1: - self.composite_logger.log_warning("Exception on core sequence update. [Exception={0}] [RetryCount={1}]".format(repr(error), str(i))) + self.composite_logger.log_verbose("[LM] Exception on core sequence update. [Exception={0}][RetryCount={1}]".format(repr(error), str(i))) time.sleep(i + 1) else: - self.composite_logger.log_error("Unable to write to core state file (retries exhausted). [Exception={0}]".format(repr(error))) + self.composite_logger.log_error("[LM] Unable to write to core state file (retries exhausted). [Exception={0}]".format(repr(error))) raise - self.composite_logger.log_debug("Completed updating core sequence.") + self.composite_logger.log_verbose("[LM] Completed updating core sequence.") # endregion # region - Process Management @@ -176,7 +176,7 @@ def identify_running_processes(self, process_ids): process_id = int(process_id) if self.is_process_running(process_id): running_process_ids.append(process_id) - self.composite_logger.log("Processes still running from the previous request: [PIDsFound={0}][PreviousPIDs={1}]".format(str(running_process_ids) if len(running_process_ids)!=0 else 'None',str(process_ids))) + self.composite_logger.log_verbose("[LM] Processes still running from the previous request: [PIDsFound={0}][PreviousPIDs={1}]".format(str(running_process_ids) if len(running_process_ids) != 0 else 'None',str(process_ids))) return running_process_ids @staticmethod @@ -199,7 +199,7 @@ def is_process_running(pid): # endregion # region - Identity - def get_vm_cloud_type(self): + def get_cloud_type(self): pass # endregion diff --git a/src/core/src/service_interfaces/LifecycleManagerArc.py b/src/core/src/service_interfaces/lifecycle_managers/LifecycleManagerArc.py similarity index 95% rename from src/core/src/service_interfaces/LifecycleManagerArc.py rename to src/core/src/service_interfaces/lifecycle_managers/LifecycleManagerArc.py index 348316991..080aecdde 100644 --- a/src/core/src/service_interfaces/LifecycleManagerArc.py +++ b/src/core/src/service_interfaces/lifecycle_managers/LifecycleManagerArc.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -16,10 +16,9 @@ import json import os -import shutil import time from core.src.bootstrap.Constants import Constants -from core.src.service_interfaces.LifecycleManager import LifecycleManager +from service_interfaces.lifecycle_managers.LifecycleManager import LifecycleManager class LifecycleManagerArc(LifecycleManager): """Class for managing the core code's lifecycle within the extension wrapper""" @@ -28,8 +27,8 @@ def __init__(self, env_layer, execution_config, composite_logger, telemetry_writ super(LifecycleManagerArc,self).__init__(env_layer,execution_config,composite_logger,telemetry_writer, status_handler) # Handshake file paths - self.ext_state_file_path = os.path.join(self.execution_config.config_folder, Constants.EXT_STATE_FILE) - self.core_state_file_path = os.path.join(self.execution_config.config_folder, Constants.CORE_STATE_FILE) + self.ext_state_file_path = os.path.join(self.execution_config.config_folder, Constants.StateFiles.EXT) + self.core_state_file_path = os.path.join(self.execution_config.config_folder, Constants.StateFiles.CORE) # Writing to log self.composite_logger.log_debug("Initializing LifecycleManagerArc") # Variables @@ -89,20 +88,20 @@ def execution_start_check(self): self.env_layer.exit(0) else: self.composite_logger.file_logger.flush() - self.composite_logger.log_warning("Auto-assessment is NOT safe to start yet. Waiting to retry (up to set timeout). [LastHeartbeat={0}][Operation={1}][ElapsedTimeInMinutes={2}][TotalWaitRequiredInMinutes={3}]".format(str(core_sequence['lastHeartbeat']), str(core_sequence['action']), str(elapsed_time_in_minutes), str(Constants.REBOOT_BUFFER_IN_MINUTES))) + self.composite_logger.log_warning("Auto-assessment is NOT safe to start yet. Waiting to retry (up to set timeout). [LastHeartbeat={0}][Operation={1}][ElapsedTimeInMinutes={2}][TotalWaitRequiredInMinutes={3}]".format(str(core_sequence['lastHeartbeat']), str(core_sequence['action']), str(elapsed_time_in_minutes), str(Constants.Config.REBOOT_BUFFER_IN_MINUTES))) self.composite_logger.file_logger.flush() time.sleep(30) continue # MAYBE SAFE TO START. Safely timeout if wait for any core restart events (from a potential reboot) has exceeded the maximum reboot buffer - if elapsed_time_in_minutes > Constants.REBOOT_BUFFER_IN_MINUTES: + if elapsed_time_in_minutes > Constants.Config.REBOOT_BUFFER_IN_MINUTES: self.composite_logger.log_debug("Auto-assessment is now considered SAFE to start as Core timed-out in reporting completion mark. [LastHeartbeat={0}][Operation={1}]".format(str(core_sequence['lastHeartbeat']), str(core_sequence['action']))) self.read_only_mode = False break # Briefly pause execution to re-check all states (including reboot buffer) again self.composite_logger.file_logger.flush() - self.composite_logger.log_debug("Auto-assessment is waiting for Core state completion mark (up to set timeout). [LastHeartbeat={0}][Operation={1}][ElapsedTimeInMinutes={2}][TotalWaitRequiredInMinutes={3}]".format(str(core_sequence['lastHeartbeat']), str(core_sequence['action']), str(elapsed_time_in_minutes), str(Constants.REBOOT_BUFFER_IN_MINUTES))) + self.composite_logger.log_debug("Auto-assessment is waiting for Core state completion mark (up to set timeout). [LastHeartbeat={0}][Operation={1}][ElapsedTimeInMinutes={2}][TotalWaitRequiredInMinutes={3}]".format(str(core_sequence['lastHeartbeat']), str(core_sequence['action']), str(elapsed_time_in_minutes), str(Constants.Config.REBOOT_BUFFER_IN_MINUTES))) self.composite_logger.file_logger.flush() time.sleep(30) @@ -166,7 +165,7 @@ def read_arc_core_sequence(self): return core_sequence except Exception as error: if i < Constants.MAX_FILE_OPERATION_RETRY_COUNT - 1: - self.composite_logger.log_warning("Exception on arc core sequence read. [Exception={0}] [RetryCount={1}]".format(repr(error), str(i))) + self.composite_logger.log_warning("Exception on arc core sequence read. [Exception={0}][RetryCount={1}]".format(repr(error), str(i))) time.sleep(i + 1) else: self.composite_logger.log_error("Unable to read arc core state file (retries exhausted). [Exception={0}]".format(repr(error))) @@ -193,7 +192,7 @@ def lifecycle_status_check(self): # End region State checkers # region - Identity - def get_vm_cloud_type(self): - return Constants.VMCloudType.ARC + def get_cloud_type(self): + return Constants.CloudType.ARC # endregion diff --git a/src/core/src/service_interfaces/lifecycle_managers/LifecycleManagerAzure.py b/src/core/src/service_interfaces/lifecycle_managers/LifecycleManagerAzure.py new file mode 100644 index 000000000..e38ed74b1 --- /dev/null +++ b/src/core/src/service_interfaces/lifecycle_managers/LifecycleManagerAzure.py @@ -0,0 +1,144 @@ +# Copyright 2020 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Requires Python 2.7+ + +import os +import time +from core.src.bootstrap.Constants import Constants +from service_interfaces.lifecycle_managers.LifecycleManager import LifecycleManager + + +class LifecycleManagerAzure(LifecycleManager): + """ [Azure] Class for managing the core code's lifecycle within the extension wrapper """ + + def __init__(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler): + super(LifecycleManagerAzure, self).__init__(env_layer, execution_config, composite_logger, telemetry_writer, status_handler) + self.ext_state_file_path = os.path.join(self.execution_config.config_folder, Constants.StateFiles.EXT) + self.core_state_file_path = os.path.join(self.execution_config.config_folder, Constants.StateFiles.CORE) + + # region - State checkers + def execution_start_check(self): + """ [At startup] Checks if the current execution flow should be happening """ + if not self.execution_config.exec_auto_assess_only: + self.__execution_start_check_non_auto_assessment() + else: + self.__execution_start_check_auto_assessment() + + def lifecycle_status_check(self): + """ [Background] Lifecycle status check embedded into any long-running operation """ + self.composite_logger.log_verbose("[LMAz] Performing lifecycle status check...") + extension_sequence = self.read_extension_sequence() + if int(extension_sequence['number']) == int(self.execution_config.sequence_number): + self.composite_logger.log_verbose("[LMAz] Extension sequence number verified to have not changed: {0}".format(str(extension_sequence['number']))) + self.update_core_sequence(completed=False) + else: + self.composite_logger.log_error("TERMINATING - Patch operation was superseded by a newer operation. [CurrentSequence={0}]".format(self.execution_config.sequence_number)) + self.status_handler.report_sequence_number_changed_termination() # fail everything in a sequence number change + self.update_core_sequence(completed=True) # forced-to-complete scenario | extension wrapper will be watching for this event + self.composite_logger.file_logger.close() + self.env_layer.exit(Constants.ExitCode.Okay) + self.composite_logger.log_verbose("[LMAz] Completed lifecycle status check.") + + def __execution_start_check_non_auto_assessment(self): + """ Evaluates if the operation is good to start. Exits if not. """ + self.composite_logger.log_debug("[LMAz][Non-AA] Execution start check initiating...") + extension_sequence = self.read_extension_sequence() + core_sequence = self.read_core_sequence() + + if int(extension_sequence['number']) == int(self.execution_config.sequence_number): + if core_sequence['completed'] is True: + # Block attempts to execute what last completed (fully) again + self.composite_logger.log_debug("[LMAz][Non-AA] BLOCKED (SAFE) - Already completed sequence number attempted. [SequenceNumber={0}]".format(str(extension_sequence['number']))) # this is okay, unless unexpected + self.composite_logger.file_logger.close() + self.env_layer.exit(Constants.ExitCode.Okay) + else: + # Incomplete current execution + self.composite_logger.log_debug("[LMAz][Non-AA] RESTART (INCOMPLETE) - Restarting execution for incomplete sequence number. [SequenceNumber={0}]".format(str(self.execution_config.sequence_number))) + elif int(extension_sequence['number']) < int(self.execution_config.sequence_number): + # Allow this but log a warning + self.composite_logger.log_debug("[LMAz][Non-AA] INVESTIGATE - Unexpected lower sequence number. [CurrentSequenceNumber={0}][LastRecorded={1}]".format(str(self.execution_config.sequence_number), str(extension_sequence['number']))) + else: + # New sequence number + self.composite_logger.log_verbose("[LMAz][Non-AA] NEW START - New sequence number accepted for execution. [CurrentSequenceNumber={0}][LastRecorded={1}]".format(str(self.execution_config.sequence_number), str(extension_sequence['number']))) + + def __execution_start_check_auto_assessment(self): + """ Evaluates if the auto-assessment operation is good to start. Exits if not. """ + self.composite_logger.log_debug("[LMAz][AA] Execution start check initiating...") + timer_start_time = self.env_layer.datetime.datetime_utcnow() + while True: + extension_sequence = self.read_extension_sequence() + core_sequence = self.read_core_sequence() + + # Timer evaluation + current_time = self.env_layer.datetime.datetime_utcnow() + elapsed_time_in_minutes = self.env_layer.datetime.total_minutes_from_time_delta(current_time - timer_start_time) + + # Check for sequence number mismatches + if int(self.execution_config.sequence_number) != int(core_sequence['number']): + if int(self.execution_config.sequence_number) < int(extension_sequence['number']) or int(self.execution_config.sequence_number) < int(core_sequence['number']): + self.composite_logger.log_debug("[LMAz][AA] EXITING (SUPERSEDED) - Auto-assessment NOT STARTED as newer sequence number detected. [Attempted={0}][DetectedExt={1}][DetectedCore={2}]".format(str(self.execution_config.sequence_number), str(extension_sequence['number']), str(core_sequence['number']))) + elif int(self.execution_config.sequence_number) > int(extension_sequence['number']) or int(self.execution_config.sequence_number) > int(core_sequence['number']): + self.composite_logger.log_debug("[LMAz][AA] EXITING (INVESTIGATE) - Auto-assessment NOT STARTED as an extension state anomaly was detected. [Attempted={0}][DetectedExt={1}][DetectedCore={2}]".format(str(self.execution_config.sequence_number), str(extension_sequence['number']), str(core_sequence['number']))) + self.composite_logger.file_logger.close() + self.env_layer.exit(Constants.ExitCode.Okay) # EXIT + + # DEFINITELY SAFE TO START. Correct sequence number marked as completed + if core_sequence['completed'].lower() == 'true': + self.composite_logger.log_debug("[LMAz][AA] SAFE TO START - Auto-assessment is SAFE to start. Existing sequence number marked as COMPLETED.\n") + self.read_only_mode = False + break # NORMAL START + + # Check for active running processes if not completed + if len(self.identify_running_processes(core_sequence['processIds'])) != 0: + if os.getpid() in core_sequence['processIds']: + self.composite_logger.log_debug("[LMAz][AA] SAFE TO START - Auto-assessment is SAFE to start. Core sequence ownership is already established.\n") + self.read_only_mode = False + break # NORMAL START + + # DEFINITELY _NOT_ SAFE TO START. Possible reasons: full core operation is in progress (okay), some previous auto-assessment is still running (bad scheduling, adhoc run, or process stalled) + if elapsed_time_in_minutes > Constants.MAX_AUTO_ASSESSMENT_WAIT_FOR_MAIN_CORE_EXEC_IN_MINUTES: # will wait up to the max allowed + self.composite_logger.log_debug("[LMAz][AA] EXITING (TIMED-OUT) - Auto-assessment is NOT safe to start yet.TIMED-OUT waiting to Core to complete. [LastHeartbeat={0}][Operation={1}]".format(str(core_sequence['lastHeartbeat']), str(core_sequence['action']))) + self.composite_logger.file_logger.close() + self.env_layer.exit(Constants.ExitCode.Okay) # EXIT + else: + self.composite_logger.file_logger.flush() + self.composite_logger.log_verbose("[LMAz][AA] WAITING WITH RETRY - Auto-assessment is NOT safe to start yet. Waiting to retry (up to set timeout). [LastHeartbeat={0}][Operation={1}][ElapsedTimeInMinutes={2}][TotalWaitRequiredInMinutes={3}]".format(str(core_sequence['lastHeartbeat']), str(core_sequence['action']), str(elapsed_time_in_minutes), str(Constants.Config.REBOOT_BUFFER_IN_MINUTES))) + self.composite_logger.file_logger.flush() + time.sleep(Constants.Config.LIFECYCLE_MANAGER_STATUS_CHECK_WAIT_IN_SECS) + continue # CHECK AGAIN + + # MAYBE SAFE TO START. Safely timeout if wait for any core restart events (from a potential reboot) has exceeded the maximum reboot buffer + if elapsed_time_in_minutes > Constants.Config.REBOOT_BUFFER_IN_MINUTES: + self.composite_logger.log_debug("[LMAz][AA] SAFE TO START (CORE TIMEOUT) - Auto-assessment is now considered SAFE to start as Core timed-out in reporting completion. [LastHeartbeat={0}][Operation={1}]".format(str(core_sequence['lastHeartbeat']), str(core_sequence['action']))) + self.read_only_mode = False + break # START + + # Briefly pause execution to re-check all states (including reboot buffer) again + self.composite_logger.file_logger.flush() + self.composite_logger.log_verbose("[LMAz][AA] WAITING WITH RETRY (FOR CORE) - Auto-assessment is waiting for Core state completion mark (up to set timeout). [LastHeartbeat={0}][Operation={1}][ElapsedTimeInMinutes={2}][TotalWaitRequiredInMinutes={3}]".format(str(core_sequence['lastHeartbeat']), str(core_sequence['action']), str(elapsed_time_in_minutes), str(Constants.Config.REBOOT_BUFFER_IN_MINUTES))) + self.composite_logger.file_logger.flush() + time.sleep(Constants.Config.LIFECYCLE_MANAGER_STATUS_CHECK_WAIT_IN_SECS) + + # Signalling take-over of core state by auto-assessment after safety checks for any competing process + self.update_core_sequence(completed=False) + # Refresh status file in memory to be up-to-date + self.status_handler.load_status_file_components() + # End region State checkers + + # region - Identity + def get_cloud_type(self): + return Constants.CloudType.AZURE + # endregion + diff --git a/src/core/src/service_interfaces/lifecycle_managers/__init__.py b/src/core/src/service_interfaces/lifecycle_managers/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/core/tests/Test_AptitudePackageManager.py b/src/core/tests/Test_AptPackageManager.py similarity index 78% rename from src/core/tests/Test_AptitudePackageManager.py rename to src/core/tests/Test_AptPackageManager.py index 62992185b..20be803c0 100644 --- a/src/core/tests/Test_AptitudePackageManager.py +++ b/src/core/tests/Test_AptPackageManager.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -22,10 +22,10 @@ from core.tests.library.ArgumentComposer import ArgumentComposer from core.tests.library.LegacyEnvLayerExtensions import LegacyEnvLayerExtensions from core.tests.library.RuntimeCompositor import RuntimeCompositor -from core.src.package_managers import AptitudePackageManager, UbuntuProClient +from package_managers.apt import AptPackageManager, UbuntuProClient -class TestAptitudePackageManager(unittest.TestCase): +class TestAptPackageManager(unittest.TestCase): def setUp(self): self.argument_composer = ArgumentComposer().get_composed_arguments() self.runtime = RuntimeCompositor(self.argument_composer, True, Constants.APT) @@ -119,7 +119,7 @@ def test_install_package_success(self): self.assertIsNotNone(package_manager) # test for successfully installing a package - self.assertEqual(package_manager.install_update_and_dependencies_and_get_status('selinux-policy.noarch', '3.13.1-102.el7_3.16', simulate=True), Constants.INSTALLED) # needs to be fixed + self.assertEqual(package_manager.install_update_and_dependencies_and_get_status('selinux-policy.noarch', '3.13.1-102.el7_3.16', simulate=True), Constants.PackageStatus.INSTALLED) # needs to be fixed def test_is_installed_check_with_dpkg(self): self.runtime.set_legacy_test_type('SuccessInstallPath') @@ -135,24 +135,24 @@ def test_install_package_failure(self): self.runtime.set_legacy_test_type('FailInstallPath') package_manager = self.container.get('package_manager') - self.runtime.status_handler.set_current_operation(Constants.INSTALLATION) + self.runtime.status_handler.set_current_operation(Constants.Op.INSTALLATION) self.assertIsNotNone(package_manager) # test for unsuccessfully installing a package - self.assertEqual(package_manager.install_update_and_dependencies_and_get_status('selinux-policy.noarch', '3.13.1-102.el7_3.16', simulate=True), Constants.FAILED) + self.assertEqual(package_manager.install_update_and_dependencies_and_get_status('selinux-policy.noarch', '3.13.1-102.el7_3.16', simulate=True), Constants.PackageStatus.FAILED) self.assertRaises(Exception, lambda: package_manager.invoke_package_manager('sudo apt-get -y --only-upgrade true install force-dpkg-failure')) # ensure that error message appears in substatus properly substatus_file_data = [] with self.runtime.env_layer.file_system.open(self.runtime.execution_config.status_file_path, 'r') as file_handle: status = json.load(file_handle) - self.assertEqual(status[0]["status"]["status"].lower(), Constants.STATUS_SUCCESS.lower()) + self.assertEqual(status[0]["status"]["status"].lower(), Constants.Status.SUCCESS.lower()) substatus_file_data = status[0]["status"]["substatus"][0] error_msg = 'Package manager on machine is not healthy. To fix, please run: sudo dpkg --configure -a' self.assertNotEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["errors"], None) self.assertTrue(error_msg in str(json.loads(substatus_file_data["formattedMessage"]["message"])["errors"]["details"])) - self.assertEqual(substatus_file_data["name"], Constants.PATCH_INSTALLATION_SUMMARY) + self.assertEqual(substatus_file_data["name"], Constants.OpSummary.INSTALLATION) def test_reboot_always_runs_only_once_if_no_reboot_is_required(self): argument_composer = ArgumentComposer() @@ -178,40 +178,40 @@ def test_install_package_only_upgrades(self): self.assertIsNotNone(package_manager) # test for unsuccessfully installing a package - self.assertEqual(package_manager.install_update_and_dependencies_and_get_status('iucode-tool', '1.5.1-1ubuntu0.1', True), Constants.PENDING) + self.assertEqual(package_manager.install_update_and_dependencies_and_get_status('iucode-tool', '1.5.1-1ubuntu0.1', True), Constants.PackageStatus.PENDING) def test_disable_auto_os_update_with_two_patch_modes_enabled_success(self): package_manager = self.container.get('package_manager') - package_manager.os_patch_configuration_settings_file_path = os.path.join(self.runtime.execution_config.config_folder, "20auto-upgrades") + package_manager.patch_mode_manager.os_patch_configuration_settings_file_path = os.path.join(self.runtime.execution_config.config_folder, "20auto-upgrades") # disable with both update package lists and unattended upgrades enabled on the system os_patch_configuration_settings = 'APT::Periodic::Update-Package-Lists "1";\nAPT::Periodic::Unattended-Upgrade "1";\n' - self.runtime.write_to_file(package_manager.os_patch_configuration_settings_file_path, os_patch_configuration_settings) - package_manager.disable_auto_os_update() - self.assertTrue(package_manager.image_default_patch_configuration_backup_exists()) + self.runtime.write_to_file(package_manager.patch_mode_manager.os_patch_configuration_settings_file_path, os_patch_configuration_settings) + package_manager.patch_mode_manager.disable_auto_os_update() + self.assertTrue(package_manager.patch_mode_manager.image_default_patch_configuration_backup_exists()) image_default_patch_configuration_backup = json.loads(self.runtime.env_layer.file_system.read_with_retry(package_manager.image_default_patch_configuration_backup_path)) self.assertTrue(image_default_patch_configuration_backup is not None) self.assertTrue(image_default_patch_configuration_backup['APT::Periodic::Update-Package-Lists'] == "1") self.assertTrue(image_default_patch_configuration_backup['APT::Periodic::Unattended-Upgrade'] == "1") - os_patch_configuration_settings = self.runtime.env_layer.file_system.read_with_retry(package_manager.os_patch_configuration_settings_file_path) + os_patch_configuration_settings = self.runtime.env_layer.file_system.read_with_retry(package_manager.patch_mode_manager.os_patch_configuration_settings_file_path) self.assertTrue(os_patch_configuration_settings is not None) self.assertTrue('APT::Periodic::Update-Package-Lists "0"' in os_patch_configuration_settings) self.assertTrue('APT::Periodic::Unattended-Upgrade "0"' in os_patch_configuration_settings) def test_disable_auto_os_update_with_one_patch_mode_enabled_success(self): package_manager = self.container.get('package_manager') - package_manager.os_patch_configuration_settings_file_path = os.path.join(self.runtime.execution_config.config_folder, "20auto-upgrades") + package_manager.patch_mode_manager.os_patch_configuration_settings_file_path = os.path.join(self.runtime.execution_config.config_folder, "20auto-upgrades") # disable with only one patch mode enabled on the system os_patch_configuration_settings = 'APT::Periodic::Unattended-Upgrade "1";\n' - self.runtime.write_to_file(package_manager.os_patch_configuration_settings_file_path, os_patch_configuration_settings) - package_manager.disable_auto_os_update() - self.assertTrue(package_manager.image_default_patch_configuration_backup_exists()) + self.runtime.write_to_file(package_manager.patch_mode_manager.os_patch_configuration_settings_file_path, os_patch_configuration_settings) + package_manager.patch_mode_manager.disable_auto_os_update() + self.assertTrue(package_manager.patch_mode_manager.image_default_patch_configuration_backup_exists()) image_default_patch_configuration_backup = json.loads(self.runtime.env_layer.file_system.read_with_retry(package_manager.image_default_patch_configuration_backup_path)) self.assertTrue(image_default_patch_configuration_backup is not None) self.assertTrue(image_default_patch_configuration_backup['APT::Periodic::Update-Package-Lists'] == "") self.assertTrue(image_default_patch_configuration_backup['APT::Periodic::Unattended-Upgrade'] == "1") - os_patch_configuration_settings = self.runtime.env_layer.file_system.read_with_retry(package_manager.os_patch_configuration_settings_file_path) + os_patch_configuration_settings = self.runtime.env_layer.file_system.read_with_retry(package_manager.patch_mode_manager.os_patch_configuration_settings_file_path) self.assertTrue(os_patch_configuration_settings is not None) self.assertTrue('APT::Periodic::Update-Package-Lists "0"' in os_patch_configuration_settings) self.assertTrue('APT::Periodic::Unattended-Upgrade "0"' in os_patch_configuration_settings) @@ -221,7 +221,7 @@ def test_get_current_auto_os_updates_with_no_os_patch_configuration_settings_fil package_manager = self.container.get('package_manager') package_manager.get_current_auto_os_patch_state = self.runtime.backup_get_current_auto_os_patch_state - self.assertTrue(package_manager.get_current_auto_os_patch_state() == Constants.AutomaticOSPatchStates.DISABLED) + self.assertTrue(package_manager.patch_mode_manager.get_current_auto_os_patch_state() == Constants.AutomaticOSPatchStates.DISABLED) package_manager.get_current_auto_os_patch_state = self.runtime.get_current_auto_os_patch_state @@ -229,17 +229,17 @@ def test_disable_auto_os_update_failure(self): # disable with non existing log file package_manager = self.container.get('package_manager') - self.assertRaises(Exception, package_manager.disable_auto_os_update) - self.assertFalse(package_manager.image_default_patch_configuration_backup_exists()) - self.assertTrue(not os.path.exists(package_manager.os_patch_configuration_settings_file_path)) + self.assertRaises(Exception, package_manager.patch_mode_manager.disable_auto_os_update) + self.assertFalse(package_manager.patch_mode_manager.image_default_patch_configuration_backup_exists()) + self.assertTrue(not os.path.exists(package_manager.patch_mode_manager.os_patch_configuration_settings_file_path)) # disable with existing log file package_manager = self.container.get('package_manager') - package_manager.os_patch_configuration_settings_file_path = os.path.join(self.runtime.execution_config.config_folder, "20auto-upgrades") + package_manager.patch_mode_manager.os_patch_configuration_settings_file_path = os.path.join(self.runtime.execution_config.config_folder, "20auto-upgrades") os_patch_mode_settings = 'APT::Periodic::Update-Package-Lists "1";\nAPT::Periodic::Unattended-Upgrade "1";\n' - self.runtime.write_to_file(package_manager.os_patch_configuration_settings_file_path, os_patch_mode_settings) + self.runtime.write_to_file(package_manager.patch_mode_manager.os_patch_configuration_settings_file_path, os_patch_mode_settings) self.runtime.env_layer.file_system.write_with_retry = self.mock_write_with_retry_raise_exception - self.assertFalse(package_manager.image_default_patch_configuration_backup_exists()) + self.assertFalse(package_manager.patch_mode_manager.image_default_patch_configuration_backup_exists()) def test_image_default_patch_mode_backup_exists(self): package_manager = self.container.get('package_manager') @@ -251,22 +251,22 @@ def test_image_default_patch_mode_backup_exists(self): } self.runtime.env_layer.file_system.write_with_retry(package_manager.image_default_patch_configuration_backup_path, '{0}'.format(json.dumps(image_default_patch_configuration_backup)), mode='w+') image_default_patch_configuration_backup = json.loads(self.runtime.env_layer.file_system.read_with_retry(package_manager.image_default_patch_configuration_backup_path)) - self.assertTrue(package_manager.image_default_patch_configuration_backup_exists()) - self.assertTrue(package_manager.is_image_default_patch_configuration_backup_valid(image_default_patch_configuration_backup)) + self.assertTrue(package_manager.patch_mode_manager.image_default_patch_configuration_backup_exists()) + self.assertTrue(package_manager.patch_mode_manager.is_image_default_patch_configuration_backup_valid(image_default_patch_configuration_backup)) # invalid mode backup image_default_patch_configuration_backup = '[]' self.runtime.env_layer.file_system.write_with_retry(package_manager.image_default_patch_configuration_backup_path, '{0}'.format(json.dumps(image_default_patch_configuration_backup)), mode='w+') image_default_patch_configuration_backup = json.loads(self.runtime.env_layer.file_system.read_with_retry(package_manager.image_default_patch_configuration_backup_path)) - self.assertTrue(package_manager.image_default_patch_configuration_backup_exists()) - self.assertFalse(package_manager.is_image_default_patch_configuration_backup_valid(image_default_patch_configuration_backup)) + self.assertTrue(package_manager.patch_mode_manager.image_default_patch_configuration_backup_exists()) + self.assertFalse(package_manager.patch_mode_manager.is_image_default_patch_configuration_backup_valid(image_default_patch_configuration_backup)) def test_image_default_patch_mode_backup_does_not_exist(self): package_manager = self.container.get('package_manager') # file does not exist package_manager.image_default_patch_mode_backup_path = "tests" - self.assertFalse(package_manager.image_default_patch_configuration_backup_exists()) + self.assertFalse(package_manager.patch_mode_manager.image_default_patch_configuration_backup_exists()) def test_is_image_default_patch_mode_backup_valid_true(self): package_manager = self.container.get('package_manager') @@ -275,7 +275,7 @@ def test_is_image_default_patch_mode_backup_valid_true(self): 'APT::Periodic::Update-Package-Lists': "1", 'APT::Periodic::Unattended-Upgrade': "1" } - self.assertTrue(package_manager.is_image_default_patch_configuration_backup_valid(image_default_patch_mode_backup)) + self.assertTrue(package_manager.patch_mode_manager.is_image_default_patch_configuration_backup_valid(image_default_patch_mode_backup)) def test_is_image_default_patch_mode_backup_valid_false(self): package_manager = self.container.get('package_manager') @@ -283,14 +283,14 @@ def test_is_image_default_patch_mode_backup_valid_false(self): image_default_patch_mode_backup = { 'test': "1", } - self.assertFalse(package_manager.is_image_default_patch_configuration_backup_valid(image_default_patch_mode_backup)) + self.assertFalse(package_manager.patch_mode_manager.is_image_default_patch_configuration_backup_valid(image_default_patch_mode_backup)) # with one valid patch mode setting image_default_patch_mode_backup = { 'APT::Periodic::Update-Package-Lists': "1", 'test': "1" } - self.assertFalse(package_manager.is_image_default_patch_configuration_backup_valid(image_default_patch_mode_backup)) + self.assertFalse(package_manager.patch_mode_manager.is_image_default_patch_configuration_backup_valid(image_default_patch_mode_backup)) def test_overwrite_existing_image_default_patch_mode_backup(self): package_manager = self.container.get('package_manager') @@ -299,7 +299,7 @@ def test_overwrite_existing_image_default_patch_mode_backup(self): "APT::Periodic::Unattended-Upgrade": "1" } self.runtime.env_layer.file_system.write_with_retry(package_manager.image_default_patch_configuration_backup_path, '{0}'.format(json.dumps(image_default_patch_configuration_backup)), mode='w+') - package_manager.backup_image_default_patch_configuration_if_not_exists() + package_manager.patch_mode_manager.backup_image_default_patch_configuration_if_not_exists() image_default_patch_configuration_backup = json.loads(self.runtime.env_layer.file_system.read_with_retry(package_manager.image_default_patch_configuration_backup_path)) self.assertTrue(image_default_patch_configuration_backup is not None) self.assertTrue(image_default_patch_configuration_backup['APT::Periodic::Update-Package-Lists'] == "0") @@ -307,12 +307,12 @@ def test_overwrite_existing_image_default_patch_mode_backup(self): def test_backup_image_default_patch_mode_with_default_patch_mode_set(self): package_manager = self.container.get('package_manager') - package_manager.os_patch_configuration_settings_file_path = os.path.join(self.runtime.execution_config.config_folder, "20auto-upgrades") + package_manager.patch_mode_manager.os_patch_configuration_settings_file_path = os.path.join(self.runtime.execution_config.config_folder, "20auto-upgrades") # default system patch mode is set, write to log os_patch_configuration_settings = 'APT::Periodic::Update-Package-Lists "1";\nAPT::Periodic::Unattended-Upgrade "1";\n' - self.runtime.write_to_file(package_manager.os_patch_configuration_settings_file_path, os_patch_configuration_settings) - package_manager.backup_image_default_patch_configuration_if_not_exists() + self.runtime.write_to_file(package_manager.patch_mode_manager.os_patch_configuration_settings_file_path, os_patch_configuration_settings) + package_manager.patch_mode_manager.backup_image_default_patch_configuration_if_not_exists() image_default_patch_configuration_backup = json.loads(self.runtime.env_layer.file_system.read_with_retry(package_manager.image_default_patch_configuration_backup_path)) self.assertTrue(image_default_patch_configuration_backup is not None) self.assertTrue(image_default_patch_configuration_backup['APT::Periodic::Update-Package-Lists'] == "1") @@ -320,18 +320,18 @@ def test_backup_image_default_patch_mode_with_default_patch_mode_set(self): def test_backup_image_default_patch_mode_overwrite_backup_if_original_backup_was_invalid(self): package_manager = self.container.get('package_manager') - package_manager.os_patch_configuration_settings_file_path = os.path.join(self.runtime.execution_config.config_folder, "20auto-upgrades") + package_manager.patch_mode_manager.os_patch_configuration_settings_file_path = os.path.join(self.runtime.execution_config.config_folder, "20auto-upgrades") # backup file exists but the content is invalid, function should overwrite the file with valid content os_patch_configuration_settings = 'APT::Periodic::Update-Package-Lists "1";\nAPT::Periodic::Unattended-Upgrade "1";\n' - self.runtime.write_to_file(package_manager.os_patch_configuration_settings_file_path, os_patch_configuration_settings) + self.runtime.write_to_file(package_manager.patch_mode_manager.os_patch_configuration_settings_file_path, os_patch_configuration_settings) existing_image_default_backup_configuration = '[]' self.runtime.write_to_file(package_manager.image_default_patch_configuration_backup_path, existing_image_default_backup_configuration) - self.assertTrue(package_manager.image_default_patch_configuration_backup_exists()) - self.assertFalse(package_manager.is_image_default_patch_configuration_backup_valid(existing_image_default_backup_configuration)) + self.assertTrue(package_manager.patch_mode_manager.image_default_patch_configuration_backup_exists()) + self.assertFalse(package_manager.patch_mode_manager.is_image_default_patch_configuration_backup_valid(existing_image_default_backup_configuration)) - package_manager.backup_image_default_patch_configuration_if_not_exists() + package_manager.patch_mode_manager.backup_image_default_patch_configuration_if_not_exists() image_default_patch_configuration_backup = json.loads(self.runtime.env_layer.file_system.read_with_retry(package_manager.image_default_patch_configuration_backup_path)) self.assertTrue(image_default_patch_configuration_backup is not None) self.assertTrue(image_default_patch_configuration_backup['APT::Periodic::Update-Package-Lists'] == "1") @@ -339,11 +339,11 @@ def test_backup_image_default_patch_mode_overwrite_backup_if_original_backup_was def test_backup_image_default_patch_mode_with_default_patch_mode_not_set(self): package_manager = self.container.get('package_manager') - package_manager.os_patch_configuration_settings_file_path = os.path.join(self.runtime.execution_config.config_folder, "20auto-upgrades") + package_manager.patch_mode_manager.os_patch_configuration_settings_file_path = os.path.join(self.runtime.execution_config.config_folder, "20auto-upgrades") # default system patch mode is not set, write empty values to log os_patch_mode_settings = '' - self.runtime.write_to_file(package_manager.os_patch_configuration_settings_file_path, os_patch_mode_settings) - package_manager.backup_image_default_patch_configuration_if_not_exists() + self.runtime.write_to_file(package_manager.patch_mode_manager.os_patch_configuration_settings_file_path, os_patch_mode_settings) + package_manager.patch_mode_manager.backup_image_default_patch_configuration_if_not_exists() image_default_patch_configuration_backup = json.loads(self.runtime.env_layer.file_system.read_with_retry(package_manager.image_default_patch_configuration_backup_path)) self.assertTrue(image_default_patch_configuration_backup is not None) self.assertTrue(image_default_patch_configuration_backup['APT::Periodic::Update-Package-Lists'] == "") @@ -351,49 +351,49 @@ def test_backup_image_default_patch_mode_with_default_patch_mode_not_set(self): def test_backup_image_default_patch_mode_raises_exception(self): package_manager = self.container.get('package_manager') - package_manager.os_patch_configuration_settings_file_path = os.path.join(self.runtime.execution_config.config_folder, "20auto-upgrades") + package_manager.patch_mode_manager.os_patch_configuration_settings_file_path = os.path.join(self.runtime.execution_config.config_folder, "20auto-upgrades") # default system patch mode is set, write to log os_patch_mode_settings = 'APT::Periodic::Update-Package-Lists "1";\nAPT::Periodic::Unattended-Upgrade "1";\n' - self.runtime.write_to_file(package_manager.os_patch_configuration_settings_file_path, os_patch_mode_settings) + self.runtime.write_to_file(package_manager.patch_mode_manager.os_patch_configuration_settings_file_path, os_patch_mode_settings) self.runtime.env_layer.file_system.write_with_retry = self.mock_write_with_retry_raise_exception - self.assertRaises(Exception, package_manager.backup_image_default_patch_configuration_if_not_exists) + self.assertRaises(Exception, package_manager.patch_mode_manager.backup_image_default_patch_configuration_if_not_exists) def test_update_image_default_patch_mode(self): package_manager = self.container.get('package_manager') - package_manager.os_patch_configuration_settings_file_path = os.path.join(self.runtime.execution_config.config_folder, "20auto-upgrades") + package_manager.patch_mode_manager.os_patch_configuration_settings_file_path = os.path.join(self.runtime.execution_config.config_folder, "20auto-upgrades") # disable update package lists when enabled by default os_patch_configuration_settings = 'APT::Periodic::Update-Package-Lists "1";\nAPT::Periodic::Unattended-Upgrade "1";\n' - self.runtime.write_to_file(package_manager.os_patch_configuration_settings_file_path, os_patch_configuration_settings) - package_manager.update_os_patch_configuration_sub_setting('APT::Periodic::Update-Package-Lists', "0") - os_patch_configuration_settings_file_path_read = self.runtime.env_layer.file_system.read_with_retry(package_manager.os_patch_configuration_settings_file_path) + self.runtime.write_to_file(package_manager.patch_mode_manager.os_patch_configuration_settings_file_path, os_patch_configuration_settings) + package_manager.patch_mode_manager.update_os_patch_configuration_sub_setting('APT::Periodic::Update-Package-Lists', "0") + os_patch_configuration_settings_file_path_read = self.runtime.env_layer.file_system.read_with_retry(package_manager.patch_mode_manager.os_patch_configuration_settings_file_path) self.assertTrue(os_patch_configuration_settings_file_path_read is not None) self.assertTrue('APT::Periodic::Update-Package-Lists "0"' in os_patch_configuration_settings_file_path_read) self.assertTrue('APT::Periodic::Unattended-Upgrade "1"' in os_patch_configuration_settings_file_path_read) # disable unattended upgrades when enabled by default os_patch_configuration_settings = 'APT::Periodic::Update-Package-Lists "1";\nAPT::Periodic::Unattended-Upgrade "1";\n' - self.runtime.write_to_file(package_manager.os_patch_configuration_settings_file_path, os_patch_configuration_settings) - package_manager.update_os_patch_configuration_sub_setting('APT::Periodic::Unattended-Upgrade', "0") - os_patch_configuration_settings_file_path_read = self.runtime.env_layer.file_system.read_with_retry(package_manager.os_patch_configuration_settings_file_path) + self.runtime.write_to_file(package_manager.patch_mode_manager.os_patch_configuration_settings_file_path, os_patch_configuration_settings) + package_manager.patch_mode_manager.update_os_patch_configuration_sub_setting('APT::Periodic::Unattended-Upgrade', "0") + os_patch_configuration_settings_file_path_read = self.runtime.env_layer.file_system.read_with_retry(package_manager.patch_mode_manager.os_patch_configuration_settings_file_path) self.assertTrue(os_patch_configuration_settings_file_path_read is not None) self.assertTrue('APT::Periodic::Update-Package-Lists "1"' in os_patch_configuration_settings_file_path_read) self.assertTrue('APT::Periodic::Unattended-Upgrade "0"' in os_patch_configuration_settings_file_path_read) # disable unattended upgrades when default patch mode settings file is empty os_patch_configuration_settings = '' - self.runtime.write_to_file(package_manager.os_patch_configuration_settings_file_path, os_patch_configuration_settings) - package_manager.update_os_patch_configuration_sub_setting('APT::Periodic::Unattended-Upgrade', "0") - os_patch_configuration_settings_file_path_read = self.runtime.env_layer.file_system.read_with_retry(package_manager.os_patch_configuration_settings_file_path) + self.runtime.write_to_file(package_manager.patch_mode_manager.os_patch_configuration_settings_file_path, os_patch_configuration_settings) + package_manager.patch_mode_manager.update_os_patch_configuration_sub_setting('APT::Periodic::Unattended-Upgrade', "0") + os_patch_configuration_settings_file_path_read = self.runtime.env_layer.file_system.read_with_retry(package_manager.patch_mode_manager.os_patch_configuration_settings_file_path) self.assertTrue(os_patch_configuration_settings_file_path_read is not None) self.assertTrue('APT::Periodic::Update-Package-Lists' not in os_patch_configuration_settings_file_path_read) self.assertTrue('APT::Periodic::Unattended-Upgrade "0"' in os_patch_configuration_settings_file_path_read) # disable unattended upgrades when it does not exist in default patch mode settings file os_patch_configuration_settings = 'APT::Periodic::Update-Package-Lists "1";\n' - self.runtime.write_to_file(package_manager.os_patch_configuration_settings_file_path, os_patch_configuration_settings) - package_manager.update_os_patch_configuration_sub_setting('APT::Periodic::Unattended-Upgrade', "0") - os_patch_configuration_settings_file_path_read = self.runtime.env_layer.file_system.read_with_retry(package_manager.os_patch_configuration_settings_file_path) + self.runtime.write_to_file(package_manager.patch_mode_manager.os_patch_configuration_settings_file_path, os_patch_configuration_settings) + package_manager.patch_mode_manager.update_os_patch_configuration_sub_setting('APT::Periodic::Unattended-Upgrade', "0") + os_patch_configuration_settings_file_path_read = self.runtime.env_layer.file_system.read_with_retry(package_manager.patch_mode_manager.os_patch_configuration_settings_file_path) self.assertTrue(os_patch_configuration_settings_file_path_read is not None) self.assertTrue('APT::Periodic::Update-Package-Lists "1"' in os_patch_configuration_settings_file_path_read) self.assertTrue('APT::Periodic::Unattended-Upgrade "0"' in os_patch_configuration_settings_file_path_read) @@ -405,11 +405,11 @@ def test_update_image_default_patch_mode_raises_exception(self): image_default_patch_mode = 'APT::Periodic::Update-Package-Lists "1";\nAPT::Periodic::Unattended-Upgrade "1";\n' self.runtime.write_to_file(package_manager.image_default_patch_mode_file_path, image_default_patch_mode) self.runtime.env_layer.file_system.write_with_retry = self.mock_write_with_retry_raise_exception - self.assertRaises(Exception, package_manager.update_os_patch_configuration_sub_setting) + self.assertRaises(Exception, package_manager.patch_mode_manager.update_os_patch_configuration_sub_setting) def test_is_reboot_pending_prerequisite_not_met_should_return_false(self): package_manager = self.container.get('package_manager') - package_manager._AptitudePackageManager__pro_client_prereq_met = False + package_manager._AptPackageManager__pro_client_prereq_met = False self.assertFalse(package_manager.is_reboot_pending()) @@ -417,7 +417,7 @@ def test_is_reboot_pending_prerequisite_met_should_return_true(self): reboot_mock = MockRebootRequiredResult() reboot_mock.mock_import_uaclient_reboot_required_module('reboot_required', 'mock_reboot_required_return_yes') package_manager = self.container.get('package_manager') - package_manager._AptitudePackageManager__pro_client_prereq_met = True + package_manager._AptPackageManager__pro_client_prereq_met = True self.assertTrue(package_manager.is_reboot_pending()) @@ -439,13 +439,13 @@ def test_is_pro_client_prereq_met_should_return_true_for_supported_os_version(se package_manager = self.container.get('package_manager') backup_package_manager_ubuntu_pro_client_is_pro_working = package_manager.ubuntu_pro_client.is_pro_working package_manager.ubuntu_pro_client.is_pro_working = self.mock_is_pro_working_return_true - backup_package_manager_is_minimum_required_python_installed = package_manager._AptitudePackageManager__is_minimum_required_python_installed - package_manager._AptitudePackageManager__is_minimum_required_python_installed = self.mock_minimum_required_python_installed_return_true + backup_package_manager_is_minimum_required_python_installed = package_manager._AptPackageManager__is_minimum_required_python_installed + package_manager._AptPackageManager__is_minimum_required_python_installed = self.mock_minimum_required_python_installed_return_true self.assertTrue(package_manager.check_pro_client_prerequisites()) package_manager.ubuntu_pro_client.is_pro_working = backup_package_manager_ubuntu_pro_client_is_pro_working - package_manager._AptitudePackageManager__is_minimum_required_python_installed = backup_package_manager_is_minimum_required_python_installed + package_manager._AptPackageManager__is_minimum_required_python_installed = backup_package_manager_is_minimum_required_python_installed def test_package_manager_instance_created_even_when_exception_thrown_in_pro(self): package_manager = self.container.get('package_manager') @@ -453,7 +453,8 @@ def test_package_manager_instance_created_even_when_exception_thrown_in_pro(self backup_package_manager_ubuntu_pro_client_install_or_update_pro = UbuntuProClient.UbuntuProClient.install_or_update_pro UbuntuProClient.UbuntuProClient.install_or_update_pro = self.mock_install_or_update_pro_raise_exception - obj = AptitudePackageManager.AptitudePackageManager(package_manager.env_layer, execution_config, package_manager.composite_logger, package_manager.telemetry_writer, package_manager.status_handler) + obj = AptPackageManager.AptPackageManager(self.runtime.env_layer, self.runtime.execution_config, self.runtime.composite_logger, self.runtime.telemetry_writer, self.runtime.status_handler, + self.runtime.patch_mode_manager, self.runtime.sources_manager, self.runtime.health_manager, Constants.APT) self.assertIsNotNone(obj) self.assertIsNotNone(obj.ubuntu_pro_client) @@ -468,13 +469,13 @@ def test_get_other_updates_success(self): runtime = RuntimeCompositor(ArgumentComposer().get_composed_arguments(), True, Constants.APT) runtime.set_legacy_test_type('UA_ESM_Required') - backup_AptitudePackageManager__pro_client_prereq_met = runtime.package_manager._AptitudePackageManager__pro_client_prereq_met - runtime.package_manager._AptitudePackageManager__pro_client_prereq_met = True + backup_AptPackageManager__pro_client_prereq_met = runtime.package_manager._AptPackageManager__pro_client_prereq_met + runtime.package_manager._AptPackageManager__pro_client_prereq_met = True packages, versions = runtime.package_manager.get_other_updates() self.assertEqual(1, len(packages)) - runtime.package_manager._AptitudePackageManager__pro_client_prereq_met = backup_AptitudePackageManager__pro_client_prereq_met + runtime.package_manager._AptPackageManager__pro_client_prereq_met = backup_AptPackageManager__pro_client_prereq_met obj.mock_unimport_uaclient_version_module() updates_obj.mock_unimport_uaclient_update_module() @@ -496,15 +497,15 @@ def test_set_security_esm_package_status_assessment(self): updates_obj.mock_import_uaclient_update_module('updates', 'mock_update_list_with_all_update_types') runtime = RuntimeCompositor(ArgumentComposer().get_composed_arguments(), True, Constants.APT) runtime.set_legacy_test_type('UA_ESM_Required') - backup_aptitudepackagemanager__pro_client_prereq_met = runtime.package_manager._AptitudePackageManager__pro_client_prereq_met - runtime.package_manager._AptitudePackageManager__pro_client_prereq_met = True + backup_AptPackageManager__pro_client_prereq_met = runtime.package_manager._AptPackageManager__pro_client_prereq_met + runtime.package_manager._AptPackageManager__pro_client_prereq_met = True - runtime.patch_assessor.start_assessment() + runtime.patch_assessor.start_operation_with_retries() status = "" error_set = False with self.runtime.env_layer.file_system.open(self.runtime.execution_config.status_file_path, 'r') as file_handle: status = json.load(file_handle) - self.assertEqual(status[0]["status"]["status"].lower(), Constants.STATUS_SUCCESS.lower()) + self.assertEqual(status[0]["status"]["status"].lower(), Constants.Status.SUCCESS.lower()) self.assertEqual(status[0]["status"]["substatus"][0]["name"], "PatchAssessmentSummary") # Parse the assessment data to check if we have logged the error details for esm_required. @@ -516,7 +517,7 @@ def test_set_security_esm_package_status_assessment(self): break self.assertTrue(error_set) - runtime.package_manager._AptitudePackageManager__pro_client_prereq_met = backup_aptitudepackagemanager__pro_client_prereq_met + runtime.package_manager._AptPackageManager__pro_client_prereq_met = backup_AptPackageManager__pro_client_prereq_met obj.mock_unimport_uaclient_version_module() updates_obj.mock_unimport_uaclient_update_module() @@ -524,25 +525,25 @@ def test_is_reboot_pending_pro_client_success(self): reboot_mock = MockRebootRequiredResult() reboot_mock.mock_import_uaclient_reboot_required_module('reboot_required', 'mock_reboot_required_return_no') runtime = RuntimeCompositor(ArgumentComposer().get_composed_arguments(), True, Constants.APT) - backup_AptitudePackageManager__pro_client_prereq_met = runtime.package_manager._AptitudePackageManager__pro_client_prereq_met - runtime.package_manager._AptitudePackageManager__pro_client_prereq_met = True + backup_AptPackageManager__pro_client_prereq_met = runtime.package_manager._AptPackageManager__pro_client_prereq_met + runtime.package_manager._AptPackageManager__pro_client_prereq_met = True self.assertFalse(runtime.package_manager.is_reboot_pending()) - runtime.package_manager._AptitudePackageManager__pro_client_prereq_met = backup_AptitudePackageManager__pro_client_prereq_met + runtime.package_manager._AptPackageManager__pro_client_prereq_met = backup_AptPackageManager__pro_client_prereq_met reboot_mock.mock_unimport_uaclient_reboot_required_module() def test_is_reboot_pending_test_mismatch(self): reboot_mock = MockRebootRequiredResult() reboot_mock.mock_import_uaclient_reboot_required_module('reboot_required', 'mock_reboot_required_return_yes') runtime = RuntimeCompositor(ArgumentComposer().get_composed_arguments(), True, Constants.APT) - backup__AptitudePackageManager__pro_client_prereq_met = runtime.package_manager._AptitudePackageManager__pro_client_prereq_met - runtime.package_manager._AptitudePackageManager__pro_client_prereq_met = True + backup__AptPackageManager__pro_client_prereq_met = runtime.package_manager._AptPackageManager__pro_client_prereq_met + runtime.package_manager._AptPackageManager__pro_client_prereq_met = True # test should return true as we fall back to Ubuntu Pro Client api`s result. self.assertTrue(runtime.package_manager.is_reboot_pending()) reboot_mock.mock_unimport_uaclient_reboot_required_module() - runtime.package_manager._AptitudePackageManager__pro_client_prereq_met = backup__AptitudePackageManager__pro_client_prereq_met + runtime.package_manager._AptPackageManager__pro_client_prereq_met = backup__AptPackageManager__pro_client_prereq_met def test_is_reboot_pending_test_raises_exception(self): runtime = RuntimeCompositor(ArgumentComposer().get_composed_arguments(), True, Constants.APT) @@ -550,15 +551,15 @@ def test_is_reboot_pending_test_raises_exception(self): runtime.package_manager.do_processes_require_restart = self.mock_do_processes_require_restart_raises_exception backup_package_manager_is_reboot_pending = runtime.package_manager.ubuntu_pro_client.is_reboot_pending runtime.package_manager.ubuntu_pro_client.is_reboot_pending = self.mock_is_reboot_pending_returns_False - backup__AptitudePackageManager__pro_client_prereq_met = runtime.package_manager._AptitudePackageManager__pro_client_prereq_met - runtime.package_manager._AptitudePackageManager__pro_client_prereq_met = True + backup__AptPackageManager__pro_client_prereq_met = runtime.package_manager._AptPackageManager__pro_client_prereq_met + runtime.package_manager._AptPackageManager__pro_client_prereq_met = True # test returns true because, we return True if there is exception. self.assertTrue(runtime.package_manager.is_reboot_pending()) runtime.package_manager.do_processes_require_restart = backup_package_manager_do_processes_require_restart runtime.package_manager.ubuntu_pro_client.is_reboot_pending = backup_package_manager_is_reboot_pending - runtime.package_manager._AptitudePackageManager__pro_client_prereq_met = backup__AptitudePackageManager__pro_client_prereq_met + runtime.package_manager._AptPackageManager__pro_client_prereq_met = backup__AptPackageManager__pro_client_prereq_met def test_check_pro_client_prerequisites_should_return_false(self): package_manager = self.container.get('package_manager') @@ -575,7 +576,8 @@ def test_check_pro_client_prerequisites_should_return_false(self): def test_eula_accepted_for_patches(self): # EULA accepted in settings and commands updated accordingly self.runtime.execution_config.accept_package_eula = True - package_manager_for_test = AptitudePackageManager.AptitudePackageManager(self.runtime.env_layer, self.runtime.execution_config, self.runtime.composite_logger, self.runtime.telemetry_writer, self.runtime.status_handler) + package_manager_for_test = AptPackageManager.AptPackageManager(self.runtime.env_layer, self.runtime.execution_config, self.runtime.composite_logger, self.runtime.telemetry_writer, self.runtime.status_handler, + self.runtime.patch_mode_manager, self.runtime.sources_manager, self.runtime.health_manager, Constants.APT) self.assertTrue("ACCEPT_EULA=Y" in package_manager_for_test.single_package_upgrade_simulation_cmd) self.assertTrue("ACCEPT_EULA=Y" in package_manager_for_test.single_package_dependency_resolution_template) self.assertTrue("ACCEPT_EULA=Y" in package_manager_for_test.single_package_upgrade_cmd) @@ -583,7 +585,8 @@ def test_eula_accepted_for_patches(self): def test_eula_not_accepted_for_patches(self): # EULA accepted in settings and commands updated accordingly self.runtime.execution_config.accept_package_eula = False - package_manager_for_test = AptitudePackageManager.AptitudePackageManager(self.runtime.env_layer, self.runtime.execution_config, self.runtime.composite_logger, self.runtime.telemetry_writer, self.runtime.status_handler) + package_manager_for_test = AptPackageManager.AptPackageManager(self.runtime.env_layer, self.runtime.execution_config, self.runtime.composite_logger, self.runtime.telemetry_writer, self.runtime.status_handler, + self.runtime.patch_mode_manager, self.runtime.sources_manager, self.runtime.health_manager, Constants.APT) self.assertTrue("ACCEPT_EULA=Y" not in package_manager_for_test.single_package_upgrade_simulation_cmd) self.assertTrue("ACCEPT_EULA=Y" not in package_manager_for_test.single_package_dependency_resolution_template) self.assertTrue("ACCEPT_EULA=Y" not in package_manager_for_test.single_package_upgrade_cmd) diff --git a/src/core/tests/Test_ConfigurationFactory.py b/src/core/tests/Test_ConfigurationFactory.py index c341bd512..ceae86e68 100644 --- a/src/core/tests/Test_ConfigurationFactory.py +++ b/src/core/tests/Test_ConfigurationFactory.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -35,29 +35,29 @@ def test_get_prod_config_correctly(self): config_factory = bootstrapper.configuration_factory self.assertTrue(config_factory) - config = config_factory.get_configuration(Constants.PROD, Constants.YUM) + config = config_factory.get_configuration(Constants.ExecEnv.PROD, Constants.YUM) self.assertEqual(config['package_manager_name'], Constants.YUM) - self.assertEqual(config['config_env'], Constants.PROD) + self.assertEqual(config['config_env'], Constants.ExecEnv.PROD) def test_get_test_config_correctly(self): bootstrapper = Bootstrapper(self.argument_composer, capture_stdout=False) config_factory = bootstrapper.configuration_factory self.assertTrue(config_factory) - config = config_factory.get_configuration(Constants.TEST, Constants.APT) + config = config_factory.get_configuration(Constants.ExecEnv.TEST, Constants.APT) self.assertEqual(config['package_manager_name'], Constants.APT) - self.assertEqual(config['config_env'], Constants.TEST) + self.assertEqual(config['config_env'], Constants.ExecEnv.TEST) def test_get_dev_config_correctly(self): bootstrapper = Bootstrapper(self.argument_composer, capture_stdout=False) config_factory = bootstrapper.configuration_factory self.assertTrue(config_factory) - config = config_factory.get_configuration(Constants.DEV, Constants.APT) + config = config_factory.get_configuration(Constants.ExecEnv.DEV, Constants.APT) self.assertEqual(config['package_manager_name'], Constants.APT) - self.assertEqual(config['config_env'], Constants.DEV) + self.assertEqual(config['config_env'], Constants.ExecEnv.DEV) if __name__ == '__main__': diff --git a/src/core/tests/Test_ConfigurePatchingProcessor.py b/src/core/tests/Test_ConfigurePatchingProcessor.py index f36c884ca..c152625ac 100644 --- a/src/core/tests/Test_ConfigurePatchingProcessor.py +++ b/src/core/tests/Test_ConfigurePatchingProcessor.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -39,7 +39,7 @@ def tearDown(self): def mock_package_manager_get_current_auto_os_patch_state_returns_unknown(self): if self.mock_package_manager_get_current_auto_os_patch_state_returns_unknown_call_count == 0: self.mock_package_manager_get_current_auto_os_patch_state_returns_unknown_call_count = 1 - return Constants.AutomaticOSPatchStates.DISABLED + return Constants.AutomaticOSPatchStates.ENABLED else: return Constants.AutomaticOSPatchStates.UNKNOWN #endregion Mocks @@ -47,19 +47,19 @@ def mock_package_manager_get_current_auto_os_patch_state_returns_unknown(self): def test_operation_success_for_configure_patching_request_for_apt_with_default_updates_config(self): # create and adjust arguments argument_composer = ArgumentComposer() - argument_composer.operation = Constants.CONFIGURE_PATCHING + argument_composer.operation = Constants.Op.CONFIGURE_PATCHING argument_composer.patch_mode = Constants.PatchModes.AUTOMATIC_BY_PLATFORM argument_composer.assessment_mode = Constants.AssessmentModes.IMAGE_DEFAULT # create and patch runtime runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.APT) - runtime.package_manager.get_current_auto_os_patch_state = runtime.backup_get_current_auto_os_patch_state - runtime.package_manager.os_patch_configuration_settings_file_path = os.path.join(runtime.execution_config.config_folder, "20auto-upgrades") + runtime.package_manager.patch_mode_manager.get_current_auto_os_patch_state = runtime.backup_get_current_auto_os_patch_state + runtime.package_manager.patch_mode_manager.os_patch_configuration_settings_file_path = os.path.join(runtime.execution_config.config_folder, "20auto-upgrades") runtime.set_legacy_test_type('HappyPath') # mock os patch configuration os_patch_configuration_settings = 'APT::Periodic::Update-Package-Lists "1";\nAPT::Periodic::Unattended-Upgrade "1";\n' - runtime.write_to_file(runtime.package_manager.os_patch_configuration_settings_file_path, os_patch_configuration_settings) + runtime.write_to_file(runtime.package_manager.patch_mode_manager.os_patch_configuration_settings_file_path, os_patch_configuration_settings) # execute Core CoreMain(argument_composer.get_composed_arguments()) @@ -72,13 +72,13 @@ def test_operation_success_for_configure_patching_request_for_apt_with_default_u substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] # check status file for configure patching patch state (and including for 'Platform' initiated assessment data) - self.assertTrue(runtime.package_manager.image_default_patch_configuration_backup_exists()) + self.assertTrue(runtime.package_manager.patch_mode_manager.image_default_patch_configuration_backup_exists()) self.assertEqual(len(substatus_file_data), 2) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) message = json.loads(substatus_file_data[0]["formattedMessage"]["message"]) self.assertTrue(message["startedBy"], Constants.PatchAssessmentSummaryStartedBy.PLATFORM) - self.assertTrue(substatus_file_data[1]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.SUCCESS.lower()) message = json.loads(substatus_file_data[1]["formattedMessage"]["message"]) self.assertEqual(message["automaticOSPatchState"], Constants.AutomaticOSPatchStates.DISABLED) @@ -92,10 +92,10 @@ def test_operation_success_for_configure_patching_request_for_apt_with_default_u def test_operation_success_for_configure_patching_request_for_apt_without_default_updates_config(self): # default auto OS updates config file not found on the machine argument_composer = ArgumentComposer() - argument_composer.operation = Constants.CONFIGURE_PATCHING + argument_composer.operation = Constants.Op.CONFIGURE_PATCHING argument_composer.patch_mode = Constants.PatchModes.AUTOMATIC_BY_PLATFORM runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.APT) - runtime.package_manager.get_current_auto_os_patch_state = runtime.backup_get_current_auto_os_patch_state + runtime.package_manager.patch_mode_manager.get_current_auto_os_patch_state = runtime.backup_get_current_auto_os_patch_state runtime.set_legacy_test_type('HappyPath') CoreMain(argument_composer.get_composed_arguments()) @@ -106,22 +106,22 @@ def test_operation_success_for_configure_patching_request_for_apt_without_defaul with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] self.assertEqual(len(substatus_file_data), 2) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) # assessment is now part of the CP flow - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(substatus_file_data[1]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) # assessment is now part of the CP flow + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.SUCCESS.lower()) runtime.stop() def test_operation_success_for_installation_request_with_configure_patching(self): argument_composer = ArgumentComposer() - argument_composer.operation = Constants.INSTALLATION + argument_composer.operation = Constants.Op.INSTALLATION argument_composer.maintenance_run_id = "9/28/2020 02:00:00 PM +00:00" argument_composer.patch_mode = Constants.PatchModes.AUTOMATIC_BY_PLATFORM runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.APT) - runtime.package_manager.get_current_auto_os_patch_state = runtime.backup_get_current_auto_os_patch_state - runtime.package_manager.os_patch_configuration_settings_file_path = os.path.join(runtime.execution_config.config_folder, "20auto-upgrades") + runtime.package_manager.patch_mode_manager.get_current_auto_os_patch_state = runtime.backup_get_current_auto_os_patch_state + runtime.package_manager.patch_mode_manager.os_patch_configuration_settings_file_path = os.path.join(runtime.execution_config.config_folder, "20auto-upgrades") os_patch_configuration_settings = 'APT::Periodic::Update-Package-Lists "1";\nAPT::Periodic::Unattended-Upgrade "1";\n' - runtime.write_to_file(runtime.package_manager.os_patch_configuration_settings_file_path, os_patch_configuration_settings) + runtime.write_to_file(runtime.package_manager.patch_mode_manager.os_patch_configuration_settings_file_path, os_patch_configuration_settings) runtime.set_legacy_test_type('SuccessInstallPath') CoreMain(argument_composer.get_composed_arguments()) @@ -131,23 +131,23 @@ def test_operation_success_for_installation_request_with_configure_patching(self # check status file with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] - self.assertTrue(runtime.package_manager.image_default_patch_configuration_backup_exists()) + self.assertTrue(runtime.package_manager.patch_mode_manager.image_default_patch_configuration_backup_exists()) image_default_patch_configuration_backup = json.loads(runtime.env_layer.file_system.read_with_retry(runtime.package_manager.image_default_patch_configuration_backup_path)) self.assertTrue(image_default_patch_configuration_backup is not None) self.assertTrue(image_default_patch_configuration_backup['APT::Periodic::Update-Package-Lists'] == "1") self.assertTrue(image_default_patch_configuration_backup['APT::Periodic::Unattended-Upgrade'] == "1") - os_patch_configuration_settings = runtime.env_layer.file_system.read_with_retry(runtime.package_manager.os_patch_configuration_settings_file_path) + os_patch_configuration_settings = runtime.env_layer.file_system.read_with_retry(runtime.package_manager.patch_mode_manager.os_patch_configuration_settings_file_path) self.assertTrue(os_patch_configuration_settings is not None) self.assertTrue('APT::Periodic::Update-Package-Lists "0"' in os_patch_configuration_settings) self.assertTrue('APT::Periodic::Unattended-Upgrade "0"' in os_patch_configuration_settings) - self.assertTrue(substatus_file_data[3]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[3]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.Status.SUCCESS.lower()) self.assertEqual(len(substatus_file_data), 4) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(substatus_file_data[1]["name"] == Constants.PATCH_INSTALLATION_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.INSTALLATION) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.SUCCESS.lower()) self.assertEqual(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][0]["name"], "python-samba") self.assertTrue("Security" in str(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][0]["classifications"])) self.assertEqual(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][1]["name"], "samba-common-bin") @@ -155,49 +155,25 @@ def test_operation_success_for_installation_request_with_configure_patching(self self.assertEqual(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][2]["name"], "samba-libs") self.assertTrue("python-samba_2:4.4.5+dfsg-2ubuntu5.4" in str(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][0]["patchId"])) self.assertTrue("Security" in str(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][2]["classifications"])) - self.assertTrue(substatus_file_data[2]["name"] == Constants.PATCH_METADATA_FOR_HEALTHSTORE) - self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[2]["name"] == Constants.OpSummary.PATCH_METADATA_FOR_HEALTHSTORE) + self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.Status.SUCCESS.lower()) substatus_file_data_patch_metadata_summary = json.loads(substatus_file_data[2]["formattedMessage"]["message"]) self.assertEqual(substatus_file_data_patch_metadata_summary["patchVersion"], "2020.09.28") self.assertTrue(substatus_file_data_patch_metadata_summary["shouldReportToHealthStore"]) runtime.stop() - def test_operation_fail_for_configure_patching_telemetry_not_supported(self): - argument_composer = ArgumentComposer() - argument_composer.operation = Constants.CONFIGURE_PATCHING - argument_composer.patch_mode = Constants.PatchModes.AUTOMATIC_BY_PLATFORM - argument_composer.events_folder = None - runtime = RuntimeCompositor(argument_composer.get_composed_arguments(env_settings=dict(telemetrySupported=False)), True, Constants.APT) - runtime.set_legacy_test_type('HappyPath') - runtime.configure_patching_processor.start_configure_patching() - - # check status file - with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: - substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] - self.assertEqual(len(substatus_file_data), 1) - self.assertTrue(substatus_file_data[0]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - if runtime.vm_cloud_type == Constants.VMCloudType.AZURE: - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_ERROR.lower()) - self.assertTrue(len(json.loads(substatus_file_data[0]["formattedMessage"]["message"])["errors"]["details"]), 1) - self.assertTrue(Constants.TELEMETRY_NOT_COMPATIBLE_ERROR_MSG in json.loads(substatus_file_data[0]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) - self.assertTrue(Constants.TELEMETRY_NOT_COMPATIBLE_ERROR_MSG in json.loads(substatus_file_data[0]["formattedMessage"]["message"])["autoAssessmentStatus"]["errors"]["details"][0]["message"]) - self.assertTrue(Constants.STATUS_ERROR in json.loads(substatus_file_data[0]["formattedMessage"]["message"])["autoAssessmentStatus"]["autoAssessmentState"]) - else: - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - runtime.stop() - def test_patch_mode_set_failure_for_configure_patching(self): argument_composer = ArgumentComposer() - argument_composer.operation = Constants.CONFIGURE_PATCHING + argument_composer.operation = Constants.Op.CONFIGURE_PATCHING argument_composer.patch_mode = Constants.PatchModes.AUTOMATIC_BY_PLATFORM argument_composer.assessment_mode = "LetsThrowAnException" runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.APT) - runtime.package_manager.get_current_auto_os_patch_state = runtime.backup_get_current_auto_os_patch_state + runtime.package_manager.patch_mode_manager.get_current_auto_os_patch_state = runtime.backup_get_current_auto_os_patch_state runtime.set_legacy_test_type('HappyPath') # mock swap - backup_package_manager_get_current_auto_os_patch_state = runtime.package_manager.get_current_auto_os_patch_state - runtime.package_manager.get_current_auto_os_patch_state = self.mock_package_manager_get_current_auto_os_patch_state_returns_unknown + backup_package_manager_get_current_auto_os_patch_state = runtime.package_manager.patch_mode_manager.get_current_auto_os_patch_state + runtime.package_manager.patch_mode_manager.get_current_auto_os_patch_state = self.mock_package_manager_get_current_auto_os_patch_state_returns_unknown # Execute main CoreMain(argument_composer.get_composed_arguments()) @@ -208,14 +184,13 @@ def test_patch_mode_set_failure_for_configure_patching(self): # check status file with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] - self.assertEqual(len(substatus_file_data), 2) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) # assessment is now part of the CP flow - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(substatus_file_data[1]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_ERROR.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) # assessment is now part of the CP flow + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.ERROR.lower()) #restore - runtime.package_manager.get_current_auto_os_patch_state = backup_package_manager_get_current_auto_os_patch_state + runtime.package_manager.patch_mode_manager.get_current_auto_os_patch_state = backup_package_manager_get_current_auto_os_patch_state runtime.stop() @@ -223,19 +198,19 @@ def test_configure_patching_with_assessment_mode_by_platform(self): # create and adjust arguments argument_composer = ArgumentComposer() - argument_composer.operation = Constants.CONFIGURE_PATCHING + argument_composer.operation = Constants.Op.CONFIGURE_PATCHING argument_composer.patch_mode = Constants.PatchModes.IMAGE_DEFAULT argument_composer.assessment_mode = Constants.AssessmentModes.AUTOMATIC_BY_PLATFORM # create and patch runtime runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.APT) - runtime.package_manager.get_current_auto_os_patch_state = runtime.backup_get_current_auto_os_patch_state - runtime.package_manager.os_patch_configuration_settings_file_path = os.path.join(runtime.execution_config.config_folder, "20auto-upgrades") + runtime.package_manager.patch_mode_manager.get_current_auto_os_patch_state = runtime.backup_get_current_auto_os_patch_state + runtime.package_manager.patch_mode_manager.os_patch_configuration_settings_file_path = os.path.join(runtime.execution_config.config_folder, "20auto-upgrades") runtime.set_legacy_test_type('HappyPath') # mock os patch configuration os_patch_configuration_settings = 'APT::Periodic::Update-Package-Lists "1";\nAPT::Periodic::Unattended-Upgrade "1";\n' - runtime.write_to_file(runtime.package_manager.os_patch_configuration_settings_file_path, os_patch_configuration_settings) + runtime.write_to_file(runtime.package_manager.patch_mode_manager.os_patch_configuration_settings_file_path, os_patch_configuration_settings) # execute Core CoreMain(argument_composer.get_composed_arguments()) @@ -249,12 +224,12 @@ def test_configure_patching_with_assessment_mode_by_platform(self): # check status file for configure patching patch state self.assertEqual(len(substatus_file_data), 2) - self.assertTrue(substatus_file_data[1]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.SUCCESS.lower()) message = json.loads(substatus_file_data[1]["formattedMessage"]["message"]) self.assertEqual(message["automaticOSPatchState"], Constants.AutomaticOSPatchStates.ENABLED) # no change is made on Auto OS updates for patch mode 'ImageDefault' - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.SUCCESS.lower()) # check status file for configure patching assessment state message = json.loads(substatus_file_data[1]["formattedMessage"]["message"]) @@ -267,19 +242,19 @@ def test_configure_patching_with_patch_mode_and_assessment_mode_by_platform(self # create and adjust arguments argument_composer = ArgumentComposer() - argument_composer.operation = Constants.CONFIGURE_PATCHING + argument_composer.operation = Constants.Op.CONFIGURE_PATCHING argument_composer.patch_mode = Constants.PatchModes.AUTOMATIC_BY_PLATFORM argument_composer.assessment_mode = Constants.AssessmentModes.AUTOMATIC_BY_PLATFORM # create and patch runtime runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.APT) - runtime.package_manager.get_current_auto_os_patch_state = runtime.backup_get_current_auto_os_patch_state - runtime.package_manager.os_patch_configuration_settings_file_path = os.path.join(runtime.execution_config.config_folder, "20auto-upgrades") + runtime.package_manager.patch_mode_manager.get_current_auto_os_patch_state = runtime.backup_get_current_auto_os_patch_state + runtime.package_manager.patch_mode_manager.os_patch_configuration_settings_file_path = os.path.join(runtime.execution_config.config_folder, "20auto-upgrades") runtime.set_legacy_test_type('HappyPath') # mock os patch configuration os_patch_configuration_settings = 'APT::Periodic::Update-Package-Lists "1";\nAPT::Periodic::Unattended-Upgrade "1";\n' - runtime.write_to_file(runtime.package_manager.os_patch_configuration_settings_file_path, os_patch_configuration_settings) + runtime.write_to_file(runtime.package_manager.patch_mode_manager.os_patch_configuration_settings_file_path, os_patch_configuration_settings) # execute Core CoreMain(argument_composer.get_composed_arguments()) @@ -292,14 +267,14 @@ def test_configure_patching_with_patch_mode_and_assessment_mode_by_platform(self substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] # check status file for configure patching patch state - self.assertTrue(runtime.package_manager.image_default_patch_configuration_backup_exists()) + self.assertTrue(runtime.package_manager.patch_mode_manager.image_default_patch_configuration_backup_exists()) self.assertEqual(len(substatus_file_data), 2) - self.assertTrue(substatus_file_data[1]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.SUCCESS.lower()) message = json.loads(substatus_file_data[1]["formattedMessage"]["message"]) self.assertEqual(message["automaticOSPatchState"], Constants.AutomaticOSPatchStates.DISABLED) # auto OS updates are disabled on patch mode 'AutomaticByPlatform' - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.SUCCESS.lower()) # check status file for configure patching assessment state message = json.loads(substatus_file_data[1]["formattedMessage"]["message"]) diff --git a/src/core/tests/Test_Container.py b/src/core/tests/Test_Container.py index 3796e7f6e..5c8d5b21c 100644 --- a/src/core/tests/Test_Container.py +++ b/src/core/tests/Test_Container.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/core/tests/Test_CoreMain.py b/src/core/tests/Test_CoreMain.py index 922fa900b..8b576a74d 100644 --- a/src/core/tests/Test_CoreMain.py +++ b/src/core/tests/Test_CoreMain.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -67,13 +67,13 @@ def test_operation_fail_for_non_autopatching_request(self): with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] self.assertEqual(len(substatus_file_data), 3) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(substatus_file_data[1]["name"] == Constants.PATCH_INSTALLATION_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_ERROR.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.INSTALLATION) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.ERROR.lower()) self.assertEqual(len(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["errors"]["details"]), 1) - self.assertTrue(substatus_file_data[2]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[2]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.Status.SUCCESS.lower()) runtime.stop() def test_operation_fail_for_autopatching_request(self): @@ -90,18 +90,18 @@ def test_operation_fail_for_autopatching_request(self): with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] self.assertEqual(len(substatus_file_data), 4) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(substatus_file_data[1]["name"] == Constants.PATCH_INSTALLATION_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_ERROR.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.INSTALLATION) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.ERROR.lower()) self.assertEqual(len(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["errors"]["details"]), 1) - self.assertTrue(substatus_file_data[2]["name"] == Constants.PATCH_METADATA_FOR_HEALTHSTORE) - self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[2]["name"] == Constants.OpSummary.PATCH_METADATA_FOR_HEALTHSTORE) + self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.Status.SUCCESS.lower()) substatus_file_data_patch_metadata_summary = json.loads(substatus_file_data[2]["formattedMessage"]["message"]) self.assertTrue(substatus_file_data_patch_metadata_summary["patchVersion"], Constants.PATCH_VERSION_UNKNOWN) self.assertFalse(substatus_file_data_patch_metadata_summary["shouldReportToHealthStore"]) - self.assertTrue(substatus_file_data[3]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[3]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.Status.SUCCESS.lower()) runtime.stop() def test_operation_success_for_non_autopatching_request(self): @@ -117,12 +117,12 @@ def test_operation_success_for_non_autopatching_request(self): with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] self.assertEqual(len(substatus_file_data), 3) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(substatus_file_data[1]["name"] == Constants.PATCH_INSTALLATION_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(substatus_file_data[2]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.INSTALLATION) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(substatus_file_data[2]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.Status.SUCCESS.lower()) runtime.stop() def test_operation_success_for_autopatching_request(self): @@ -141,17 +141,17 @@ def test_operation_success_for_autopatching_request(self): with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] self.assertEqual(len(substatus_file_data), 4) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(substatus_file_data[1]["name"] == Constants.PATCH_INSTALLATION_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(substatus_file_data[2]["name"] == Constants.PATCH_METADATA_FOR_HEALTHSTORE) - self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.INSTALLATION) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(substatus_file_data[2]["name"] == Constants.OpSummary.PATCH_METADATA_FOR_HEALTHSTORE) + self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.Status.SUCCESS.lower()) substatus_file_data_patch_metadata_summary = json.loads(substatus_file_data[2]["formattedMessage"]["message"]) self.assertEqual(substatus_file_data_patch_metadata_summary["patchVersion"], "2020.09.28") self.assertTrue(substatus_file_data_patch_metadata_summary["shouldReportToHealthStore"]) - self.assertTrue(substatus_file_data[3]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[3]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.Status.SUCCESS.lower()) runtime.stop() def test_operation_success_for_autopatching_request_with_security_classification(self): @@ -172,10 +172,10 @@ def test_operation_success_for_autopatching_request_with_security_classification with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] self.assertEqual(len(substatus_file_data), 4) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(substatus_file_data[1]["name"] == Constants.PATCH_INSTALLATION_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.INSTALLATION) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.SUCCESS.lower()) self.assertEqual(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][0]["name"], "python-samba") self.assertTrue("Security" in str(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][0]["classifications"])) self.assertEqual(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][1]["name"], "samba-common-bin") @@ -183,13 +183,13 @@ def test_operation_success_for_autopatching_request_with_security_classification self.assertEqual(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][2]["name"], "samba-libs") self.assertTrue("python-samba_2:4.4.5+dfsg-2ubuntu5.4" in str(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][0]["patchId"])) self.assertTrue("Security" in str(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][2]["classifications"])) - self.assertTrue(substatus_file_data[2]["name"] == Constants.PATCH_METADATA_FOR_HEALTHSTORE) - self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[2]["name"] == Constants.OpSummary.PATCH_METADATA_FOR_HEALTHSTORE) + self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.Status.SUCCESS.lower()) substatus_file_data_patch_metadata_summary = json.loads(substatus_file_data[2]["formattedMessage"]["message"]) self.assertEqual(substatus_file_data_patch_metadata_summary["patchVersion"], "2020.09.28") self.assertTrue(substatus_file_data_patch_metadata_summary["shouldReportToHealthStore"]) - self.assertTrue(substatus_file_data[3]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[3]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.Status.SUCCESS.lower()) runtime.stop() def test_invalid_maintenance_run_id(self): @@ -208,17 +208,17 @@ def test_invalid_maintenance_run_id(self): with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] self.assertEqual(len(substatus_file_data), 4) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(substatus_file_data[1]["name"] == Constants.PATCH_INSTALLATION_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_SUCCESS.lower()) # "invalid" maintenance ids are okay in the new contract - self.assertTrue(substatus_file_data[2]["name"] == Constants.PATCH_METADATA_FOR_HEALTHSTORE) - self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.INSTALLATION) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.SUCCESS.lower()) # "invalid" maintenance ids are okay in the new contract + self.assertTrue(substatus_file_data[2]["name"] == Constants.OpSummary.PATCH_METADATA_FOR_HEALTHSTORE) + self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.Status.SUCCESS.lower()) substatus_file_data_patch_metadata_summary = json.loads(substatus_file_data[2]["formattedMessage"]["message"]) self.assertEqual(substatus_file_data_patch_metadata_summary["patchVersion"], Constants.PATCH_VERSION_UNKNOWN) self.assertTrue(substatus_file_data_patch_metadata_summary["shouldReportToHealthStore"]) - self.assertTrue(substatus_file_data[3]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[3]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.Status.SUCCESS.lower()) runtime.stop() # test with a random string for maintenance run id @@ -236,22 +236,22 @@ def test_invalid_maintenance_run_id(self): with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] self.assertEqual(len(substatus_file_data), 4) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(substatus_file_data[1]["name"] == Constants.PATCH_INSTALLATION_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(substatus_file_data[2]["name"] == Constants.PATCH_METADATA_FOR_HEALTHSTORE) - self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.INSTALLATION) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(substatus_file_data[2]["name"] == Constants.OpSummary.PATCH_METADATA_FOR_HEALTHSTORE) + self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.Status.SUCCESS.lower()) substatus_file_data_patch_metadata_summary = json.loads(substatus_file_data[2]["formattedMessage"]["message"]) self.assertEqual(substatus_file_data_patch_metadata_summary["patchVersion"], maintenance_run_id) self.assertTrue(substatus_file_data_patch_metadata_summary["shouldReportToHealthStore"]) - self.assertTrue(substatus_file_data[3]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[3]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.Status.SUCCESS.lower()) runtime.stop() def test_assessment_operation_success(self): argument_composer = ArgumentComposer() - argument_composer.operation = Constants.ASSESSMENT + argument_composer.operation = Constants.Op.ASSESSMENT runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.ZYPPER) runtime.set_legacy_test_type('HappyPath') CoreMain(argument_composer.get_composed_arguments()) @@ -263,15 +263,15 @@ def test_assessment_operation_success(self): with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] self.assertEqual(len(substatus_file_data), 2) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(substatus_file_data[1]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.SUCCESS.lower()) runtime.stop() def test_assessment_operation_fail(self): argument_composer = ArgumentComposer() - argument_composer.operation = Constants.ASSESSMENT + argument_composer.operation = Constants.Op.ASSESSMENT runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.ZYPPER) runtime.set_legacy_test_type('ExceptionPath') CoreMain(argument_composer.get_composed_arguments()) @@ -283,32 +283,33 @@ def test_assessment_operation_fail(self): with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] self.assertEqual(len(substatus_file_data), 2) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_ERROR.lower()) - self.assertEqual(len(json.loads(substatus_file_data[0]["formattedMessage"]["message"])["errors"]["details"]), 2) - self.assertTrue(substatus_file_data[1]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.ERROR.lower()) + self.assertEqual(len(json.loads(substatus_file_data[0]["formattedMessage"]["message"])["errors"]["details"]), 1) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.SUCCESS.lower()) runtime.stop() def test_assessment_operation_fail_due_to_no_telemetry(self): argument_composer = ArgumentComposer() - argument_composer.operation = Constants.ASSESSMENT + argument_composer.operation = Constants.Op.ASSESSMENT argument_composer.events_folder = None composed_arguments = argument_composer.get_composed_arguments(dict(telemetrySupported=False)) runtime = RuntimeCompositor(composed_arguments, True, Constants.ZYPPER) runtime.set_legacy_test_type('HappyPath') + runtime.core_exec.check_minimum_environment_requirements_and_report = runtime.backup_check_minimum_environment_requirements_and_report CoreMain(composed_arguments) with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] self.assertEqual(len(substatus_file_data), 2) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_ERROR.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.ERROR.lower()) self.assertEqual(len(json.loads(substatus_file_data[0]["formattedMessage"]["message"])["errors"]["details"]), 1) - self.assertTrue(Constants.TELEMETRY_NOT_COMPATIBLE_ERROR_MSG in json.loads(substatus_file_data[0]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) - self.assertTrue(substatus_file_data[1]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_ERROR.lower()) - self.assertTrue(Constants.TELEMETRY_NOT_COMPATIBLE_ERROR_MSG in json.loads(substatus_file_data[1]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) + self.assertTrue(Constants.Errors.NO_TELEMETRY_SUPPORT_AT_AGENT in json.loads(substatus_file_data[0]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.ERROR.lower()) + self.assertTrue(Constants.Errors.NO_TELEMETRY_SUPPORT_AT_AGENT in json.loads(substatus_file_data[1]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) runtime.stop() def test_installation_operation_fail_due_to_telemetry_unsupported_no_events_folder(self): @@ -323,27 +324,27 @@ def test_installation_operation_fail_due_to_telemetry_unsupported_no_events_fold with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] self.assertEqual(len(substatus_file_data), 4) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_ERROR.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.ERROR.lower()) self.assertEqual(len(json.loads(substatus_file_data[0]["formattedMessage"]["message"])["errors"]["details"]), 1) - self.assertTrue(Constants.TELEMETRY_NOT_COMPATIBLE_ERROR_MSG in json.loads(substatus_file_data[0]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) - self.assertTrue(substatus_file_data[1]["name"] == Constants.PATCH_INSTALLATION_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_ERROR.lower()) + self.assertTrue(Constants.Errors.NO_TELEMETRY_SUPPORT_AT_AGENT in json.loads(substatus_file_data[0]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.INSTALLATION) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.ERROR.lower()) self.assertEqual(len(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["errors"]["details"]), 1) - self.assertFalse(Constants.TELEMETRY_NOT_COMPATIBLE_ERROR_MSG in json.loads(substatus_file_data[1]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) + self.assertFalse(Constants.Errors.NO_TELEMETRY_SUPPORT_AT_AGENT in json.loads(substatus_file_data[1]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) self.assertTrue("Installation failed due to assessment failure. Please refer the error details in assessment substatus" in json.loads(substatus_file_data[1]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) - self.assertTrue(substatus_file_data[2]["name"] == Constants.PATCH_METADATA_FOR_HEALTHSTORE) - self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(substatus_file_data[3]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.STATUS_ERROR.lower()) + self.assertTrue(substatus_file_data[2]["name"] == Constants.OpSummary.PATCH_METADATA_FOR_HEALTHSTORE) + self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(substatus_file_data[3]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.Status.ERROR.lower()) self.assertEqual(len(json.loads(substatus_file_data[3]["formattedMessage"]["message"])["errors"]["details"]), 1) - self.assertTrue(Constants.TELEMETRY_NOT_COMPATIBLE_ERROR_MSG in json.loads(substatus_file_data[3]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) + self.assertTrue(Constants.Errors.NO_TELEMETRY_SUPPORT_AT_AGENT in json.loads(substatus_file_data[3]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) runtime.stop() def test_installation_operation_fail_due_to_no_telemetry(self): # telemetry not supported argument_composer = ArgumentComposer() - argument_composer.operation = Constants.INSTALLATION + argument_composer.operation = Constants.Op.INSTALLATION argument_composer.maintenance_run_id = str(datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")) runtime = RuntimeCompositor(argument_composer.get_composed_arguments(env_settings={"telemetrySupported": False}), True, Constants.ZYPPER) runtime.set_legacy_test_type('SuccessInstallPath') @@ -355,39 +356,40 @@ def test_installation_operation_fail_due_to_no_telemetry(self): with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] self.assertEqual(len(substatus_file_data), 4) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_ERROR.lower()) - self.assertTrue(substatus_file_data[1]["name"] == Constants.PATCH_INSTALLATION_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_ERROR.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.ERROR.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.INSTALLATION) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.ERROR.lower()) self.assertEqual(len(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["errors"]["details"]), 2) - self.assertFalse(Constants.TELEMETRY_NOT_COMPATIBLE_ERROR_MSG in json.loads(substatus_file_data[1]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) + self.assertFalse(Constants.Errors.NO_TELEMETRY_SUPPORT_AT_AGENT in json.loads(substatus_file_data[1]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) self.assertTrue("Installation failed due to assessment failure. Please refer the error details in assessment substatus" in json.loads(substatus_file_data[1]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) - self.assertTrue(substatus_file_data[2]["name"] == Constants.PATCH_METADATA_FOR_HEALTHSTORE) - self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(substatus_file_data[3]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.STATUS_ERROR.lower()) + self.assertTrue(substatus_file_data[2]["name"] == Constants.OpSummary.PATCH_METADATA_FOR_HEALTHSTORE) + self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(substatus_file_data[3]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.Status.ERROR.lower()) self.assertEqual(len(json.loads(substatus_file_data[3]["formattedMessage"]["message"])["errors"]["details"]), 1) - self.assertTrue(Constants.TELEMETRY_NOT_COMPATIBLE_ERROR_MSG in json.loads(substatus_file_data[3]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) + self.assertTrue(Constants.Errors.NO_TELEMETRY_SUPPORT_AT_AGENT in json.loads(substatus_file_data[3]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) runtime.stop() def test_assessment_operation_fail_on_arc_due_to_no_telemetry(self): argument_composer = ArgumentComposer() - argument_composer.operation = Constants.ASSESSMENT + argument_composer.operation = Constants.Op.ASSESSMENT argument_composer.events_folder = None - runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.ZYPPER, Constants.VMCloudType.ARC) + runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.ZYPPER, Constants.CloudType.ARC) runtime.set_legacy_test_type('HappyPath') + runtime.core_exec.check_minimum_environment_requirements_and_report = runtime.backup_check_minimum_environment_requirements_and_report CoreMain(argument_composer.get_composed_arguments()) with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] self.assertEqual(len(substatus_file_data), 2) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_ERROR.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.ERROR.lower()) self.assertEqual(len(json.loads(substatus_file_data[0]["formattedMessage"]["message"])["errors"]["details"]), 1) - self.assertTrue(Constants.TELEMETRY_NOT_COMPATIBLE_ERROR_MSG in json.loads(substatus_file_data[0]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) - self.assertTrue(substatus_file_data[1]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_ERROR.lower()) - self.assertTrue(Constants.TELEMETRY_NOT_COMPATIBLE_ERROR_MSG in json.loads(substatus_file_data[1]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) + self.assertTrue(Constants.Errors.NO_TELEMETRY_SUPPORT_AT_AGENT in json.loads(substatus_file_data[0]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.ERROR.lower()) + self.assertTrue(Constants.Errors.NO_TELEMETRY_SUPPORT_AT_AGENT in json.loads(substatus_file_data[1]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) runtime.stop() def test_installation_operation_fail_on_arc_due_to_no_telemetry(self): @@ -395,28 +397,28 @@ def test_installation_operation_fail_on_arc_due_to_no_telemetry(self): argument_composer = ArgumentComposer() argument_composer.maintenance_run_id = str(datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")) argument_composer.events_folder = None - runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.ZYPPER,Constants.VMCloudType.ARC) + runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.ZYPPER, Constants.CloudType.ARC) runtime.set_legacy_test_type('SuccessInstallPath') CoreMain(argument_composer.get_composed_arguments()) with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] self.assertEqual(len(substatus_file_data), 4) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_ERROR.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.ERROR.lower()) self.assertEqual(len(json.loads(substatus_file_data[0]["formattedMessage"]["message"])["errors"]["details"]), 1) - self.assertTrue(Constants.TELEMETRY_NOT_COMPATIBLE_ERROR_MSG in json.loads(substatus_file_data[0]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) - self.assertTrue(substatus_file_data[1]["name"] == Constants.PATCH_INSTALLATION_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_ERROR.lower()) + self.assertTrue(Constants.Errors.NO_TELEMETRY_SUPPORT_AT_AGENT in json.loads(substatus_file_data[0]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.INSTALLATION) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.ERROR.lower()) self.assertEqual(len(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["errors"]["details"]), 1) - self.assertFalse(Constants.TELEMETRY_NOT_COMPATIBLE_ERROR_MSG in json.loads(substatus_file_data[1]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) + self.assertFalse(Constants.Errors.NO_TELEMETRY_SUPPORT_AT_AGENT in json.loads(substatus_file_data[1]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) self.assertTrue("Installation failed due to assessment failure. Please refer the error details in assessment substatus" in json.loads(substatus_file_data[1]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) - self.assertTrue(substatus_file_data[2]["name"] == Constants.PATCH_METADATA_FOR_HEALTHSTORE) - self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(substatus_file_data[3]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.STATUS_ERROR.lower()) + self.assertTrue(substatus_file_data[2]["name"] == Constants.OpSummary.PATCH_METADATA_FOR_HEALTHSTORE) + self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(substatus_file_data[3]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.Status.ERROR.lower()) self.assertEqual(len(json.loads(substatus_file_data[3]["formattedMessage"]["message"])["errors"]["details"]), 1) - self.assertTrue(Constants.TELEMETRY_NOT_COMPATIBLE_ERROR_MSG in json.loads(substatus_file_data[3]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) + self.assertTrue(Constants.Errors.NO_TELEMETRY_SUPPORT_AT_AGENT in json.loads(substatus_file_data[3]["formattedMessage"]["message"])["errors"]["details"][0]["message"]) runtime.stop() def test_install_all_packages_for_centos_autopatching(self): @@ -442,10 +444,10 @@ def test_install_all_packages_for_centos_autopatching(self): with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] self.assertEqual(len(substatus_file_data), 4) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(substatus_file_data[1]["name"] == Constants.PATCH_INSTALLATION_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.INSTALLATION) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.SUCCESS.lower()) self.assertTrue(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["installedPatchCount"] == 5) self.assertEqual(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][1]["name"], "selinux-policy.noarch") self.assertTrue("Other" in str(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][1]["classifications"])) @@ -457,13 +459,13 @@ def test_install_all_packages_for_centos_autopatching(self): self.assertTrue("libgcc.i686_4.8.5-28.el7_CentOS Linux_7.9.2009" in str(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][0]["patchId"])) self.assertTrue("Security" in str(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][0]["classifications"])) self.assertTrue("Installed" == json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][0]["patchInstallationState"]) - self.assertTrue(substatus_file_data[2]["name"] == Constants.PATCH_METADATA_FOR_HEALTHSTORE) - self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[2]["name"] == Constants.OpSummary.PATCH_METADATA_FOR_HEALTHSTORE) + self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.Status.SUCCESS.lower()) substatus_file_data_patch_metadata_summary = json.loads(substatus_file_data[2]["formattedMessage"]["message"]) self.assertEqual(substatus_file_data_patch_metadata_summary["patchVersion"], "2020.09.28") self.assertTrue(substatus_file_data_patch_metadata_summary["shouldReportToHealthStore"]) - self.assertTrue(substatus_file_data[3]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[3]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.Status.SUCCESS.lower()) runtime.stop() LegacyEnvLayerExtensions.LegacyPlatform.linux_distribution = backup_envlayer_platform_linux_distribution @@ -492,10 +494,10 @@ def test_install_all_packages_for_centos_autopatching_as_warning_with_never_rebo with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] self.assertEqual(len(substatus_file_data), 4) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(substatus_file_data[1]["name"] == Constants.PATCH_INSTALLATION_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_WARNING.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.INSTALLATION) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.WARNING.lower()) self.assertTrue(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["installedPatchCount"] == 5) self.assertEqual(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][1]["name"], "selinux-policy.noarch") self.assertTrue("Other" in str(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][1]["classifications"])) @@ -507,13 +509,13 @@ def test_install_all_packages_for_centos_autopatching_as_warning_with_never_rebo self.assertTrue("libgcc.i686_4.8.5-28.el7_CentOS Linux_7.9.2009" in str(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][0]["patchId"])) self.assertTrue("Security" in str(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][0]["classifications"])) self.assertTrue("Installed" == json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][0]["patchInstallationState"]) - self.assertTrue(substatus_file_data[2]["name"] == Constants.PATCH_METADATA_FOR_HEALTHSTORE) - self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[2]["name"] == Constants.OpSummary.PATCH_METADATA_FOR_HEALTHSTORE) + self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.Status.SUCCESS.lower()) substatus_file_data_patch_metadata_summary = json.loads(substatus_file_data[2]["formattedMessage"]["message"]) self.assertEqual(substatus_file_data_patch_metadata_summary["patchVersion"], "2020.09.28") self.assertTrue(substatus_file_data_patch_metadata_summary["shouldReportToHealthStore"]) - self.assertTrue(substatus_file_data[3]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[3]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.Status.SUCCESS.lower()) runtime.stop() LegacyEnvLayerExtensions.LegacyPlatform.linux_distribution = backup_envlayer_platform_linux_distribution @@ -541,10 +543,10 @@ def test_install_only_critical_and_security_packages_for_redhat_autopatching(sel with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] self.assertEqual(len(substatus_file_data), 4) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(substatus_file_data[1]["name"] == Constants.PATCH_INSTALLATION_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.INSTALLATION) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.SUCCESS.lower()) self.assertTrue(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["installedPatchCount"] == 1) self.assertEqual(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][1]["name"], "selinux-policy.noarch") self.assertTrue("Other" in str(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][1]["classifications"])) @@ -562,13 +564,13 @@ def test_install_only_critical_and_security_packages_for_redhat_autopatching(sel self.assertTrue("libgcc.i686_4.8.5-28.el7_Red Hat Enterprise Linux Server_7.5" in str(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][0]["patchId"])) self.assertTrue("Security" in str(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][0]["classifications"])) self.assertTrue("Installed" == json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][0]["patchInstallationState"]) - self.assertTrue(substatus_file_data[2]["name"] == Constants.PATCH_METADATA_FOR_HEALTHSTORE) - self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[2]["name"] == Constants.OpSummary.PATCH_METADATA_FOR_HEALTHSTORE) + self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.Status.SUCCESS.lower()) substatus_file_data_patch_metadata_summary = json.loads(substatus_file_data[2]["formattedMessage"]["message"]) self.assertEqual(substatus_file_data_patch_metadata_summary["patchVersion"], "2020.09.28") self.assertTrue(substatus_file_data_patch_metadata_summary["shouldReportToHealthStore"]) - self.assertTrue(substatus_file_data[3]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[3]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.Status.SUCCESS.lower()) runtime.stop() LegacyEnvLayerExtensions.LegacyPlatform.linux_distribution = backup_envlayer_platform_linux_distribution @@ -597,10 +599,10 @@ def test_install_only_critical_and_security_packages_for_redhat_autopatching_war with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] self.assertEqual(len(substatus_file_data), 4) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(substatus_file_data[1]["name"] == Constants.PATCH_INSTALLATION_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_WARNING.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.INSTALLATION) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.WARNING.lower()) self.assertTrue(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["installedPatchCount"] == 1) self.assertEqual(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][1]["name"], "selinux-policy.noarch") self.assertTrue("Other" in str(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][1]["classifications"])) @@ -618,13 +620,13 @@ def test_install_only_critical_and_security_packages_for_redhat_autopatching_war self.assertTrue("libgcc.i686_4.8.5-28.el7_Red Hat Enterprise Linux Server_7.5" in str(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][0]["patchId"])) self.assertTrue("Security" in str(json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][0]["classifications"])) self.assertTrue("Installed" == json.loads(substatus_file_data[1]["formattedMessage"]["message"])["patches"][0]["patchInstallationState"]) - self.assertTrue(substatus_file_data[2]["name"] == Constants.PATCH_METADATA_FOR_HEALTHSTORE) - self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[2]["name"] == Constants.OpSummary.PATCH_METADATA_FOR_HEALTHSTORE) + self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.Status.SUCCESS.lower()) substatus_file_data_patch_metadata_summary = json.loads(substatus_file_data[2]["formattedMessage"]["message"]) self.assertEqual(substatus_file_data_patch_metadata_summary["patchVersion"], "2020.09.28") self.assertTrue(substatus_file_data_patch_metadata_summary["shouldReportToHealthStore"]) - self.assertTrue(substatus_file_data[3]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[3]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.Status.SUCCESS.lower()) runtime.stop() LegacyEnvLayerExtensions.LegacyPlatform.linux_distribution = backup_envlayer_platform_linux_distribution @@ -634,7 +636,7 @@ def test_auto_assessment_success_with_configure_patching_in_prev_operation_on_sa """Unit test for auto assessment request with configure patching completed on the sequence before. Result: should retain prev substatus and update only PatchAssessmentSummary""" # operation #1: ConfigurePatching argument_composer = ArgumentComposer() - argument_composer.operation = Constants.CONFIGURE_PATCHING + argument_composer.operation = Constants.Op.CONFIGURE_PATCHING argument_composer.patch_mode = Constants.PatchModes.AUTOMATIC_BY_PLATFORM argument_composer.assessment_mode = Constants.AssessmentModes.AUTOMATIC_BY_PLATFORM runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.APT) @@ -645,11 +647,11 @@ def test_auto_assessment_success_with_configure_patching_in_prev_operation_on_sa # check status file with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: status_file_data = json.load(file_handle)[0]["status"] - self.assertTrue(status_file_data["operation"] == Constants.CONFIGURE_PATCHING) + self.assertTrue(status_file_data["operation"] == Constants.Op.CONFIGURE_PATCHING) substatus_file_data = status_file_data["substatus"] self.assertEqual(len(substatus_file_data), 2) - self.assertTrue(substatus_file_data[1]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.SUCCESS.lower()) # check status file for configure patching auto updates state message = json.loads(substatus_file_data[1]["formattedMessage"]["message"]) self.assertEqual(message["automaticOSPatchState"], Constants.AutomaticOSPatchStates.DISABLED) # auto OS updates are disabled in RuntimeCompositor @@ -663,7 +665,7 @@ def test_auto_assessment_success_with_configure_patching_in_prev_operation_on_sa argument_composer.maintenance_run_id = None argument_composer.start_time = runtime.env_layer.datetime.standard_datetime_to_utc(datetime.datetime.utcnow()) argument_composer.duration = Constants.AUTO_ASSESSMENT_MAXIMUM_DURATION - argument_composer.reboot_setting = Constants.REBOOT_NEVER + argument_composer.reboot_setting = Constants.RebootSettings.NEVER argument_composer.patch_mode = None argument_composer.exec_auto_assess_only = True runtime.execution_config.exec_auto_assess_only = True @@ -672,16 +674,16 @@ def test_auto_assessment_success_with_configure_patching_in_prev_operation_on_sa with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: status_file_data = json.load(file_handle)[0]["status"] # verifying the original operation name is preserved - self.assertTrue(status_file_data["operation"] == Constants.CONFIGURE_PATCHING) + self.assertTrue(status_file_data["operation"] == Constants.Op.CONFIGURE_PATCHING) substatus_file_data = status_file_data["substatus"] self.assertEqual(len(substatus_file_data), 2) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.SUCCESS.lower()) # check started by set to 'Platform' self.assertTrue(json.loads(substatus_file_data[0]["formattedMessage"]["message"])['startedBy'], Constants.PatchAssessmentSummaryStartedBy.PLATFORM) # verifying the older operation summary is preserved - self.assertTrue(substatus_file_data[1]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.SUCCESS.lower()) message = json.loads(substatus_file_data[1]["formattedMessage"]["message"]) self.assertEqual(message["automaticOSPatchState"], Constants.AutomaticOSPatchStates.DISABLED) # auto OS updates are disabled in RuntimeCompositor self.assertEqual(message["autoAssessmentStatus"]["autoAssessmentState"], Constants.AutoAssessmentStates.ENABLED) # auto assessment is enabled @@ -694,20 +696,20 @@ def test_auto_assessment_success_on_arc_with_configure_patching_in_prev_operatio # operation #1: ConfigurePatching # Here it should skip agent compatibility check as operation is configure patching [ not assessment or installation] argument_composer = ArgumentComposer() - argument_composer.operation = Constants.CONFIGURE_PATCHING + argument_composer.operation = Constants.Op.CONFIGURE_PATCHING argument_composer.patch_mode = Constants.PatchModes.AUTOMATIC_BY_PLATFORM argument_composer.assessment_mode = Constants.AssessmentModes.AUTOMATIC_BY_PLATFORM - runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.APT, Constants.VMCloudType.ARC) + runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.APT, Constants.CloudType.ARC) runtime.set_legacy_test_type("SuccessInstallPath") CoreMain(argument_composer.get_composed_arguments()) # check status file with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: status_file_data = json.load(file_handle)[0]["status"] - self.assertTrue(status_file_data["operation"] == Constants.CONFIGURE_PATCHING) + self.assertTrue(status_file_data["operation"] == Constants.Op.CONFIGURE_PATCHING) substatus_file_data = status_file_data["substatus"] self.assertEqual(len(substatus_file_data), 2) - self.assertTrue(substatus_file_data[1]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.SUCCESS.lower()) # check status file for configure patching auto updates state message = json.loads(substatus_file_data[1]["formattedMessage"]["message"]) self.assertEqual(message["automaticOSPatchState"], Constants.AutomaticOSPatchStates.DISABLED) # auto OS updates are disabled in RuntimeCompositor @@ -721,7 +723,7 @@ def test_auto_assessment_success_on_arc_with_configure_patching_in_prev_operatio argument_composer.maintenance_run_id = None argument_composer.start_time = runtime.env_layer.datetime.standard_datetime_to_utc(datetime.datetime.utcnow()) argument_composer.duration = Constants.AUTO_ASSESSMENT_MAXIMUM_DURATION - argument_composer.reboot_setting = Constants.REBOOT_NEVER + argument_composer.reboot_setting = Constants.RebootSettings.NEVER argument_composer.patch_mode = None argument_composer.exec_auto_assess_only = True runtime.execution_config.exec_auto_assess_only = True @@ -732,16 +734,16 @@ def test_auto_assessment_success_on_arc_with_configure_patching_in_prev_operatio with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: status_file_data = json.load(file_handle)[0]["status"] # verifying the original operation name is preserved - self.assertTrue(status_file_data["operation"] == Constants.CONFIGURE_PATCHING) + self.assertTrue(status_file_data["operation"] == Constants.Op.CONFIGURE_PATCHING) substatus_file_data = status_file_data["substatus"] self.assertEqual(len(substatus_file_data), 2) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.SUCCESS.lower()) # check started by set to 'Platform' self.assertTrue(json.loads(substatus_file_data[0]["formattedMessage"]["message"])['startedBy'], Constants.PatchAssessmentSummaryStartedBy.PLATFORM) # verifying the older operation summary is preserved - self.assertTrue(substatus_file_data[1]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.SUCCESS.lower()) message = json.loads(substatus_file_data[1]["formattedMessage"]["message"]) self.assertEqual(message["automaticOSPatchState"], Constants.AutomaticOSPatchStates.DISABLED) # auto OS updates are disabled in RuntimeCompositor # check status file for configure patching assessment state @@ -754,7 +756,7 @@ def test_auto_assessment_success_with_assessment_in_prev_operation_on_same_seque """Unit test for auto assessment request with assessment completed on the sequence before. Result: should contain PatchAssessmentSummary with an updated timestamp and ConfigurePatchingSummary from the first Assessment operation""" # operation #1: Assessment argument_composer = ArgumentComposer() - argument_composer.operation = Constants.ASSESSMENT + argument_composer.operation = Constants.Op.ASSESSMENT runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.APT) runtime.set_legacy_test_type("SuccessInstallPath") CoreMain(argument_composer.get_composed_arguments()) @@ -763,21 +765,21 @@ def test_auto_assessment_success_with_assessment_in_prev_operation_on_same_seque # check status file with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: status_file_data = json.load(file_handle)[0]["status"] - self.assertTrue(status_file_data["operation"] == Constants.ASSESSMENT) + self.assertTrue(status_file_data["operation"] == Constants.Op.ASSESSMENT) substatus_file_data = status_file_data["substatus"] self.assertEqual(len(substatus_file_data), 2) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.SUCCESS.lower()) # check started by set to 'User' self.assertTrue(json.loads(substatus_file_data[0]["formattedMessage"]["message"])['startedBy'], Constants.PatchAssessmentSummaryStartedBy.USER) - self.assertTrue(substatus_file_data[1]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.SUCCESS.lower()) # check status file for configure patching auto updates state message = json.loads(substatus_file_data[1]["formattedMessage"]["message"]) self.assertEqual(message["automaticOSPatchState"], Constants.AutomaticOSPatchStates.DISABLED) # auto OS updates are disabled in RuntimeCompositor # check status file for configure patching assessment state message = json.loads(substatus_file_data[1]["formattedMessage"]["message"]) - self.assertEqual(message["autoAssessmentStatus"]["autoAssessmentState"], Constants.AutoAssessmentStates.UNKNOWN) # Configure patching for auto assessment did not execute since assessmentMode was not in input + self.assertEqual(message["autoAssessmentStatus"]["autoAssessmentState"], Constants.AutoAssessmentStates.UNKNOWN) # Configure patching for auto assessment is reported as disabled (if not explicitly asked for) # operation #2: Auto Assessment argument_composer.activity_id = str(uuid.uuid4()) @@ -785,7 +787,7 @@ def test_auto_assessment_success_with_assessment_in_prev_operation_on_same_seque argument_composer.maintenance_run_id = None argument_composer.start_time = runtime.env_layer.datetime.standard_datetime_to_utc(datetime.datetime.utcnow()) argument_composer.duration = Constants.AUTO_ASSESSMENT_MAXIMUM_DURATION - argument_composer.reboot_setting = Constants.REBOOT_NEVER + argument_composer.reboot_setting = Constants.RebootSettings.NEVER argument_composer.patch_mode = None argument_composer.exec_auto_assess_only = True runtime.execution_config.exec_auto_assess_only = True @@ -797,16 +799,16 @@ def test_auto_assessment_success_with_assessment_in_prev_operation_on_same_seque status_file_data = json.load(file_handle)[0]["status"] # verifying the original operation name is preserved - self.assertTrue(status_file_data["operation"] == Constants.ASSESSMENT) + self.assertTrue(status_file_data["operation"] == Constants.Op.ASSESSMENT) substatus_file_data = status_file_data["substatus"] self.assertEqual(len(substatus_file_data), 2) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.SUCCESS.lower()) # check started by set to 'Platform' self.assertTrue(json.loads(substatus_file_data[0]["formattedMessage"]["message"])['startedBy'], Constants.PatchAssessmentSummaryStartedBy.PLATFORM) # verifying the older operation summary is preserved - self.assertTrue(substatus_file_data[1]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.SUCCESS.lower()) # check status file for configure patching auto updates state message = json.loads(substatus_file_data[1]["formattedMessage"]["message"]) self.assertEqual(message["automaticOSPatchState"], Constants.AutomaticOSPatchStates.DISABLED) # auto OS updates are disabled in RuntimeCompositor @@ -821,7 +823,7 @@ def test_auto_assessment_success_with_installation_in_prev_operation_on_same_seq Result: should contain PatchAssessmentSummary with an updated timestamp after auto assessment, and retain PatchInstallationSummary, ConfigurePatchingSummary and PatchMetadatForHealthStoreSummary from the installation(Auto Patching) operation""" # operation #1: Assessment argument_composer = ArgumentComposer() - argument_composer.operation = Constants.INSTALLATION + argument_composer.operation = Constants.Op.INSTALLATION argument_composer.maintenance_run_id = "8/27/2021 02:00:00 PM +00:00" argument_composer.classifications_to_include = ["Security", "Critical"] runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.APT) @@ -832,20 +834,20 @@ def test_auto_assessment_success_with_installation_in_prev_operation_on_same_seq # check status file with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: status_file_data = json.load(file_handle)[0]["status"] - self.assertTrue(status_file_data["operation"] == Constants.INSTALLATION) + self.assertTrue(status_file_data["operation"] == Constants.Op.INSTALLATION) substatus_file_data = status_file_data["substatus"] self.assertEqual(len(substatus_file_data), 4) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.SUCCESS.lower()) # check started by set to 'User' self.assertTrue(json.loads(substatus_file_data[0]["formattedMessage"]["message"])['startedBy'], Constants.PatchAssessmentSummaryStartedBy.USER) - self.assertTrue(substatus_file_data[1]["name"] == Constants.PATCH_INSTALLATION_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.INSTALLATION) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.SUCCESS.lower()) last_modified_time_from_installation_substatus_after_user_initiated_installation = json.loads(substatus_file_data[1]["formattedMessage"]["message"])["lastModifiedTime"] - self.assertTrue(substatus_file_data[2]["name"] == Constants.PATCH_METADATA_FOR_HEALTHSTORE) - self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(substatus_file_data[3]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[2]["name"] == Constants.OpSummary.PATCH_METADATA_FOR_HEALTHSTORE) + self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(substatus_file_data[3]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.Status.SUCCESS.lower()) # check status file for configure patching auto updates state message = json.loads(substatus_file_data[3]["formattedMessage"]["message"]) self.assertEqual(message["automaticOSPatchState"], Constants.AutomaticOSPatchStates.DISABLED) # auto OS updates are disabled in RuntimeCompositor, this is tested in Test-ConfigurePatchingProcessor @@ -859,7 +861,7 @@ def test_auto_assessment_success_with_installation_in_prev_operation_on_same_seq argument_composer.maintenance_run_id = None argument_composer.start_time = runtime.env_layer.datetime.standard_datetime_to_utc(datetime.datetime.utcnow()) argument_composer.duration = Constants.AUTO_ASSESSMENT_MAXIMUM_DURATION - argument_composer.reboot_setting = Constants.REBOOT_NEVER + argument_composer.reboot_setting = Constants.RebootSettings.NEVER argument_composer.patch_mode = None argument_composer.exec_auto_assess_only = True runtime.execution_config.exec_auto_assess_only = True @@ -871,23 +873,23 @@ def test_auto_assessment_success_with_installation_in_prev_operation_on_same_seq status_file_data = json.load(file_handle)[0]["status"] # verifying the original operation name is preserved - self.assertTrue(status_file_data["operation"] == Constants.INSTALLATION) + self.assertTrue(status_file_data["operation"] == Constants.Op.INSTALLATION) substatus_file_data = status_file_data["substatus"] self.assertEqual(len(substatus_file_data), 4) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.SUCCESS.lower()) # check started by set to 'Platform' self.assertTrue(json.loads(substatus_file_data[0]["formattedMessage"]["message"])['startedBy'], Constants.PatchAssessmentSummaryStartedBy.PLATFORM) # verifying the older operation summary is preserved - self.assertTrue(substatus_file_data[1]["name"] == Constants.PATCH_INSTALLATION_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.INSTALLATION) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.SUCCESS.lower()) # validate lastModifiedTime in InstallationSummary is preserved from the user initiated installation operation last_modified_time_from_installation_substatus_after_platform_initiated_assessment = json.loads(substatus_file_data[1]["formattedMessage"]["message"])["lastModifiedTime"] self.assertEqual(last_modified_time_from_installation_substatus_after_user_initiated_installation, last_modified_time_from_installation_substatus_after_platform_initiated_assessment) - self.assertTrue(substatus_file_data[2]["name"] == Constants.PATCH_METADATA_FOR_HEALTHSTORE) - self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(substatus_file_data[3]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[2]["name"] == Constants.OpSummary.PATCH_METADATA_FOR_HEALTHSTORE) + self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(substatus_file_data[3]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[3]["status"].lower() == Constants.Status.SUCCESS.lower()) # check status file for configure patching auto updates state message = json.loads(substatus_file_data[3]["formattedMessage"]["message"]) self.assertEqual(message["automaticOSPatchState"], Constants.AutomaticOSPatchStates.DISABLED) # auto OS updates are disabled in RuntimeCompositor @@ -899,7 +901,7 @@ def test_auto_assessment_success_with_installation_in_prev_operation_on_same_seq def test_assessment_operation_fail_after_package_manager_reboot(self): argument_composer = ArgumentComposer() - argument_composer.operation = Constants.ASSESSMENT + argument_composer.operation = Constants.Op.ASSESSMENT runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.ZYPPER) runtime.set_legacy_test_type('ExceptionPath') CoreMain(argument_composer.get_composed_arguments()) @@ -924,17 +926,17 @@ def test_assessment_operation_fail_after_package_manager_reboot(self): with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] self.assertEqual(len(substatus_file_data), 3) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_ERROR.lower()) - self.assertTrue(substatus_file_data[1]["name"] == Constants.PATCH_INSTALLATION_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_TRANSITIONING.lower()) - self.assertTrue(substatus_file_data[2]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.ERROR.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.INSTALLATION) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.TRANSITIONING.lower()) + self.assertTrue(substatus_file_data[2]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.Status.SUCCESS.lower()) runtime.stop() def test_assessment_operation_success_after_package_manager_reboot(self): argument_composer = ArgumentComposer() - argument_composer.operation = Constants.ASSESSMENT + argument_composer.operation = Constants.Op.ASSESSMENT runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.ZYPPER) runtime.set_legacy_test_type('ExceptionPath') CoreMain(argument_composer.get_composed_arguments()) @@ -960,12 +962,12 @@ def test_assessment_operation_success_after_package_manager_reboot(self): with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] self.assertEqual(len(substatus_file_data), 3) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(substatus_file_data[1]["name"] == Constants.PATCH_INSTALLATION_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_TRANSITIONING.lower()) - self.assertTrue(substatus_file_data[2]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.INSTALLATION) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.TRANSITIONING.lower()) + self.assertTrue(substatus_file_data[2]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[2]["status"].lower() == Constants.Status.SUCCESS.lower()) runtime.stop() def test_assessment_superseded(self): @@ -973,7 +975,7 @@ def test_assessment_superseded(self): Result: Assessment should terminate with a superseded error message.""" # Step 1: Run assessment normally to generate 0.status and ExtState.json argument_composer = ArgumentComposer() - argument_composer.operation = Constants.ASSESSMENT + argument_composer.operation = Constants.Op.ASSESSMENT runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.ZYPPER) runtime.set_legacy_test_type('HappyPath') CoreMain(argument_composer.get_composed_arguments()) @@ -985,10 +987,10 @@ def test_assessment_superseded(self): with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] self.assertEqual(len(substatus_file_data), 2) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(substatus_file_data[1]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_SUCCESS.lower()) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.SUCCESS.lower()) scratch_path = os.path.join(os.path.curdir, "scratch") @@ -1024,11 +1026,11 @@ def test_assessment_superseded(self): with runtime.env_layer.file_system.open(runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] self.assertEqual(len(substatus_file_data), 2) - self.assertTrue(substatus_file_data[0]["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) - self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.STATUS_ERROR.lower()) - self.assertTrue(substatus_file_data[1]["name"] == Constants.CONFIGURE_PATCHING_SUMMARY) - self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.STATUS_SUCCESS.lower()) - self.assertTrue(Constants.PatchOperationErrorCodes.NEWER_OPERATION_SUPERSEDED in substatus_file_data[0]["formattedMessage"]["message"]) + self.assertTrue(substatus_file_data[0]["name"] == Constants.OpSummary.ASSESSMENT) + self.assertTrue(substatus_file_data[0]["status"].lower() == Constants.Status.ERROR.lower()) + self.assertTrue(substatus_file_data[1]["name"] == Constants.OpSummary.CONFIGURE_PATCHING) + self.assertTrue(substatus_file_data[1]["status"].lower() == Constants.Status.SUCCESS.lower()) + self.assertTrue(Constants.PatchOperationErrorCodes.CL_NEWER_OPERATION_SUPERSEDED in substatus_file_data[0]["formattedMessage"]["message"]) runtime.stop() @@ -1036,7 +1038,7 @@ def test_temp_folder_created_during_execution_config_init(self): # temp_folder is set with a path in environment settings but the dir does not exist argument_composer = ArgumentComposer() shutil.rmtree(argument_composer.temp_folder) - argument_composer.operation = Constants.ASSESSMENT + argument_composer.operation = Constants.Op.ASSESSMENT runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.APT) # validate temp_folder is created self.assertTrue(runtime.execution_config.temp_folder is not None) @@ -1047,7 +1049,7 @@ def test_temp_folder_created_during_execution_config_init(self): argument_composer = ArgumentComposer() shutil.rmtree(argument_composer.temp_folder) argument_composer.temp_folder = None - argument_composer.operation = Constants.ASSESSMENT + argument_composer.operation = Constants.Op.ASSESSMENT runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.APT) # validate temp_folder is created self.assertTrue(runtime.execution_config.temp_folder is not None) @@ -1058,7 +1060,7 @@ def test_temp_folder_created_during_execution_config_init(self): argument_composer = ArgumentComposer() shutil.rmtree(argument_composer.temp_folder) argument_composer.temp_folder = None - argument_composer.operation = Constants.ASSESSMENT + argument_composer.operation = Constants.Op.ASSESSMENT # mock path exists check to return False on config_folder exists check backup_os_path_exists = os.path.exists os.path.exists = self.mock_os_path_exists @@ -1074,7 +1076,7 @@ def test_delete_temp_folder_contents_success(self): self.assertEqual(argument_composer.temp_folder, os.path.abspath(os.path.join(os.path.curdir, "scratch", "tmp"))) # delete temp content - argument_composer.operation = Constants.ASSESSMENT + argument_composer.operation = Constants.Op.ASSESSMENT runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.APT) runtime.set_legacy_test_type('HappyPath') CoreMain(argument_composer.get_composed_arguments()) @@ -1087,7 +1089,7 @@ def test_delete_temp_folder_contents_success(self): def test_delete_temp_folder_contents_when_none_exists(self): argument_composer = ArgumentComposer() - argument_composer.operation = Constants.ASSESSMENT + argument_composer.operation = Constants.Op.ASSESSMENT runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.APT) shutil.rmtree(runtime.execution_config.temp_folder) @@ -1109,7 +1111,7 @@ def test_delete_temp_folder_contents_failure(self): self.backup_os_remove = os.remove os.remove = self.mock_os_remove - argument_composer.operation = Constants.ASSESSMENT + argument_composer.operation = Constants.Op.ASSESSMENT runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.APT) # delete temp content attempt #1, throws exception diff --git a/src/core/tests/Test_LifecycleManagerArc.py b/src/core/tests/Test_LifecycleManagerArc.py index 832cb1062..e4e2da7f1 100644 --- a/src/core/tests/Test_LifecycleManagerArc.py +++ b/src/core/tests/Test_LifecycleManagerArc.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -25,7 +25,7 @@ class TestLifecycleManagerArc(unittest.TestCase): def setUp(self): - self.runtime = RuntimeCompositor(ArgumentComposer().get_composed_arguments(), True, Constants.APT, Constants.VMCloudType.ARC) + self.runtime = RuntimeCompositor(ArgumentComposer(Constants.CloudType.ARC).get_composed_arguments(), True, Constants.APT, Constants.CloudType.ARC) self.container = self.runtime.container self.lifecycle_manager = self.runtime.lifecycle_manager diff --git a/src/core/tests/Test_LifecycleManagerAzure.py b/src/core/tests/Test_LifecycleManagerAzure.py index 8f9008828..652430fdc 100644 --- a/src/core/tests/Test_LifecycleManagerAzure.py +++ b/src/core/tests/Test_LifecycleManagerAzure.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/core/tests/Test_MaintenanceWindow.py b/src/core/tests/Test_MaintenanceWindow.py index 8051f9f03..e65564a12 100644 --- a/src/core/tests/Test_MaintenanceWindow.py +++ b/src/core/tests/Test_MaintenanceWindow.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -19,6 +19,7 @@ from core.tests.library.ArgumentComposer import ArgumentComposer from core.tests.library.RuntimeCompositor import RuntimeCompositor + class TestMaintenanceWindow(unittest.TestCase): def setUp(self): pass @@ -83,7 +84,7 @@ def test_get_percentage_maintenance_window_used(self): argument_composer.start_time = (datetime.datetime.utcnow() - datetime.timedelta(hours=0, minutes=18)).strftime("%Y-%m-%dT%H:%M:%S.9999Z") argument_composer.maximum_duration = "PT3H" runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True) - perc_maintenance_window_used = runtime.maintenance_window.get_percentage_maintenance_window_used() + perc_maintenance_window_used = runtime.maintenance_window.get_maintenance_window_used_as_percentage() # 18 minutes of maintenance window used out of 3 hours (180 minutes). So, it should be 10%. # The value should be slightly greater than 10 as it takes some time to trigger the method get_percentage_maintenance_window_used self.assertGreaterEqual(perc_maintenance_window_used, 10) @@ -95,7 +96,7 @@ def test_get_percentage_maintenance_window_used_Fail(self): # ZeroDivisionError should be thrown as duration is 0 argument_composer.maximum_duration = "PT0H" runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True) - self.assertRaises(Exception, runtime.maintenance_window.get_percentage_maintenance_window_used) + runtime.maintenance_window.get_maintenance_window_used_as_percentage() # shouldn't throw an exception runtime.stop() def test_get_percentage_maintenance_window_used_start_time_greater_exception(self): @@ -103,7 +104,7 @@ def test_get_percentage_maintenance_window_used_start_time_greater_exception(sel # Setting start time 1 hour later than current time argument_composer.start_time = (datetime.datetime.utcnow() + datetime.timedelta(hours=1)).strftime("%Y-%m-%dT%H:%M:%S.9999Z") runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True) - self.assertRaises(Exception, runtime.maintenance_window.get_percentage_maintenance_window_used) + runtime.maintenance_window.get_maintenance_window_used_as_percentage() # shouldn't throw an exception runtime.stop() def test_is_package_install_time_available(self): @@ -124,5 +125,6 @@ def test_is_package_install_time_available(self): self.assertEqual(False, runtime.maintenance_window.is_package_install_time_available(remaining_time_in_minutes, number_of_packages)) runtime.stop() + if __name__ == '__main__': unittest.main() diff --git a/src/core/tests/Test_PackageFilter.py b/src/core/tests/Test_PackageFilter.py index d858272b2..d6bfc3364 100644 --- a/src/core/tests/Test_PackageFilter.py +++ b/src/core/tests/Test_PackageFilter.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/core/tests/Test_PatchAssessor.py b/src/core/tests/Test_PatchAssessor.py index 40f1c22dc..ede9475e3 100644 --- a/src/core/tests/Test_PatchAssessor.py +++ b/src/core/tests/Test_PatchAssessor.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -36,11 +36,11 @@ def tearDown(self): self.runtime.stop() def test_assessment_success(self): - self.assertTrue(self.runtime.patch_assessor.start_assessment()) + self.assertTrue(self.runtime.patch_assessor.start_operation_with_retries()) def test_assessment_fail(self): self.runtime.set_legacy_test_type('UnalignedPath') - self.assertRaises(Exception, self.runtime.patch_assessor.start_assessment) + self.assertFalse(self.runtime.patch_assessor.start_operation_with_retries()) def test_get_all_updates_fail(self): self.runtime.set_legacy_test_type('UnalignedPath') @@ -53,22 +53,22 @@ def test_get_all_security_updates_fail(self): def test_assessment_fail_with_status_update(self): self.runtime.package_manager.refresh_repo = self.mock_refresh_repo self.runtime.set_legacy_test_type('UnalignedPath') - self.assertRaises(Exception, self.runtime.patch_assessor.start_assessment) + self.assertFalse(self.runtime.patch_assessor.start_operation_with_retries()) with open(self.runtime.execution_config.status_file_path, 'r') as file_handle: file_contents = json.loads(file_handle.read()) - self.assertTrue('Unexpected return code (100) from package manager on command: LANG=en_US.UTF8 sudo apt-get -s dist-upgrade' in str(file_contents)) + self.assertTrue('Unexpected return code from package manager. [Code=100]' in str(file_contents)) def test_assessment_telemetry_fail(self): backup_telemetry_writer = self.runtime.telemetry_writer telemetry_writer = TelemetryWriter(self.runtime.env_layer, self.runtime.composite_logger, events_folder_path=None, telemetry_supported=False) self.runtime.patch_assessor.telemetry_writer = telemetry_writer - self.assertRaises(Exception, self.runtime.patch_assessor.start_assessment) + self.assertTrue(self.runtime.patch_assessor.start_operation_with_retries()) telemetry_writer = TelemetryWriter(self.runtime.env_layer, self.runtime.composite_logger, events_folder_path="events", telemetry_supported=False) self.runtime.patch_assessor.telemetry_writer = telemetry_writer - self.assertRaises(Exception, self.runtime.patch_assessor.start_assessment) + self.assertTrue(self.runtime.patch_assessor.start_operation_with_retries()) telemetry_writer = TelemetryWriter(self.runtime.env_layer, self.runtime.composite_logger, events_folder_path=None, telemetry_supported=True) self.runtime.patch_assessor.telemetry_writer = telemetry_writer - self.assertRaises(Exception, self.runtime.patch_assessor.start_assessment) + self.assertTrue(self.runtime.patch_assessor.start_operation_with_retries()) self.runtime.patch_assessor.telemetry_writer = backup_telemetry_writer def test_assessment_state_file(self): @@ -151,36 +151,28 @@ def test_convert_iso8601_duration_to_total_seconds(self): self.assertRaises(Exception, lambda: self.runtime.patch_assessor.convert_iso8601_duration_to_total_seconds('')) def test_write_assessment_perf_logs(self): - self.runtime.patch_assessor.start_assessment() + self.runtime.patch_assessor.start_operation_with_retries() self.assertTrue(self.runtime.patch_assessor.stopwatch.start_time is not None) self.assertTrue(self.runtime.patch_assessor.stopwatch.end_time is not None) self.assertTrue(self.runtime.patch_assessor.stopwatch.time_taken_in_secs is not None) self.assertTrue(self.runtime.patch_assessor.stopwatch.task_details is not None) self.assertTrue(self.runtime.patch_assessor.stopwatch.start_time <= self.runtime.patch_assessor.stopwatch.end_time) self.assertTrue(self.runtime.patch_assessor.stopwatch.time_taken_in_secs >= 0) - task_info = "{0}={1}".format(str(Constants.PerfLogTrackerParams.TASK), str(Constants.ASSESSMENT)) + task_info = "{0}={1}".format(str(Constants.PerfLogTrackerParams.TASK), str(Constants.Op.ASSESSMENT)) self.assertTrue(task_info in str(self.runtime.patch_assessor.stopwatch.task_details)) - task_status = "{0}={1}".format(str(Constants.PerfLogTrackerParams.TASK_STATUS), str(Constants.TaskStatus.SUCCEEDED)) + task_status = "{0}={1}".format(str(Constants.PerfLogTrackerParams.TASK_STATUS), str(Constants.Status.SUCCESS)) self.assertTrue(task_status in str(self.runtime.patch_assessor.stopwatch.task_details)) err_msg = "{0}=".format(str(Constants.PerfLogTrackerParams.ERROR_MSG)) self.assertTrue(err_msg in str(self.runtime.patch_assessor.stopwatch.task_details)) - def test_stopwatch_properties_assessment_fail(self): self.runtime.set_legacy_test_type('UnalignedPath') - self.assertRaises(Exception, self.runtime.patch_assessor.start_assessment) + self.assertFalse(self.runtime.patch_assessor.start_operation_with_retries()) self.assertTrue(self.runtime.patch_assessor.stopwatch.start_time is not None) self.assertTrue(self.runtime.patch_assessor.stopwatch.end_time is not None) self.assertTrue(self.runtime.patch_assessor.stopwatch.time_taken_in_secs is not None) self.assertTrue(self.runtime.patch_assessor.stopwatch.task_details is not None) - def test_raise_if_min_python_version_not_met(self): - sys.version_info = (2, 6) - # Assert that an exception is raised - with self.assertRaises(Exception) as context: - self.runtime.patch_assessor.start_assessment() - self.assertEqual(str(context.exception), Constants.PYTHON_NOT_COMPATIBLE_ERROR_MSG.format(sys.version_info)) - def raise_ex(self): raise Exception() diff --git a/src/core/tests/Test_PatchInstaller.py b/src/core/tests/Test_PatchInstaller.py index 6d6a81e7b..b40a66e3a 100644 --- a/src/core/tests/Test_PatchInstaller.py +++ b/src/core/tests/Test_PatchInstaller.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -584,34 +584,34 @@ def test_raise_if_telemetry_unsupported(self): runtime = RuntimeCompositor(argument_composer.get_composed_arguments(env_settings={"telemetrySupported": True}), True, Constants.YUM) runtime.set_legacy_test_type('SuccessInstallPath') - runtime.patch_installer.lifecycle_manager.get_vm_cloud_type = lambda: Constants.VMCloudType.ARC + runtime.patch_installer.lifecycle_manager.get_cloud_type = lambda: Constants.CloudType.ARC runtime.patch_installer.raise_if_telemetry_unsupported() runtime.stop() runtime = RuntimeCompositor(argument_composer.get_composed_arguments(env_settings={"telemetrySupported": False}), True, Constants.YUM) runtime.set_legacy_test_type('SuccessInstallPath') - runtime.patch_installer.lifecycle_manager.get_vm_cloud_type = lambda: Constants.VMCloudType.ARC + runtime.patch_installer.lifecycle_manager.get_cloud_type = lambda: Constants.CloudType.ARC self.assertRaises(Exception, runtime.patch_installer.raise_if_telemetry_unsupported) runtime.stop() runtime = RuntimeCompositor(argument_composer.get_composed_arguments(env_settings={"telemetrySupported": True}), True, Constants.YUM) runtime.set_legacy_test_type('SuccessInstallPath') - runtime.patch_installer.lifecycle_manager.get_vm_cloud_type = lambda: Constants.VMCloudType.ARC - runtime.patch_installer.execution_config.operation = Constants.CONFIGURE_PATCHING + runtime.patch_installer.lifecycle_manager.get_cloud_type = lambda: Constants.CloudType.ARC + runtime.patch_installer.execution_config.operation = Constants.Op.CONFIGURE_PATCHING runtime.patch_installer.raise_if_telemetry_unsupported() runtime.stop() runtime = RuntimeCompositor(argument_composer.get_composed_arguments(env_settings={"telemetrySupported": False}), True, Constants.YUM) runtime.set_legacy_test_type('SuccessInstallPath') - runtime.patch_installer.lifecycle_manager.get_vm_cloud_type = lambda: Constants.VMCloudType.ARC - runtime.patch_installer.execution_config.operation = Constants.CONFIGURE_PATCHING + runtime.patch_installer.lifecycle_manager.get_cloud_type = lambda: Constants.CloudType.ARC + runtime.patch_installer.execution_config.operation = Constants.Op.CONFIGURE_PATCHING # Should not raise an exception because it is an ARC VM and it is not installation or assessment runtime.patch_installer.raise_if_telemetry_unsupported() runtime.stop() runtime = RuntimeCompositor(argument_composer.get_composed_arguments(env_settings={"telemetrySupported": True}), True, Constants.YUM) runtime.set_legacy_test_type('SuccessInstallPath') - runtime.patch_installer.execution_config.operation = Constants.CONFIGURE_PATCHING + runtime.patch_installer.execution_config.operation = Constants.Op.CONFIGURE_PATCHING runtime.patch_installer.raise_if_telemetry_unsupported() runtime.stop() @@ -624,7 +624,7 @@ def test_write_installer_perf_logs(self): self.assertTrue(runtime.patch_installer.stopwatch.task_details is not None) self.assertTrue(runtime.patch_installer.stopwatch.start_time <= runtime.patch_installer.stopwatch.end_time) self.assertTrue(runtime.patch_installer.stopwatch.time_taken_in_secs >= 0) - task_info = "{0}={1}".format(str(Constants.PerfLogTrackerParams.TASK), str(Constants.INSTALLATION)) + task_info = "{0}={1}".format(str(Constants.PerfLogTrackerParams.TASK), str(Constants.Op.INSTALLATION)) self.assertTrue(task_info in str(runtime.patch_installer.stopwatch.task_details)) task_status = "{0}={1}".format(str(Constants.PerfLogTrackerParams.TASK_STATUS), str(Constants.TaskStatus.SUCCEEDED)) self.assertTrue(task_status in str(runtime.patch_installer.stopwatch.task_details)) @@ -649,21 +649,24 @@ def test_write_installer_perf_logs_runs_successfully_if_exception_in_get_percent argument_composer = ArgumentComposer() argument_composer.maximum_duration = "PT0H" runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), legacy_mode=True) - self.assertTrue(runtime.patch_installer.write_installer_perf_logs(True, 1, 1, runtime.maintenance_window, False, Constants.TaskStatus.SUCCEEDED, "")) + runtime.patch_installer.stopwatch.start() + runtime.patch_installer.set_additional_operation_specific_perf_logs(installed_patch_count=1, maintenance_window_exceeded=False) + self.assertTrue(runtime.patch_installer.write_operation_perf_logs(retry_count=1) is None) runtime.stop() def test_raise_if_min_python_version_not_met(self): runtime = RuntimeCompositor(ArgumentComposer().get_composed_arguments(), legacy_mode=True) + runtime.core_exec.check_minimum_environment_requirements_and_report = runtime.backup_check_minimum_environment_requirements_and_report original_version = sys.version_info sys.version_info = (2, 6) # Assert that an exception is raised with self.assertRaises(Exception) as context: - runtime.patch_installer.start_installation() - self.assertEqual(str(context.exception), Constants.PYTHON_NOT_COMPATIBLE_ERROR_MSG.format(sys.version_info)) - + runtime.core_exec.check_minimum_environment_requirements_and_report(patch_operation_requested=Constants.Op.INSTALLATION) + self.assertEqual(str(context.exception), Constants.PatchOperationErrorCodes.CL_PYTHON_TOO_OLD.format(sys.version_info)) # reset sys.version to original sys.version_info = original_version runtime.stop() + if __name__ == '__main__': unittest.main() diff --git a/src/core/tests/Test_RebootManager.py b/src/core/tests/Test_RebootManager.py index f3174f3c7..966b09266 100644 --- a/src/core/tests/Test_RebootManager.py +++ b/src/core/tests/Test_RebootManager.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -28,11 +28,11 @@ def tearDown(self): pass def test_reboot_settings(self): - self.test_reboot_setting('Never', Constants.REBOOT_NEVER) - self.test_reboot_setting('IfRequired', Constants.REBOOT_IF_REQUIRED) - self.test_reboot_setting('Always', Constants.REBOOT_ALWAYS) + self.test_reboot_setting('Never', Constants.RebootSettings.NEVER) + self.test_reboot_setting('IfRequired', Constants.RebootSettings.IF_REQUIRED) + self.test_reboot_setting('Always', Constants.RebootSettings.ALWAYS) - def test_reboot_setting(self, reboot_setting_in_api='Never', reboot_setting_in_code=Constants.REBOOT_NEVER): + def test_reboot_setting(self, reboot_setting_in_api='Never', reboot_setting_in_code=Constants.RebootSettings.NEVER): argument_composer = ArgumentComposer() argument_composer.reboot_setting = reboot_setting_in_api runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.YUM) @@ -45,7 +45,7 @@ def test_reboot_setting_default_config(self): argument_composer.reboot_setting = "" runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.YUM) reboot_manager = runtime.reboot_manager - self.assertEqual(reboot_manager.is_setting(Constants.REBOOT_IF_REQUIRED), True) + self.assertEqual(reboot_manager.is_setting(Constants.RebootSettings.IF_REQUIRED), True) runtime.stop() def test_reboot_time_available(self): @@ -98,6 +98,7 @@ def test_reboot_always_runs_only_once_if_no_reboot_is_required(self): runtime.status_handler.set_installation_reboot_status(Constants.RebootStatus.REQUIRED) runtime.status_handler.set_installation_reboot_status(Constants.RebootStatus.STARTED) runtime.status_handler.is_reboot_pending = False + runtime.package_manager.is_reboot_pending = lambda: False runtime.status_handler.set_installation_reboot_status(Constants.RebootStatus.COMPLETED) # no further reboot should be required diff --git a/src/core/tests/Test_StatusHandler.py b/src/core/tests/Test_StatusHandler.py index 09e227db9..4bf66f2a1 100644 --- a/src/core/tests/Test_StatusHandler.py +++ b/src/core/tests/Test_StatusHandler.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -41,7 +41,7 @@ def test_set_package_assessment_status(self): self.runtime.status_handler.set_package_assessment_status(packages, package_versions) with self.runtime.env_layer.file_system.open(self.runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"][0] - self.assertEqual(substatus_file_data["name"], Constants.PATCH_ASSESSMENT_SUMMARY) + self.assertEqual(substatus_file_data["name"], Constants.OpSummary.ASSESSMENT) self.assertEqual(len(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"]), 3) self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"][0]["name"], "python-samba") self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"][1]["name"], "samba-common-bin") @@ -56,7 +56,7 @@ def test_set_package_assessment_status_for_auto_assessment(self): self.runtime.status_handler.set_package_assessment_status(packages, package_versions) with self.runtime.env_layer.file_system.open(self.runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"][0] - self.assertEqual(substatus_file_data["name"], Constants.PATCH_ASSESSMENT_SUMMARY) + self.assertEqual(substatus_file_data["name"], Constants.OpSummary.ASSESSMENT) self.assertEqual(len(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"]), 3) self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"][0]["name"], "python-samba") self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"][1]["name"], "samba-common-bin") @@ -70,7 +70,7 @@ def test_set_package_install_status(self): substatus_file_data = [] with self.runtime.env_layer.file_system.open(self.runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"][0] - self.assertEqual(substatus_file_data["name"], Constants.PATCH_INSTALLATION_SUMMARY) + self.assertEqual(substatus_file_data["name"], Constants.OpSummary.INSTALLATION) self.assertEqual(len(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"]), 3) self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"][0]["name"], "python-samba") self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"][1]["name"], "samba-common-bin") @@ -83,14 +83,14 @@ def test_set_package_install_status_extended(self): self.runtime.status_handler.set_package_install_status(packages, package_versions) with self.runtime.env_layer.file_system.open(self.runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"][0] - self.assertEqual(substatus_file_data["name"], Constants.PATCH_INSTALLATION_SUMMARY) + self.assertEqual(substatus_file_data["name"], Constants.OpSummary.INSTALLATION) self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"][1]["name"], "samba-common-bin") - self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"][1]["patchInstallationState"], Constants.PENDING) - self.runtime.status_handler.set_package_install_status("samba-common-bin", "2:4.4.5+dfsg-2ubuntu5.4", Constants.INSTALLED) + self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"][1]["patchInstallationState"], Constants.PackageStatus.PENDING) + self.runtime.status_handler.set_package_install_status("samba-common-bin", "2:4.4.5+dfsg-2ubuntu5.4", Constants.PackageStatus.INSTALLED) with self.runtime.env_layer.file_system.open(self.runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"][0] self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"][0]["name"], "samba-common-bin") - self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"][0]["patchInstallationState"], Constants.INSTALLED) + self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"][0]["patchInstallationState"], Constants.PackageStatus.INSTALLED) def test_set_package_install_status_classification(self): packages, package_versions = self.runtime.package_manager.get_all_updates() @@ -100,7 +100,7 @@ def test_set_package_install_status_classification(self): substatus_file_data = [] with self.runtime.env_layer.file_system.open(self.runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"][0] - self.assertEqual(substatus_file_data["name"], Constants.PATCH_INSTALLATION_SUMMARY) + self.assertEqual(substatus_file_data["name"], Constants.OpSummary.INSTALLATION) self.assertEqual(len(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"]), 3) self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"][0]["name"], "python-samba") self.assertTrue("Security" in str(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"][0]["classifications"])) @@ -118,7 +118,7 @@ def test_set_package_install_status_classification_not_set(self): substatus_file_data = [] with self.runtime.env_layer.file_system.open(self.runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"][0] - self.assertEqual(substatus_file_data["name"], Constants.PATCH_INSTALLATION_SUMMARY) + self.assertEqual(substatus_file_data["name"], Constants.OpSummary.INSTALLATION) self.assertEqual(len(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"]), 3) self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"][0]["name"], "python-samba") self.assertTrue("Other" in str(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"][0]["classifications"])) @@ -152,7 +152,7 @@ def test_set_maintenance_window_exceeded(self): def test_add_error(self): # Setting operation to assessment to add all errors under assessment substatus - self.runtime.status_handler.set_current_operation(Constants.ASSESSMENT) + self.runtime.status_handler.set_current_operation(Constants.Op.ASSESSMENT) # Unexpected input self.runtime.status_handler.add_error_to_status(None) @@ -162,7 +162,7 @@ def test_add_error(self): substatus_file_data = json.load(file_handle)[0]["status"]["substatus"][0] self.assertEqual(len(json.loads(substatus_file_data["formattedMessage"]["message"])["errors"]["details"]), 0) - self.runtime.status_handler.set_assessment_substatus_json(status=Constants.STATUS_SUCCESS) + self.runtime.status_handler.set_assessment_substatus_json(status=Constants.Status.SUCCESS) # Adding multiple exceptions self.runtime.status_handler.add_error_to_status("exception1", Constants.PatchOperationErrorCodes.DEFAULT_ERROR) @@ -185,7 +185,7 @@ def test_add_error(self): self.assertEqual("Success".lower(), str(substatus_file_data["status"]).lower()) self.assertNotEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["errors"], None) self.assertTrue("Adding same exception" not in str(json.loads(substatus_file_data["formattedMessage"]["message"])["errors"]["details"])) - self.assertEqual(substatus_file_data["name"], Constants.PATCH_ASSESSMENT_SUMMARY) + self.assertEqual(substatus_file_data["name"], Constants.OpSummary.ASSESSMENT) self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["errors"]["code"], 1) self.assertEqual(len(json.loads(substatus_file_data["formattedMessage"]["message"])["errors"]["details"]), 5) self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["errors"]["details"][1]["code"], Constants.PatchOperationErrorCodes.OPERATION_FAILED) @@ -193,19 +193,19 @@ def test_add_error(self): self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["errors"]["details"][0]["message"], "a"*125 + "...") # Adding installation error - self.runtime.status_handler.set_current_operation(Constants.INSTALLATION) + self.runtime.status_handler.set_current_operation(Constants.Op.INSTALLATION) self.runtime.status_handler.add_error_to_status("installexception1", Constants.PatchOperationErrorCodes.DEFAULT_ERROR) substatus_file_data = [] with self.runtime.env_layer.file_system.open(self.runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"][1] self.assertNotEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["errors"], None) - self.assertEqual(substatus_file_data["name"], Constants.PATCH_INSTALLATION_SUMMARY) + self.assertEqual(substatus_file_data["name"], Constants.OpSummary.INSTALLATION) self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["errors"]["code"], 1) self.assertEqual(len(json.loads(substatus_file_data["formattedMessage"]["message"])["errors"]["details"]), 1) def test_add_duplicate_error(self): # Setting operation to assessment to add all errors under assessment substatus - self.runtime.status_handler.set_current_operation(Constants.ASSESSMENT) + self.runtime.status_handler.set_current_operation(Constants.Op.ASSESSMENT) # Unexpected input self.runtime.status_handler.add_error_to_status(None) @@ -215,7 +215,7 @@ def test_add_duplicate_error(self): substatus_file_data = json.load(file_handle)[0]["status"]["substatus"][0] self.assertEqual(len(json.loads(substatus_file_data["formattedMessage"]["message"])["errors"]["details"]), 0) - self.runtime.status_handler.set_assessment_substatus_json(status=Constants.STATUS_SUCCESS) + self.runtime.status_handler.set_assessment_substatus_json(status=Constants.Status.SUCCESS) # Adding multiple, duplicate exceptions self.runtime.status_handler.add_error_to_status("exception1", Constants.PatchOperationErrorCodes.DEFAULT_ERROR) @@ -238,7 +238,7 @@ def test_add_duplicate_error(self): self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["errors"]["details"][1]["code"], Constants.PatchOperationErrorCodes.DEFAULT_ERROR) def test_add_error_fail(self): - self.runtime.status_handler.set_current_operation(Constants.ASSESSMENT) + self.runtime.status_handler.set_current_operation(Constants.Op.ASSESSMENT) import tempfile tempfile_backup = tempfile.NamedTemporaryFile @@ -258,14 +258,14 @@ def test_add_error_fail(self): def test_status_file_initial_load(self): # for non autopatching request, with Reboot started self.runtime.status_handler.set_installation_reboot_status(Constants.RebootStatus.STARTED) - status_handler = StatusHandler(self.runtime.env_layer, self.runtime.execution_config, self.runtime.composite_logger, self.runtime.telemetry_writer, self.runtime.vm_cloud_type) + status_handler = StatusHandler(self.runtime.env_layer, self.runtime.execution_config, self.runtime.composite_logger, self.runtime.telemetry_writer, Constants.APT, self.runtime.cloud_type) self.assertTrue(status_handler is not None) # for autopatching request, with reboot started self.runtime.status_handler.set_installation_reboot_status(Constants.RebootStatus.STARTED) self.runtime.status_handler.set_patch_metadata_for_healthstore_substatus_json() self.runtime.execution_config.maintenance_run_id = str(datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")) - status_handler = StatusHandler(self.runtime.env_layer, self.runtime.execution_config, self.runtime.composite_logger, self.runtime.telemetry_writer, self.runtime.vm_cloud_type) + status_handler = StatusHandler(self.runtime.env_layer, self.runtime.execution_config, self.runtime.composite_logger, self.runtime.telemetry_writer, Constants.APT, self.runtime.cloud_type) with self.runtime.env_layer.file_system.open(self.runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"] self.assertTrue(len(substatus_file_data) == 1) @@ -273,13 +273,13 @@ def test_status_file_initial_load(self): # for autopatching request, with reboot not started self.runtime.status_handler.set_installation_reboot_status(Constants.RebootStatus.COMPLETED) self.runtime.execution_config.maintenance_run_id = str(datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")) - status_handler = StatusHandler(self.runtime.env_layer, self.runtime.execution_config, self.runtime.composite_logger, self.runtime.telemetry_writer, self.runtime.vm_cloud_type) + status_handler = StatusHandler(self.runtime.env_layer, self.runtime.execution_config, self.runtime.composite_logger, self.runtime.telemetry_writer, Constants.APT, self.runtime.cloud_type) with self.runtime.env_layer.file_system.open(self.runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"][0] self.assertTrue(status_handler is not None) self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["shouldReportToHealthStore"], False) self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["patchVersion"], Constants.PATCH_VERSION_UNKNOWN) - self.assertEqual(substatus_file_data["status"].lower(), Constants.STATUS_SUCCESS.lower()) + self.assertEqual(substatus_file_data["status"].lower(), Constants.Status.SUCCESS.lower()) # fail to load status file self.runtime.status_handler.set_installation_reboot_status(Constants.RebootStatus.STARTED) @@ -287,7 +287,7 @@ def test_status_file_initial_load(self): self.runtime.env_layer.file_system.open = None status_handler_failed = False try: - status_handler = StatusHandler(self.runtime.env_layer, self.runtime.execution_config, self.runtime.composite_logger, self.runtime.telemetry_writer, self.runtime.vm_cloud_type) + status_handler = StatusHandler(self.runtime.env_layer, self.runtime.execution_config, self.runtime.composite_logger, self.runtime.telemetry_writer, Constants.APT, self.runtime.cloud_type) except Exception as error: status_handler_failed = True @@ -296,12 +296,12 @@ def test_status_file_initial_load(self): def test_set_patch_metadata_for_healthstore_substatus_json(self): # setting healthstore properties - self.runtime.status_handler.set_patch_metadata_for_healthstore_substatus_json(status=Constants.STATUS_SUCCESS, patch_version="2020-07-08", report_to_healthstore=True, wait_after_update=True) + self.runtime.status_handler.set_patch_metadata_for_healthstore_substatus_json(status=Constants.Status.SUCCESS, patch_version="2020-07-08", report_to_healthstore=True, wait_after_update=True) with self.runtime.env_layer.file_system.open(self.runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"][0] self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["shouldReportToHealthStore"], True) self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["patchVersion"], "2020-07-08") - self.assertEqual(substatus_file_data["status"].lower(), Constants.STATUS_SUCCESS.lower()) + self.assertEqual(substatus_file_data["status"].lower(), Constants.Status.SUCCESS.lower()) # using default values self.runtime.status_handler.set_patch_metadata_for_healthstore_substatus_json() @@ -309,7 +309,7 @@ def test_set_patch_metadata_for_healthstore_substatus_json(self): substatus_file_data = json.load(file_handle)[0]["status"]["substatus"][0] self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["shouldReportToHealthStore"], False) self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["patchVersion"], Constants.PATCH_VERSION_UNKNOWN) - self.assertEqual(substatus_file_data["status"].lower(), Constants.STATUS_SUCCESS.lower()) + self.assertEqual(substatus_file_data["status"].lower(), Constants.Status.SUCCESS.lower()) def get_status_handler_substatus_maintenance_run_id(self): with self.runtime.env_layer.file_system.open(self.runtime.execution_config.status_file_path, 'r') as file_handle: @@ -319,7 +319,7 @@ def get_status_handler_substatus_maintenance_run_id(self): def test_status_file_maintenance_run_id(self): # Testing None/empty values for maintenance run id self.runtime.status_handler.set_installation_reboot_status(Constants.RebootStatus.STARTED) - status_handler = StatusHandler(self.runtime.env_layer, self.runtime.execution_config, self.runtime.composite_logger, self.runtime.telemetry_writer, self.runtime.vm_cloud_type) + status_handler = StatusHandler(self.runtime.env_layer, self.runtime.execution_config, self.runtime.composite_logger, self.runtime.telemetry_writer, Constants.APT, self.runtime.cloud_type) self.assertTrue(status_handler is not None) # Expect datetime string @@ -343,9 +343,9 @@ def test_sequence_number_changed_termination_auto_assess_only(self): self.runtime.status_handler.report_sequence_number_changed_termination() with self.runtime.env_layer.file_system.open(self.runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"][0] - self.assertTrue(substatus_file_data["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) + self.assertTrue(substatus_file_data["name"] == Constants.OpSummary.ASSESSMENT) formatted_message = json.loads(substatus_file_data['formattedMessage']['message']) - self.assertTrue(formatted_message["errors"]["details"][0]["code"] == Constants.PatchOperationErrorCodes.NEWER_OPERATION_SUPERSEDED) + self.assertTrue(formatted_message["errors"]["details"][0]["code"] == Constants.PatchOperationErrorCodes.CL_NEWER_OPERATION_SUPERSEDED) self.assertEqual(formatted_message["startedBy"], Constants.PatchAssessmentSummaryStartedBy.PLATFORM) def test_set_patch_metadata_for_healthstore_substatus_json_auto_assess_transitioning(self): @@ -361,7 +361,7 @@ def test_set_configure_patching_substatus_json_auto_assess_transitioning(self): def test_set_current_operation_auto_assess_non_assessment(self): self.runtime.execution_config.exec_auto_assess_only = True self.assertRaises(Exception, - lambda: self.runtime.status_handler.set_current_operation(Constants.INSTALLATION)) + lambda: self.runtime.status_handler.set_current_operation(Constants.Op.INSTALLATION)) def test_sort_packages_by_classification_and_state(self): with self.runtime.env_layer.file_system.open("../../extension/tests/helpers/PatchOrderAssessmentSummary.json", 'r') as file_handle: @@ -403,7 +403,7 @@ def test_sort_packages_by_classification_and_state(self): def test_if_status_file_resets_on_load_if_malformed(self): # Mock complete status file with malformed json - sample_json = '[{"version": 1.0, "timestampUTC": "2023-05-13T07:38:07Z", "statusx": {"name": "Azure Patch Management", "operation": "Installation", "status": "success", "code": 0, "formattedMessage": {"lang": "en-US", "message": ""}, "substatusx": []}}]' + sample_json = '[{"version": 1.0, "timestampUTC": "2023-05-13T07:38:07Z", "statusx": {"name": "Azure Guest Patching Service", "operation": "Installation", "status": "success", "code": 0, "formattedMessage": {"lang": "en-US", "message": ""}, "substatusx": []}}]' file_path = self.runtime.execution_config.status_folder example_file1 = os.path.join(file_path, '123.complete.status') self.runtime.execution_config.complete_status_file_path = example_file1 @@ -411,11 +411,11 @@ def test_if_status_file_resets_on_load_if_malformed(self): with open(example_file1, 'w') as f: f.write(sample_json) - status_handler = StatusHandler(self.runtime.env_layer, self.runtime.execution_config, self.runtime.composite_logger, self.runtime.telemetry_writer, self.runtime.vm_cloud_type) + status_handler = StatusHandler(self.runtime.env_layer, self.runtime.execution_config, self.runtime.composite_logger, self.runtime.telemetry_writer, Constants.APT, self.runtime.cloud_type) # Mock complete status file with malformed json and being called in the load_status_file_components, and it will recreate a good complete_status_file with self.runtime.env_layer.file_system.open(self.runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0] - self.assertEqual(substatus_file_data["status"]["name"], "Azure Patch Management") + self.assertEqual(substatus_file_data["status"]["name"], "Azure Guest Patching Service") self.assertEqual(substatus_file_data["status"]["operation"], "Installation") self.assertIsNotNone(substatus_file_data["status"]["substatus"]) self.assertEqual(len(substatus_file_data["status"]["substatus"]), 0) @@ -440,9 +440,9 @@ def test_assessment_packages_map(self): patch_count_for_test = 5 expected_patch_id = 'python-samba0_2:4.4.5+dfsg-2ubuntu5.4_Ubuntu_16.04' - status_handler = StatusHandler(self.runtime.env_layer, self.runtime.execution_config, self.runtime.composite_logger, self.runtime.telemetry_writer, self.runtime.vm_cloud_type) - self.runtime.execution_config.operation = Constants.ASSESSMENT - self.runtime.status_handler.set_current_operation(Constants.ASSESSMENT) + status_handler = StatusHandler(self.runtime.env_layer, self.runtime.execution_config, self.runtime.composite_logger, self.runtime.telemetry_writer, Constants.APT, self.runtime.cloud_type) + self.runtime.execution_config.operation = Constants.Op.ASSESSMENT + self.runtime.status_handler.set_current_operation(Constants.Op.ASSESSMENT) test_packages, test_package_versions = self.__set_up_packages_func(patch_count_for_test) status_handler.set_package_assessment_status(test_packages, test_package_versions, 'Critical') @@ -450,7 +450,7 @@ def test_assessment_packages_map(self): with self.runtime.env_layer.file_system.open(self.runtime.execution_config.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"][0] - self.assertTrue(substatus_file_data["name"] == Constants.PATCH_ASSESSMENT_SUMMARY) + self.assertTrue(substatus_file_data["name"] == Constants.OpSummary.ASSESSMENT) formatted_message = json.loads(substatus_file_data['formattedMessage']['message']) self.assertEqual(len(formatted_message['patches']), patch_count_for_test) self.assertEqual(formatted_message['patches'][0]['classifications'], ['Critical']) @@ -465,8 +465,8 @@ def test_installation_packages_map(self): patch_id_critical = 'python-samba0_2:4.4.5+dfsg-2ubuntu5.4_Ubuntu_16.04' expected_value_critical = {'version': '2:4.4.5+dfsg-2ubuntu5.4', 'classifications': ['Critical'], 'name': 'python-samba0', 'patchId': 'python-samba0_2:4.4.5+dfsg-2ubuntu5.4_Ubuntu_16.04', 'patchInstallationState': 'Installed'} - self.runtime.execution_config.operation = Constants.INSTALLATION - self.runtime.status_handler.set_current_operation(Constants.INSTALLATION) + self.runtime.execution_config.operation = Constants.Op.INSTALLATION + self.runtime.status_handler.set_current_operation(Constants.Op.INSTALLATION) patch_count_for_test = 50 test_packages, test_package_versions = self.__set_up_packages_func(patch_count_for_test) @@ -499,7 +499,7 @@ def test_load_status_and_set_package_install_status(self): test_packages, test_package_versions = self.__set_up_packages_func(patch_count_for_test) file_path = self.runtime.execution_config.status_folder example_file1 = os.path.join(file_path, '123.complete.status') - sample_json = [{"version": 1.0, "timestampUTC": "2023-06-17T02:06:19Z", "status": {"name": "Azure Patch Management", "operation": "Installation", "status": "success", "code": 0, "formattedMessage": {"lang": "en-US", "message": ""}, "substatus": [{"name": "PatchInstallationSummary", "status": "transitioning", "code": 0, "formattedMessage": {"lang": "en-US", "message": "{\"installationActivityId\": \"c365ab46-a12a-4388-853b-5240a0702124\", \"rebootStatus\": \"NotNeeded\", \"maintenanceWindowExceeded\": false, \"notSelectedPatchCount\": 0, \"excludedPatchCount\": 0, \"pendingPatchCount\": 0, \"installedPatchCount\": 5, \"failedPatchCount\": 0, \"patches\": [{\"patchId\": \"python-samba0_2:4.4.5+dfsg-2ubuntu5.4_Ubuntu_16.04\", \"name\": \"python-samba0\", \"version\": \"2:4.4.5+dfsg-2ubuntu5.4\", \"classifications\": [\"Other\"], \"patchInstallationState\": \"Pending\"}, {\"patchId\": \"python-samba1_2:4.4.5+dfsg-2ubuntu5.4_Ubuntu_16.04\", \"name\": \"python-samba1\", \"version\": \"2:4.4.5+dfsg-2ubuntu5.4\", \"classifications\": [\"Security\"], \"patchInstallationState\": \"Failed\"}, {\"patchId\": \"python-samba2_2:4.4.5+dfsg-2ubuntu5.4_Ubuntu_16.04\", \"name\": \"python-samba2\", \"version\": \"2:4.4.5+dfsg-2ubuntu5.4\", \"classifications\": [\"Other\"], \"patchInstallationState\": \"Not_Selected\"}, {\"patchId\": \"python-samba3_2:4.4.5+dfsg-2ubuntu5.4_Ubuntu_16.04\", \"name\": \"python-samba3\", \"version\": \"2:4.4.5+dfsg-2ubuntu5.4\", \"classifications\": [\"Other\"], \"patchInstallationState\": \"Pending\"}, {\"patchId\": \"python-samba4_2:4.4.5+dfsg-2ubuntu5.4_Ubuntu_16.04\", \"name\": \"python-samba4\", \"version\": \"2:4.4.5+dfsg-2ubuntu5.4\", \"classifications\": [\"Unclassified\"], \"patchInstallationState\": \"Failed\"}], \"startTime\": \"2023-06-17T02:06:19.480634Z\", \"lastModifiedTime\": \"2023-06-17T02:06:19Z\", \"maintenanceRunId\": \"\", \"errors\": {\"code\": 0, \"details\": [], \"message\": \"0 error/s reported.\"}}"}}]}}] + sample_json = [{"version": 1.0, "timestampUTC": "2023-06-17T02:06:19Z", "status": {"name": "Azure Guest Patching Service", "operation": "Installation", "status": "success", "code": 0, "formattedMessage": {"lang": "en-US", "message": ""}, "substatus": [{"name": "PatchInstallationSummary", "status": "transitioning", "code": 0, "formattedMessage": {"lang": "en-US", "message": "{\"installationActivityId\": \"c365ab46-a12a-4388-853b-5240a0702124\", \"rebootStatus\": \"NotNeeded\", \"maintenanceWindowExceeded\": false, \"notSelectedPatchCount\": 0, \"excludedPatchCount\": 0, \"pendingPatchCount\": 0, \"installedPatchCount\": 5, \"failedPatchCount\": 0, \"patches\": [{\"patchId\": \"python-samba0_2:4.4.5+dfsg-2ubuntu5.4_Ubuntu_16.04\", \"name\": \"python-samba0\", \"version\": \"2:4.4.5+dfsg-2ubuntu5.4\", \"classifications\": [\"Other\"], \"patchInstallationState\": \"Pending\"}, {\"patchId\": \"python-samba1_2:4.4.5+dfsg-2ubuntu5.4_Ubuntu_16.04\", \"name\": \"python-samba1\", \"version\": \"2:4.4.5+dfsg-2ubuntu5.4\", \"classifications\": [\"Security\"], \"patchInstallationState\": \"Failed\"}, {\"patchId\": \"python-samba2_2:4.4.5+dfsg-2ubuntu5.4_Ubuntu_16.04\", \"name\": \"python-samba2\", \"version\": \"2:4.4.5+dfsg-2ubuntu5.4\", \"classifications\": [\"Other\"], \"patchInstallationState\": \"Not_Selected\"}, {\"patchId\": \"python-samba3_2:4.4.5+dfsg-2ubuntu5.4_Ubuntu_16.04\", \"name\": \"python-samba3\", \"version\": \"2:4.4.5+dfsg-2ubuntu5.4\", \"classifications\": [\"Other\"], \"patchInstallationState\": \"Pending\"}, {\"patchId\": \"python-samba4_2:4.4.5+dfsg-2ubuntu5.4_Ubuntu_16.04\", \"name\": \"python-samba4\", \"version\": \"2:4.4.5+dfsg-2ubuntu5.4\", \"classifications\": [\"Unclassified\"], \"patchInstallationState\": \"Failed\"}], \"startTime\": \"2023-06-17T02:06:19.480634Z\", \"lastModifiedTime\": \"2023-06-17T02:06:19Z\", \"maintenanceRunId\": \"\", \"errors\": {\"code\": 0, \"details\": [], \"message\": \"0 error/s reported.\"}}"}}]}}] with open(example_file1, 'w') as f: f.write(json.dumps(sample_json)) self.runtime.status_handler.status_file_path = example_file1 @@ -509,7 +509,7 @@ def test_load_status_and_set_package_install_status(self): self.runtime.status_handler.set_package_install_status(test_packages, test_package_versions, 'Installed', 'Critical') with self.runtime.env_layer.file_system.open(self.runtime.status_handler.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"][0] - self.assertEqual(substatus_file_data["name"], Constants.PATCH_INSTALLATION_SUMMARY) + self.assertEqual(substatus_file_data["name"], Constants.OpSummary.INSTALLATION) self.assertEqual(len(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"]), patch_count_for_test) self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"][0]["name"], "python-samba0") self.assertTrue('Critical' in str(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"][0]["classifications"])) @@ -523,7 +523,7 @@ def test_load_status_and_set_package_install_status(self): self.runtime.status_handler.set_package_install_status_classification(test_packages, test_package_versions, "Critical") with self.runtime.env_layer.file_system.open(self.runtime.status_handler.status_file_path, 'r') as file_handle: substatus_file_data = json.load(file_handle)[0]["status"]["substatus"][0] - self.assertEqual(substatus_file_data["name"], Constants.PATCH_INSTALLATION_SUMMARY) + self.assertEqual(substatus_file_data["name"], Constants.OpSummary.INSTALLATION) self.assertEqual(len(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"]), patch_count_for_test) self.assertEqual(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"][0]["name"], "python-samba0") self.assertTrue('Critical' in str(json.loads(substatus_file_data["formattedMessage"]["message"])["patches"][0]["classifications"])) diff --git a/src/core/tests/Test_Stopwatch.py b/src/core/tests/Test_Stopwatch.py index 83ea3540f..5d85c3ec3 100644 --- a/src/core/tests/Test_Stopwatch.py +++ b/src/core/tests/Test_Stopwatch.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -91,8 +91,6 @@ def test_set_task_details(self): self.assertTrue(stopwatch.end_time is not None) self.assertTrue(stopwatch.time_taken_in_secs is not None) self.assertTrue(stopwatch.task_details is None) - stopwatch.set_task_details("test") - self.assertTrue(stopwatch.task_details is not None) # test start Stopwatch twice def test_started_already(self): diff --git a/src/core/tests/Test_TelemetryWriter.py b/src/core/tests/Test_TelemetryWriter.py index 3c0363f1d..1df90ba25 100644 --- a/src/core/tests/Test_TelemetryWriter.py +++ b/src/core/tests/Test_TelemetryWriter.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -41,13 +41,13 @@ def mock_os_path_exists(self, filepath): return True def mock_get_file_size(self, file_path): - return Constants.TELEMETRY_EVENT_FILE_SIZE_LIMIT_IN_CHARS + 10 + return Constants.TelemetryConfig.EVENT_FILE_SIZE_LIMIT_IN_CHARS + 10 def mock_os_listdir(self, file_path): return ['testevent1.json', 'testevent2.json', 'testevent3.json', 'testevent4.json'] def test_write_event(self): - self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task") + self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task") latest_event_file = [pos_json for pos_json in os.listdir(self.runtime.telemetry_writer.events_folder_path) if re.search('^[0-9]+.json$', pos_json)][-1] telemetry_event_counter_in_first_test_event = None with open(os.path.join(self.runtime.telemetry_writer.events_folder_path, latest_event_file), 'r+') as f: @@ -58,7 +58,7 @@ def test_write_event(self): telemetry_event_counter_in_first_test_event = text_found.group(1) if text_found else None f.close() - self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task2") + self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task2") latest_event_file = [pos_json for pos_json in os.listdir(self.runtime.telemetry_writer.events_folder_path) if re.search('^[0-9]+.json$', pos_json)][-1] telemetry_event_counter_in_second_test_event = None with open(os.path.join(self.runtime.telemetry_writer.events_folder_path, latest_event_file), 'r+') as f: @@ -76,8 +76,8 @@ def test_write_event(self): def test_write_multiple_events_in_same_file(self): time_backup = time.time time.time = self.mock_time - self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task") - self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task2") + self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task") + self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task2") latest_event_file = [pos_json for pos_json in os.listdir(self.runtime.telemetry_writer.events_folder_path) if re.search('^' + str(self.mock_time()) + '0+.json$', pos_json)][-1] with open(os.path.join(self.runtime.telemetry_writer.events_folder_path, latest_event_file), 'r+') as f: events = json.load(f) @@ -90,25 +90,25 @@ def test_write_multiple_events_in_same_file(self): def test_write_event_msg_size_limit(self): # Assuming 1 char is 1 byte message = "a"*3074 - self.runtime.telemetry_writer.write_event(message, Constants.TelemetryEventLevel.Error, "Test Task") + self.runtime.telemetry_writer.write_event(message, Constants.EventLevel.Error, "Test Task") latest_event_file = [pos_json for pos_json in os.listdir(self.runtime.telemetry_writer.events_folder_path) if re.search('^[0-9]+.json$', pos_json)][-1] with open(os.path.join(self.runtime.telemetry_writer.events_folder_path, latest_event_file), 'r+') as f: events = json.load(f) self.assertTrue(events is not None) self.assertEqual(events[-1]["TaskName"], "Test Task") self.assertTrue(len(events[-1]["Message"]) < len(message.encode('utf-8'))) - chars_dropped = len(message.encode('utf-8')) - Constants.TELEMETRY_MSG_SIZE_LIMIT_IN_CHARS + Constants.TELEMETRY_BUFFER_FOR_DROPPED_COUNT_MSG_IN_CHARS + Constants.TELEMETRY_EVENT_COUNTER_MSG_SIZE_LIMIT_IN_CHARS + chars_dropped = len(message.encode('utf-8')) - Constants.TelemetryConfig.MSG_SIZE_LIMIT_IN_CHARS + Constants.TelemetryConfig.BUFFER_FOR_DROPPED_COUNT_MSG_IN_CHARS + Constants.TelemetryConfig.EVENT_COUNTER_MSG_SIZE_LIMIT_IN_CHARS self.assertTrue("a"*(len(message.encode('utf-8')) - chars_dropped) + ". [{0} chars dropped]".format(chars_dropped) in events[-1]["Message"]) f.close() # TODO: The following 3 tests cause widespread test suite failures (on master), so leaving it out. And tracking in: Task 10912099: [Bug] Bug in telemetry writer - overwriting prior events in fast execution # def test_write_event_size_limit(self): # # will not write to telemetry if event size exceeds limit - # self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task") + # self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task") # old_events = os.listdir(self.runtime.telemetry_writer.events_folder_path) # message = "a" * 3074 # task_name = "b" * 5000 - # self.runtime.telemetry_writer.write_event(message, Constants.TelemetryEventLevel.Error, task_name) + # self.runtime.telemetry_writer.write_event(message, Constants.EventLevel.Error, task_name) # new_events = os.listdir(self.runtime.telemetry_writer.events_folder_path) # self.assertEqual(old_events, new_events) # latest_event_file = [pos_json for pos_json in os.listdir(self.runtime.telemetry_writer.events_folder_path) if re.search('^[0-9]+.json$', pos_json)][-1] @@ -119,7 +119,7 @@ def test_write_event_msg_size_limit(self): # f.close() # # def test_write_to_new_file_if_event_file_limit_reached(self): - # self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task") + # self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task") # first_event_file = [pos_json for pos_json in os.listdir(self.runtime.telemetry_writer.events_folder_path) if re.search('^[0-9]+.json$', pos_json)][-1] # os_path_exists_backup = os.path.exists # os.path.exists = self.mock_os_path_exists @@ -129,7 +129,7 @@ def test_write_event_msg_size_limit(self): # # forcing wait of 1 sec to ensure new file is created, since we have mocked time.sleep in RuntimeComposer # time.sleep = self.runtime.backup_time_sleep # - # self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task2") + # self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task2") # event_files = os.listdir(self.runtime.telemetry_writer.events_folder_path) # self.assertTrue(len(event_files) > 1) # second_event_file = [pos_json for pos_json in os.listdir(self.runtime.telemetry_writer.events_folder_path) if re.search('^[0-9]+.json$', pos_json)][-1] @@ -139,49 +139,49 @@ def test_write_event_msg_size_limit(self): # # def test_delete_older_events(self): # # deleting older event files before adding new one - # self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task") - # self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task2") - # self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task3") + # self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task") + # self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task2") + # self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task3") # old_event_files = [pos_json for pos_json in os.listdir(self.runtime.telemetry_writer.events_folder_path) if re.search('^[0-9]+.json$', pos_json)] - # telemetry_dir_size_backup = Constants.TELEMETRY_DIR_SIZE_LIMIT_IN_CHARS - # Constants.TELEMETRY_DIR_SIZE_LIMIT_IN_CHARS = 1030 - # telemetry_event_size_backup = Constants.TELEMETRY_EVENT_FILE_SIZE_LIMIT_IN_CHARS - # Constants.TELEMETRY_EVENT_FILE_SIZE_LIMIT_IN_CHARS = 1024 + # telemetry_dir_size_backup = Constants.TelemetryConfig.DIR_SIZE_LIMIT_IN_CHARS + # Constants.TelemetryConfig.DIR_SIZE_LIMIT_IN_CHARS = 1030 + # telemetry_event_size_backup = Constants.TelemetryConfig.EVENT_FILE_SIZE_LIMIT_IN_CHARS + # Constants.TelemetryConfig.EVENT_FILE_SIZE_LIMIT_IN_CHARS = 1024 # - # self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task4") + # self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task4") # new_event_files = [pos_json for pos_json in os.listdir(self.runtime.telemetry_writer.events_folder_path) if re.search('^[0-9]+.json$', pos_json)] # self.assertEqual(len(new_event_files), 1) # self.assertTrue(old_event_files[0] not in new_event_files and old_event_files[1] not in new_event_files and old_event_files[2] not in new_event_files) - # Constants.TELEMETRY_DIR_SIZE_LIMIT_IN_CHARS = telemetry_dir_size_backup - # Constants.TELEMETRY_EVENT_FILE_SIZE_LIMIT_IN_CHARS = telemetry_event_size_backup + # Constants.TelemetryConfig.DIR_SIZE_LIMIT_IN_CHARS = telemetry_dir_size_backup + # Constants.TelemetryConfig.EVENT_FILE_SIZE_LIMIT_IN_CHARS = telemetry_event_size_backup # # # error while deleting event files where the directory size exceeds limit even after deletion attempts - # self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task") - # self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task2") - # self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task3") + # self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task") + # self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task2") + # self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task3") # old_event_files = [pos_json for pos_json in os.listdir(self.runtime.telemetry_writer.events_folder_path) if re.search('^[0-9]+.json$', pos_json)] - # telemetry_dir_size_backup = Constants.TELEMETRY_DIR_SIZE_LIMIT_IN_CHARS - # Constants.TELEMETRY_DIR_SIZE_LIMIT_IN_CHARS = 500 - # telemetry_event_size_backup = Constants.TELEMETRY_EVENT_FILE_SIZE_LIMIT_IN_CHARS - # Constants.TELEMETRY_EVENT_FILE_SIZE_LIMIT_IN_CHARS = 400 + # telemetry_dir_size_backup = Constants.TelemetryConfig.DIR_SIZE_LIMIT_IN_CHARS + # Constants.TelemetryConfig.DIR_SIZE_LIMIT_IN_CHARS = 500 + # telemetry_event_size_backup = Constants.TelemetryConfig.EVENT_FILE_SIZE_LIMIT_IN_CHARS + # Constants.TelemetryConfig.EVENT_FILE_SIZE_LIMIT_IN_CHARS = 400 # os_remove_backup = os.remove # os.remove = self.mock_os_remove # - # self.assertRaises(Exception, lambda: self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task4")) + # self.assertRaises(Exception, lambda: self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task4")) # - # Constants.TELEMETRY_DIR_SIZE_LIMIT_IN_CHARS = telemetry_dir_size_backup - # Constants.TELEMETRY_EVENT_FILE_SIZE_LIMIT_IN_CHARS = telemetry_event_size_backup + # Constants.TelemetryConfig.DIR_SIZE_LIMIT_IN_CHARS = telemetry_dir_size_backup + # Constants.TelemetryConfig.EVENT_FILE_SIZE_LIMIT_IN_CHARS = telemetry_event_size_backup # os.remove = os_remove_backup def test_write_event_max_event_count_throttle_reached(self): - event_count_max_throttle_backup = Constants.TELEMETRY_MAX_EVENT_COUNT_THROTTLE - Constants.TELEMETRY_MAX_EVENT_COUNT_THROTTLE = 5 + event_count_max_throttle_backup = Constants.TelemetryConfig.MAX_EVENT_COUNT_THROTTLE + Constants.TelemetryConfig.MAX_EVENT_COUNT_THROTTLE = 5 self.runtime.telemetry_writer.event_count = 1 self.runtime.telemetry_writer.start_time_for_event_count_throttle_check = datetime.datetime.utcnow() - self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task") - self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task2") - self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task3") + self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task") + self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task2") + self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task3") event_file_task3 = [pos_json for pos_json in os.listdir(self.runtime.telemetry_writer.events_folder_path) if re.search('^[0-9]+.json$', pos_json)][-1] with open(os.path.join(self.runtime.telemetry_writer.events_folder_path, event_file_task3), 'r+') as f: events = json.load(f) @@ -189,7 +189,7 @@ def test_write_event_max_event_count_throttle_reached(self): self.assertTrue("Test Task3" in events[-1]['TaskName']) f.close() - self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task4") + self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task4") event_file_task4 = [pos_json for pos_json in os.listdir(self.runtime.telemetry_writer.events_folder_path) if re.search('^[0-9]+.json$', pos_json)][-1] with open(os.path.join(self.runtime.telemetry_writer.events_folder_path, event_file_task4), 'r+') as f: events = json.load(f) @@ -198,29 +198,29 @@ def test_write_event_max_event_count_throttle_reached(self): f.close() self.assertTrue(self.runtime.telemetry_writer.event_count == 2) - self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task5") + self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task5") self.assertTrue(self.runtime.telemetry_writer.event_count == 3) - max_time_for_event_count_throttle_backup = Constants.TELEMETRY_MAX_TIME_IN_SECONDS_FOR_EVENT_COUNT_THROTTLE - Constants.TELEMETRY_MAX_TIME_IN_SECONDS_FOR_EVENT_COUNT_THROTTLE = 0 - self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task6") + max_time_for_event_count_throttle_backup = Constants.TelemetryConfig.MAX_TIME_IN_SECONDS_FOR_EVENT_COUNT_THROTTLE + Constants.TelemetryConfig.MAX_TIME_IN_SECONDS_FOR_EVENT_COUNT_THROTTLE = 0 + self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task6") self.assertTrue(self.runtime.telemetry_writer.event_count == 2) - Constants.TELEMETRY_MAX_TIME_IN_SECONDS_FOR_EVENT_COUNT_THROTTLE = max_time_for_event_count_throttle_backup + Constants.TelemetryConfig.MAX_TIME_IN_SECONDS_FOR_EVENT_COUNT_THROTTLE = max_time_for_event_count_throttle_backup - Constants.TELEMETRY_MAX_EVENT_COUNT_THROTTLE = event_count_max_throttle_backup + Constants.TelemetryConfig.MAX_EVENT_COUNT_THROTTLE = event_count_max_throttle_backup def test_events_deleted_outside_of_extension_while_extension_is_running(self): backup_os_listdir = os.listdir os.listdir = self.mock_os_listdir - self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task") + self.runtime.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task") os.listdir = backup_os_listdir def test_write_event_with_buffer_true_and_then_flush(self): - self.runtime.telemetry_writer.write_event_with_buffer("Message 1", Constants.TelemetryEventLevel.Verbose, + self.runtime.telemetry_writer.write_event_with_buffer("Message 1", Constants.EventLevel.Verbose, Constants.BufferMessage.TRUE) - self.runtime.telemetry_writer.write_event_with_buffer("Message 2", Constants.TelemetryEventLevel.Verbose, + self.runtime.telemetry_writer.write_event_with_buffer("Message 2", Constants.EventLevel.Verbose, Constants.BufferMessage.TRUE) - self.runtime.telemetry_writer.write_event_with_buffer("Message 3", Constants.TelemetryEventLevel.Verbose, + self.runtime.telemetry_writer.write_event_with_buffer("Message 3", Constants.EventLevel.Verbose, Constants.BufferMessage.FLUSH) latest_event_file = [pos_json for pos_json in os.listdir(self.runtime.telemetry_writer.events_folder_path) if @@ -233,7 +233,7 @@ def test_write_event_with_buffer_true_and_then_flush(self): self.assertTrue(text_found.string.startswith("Message 1 | Message 2 | Message 3")) def test_write_event_with_buffer_only_flush(self): - self.runtime.telemetry_writer.write_event_with_buffer("Message 1", Constants.TelemetryEventLevel.Verbose, + self.runtime.telemetry_writer.write_event_with_buffer("Message 1", Constants.EventLevel.Verbose, Constants.BufferMessage.FLUSH) latest_event_file = [pos_json for pos_json in os.listdir(self.runtime.telemetry_writer.events_folder_path) if @@ -246,7 +246,7 @@ def test_write_event_with_buffer_only_flush(self): self.assertTrue(text_found.string.startswith("Message 1")) def test_write_event_with_buffer_false(self): - self.runtime.telemetry_writer.write_event_with_buffer("Message 1", Constants.TelemetryEventLevel.Verbose, + self.runtime.telemetry_writer.write_event_with_buffer("Message 1", Constants.EventLevel.Verbose, Constants.BufferMessage.FALSE) latest_event_file = [pos_json for pos_json in os.listdir(self.runtime.telemetry_writer.events_folder_path) if @@ -259,13 +259,13 @@ def test_write_event_with_buffer_false(self): self.assertTrue(text_found.string.startswith("Message 1")) def test_write_event_with_buffer_true_and_then_flush_but_different_telemetry_event_level(self): - self.runtime.telemetry_writer.write_event_with_buffer("Message 1", Constants.TelemetryEventLevel.Verbose, + self.runtime.telemetry_writer.write_event_with_buffer("Message 1", Constants.EventLevel.Verbose, Constants.BufferMessage.TRUE) - self.runtime.telemetry_writer.write_event_with_buffer("Message 2", Constants.TelemetryEventLevel.Informational, + self.runtime.telemetry_writer.write_event_with_buffer("Message 2", Constants.EventLevel.Info, Constants.BufferMessage.FLUSH) - # As the messages are with different TelemetryEventLevel, they will be written separately + # As the messages are with different EventLevel, they will be written separately # even though flush is used. latest_event_file = [pos_json for pos_json in os.listdir(self.runtime.telemetry_writer.events_folder_path) if re.search('^[0-9]+.json$', pos_json)][-1] @@ -277,10 +277,10 @@ def test_write_event_with_buffer_true_and_then_flush_but_different_telemetry_eve self.assertTrue(text_found.string.startswith("Message 2")) def test_write_event_with_buffer_true_and_empty_string_and_then_flush_with_non_empty_string(self): - self.runtime.telemetry_writer.write_event_with_buffer("", Constants.TelemetryEventLevel.Verbose, + self.runtime.telemetry_writer.write_event_with_buffer("", Constants.EventLevel.Verbose, Constants.BufferMessage.TRUE) - self.runtime.telemetry_writer.write_event_with_buffer("Message 1", Constants.TelemetryEventLevel.Verbose, + self.runtime.telemetry_writer.write_event_with_buffer("Message 1", Constants.EventLevel.Verbose, Constants.BufferMessage.FLUSH) latest_event_file = [pos_json for pos_json in os.listdir(self.runtime.telemetry_writer.events_folder_path) if diff --git a/src/core/tests/Test_UbuntuProClient.py b/src/core/tests/Test_UbuntuProClient.py index 6f6dd5b9e..aa92efb99 100644 --- a/src/core/tests/Test_UbuntuProClient.py +++ b/src/core/tests/Test_UbuntuProClient.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/core/tests/Test_YumPackageManager.py b/src/core/tests/Test_YumPackageManager.py index e8c2c13f1..bfb8df1b3 100644 --- a/src/core/tests/Test_YumPackageManager.py +++ b/src/core/tests/Test_YumPackageManager.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -197,7 +197,7 @@ def test_install_package_success(self): self.assertIsNotNone(package_filter) # test for successfully installing a package - self.assertEqual(package_manager.install_update_and_dependencies_and_get_status('selinux-policy.noarch', '3.13.1-102.el7_3.16', simulate=True), Constants.INSTALLED) + self.assertEqual(package_manager.install_update_and_dependencies_and_get_status('selinux-policy.noarch', '3.13.1-102.el7_3.16', simulate=True), Constants.PackageStatus.INSTALLED) def test_install_package_failure(self): """Unit test for install package failure""" @@ -209,7 +209,7 @@ def test_install_package_failure(self): self.assertIsNotNone(package_filter) # test for unsuccessfully installing a package - self.assertEqual(package_manager.install_update_and_dependencies_and_get_status('selinux-policy', '3.13.1-102.el7_3.16', simulate=True), Constants.FAILED) + self.assertEqual(package_manager.install_update_and_dependencies_and_get_status('selinux-policy', '3.13.1-102.el7_3.16', simulate=True), Constants.PackageStatus.FAILED) def test_install_package_obsoleted(self): """Unit test for install package failure""" @@ -221,7 +221,7 @@ def test_install_package_obsoleted(self): self.assertIsNotNone(package_filter) # test for unsuccessfully installing a package - self.assertEqual(package_manager.install_update_and_dependencies_and_get_status('rdma.noarch', '7.3_4.7_rc2-6.el7_3', simulate=True), Constants.INSTALLED) + self.assertEqual(package_manager.install_update_and_dependencies_and_get_status('rdma.noarch', '7.3_4.7_rc2-6.el7_3', simulate=True), Constants.PackageStatus.INSTALLED) def test_install_package_replaced(self): """Unit test for install package failure""" @@ -233,7 +233,7 @@ def test_install_package_replaced(self): self.assertIsNotNone(package_filter) # test for unsuccessfully installing a package - self.assertEqual(package_manager.install_update_and_dependencies_and_get_status('python-rhsm.x86_64', '1.19.10-1.el7_4', simulate=True), Constants.INSTALLED) + self.assertEqual(package_manager.install_update_and_dependencies_and_get_status('python-rhsm.x86_64', '1.19.10-1.el7_4', simulate=True), Constants.PackageStatus.INSTALLED) def test_get_product_name(self): """Unit test for retrieving product Name""" @@ -484,121 +484,121 @@ def test_disable_auto_os_updates_with_uninstalled_services(self): # no services are installed on the machine. expected o/p: function will complete successfully. Backup file will be created with default values, no auto OS update configuration settings will be updated as there are none self.runtime.set_legacy_test_type('SadPath') package_manager = self.container.get('package_manager') - package_manager.disable_auto_os_update() - self.assertTrue(package_manager.image_default_patch_configuration_backup_exists()) + package_manager.patch_mode_manager.disable_auto_os_update() + self.assertTrue(package_manager.patch_mode_manager.image_default_patch_configuration_backup_exists()) image_default_patch_configuration_backup = json.loads(self.runtime.env_layer.file_system.read_with_retry(package_manager.image_default_patch_configuration_backup_path)) self.assertTrue(image_default_patch_configuration_backup is not None) # validating backup for yum-cron self.assertTrue(Constants.YumAutoOSUpdateServices.YUM_CRON in image_default_patch_configuration_backup) - self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.YUM_CRON][package_manager.yum_cron_download_updates_identifier_text], "") - self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.YUM_CRON][package_manager.yum_cron_apply_updates_identifier_text], "") - self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.YUM_CRON][package_manager.yum_cron_enable_on_reboot_identifier_text], False) - self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.YUM_CRON][package_manager.yum_cron_installation_state_identifier_text], False) + self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.YUM_CRON][package_manager.patch_mode_manager.yum_cron_download_updates_identifier_text], "") + self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.YUM_CRON][package_manager.patch_mode_manager.yum_cron_apply_updates_identifier_text], "") + self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.YUM_CRON][package_manager.patch_mode_manager.yum_cron_enable_on_reboot_identifier_text], False) + self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.YUM_CRON][package_manager.patch_mode_manager.yum_cron_installation_state_identifier_text], False) # validating backup for dnf-automatic self.assertTrue(Constants.YumAutoOSUpdateServices.DNF_AUTOMATIC in image_default_patch_configuration_backup) - self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.DNF_AUTOMATIC][package_manager.dnf_automatic_download_updates_identifier_text], "") - self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.DNF_AUTOMATIC][package_manager.dnf_automatic_apply_updates_identifier_text], "") - self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.DNF_AUTOMATIC][package_manager.dnf_automatic_enable_on_reboot_identifier_text], False) - self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.DNF_AUTOMATIC][package_manager.dnf_automatic_installation_state_identifier_text], False) + self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.DNF_AUTOMATIC][package_manager.patch_mode_manager.dnf_automatic_download_updates_identifier_text], "") + self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.DNF_AUTOMATIC][package_manager.patch_mode_manager.dnf_automatic_apply_updates_identifier_text], "") + self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.DNF_AUTOMATIC][package_manager.patch_mode_manager.dnf_automatic_enable_on_reboot_identifier_text], False) + self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.DNF_AUTOMATIC][package_manager.patch_mode_manager.dnf_automatic_installation_state_identifier_text], False) # validating backup for packagekit self.assertTrue(Constants.YumAutoOSUpdateServices.PACKAGEKIT in image_default_patch_configuration_backup) - self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.PACKAGEKIT][package_manager.packagekit_download_updates_identifier_text], "") - self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.PACKAGEKIT][package_manager.packagekit_apply_updates_identifier_text], "") - self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.PACKAGEKIT][package_manager.packagekit_enable_on_reboot_identifier_text], False) - self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.PACKAGEKIT][package_manager.packagekit_installation_state_identifier_text], False) + self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.PACKAGEKIT][package_manager.patch_mode_manager.packagekit_download_updates_identifier_text], "") + self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.PACKAGEKIT][package_manager.patch_mode_manager.packagekit_apply_updates_identifier_text], "") + self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.PACKAGEKIT][package_manager.patch_mode_manager.packagekit_enable_on_reboot_identifier_text], False) + self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.PACKAGEKIT][package_manager.patch_mode_manager.packagekit_installation_state_identifier_text], False) def test_disable_auto_os_updates_with_installed_services(self): # all services are installed and contain valid configurations. expected o/p All services will be disabled and backup file should reflect default settings for all self.runtime.set_legacy_test_type('HappyPath') package_manager = self.container.get('package_manager') - package_manager.yum_cron_configuration_settings_file_path = os.path.join(self.runtime.execution_config.config_folder, "yum-cron.conf") + package_manager.patch_mode_manager.yum_cron_configuration_settings_file_path = os.path.join(self.runtime.execution_config.config_folder, "yum-cron.conf") yum_cron_os_patch_configuration_settings = 'apply_updates = yes\ndownload_updates = yes\n' - self.runtime.write_to_file(package_manager.yum_cron_configuration_settings_file_path, yum_cron_os_patch_configuration_settings) + self.runtime.write_to_file(package_manager.patch_mode_manager.yum_cron_configuration_settings_file_path, yum_cron_os_patch_configuration_settings) - package_manager.dnf_automatic_configuration_file_path = os.path.join(self.runtime.execution_config.config_folder, "automatic.conf") + package_manager.patch_mode_manager.dnf_automatic_configuration_file_path = os.path.join(self.runtime.execution_config.config_folder, "automatic.conf") dnf_automatic_os_patch_configuration_settings = 'apply_updates = yes\ndownload_updates = yes\n' - self.runtime.write_to_file(package_manager.dnf_automatic_configuration_file_path, dnf_automatic_os_patch_configuration_settings) + self.runtime.write_to_file(package_manager.patch_mode_manager.dnf_automatic_configuration_file_path, dnf_automatic_os_patch_configuration_settings) - package_manager.packagekit_configuration_file_path = os.path.join(self.runtime.execution_config.config_folder, "PackageKit.conf") + package_manager.patch_mode_manager.packagekit_configuration_file_path = os.path.join(self.runtime.execution_config.config_folder, "PackageKit.conf") packagekit_os_patch_configuration_settings = 'WritePreparedUpdates = true\nGetPreparedUpdates = true\n' - self.runtime.write_to_file(package_manager.packagekit_configuration_file_path, packagekit_os_patch_configuration_settings) + self.runtime.write_to_file(package_manager.patch_mode_manager.packagekit_configuration_file_path, packagekit_os_patch_configuration_settings) - package_manager.disable_auto_os_update() - self.assertTrue(package_manager.image_default_patch_configuration_backup_exists()) + package_manager.patch_mode_manager.disable_auto_os_update() + self.assertTrue(package_manager.patch_mode_manager.image_default_patch_configuration_backup_exists()) image_default_patch_configuration_backup = json.loads(self.runtime.env_layer.file_system.read_with_retry(package_manager.image_default_patch_configuration_backup_path)) self.assertTrue(image_default_patch_configuration_backup is not None) # validating backup for yum-cron self.assertTrue(Constants.YumAutoOSUpdateServices.YUM_CRON in image_default_patch_configuration_backup) - self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.YUM_CRON][package_manager.yum_cron_download_updates_identifier_text], "yes") - self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.YUM_CRON][package_manager.yum_cron_apply_updates_identifier_text], "yes") - self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.YUM_CRON][package_manager.yum_cron_enable_on_reboot_identifier_text], True) - self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.YUM_CRON][package_manager.yum_cron_installation_state_identifier_text], True) + self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.YUM_CRON][package_manager.patch_mode_manager.yum_cron_download_updates_identifier_text], "yes") + self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.YUM_CRON][package_manager.patch_mode_manager.yum_cron_apply_updates_identifier_text], "yes") + self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.YUM_CRON][package_manager.patch_mode_manager.yum_cron_enable_on_reboot_identifier_text], True) + self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.YUM_CRON][package_manager.patch_mode_manager.yum_cron_installation_state_identifier_text], True) # validating backup for dnf-automatic self.assertTrue(Constants.YumAutoOSUpdateServices.DNF_AUTOMATIC in image_default_patch_configuration_backup) - self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.DNF_AUTOMATIC][package_manager.dnf_automatic_download_updates_identifier_text], "yes") - self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.DNF_AUTOMATIC][package_manager.dnf_automatic_apply_updates_identifier_text], "yes") - self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.DNF_AUTOMATIC][package_manager.dnf_automatic_enable_on_reboot_identifier_text], True) - self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.DNF_AUTOMATIC][package_manager.dnf_automatic_installation_state_identifier_text], True) + self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.DNF_AUTOMATIC][package_manager.patch_mode_manager.dnf_automatic_download_updates_identifier_text], "yes") + self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.DNF_AUTOMATIC][package_manager.patch_mode_manager.dnf_automatic_apply_updates_identifier_text], "yes") + self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.DNF_AUTOMATIC][package_manager.patch_mode_manager.dnf_automatic_enable_on_reboot_identifier_text], True) + self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.DNF_AUTOMATIC][package_manager.patch_mode_manager.dnf_automatic_installation_state_identifier_text], True) # validating backup for packagekit self.assertTrue(Constants.YumAutoOSUpdateServices.PACKAGEKIT in image_default_patch_configuration_backup) - self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.PACKAGEKIT][package_manager.packagekit_download_updates_identifier_text], "true") - self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.PACKAGEKIT][package_manager.packagekit_apply_updates_identifier_text], "true") - self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.PACKAGEKIT][package_manager.packagekit_enable_on_reboot_identifier_text], True) - self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.PACKAGEKIT][package_manager.packagekit_installation_state_identifier_text], True) + self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.PACKAGEKIT][package_manager.patch_mode_manager.packagekit_download_updates_identifier_text], "true") + self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.PACKAGEKIT][package_manager.patch_mode_manager.packagekit_apply_updates_identifier_text], "true") + self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.PACKAGEKIT][package_manager.patch_mode_manager.packagekit_enable_on_reboot_identifier_text], True) + self.assertEqual(image_default_patch_configuration_backup[Constants.YumAutoOSUpdateServices.PACKAGEKIT][package_manager.patch_mode_manager.packagekit_installation_state_identifier_text], True) def test_disable_auto_os_update_failure(self): - # disable with non existing log file + # disable with non-existing log file package_manager = self.container.get('package_manager') - self.assertRaises(Exception, package_manager.disable_auto_os_update) - self.assertTrue(package_manager.image_default_patch_configuration_backup_exists()) + self.assertRaises(Exception, package_manager.patch_mode_manager.disable_auto_os_update) + self.assertTrue(package_manager.patch_mode_manager.image_default_patch_configuration_backup_exists()) def test_update_image_default_patch_mode(self): package_manager = self.container.get('package_manager') - package_manager.os_patch_configuration_settings_file_path = package_manager.yum_cron_configuration_settings_file_path = os.path.join(self.runtime.execution_config.config_folder, "yum-cron.conf") + package_manager.patch_mode_manager.os_patch_configuration_settings_file_path = package_manager.patch_mode_manager.yum_cron_configuration_settings_file_path = os.path.join(self.runtime.execution_config.config_folder, "yum-cron.conf") - # disable apply_udpates when enabled by default + # disable apply_updates when enabled by default yum_cron_os_patch_configuration_settings = 'apply_updates = yes\ndownload_updates = yes\n' - self.runtime.write_to_file(package_manager.yum_cron_configuration_settings_file_path, yum_cron_os_patch_configuration_settings) + self.runtime.write_to_file(package_manager.patch_mode_manager.yum_cron_configuration_settings_file_path, yum_cron_os_patch_configuration_settings) - package_manager.update_os_patch_configuration_sub_setting(package_manager.yum_cron_apply_updates_identifier_text, "no", package_manager.yum_cron_config_pattern_match_text) - yum_cron_os_patch_configuration_settings_file_path_read = self.runtime.env_layer.file_system.read_with_retry(package_manager.yum_cron_configuration_settings_file_path) + package_manager.patch_mode_manager.update_os_patch_configuration_sub_setting(package_manager.patch_mode_manager.yum_cron_apply_updates_identifier_text, "no", package_manager.patch_mode_manager.yum_cron_config_pattern_match_text) + yum_cron_os_patch_configuration_settings_file_path_read = self.runtime.env_layer.file_system.read_with_retry(package_manager.patch_mode_manager.yum_cron_configuration_settings_file_path) self.assertTrue(yum_cron_os_patch_configuration_settings_file_path_read is not None) self.assertTrue('apply_updates = no' in yum_cron_os_patch_configuration_settings_file_path_read) self.assertTrue('download_updates = yes' in yum_cron_os_patch_configuration_settings_file_path_read) # disable download_updates when enabled by default yum_cron_os_patch_configuration_settings = 'apply_updates = yes\ndownload_updates = yes\n' - self.runtime.write_to_file(package_manager.os_patch_configuration_settings_file_path, yum_cron_os_patch_configuration_settings) - package_manager.update_os_patch_configuration_sub_setting(package_manager.yum_cron_download_updates_identifier_text, "no", package_manager.yum_cron_config_pattern_match_text) - yum_cron_os_patch_configuration_settings_file_path_read = self.runtime.env_layer.file_system.read_with_retry(package_manager.os_patch_configuration_settings_file_path) + self.runtime.write_to_file(package_manager.patch_mode_manager.os_patch_configuration_settings_file_path, yum_cron_os_patch_configuration_settings) + package_manager.patch_mode_manager.update_os_patch_configuration_sub_setting(package_manager.patch_mode_manager.yum_cron_download_updates_identifier_text, "no", package_manager.patch_mode_manager.yum_cron_config_pattern_match_text) + yum_cron_os_patch_configuration_settings_file_path_read = self.runtime.env_layer.file_system.read_with_retry(package_manager.patch_mode_manager.os_patch_configuration_settings_file_path) self.assertTrue(yum_cron_os_patch_configuration_settings_file_path_read is not None) self.assertTrue('apply_updates = yes' in yum_cron_os_patch_configuration_settings_file_path_read) self.assertTrue('download_updates = no' in yum_cron_os_patch_configuration_settings_file_path_read) # disable apply_updates when default patch mode settings file is empty yum_cron_os_patch_configuration_settings = '' - self.runtime.write_to_file(package_manager.os_patch_configuration_settings_file_path, yum_cron_os_patch_configuration_settings) - package_manager.update_os_patch_configuration_sub_setting(package_manager.yum_cron_apply_updates_identifier_text, "no", package_manager.yum_cron_config_pattern_match_text) - yum_cron_os_patch_configuration_settings_file_path_read = self.runtime.env_layer.file_system.read_with_retry(package_manager.os_patch_configuration_settings_file_path) + self.runtime.write_to_file(package_manager.patch_mode_manager.os_patch_configuration_settings_file_path, yum_cron_os_patch_configuration_settings) + package_manager.patch_mode_manager.update_os_patch_configuration_sub_setting(package_manager.patch_mode_manager.yum_cron_apply_updates_identifier_text, "no", package_manager.patch_mode_manager.yum_cron_config_pattern_match_text) + yum_cron_os_patch_configuration_settings_file_path_read = self.runtime.env_layer.file_system.read_with_retry(package_manager.patch_mode_manager.os_patch_configuration_settings_file_path) self.assertTrue(yum_cron_os_patch_configuration_settings_file_path_read is not None) self.assertTrue('download_updates' not in yum_cron_os_patch_configuration_settings_file_path_read) self.assertTrue('apply_updates = no' in yum_cron_os_patch_configuration_settings_file_path_read) def test_update_image_default_patch_mode_raises_exception(self): package_manager = self.container.get('package_manager') - package_manager.yum_cron_configuration_settings_file_path = os.path.join(self.runtime.execution_config.config_folder, "yum-cron.conf") + package_manager.patch_mode_manager.yum_cron_configuration_settings_file_path = os.path.join(self.runtime.execution_config.config_folder, "yum-cron.conf") yum_cron_os_patch_configuration_settings = 'apply_updates = yes\ndownload_updates = yes\n' - self.runtime.write_to_file(package_manager.yum_cron_configuration_settings_file_path, yum_cron_os_patch_configuration_settings) + self.runtime.write_to_file(package_manager.patch_mode_manager.yum_cron_configuration_settings_file_path, yum_cron_os_patch_configuration_settings) self.runtime.env_layer.file_system.write_with_retry = self.mock_write_with_retry_raise_exception - self.assertRaises(Exception, package_manager.update_os_patch_configuration_sub_setting) + self.assertRaises(Exception, package_manager.patch_mode_manager.update_os_patch_configuration_sub_setting) def test_is_reboot_pending_return_true_when_exception_raised(self): package_manager = self.container.get('package_manager') diff --git a/src/core/tests/Test_ZypperPackageManager.py b/src/core/tests/Test_ZypperPackageManager.py index ffadb910d..6ecaa4f4c 100644 --- a/src/core/tests/Test_ZypperPackageManager.py +++ b/src/core/tests/Test_ZypperPackageManager.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -227,7 +227,7 @@ def test_install_package_success(self): self.assertIsNotNone(package_manager) # test for successfully installing a package - self.assertEqual(package_manager.install_update_and_dependencies_and_get_status('selinux-policy', '3.13.1-102.el7_3.16', simulate=True), Constants.INSTALLED) + self.assertEqual(package_manager.install_update_and_dependencies_and_get_status('selinux-policy', '3.13.1-102.el7_3.16', simulate=True), Constants.PackageStatus.INSTALLED) def test_install_package_failure(self): self.runtime.set_legacy_test_type('FailInstallPath') @@ -236,7 +236,7 @@ def test_install_package_failure(self): self.assertIsNotNone(package_manager) # test for unsuccessfully installing a package - self.assertEqual(package_manager.install_update_and_dependencies_and_get_status('selinux-policy.noarch', '3.13.1-102.el7_3.16', simulate=True), Constants.FAILED) + self.assertEqual(package_manager.install_update_and_dependencies_and_get_status('selinux-policy.noarch', '3.13.1-102.el7_3.16', simulate=True), Constants.PackageStatus.FAILED) def test_get_process_tree_from_package_manager_output_success(self): self.runtime.set_legacy_test_type('HappyPath') @@ -377,53 +377,53 @@ def test_disable_auto_os_updates_with_uninstalled_services(self): # no services are installed on the machine. expected o/p: function will complete successfully. Backup file will be created with default values, no auto OS update configuration settings will be updated as there are none self.runtime.set_legacy_test_type('SadPath') package_manager = self.container.get('package_manager') - package_manager.disable_auto_os_update() - self.assertTrue(package_manager.image_default_patch_configuration_backup_exists()) + package_manager.patch_mode_manager.disable_auto_os_update() + self.assertTrue(package_manager.patch_mode_manager.image_default_patch_configuration_backup_exists()) image_default_patch_configuration_backup = json.loads(self.runtime.env_layer.file_system.read_with_retry(package_manager.image_default_patch_configuration_backup_path)) self.assertTrue(image_default_patch_configuration_backup is not None) # validating backup for yast2-online-update-configuration - self.assertTrue(package_manager.ZypperAutoOSUpdateServices.YAST2_ONLINE_UPDATE_CONFIGURATION in image_default_patch_configuration_backup) - self.assertEqual(image_default_patch_configuration_backup[package_manager.ZypperAutoOSUpdateServices.YAST2_ONLINE_UPDATE_CONFIGURATION][package_manager.YastOnlineUpdateConfigurationConstants.APPLY_UPDATES_IDENTIFIER_TEXT], "") - self.assertEqual(image_default_patch_configuration_backup[package_manager.ZypperAutoOSUpdateServices.YAST2_ONLINE_UPDATE_CONFIGURATION][package_manager.YastOnlineUpdateConfigurationConstants.INSTALLATION_STATE_IDENTIFIER_TEXT], False) + self.assertTrue(package_manager.patch_mode_manager.ZypperAutoOSUpdateServices.YAST2_ONLINE_UPDATE_CONFIGURATION in image_default_patch_configuration_backup) + self.assertEqual(image_default_patch_configuration_backup[package_manager.patch_mode_manager.ZypperAutoOSUpdateServices.YAST2_ONLINE_UPDATE_CONFIGURATION][package_manager.patch_mode_manager.YastOnlineUpdateConfigurationConstants.APPLY_UPDATES_IDENTIFIER_TEXT], "") + self.assertEqual(image_default_patch_configuration_backup[package_manager.patch_mode_manager.ZypperAutoOSUpdateServices.YAST2_ONLINE_UPDATE_CONFIGURATION][package_manager.patch_mode_manager.YastOnlineUpdateConfigurationConstants.INSTALLATION_STATE_IDENTIFIER_TEXT], False) def test_disable_auto_os_updates_with_installed_services(self): # all services are installed and contain valid configurations. expected o/p All services will be disabled and backup file should reflect default settings for all self.runtime.set_legacy_test_type('HappyPath') package_manager = self.container.get('package_manager') - package_manager.YastOnlineUpdateConfigurationConstants.OS_PATCH_CONFIGURATION_SETTINGS_FILE_PATH = os.path.join(self.runtime.execution_config.config_folder, "automatic_online_update") + package_manager.patch_mode_manager.YastOnlineUpdateConfigurationConstants.OS_PATCH_CONFIGURATION_SETTINGS_FILE_PATH = os.path.join(self.runtime.execution_config.config_folder, "automatic_online_update") yast2_online_update_configuration_os_patch_configuration_settings = 'AOU_ENABLE_CRONJOB="true"' - self.runtime.write_to_file(package_manager.YastOnlineUpdateConfigurationConstants.OS_PATCH_CONFIGURATION_SETTINGS_FILE_PATH, yast2_online_update_configuration_os_patch_configuration_settings) + self.runtime.write_to_file(package_manager.patch_mode_manager.YastOnlineUpdateConfigurationConstants.OS_PATCH_CONFIGURATION_SETTINGS_FILE_PATH, yast2_online_update_configuration_os_patch_configuration_settings) - package_manager.disable_auto_os_update() - self.assertTrue(package_manager.image_default_patch_configuration_backup_exists()) + package_manager.patch_mode_manager.disable_auto_os_update() + self.assertTrue(package_manager.patch_mode_manager.image_default_patch_configuration_backup_exists()) image_default_patch_configuration_backup = json.loads(self.runtime.env_layer.file_system.read_with_retry(package_manager.image_default_patch_configuration_backup_path)) self.assertTrue(image_default_patch_configuration_backup is not None) # validating backup for yast2-online-update-configuration - self.assertTrue(package_manager.ZypperAutoOSUpdateServices.YAST2_ONLINE_UPDATE_CONFIGURATION in image_default_patch_configuration_backup) - self.assertEqual(image_default_patch_configuration_backup[package_manager.ZypperAutoOSUpdateServices.YAST2_ONLINE_UPDATE_CONFIGURATION][package_manager.YastOnlineUpdateConfigurationConstants.APPLY_UPDATES_IDENTIFIER_TEXT], "true") - self.assertEqual(image_default_patch_configuration_backup[package_manager.ZypperAutoOSUpdateServices.YAST2_ONLINE_UPDATE_CONFIGURATION][package_manager.YastOnlineUpdateConfigurationConstants.INSTALLATION_STATE_IDENTIFIER_TEXT], True) + self.assertTrue(package_manager.patch_mode_manager.ZypperAutoOSUpdateServices.YAST2_ONLINE_UPDATE_CONFIGURATION in image_default_patch_configuration_backup) + self.assertEqual(image_default_patch_configuration_backup[package_manager.patch_mode_manager.ZypperAutoOSUpdateServices.YAST2_ONLINE_UPDATE_CONFIGURATION][package_manager.patch_mode_manager.YastOnlineUpdateConfigurationConstants.APPLY_UPDATES_IDENTIFIER_TEXT], "true") + self.assertEqual(image_default_patch_configuration_backup[package_manager.patch_mode_manager.ZypperAutoOSUpdateServices.YAST2_ONLINE_UPDATE_CONFIGURATION][package_manager.patch_mode_manager.YastOnlineUpdateConfigurationConstants.INSTALLATION_STATE_IDENTIFIER_TEXT], True) def test_update_image_default_patch_mode(self): package_manager = self.container.get('package_manager') - package_manager.os_patch_configuration_settings_file_path = package_manager.YastOnlineUpdateConfigurationConstants.OS_PATCH_CONFIGURATION_SETTINGS_FILE_PATH = os.path.join(self.runtime.execution_config.config_folder, "automatic_online_update") + package_manager.patch_mode_manager.os_patch_configuration_settings_file_path = package_manager.patch_mode_manager.YastOnlineUpdateConfigurationConstants.OS_PATCH_CONFIGURATION_SETTINGS_FILE_PATH = os.path.join(self.runtime.execution_config.config_folder, "automatic_online_update") # disable apply_updates when enabled by default yast2_online_update_configuration_os_patch_configuration_settings = 'AOU_ENABLE_CRONJOB="true"' - self.runtime.write_to_file(package_manager.YastOnlineUpdateConfigurationConstants.OS_PATCH_CONFIGURATION_SETTINGS_FILE_PATH, yast2_online_update_configuration_os_patch_configuration_settings) + self.runtime.write_to_file(package_manager.patch_mode_manager.YastOnlineUpdateConfigurationConstants.OS_PATCH_CONFIGURATION_SETTINGS_FILE_PATH, yast2_online_update_configuration_os_patch_configuration_settings) - package_manager.update_os_patch_configuration_sub_setting(package_manager.YastOnlineUpdateConfigurationConstants.APPLY_UPDATES_IDENTIFIER_TEXT, "false", package_manager.YastOnlineUpdateConfigurationConstants.AUTO_UPDATE_CONFIG_PATTERN_MATCH_TEXT) - yast2_online_update_configuration_os_patch_configuration_settings_file_path_read = self.runtime.env_layer.file_system.read_with_retry(package_manager.YastOnlineUpdateConfigurationConstants.OS_PATCH_CONFIGURATION_SETTINGS_FILE_PATH) + package_manager.patch_mode_manager.update_os_patch_configuration_sub_setting(package_manager.patch_mode_manager.YastOnlineUpdateConfigurationConstants.APPLY_UPDATES_IDENTIFIER_TEXT, "false", package_manager.patch_mode_manager.YastOnlineUpdateConfigurationConstants.AUTO_UPDATE_CONFIG_PATTERN_MATCH_TEXT) + yast2_online_update_configuration_os_patch_configuration_settings_file_path_read = self.runtime.env_layer.file_system.read_with_retry(package_manager.patch_mode_manager.YastOnlineUpdateConfigurationConstants.OS_PATCH_CONFIGURATION_SETTINGS_FILE_PATH) self.assertTrue(yast2_online_update_configuration_os_patch_configuration_settings_file_path_read is not None) self.assertTrue('AOU_ENABLE_CRONJOB="false"' in yast2_online_update_configuration_os_patch_configuration_settings_file_path_read) # disable apply_updates when default patch mode settings file is empty yast2_online_update_configuration_os_patch_configuration_settings = '' - self.runtime.write_to_file(package_manager.os_patch_configuration_settings_file_path, yast2_online_update_configuration_os_patch_configuration_settings) - package_manager.update_os_patch_configuration_sub_setting(package_manager.YastOnlineUpdateConfigurationConstants.APPLY_UPDATES_IDENTIFIER_TEXT, "false", package_manager.YastOnlineUpdateConfigurationConstants.AUTO_UPDATE_CONFIG_PATTERN_MATCH_TEXT) - yast2_online_update_configuration_os_patch_configuration_settings_file_path_read = self.runtime.env_layer.file_system.read_with_retry(package_manager.os_patch_configuration_settings_file_path) + self.runtime.write_to_file(package_manager.patch_mode_manager.os_patch_configuration_settings_file_path, yast2_online_update_configuration_os_patch_configuration_settings) + package_manager.patch_mode_manager.update_os_patch_configuration_sub_setting(package_manager.patch_mode_manager.YastOnlineUpdateConfigurationConstants.APPLY_UPDATES_IDENTIFIER_TEXT, "false", package_manager.patch_mode_manager.YastOnlineUpdateConfigurationConstants.AUTO_UPDATE_CONFIG_PATTERN_MATCH_TEXT) + yast2_online_update_configuration_os_patch_configuration_settings_file_path_read = self.runtime.env_layer.file_system.read_with_retry(package_manager.patch_mode_manager.os_patch_configuration_settings_file_path) self.assertTrue(yast2_online_update_configuration_os_patch_configuration_settings_file_path_read is not None) self.assertTrue('AOU_ENABLE_CRONJOB="false"' in yast2_online_update_configuration_os_patch_configuration_settings_file_path_read) @@ -435,7 +435,7 @@ def is_string_in_status_file(self, str_to_find): def test_package_manager_with_retries(self): package_manager = self.container.get('package_manager') # Setting operation to assessment to add all errors under assessment substatus - self.runtime.status_handler.set_current_operation(Constants.ASSESSMENT) + self.runtime.status_handler.set_current_operation(Constants.Op.ASSESSMENT) # Wrap count in a mutable container to modify in mocked method to keep track of retries counter = [0] @@ -465,7 +465,7 @@ def mock_run_command_output(cmd, no_output=False, chk_err=False): # Should reach max retries - 1 and then succeed, per the code above self.assertEqual(counter[0], package_manager.package_manager_max_retries - 1) - self.assertFalse(self.is_string_in_status_file('Unexpected return code (4) from package manager on command: sudo zypper refresh')) + self.assertFalse(self.is_string_in_status_file("Unexpected return code from package manager. [Code=4][Command=sudo zypper refresh]")) # Case 2: UnalignedPath to HappyPath (retry a few times and then success) counter = [0] @@ -481,7 +481,7 @@ def mock_run_command_output(cmd, no_output=False, chk_err=False): # Should reach max retries - 1 and then succeed, per the code above self.assertEqual(counter[0], package_manager.package_manager_max_retries - 1) - self.assertTrue(self.is_string_in_status_file('Unexpected return code (7) from package manager on command: sudo zypper refresh')) + self.assertTrue(self.is_string_in_status_file('Unexpected return code from package manager. [Code=7][Command=sudo zypper refresh]')) # Case 3: NonexistentErrorCodePath to HappyPath (should not retry since error code is not supported) counter = [0] @@ -495,8 +495,8 @@ def mock_run_command_output(cmd, no_output=False, chk_err=False): self.fail('Package manager should fail without retrying') except Exception as error: self.assertEqual(counter[0], 1) # invoke should only be called once - self.assertTrue(self.is_string_in_status_file('Unexpected return code (999999) from package manager on command: sudo zypper refresh')) - self.assertTrue('Unexpected return code (999999) from package manager on command: sudo zypper refresh' in repr(error)) + self.assertTrue(self.is_string_in_status_file('Unexpected return code from package manager. [Code=999999][Command=sudo zypper refresh]')) + #self.assertTrue('Unexpected return code from package manager. [Code=999999][Command=sudo zypper refresh]' in repr(error)) # Case 4: SadPath (retry and ultimately fail) # Set counter to max retries already so it does not hit the condition to enable HappyPath @@ -511,15 +511,15 @@ def mock_run_command_output(cmd, no_output=False, chk_err=False): except Exception as error: # Should reach max retries * 2 and fail (since it started at max retries) self.assertEqual(counter[0], package_manager.package_manager_max_retries * 2) - self.assertTrue(self.is_string_in_status_file('Unexpected return code (7) from package manager on command: sudo zypper refresh')) - self.assertTrue('Unexpected return code (7) from package manager on command: sudo zypper refresh' in repr(error)) + self.assertTrue(self.is_string_in_status_file('Unexpected return code from package manager. [Code=7][Command=sudo zypper refresh]')) + self.assertTrue('Unexpected return code from package manager. [Code=7][Command=sudo zypper refresh]' in repr(error)) package_manager.env_layer.run_command_output = backup_mocked_method def test_package_manager_no_repos(self): package_manager = self.container.get('package_manager') # Setting operation to assessment to add all errors under assessment substatus - self.runtime.status_handler.set_current_operation(Constants.ASSESSMENT) + self.runtime.status_handler.set_current_operation(Constants.Op.ASSESSMENT) cmd_to_run = 'sudo zypper refresh' # Wrap count in a mutable container to modify in mocked method to keep track of retries @@ -570,16 +570,16 @@ def mock_run_command_output(cmd, no_output=False, chk_err=False): try: package_manager.invoke_package_manager(cmd_to_run) except Exception as error: - # Should try twice - once to fail and refresh repo, twice to ultimately fail with same error code (non-retriable) + # Should try twice - once to fail and refresh repo, twice to ultimately fail with same error code (non-retryable) self.assertEqual(counter[0], 2) - self.assertTrue(self.is_string_in_status_file('Unexpected return code (6) from package manager on command: sudo zypper refresh')) - self.assertTrue('Unexpected return code (6) from package manager on command: sudo zypper refresh' in repr(error)) + self.assertTrue(self.is_string_in_status_file("Unexpected return code from package manager. [Code=6][Command=sudo zypper refresh --services]")) + self.assertTrue("Unexpected return code from package manager. [Code=6][Command=sudo zypper refresh]" in repr(error)) package_manager.env_layer.run_command_output = backup_mocked_method def test_package_manager_exit_err_commit(self): package_manager = self.container.get('package_manager') - self.runtime.status_handler.set_current_operation(Constants.INSTALLATION) + self.runtime.status_handler.set_current_operation(Constants.Op.INSTALLATION) # Test command modifications with --replacefiles cmd_to_run = 'sudo zypper --non-interactive update samba-libs=4.15.4+git.327.37e0a40d45f-3.57.1' @@ -623,7 +623,7 @@ def mock_run_command_output(cmd, no_output=False, chk_err=False): def test_package_manager_exit_reboot_required(self): # AnotherSadPath returns code 102 for this command package_manager = self.container.get('package_manager') - self.runtime.status_handler.set_current_operation(Constants.INSTALLATION) + self.runtime.status_handler.set_current_operation(Constants.Op.INSTALLATION) self.runtime.set_legacy_test_type('AnotherSadPath') cmd = "sudo LANG=en_US.UTF8 zypper --non-interactive patch --category security --dry-run" @@ -638,7 +638,7 @@ def test_package_manager_exit_reboot_required(self): def test_package_manager_exit_repeat_operation(self): # SadPath returns code 103 for this command package_manager = self.container.get('package_manager') - self.runtime.status_handler.set_current_operation(Constants.INSTALLATION) + self.runtime.status_handler.set_current_operation(Constants.Op.INSTALLATION) self.runtime.set_legacy_test_type('SadPath') # Should not set reboot flag (as it is a dry run) diff --git a/src/core/tests/__init__.py b/src/core/tests/__init__.py index a1eee39e6..2e3211aae 100644 --- a/src/core/tests/__init__.py +++ b/src/core/tests/__init__.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/core/tests/library/ArgumentComposer.py b/src/core/tests/library/ArgumentComposer.py index 550f225b5..e4701f5e6 100644 --- a/src/core/tests/library/ArgumentComposer.py +++ b/src/core/tests/library/ArgumentComposer.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,6 +14,7 @@ # # Requires Python 2.7+ +""" Argument Composer - helps encapsulate argument composition for Core from default settings that can be customized as desired prior to composition """ import base64 import datetime import json @@ -23,11 +24,9 @@ class ArgumentComposer(object): - """ Helps encapsulate argument composition for Core from default settings that can be customized as desired prior to composition """ - - def __init__(self): + def __init__(self, cloud_type=Constants.CloudType.AZURE): # Constants - self.__EXEC = "MsftLinuxPatchCore.py" + self.__EXEC = "AzGPSLinuxPatchCore.py" self.__TESTS_FOLDER = "tests" self.__SCRATCH_FOLDER = "scratch" self.__ARG_TEMPLATE = "{0} {1} {2} {3} \'{4}\' {5} \'{6}\' {7} {8}" @@ -50,7 +49,8 @@ def __init__(self): Constants.AzGPSPaths.EULA_SETTINGS = os.path.join(scratch_folder, "patch.eula.settings") # config settings - self.operation = Constants.INSTALLATION + self.cloud_type = cloud_type + self.operation = Constants.Op.INSTALLATION if cloud_type == Constants.CloudType.AZURE else Constants.Op.ASSESSMENT self.activity_id = 'c365ab46-a12a-4388-853b-5240a0702124' self.start_time = str(datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")) self.maximum_duration = 'PT2H' @@ -64,7 +64,7 @@ def __init__(self): self.assessment_mode = None self.maximum_assessment_interval = "PT3H" - self.exec_auto_assess_only = False + self.exec_auto_assess_only = bool(cloud_type == Constants.CloudType.ARC) # REAL environment settings self.emulator_enabled = False @@ -85,6 +85,7 @@ def get_composed_arguments(self, env_settings={}): environment_settings[key] = env_settings[key] config_settings = { + "cloudType": self.cloud_type, "operation": self.operation, "activityId": self.activity_id, "startTime": self.start_time, diff --git a/src/core/tests/library/LegacyEnvLayerExtensions.py b/src/core/tests/library/LegacyEnvLayerExtensions.py index d03167224..43bc57856 100644 --- a/src/core/tests/library/LegacyEnvLayerExtensions.py +++ b/src/core/tests/library/LegacyEnvLayerExtensions.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -63,7 +63,10 @@ def run_command_output(self, cmd, no_output=False, chk_err=True): code = 0 if self.legacy_test_type == 'HappyPath': - if cmd.find("cat /proc/cpuinfo | grep name") > -1: + if cmd.find("sudo timeout 10 id && echo True || echo False") > -1: + code = 0 + output = "uid=0(root) gid=0(root) groups=0(root)\nTrue" + elif cmd.find("cat /proc/cpuinfo | grep name") > -1: code = 0 output = "model name : Intel(R) Core(TM) i7-6700 CPU @ 3.40GHz\n" + \ "model name : Intel(R) Core(TM) i7-6700 CPU @ 3.40GHz\n" + \ diff --git a/src/core/tests/library/RuntimeCompositor.py b/src/core/tests/library/RuntimeCompositor.py index dd27ab335..2b82a0098 100644 --- a/src/core/tests/library/RuntimeCompositor.py +++ b/src/core/tests/library/RuntimeCompositor.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -27,27 +27,29 @@ from core.tests.library.LegacyEnvLayerExtensions import LegacyEnvLayerExtensions from core.src.bootstrap.Bootstrapper import Bootstrapper from core.src.bootstrap.Constants import Constants +from core.src.core_logic.CoreExecutionEngine import CoreExecutionEngine -# Todo: find a different way to import these try: - import urllib2 as urlreq # Python 2.x -except: - import urllib.request as urlreq # Python 3.x + import urllib.request as urlreq # Python 3.x +except ImportError: + import urllib2 as urlreq # Python 2.x try: - from StringIO import StringIO # for Python 2 + from io import StringIO # Python 3.x except ImportError: - from io import StringIO # for Python 3 + from StringIO import StringIO # Python 2.x class RuntimeCompositor(object): - def __init__(self, argv=Constants.DEFAULT_UNSPECIFIED_VALUE, legacy_mode=False, package_manager_name=Constants.APT, vm_cloud_type=Constants.VMCloudType.AZURE): + def __init__(self, argv=Constants.DEFAULT_UNSPECIFIED_VALUE, legacy_mode=False, package_manager_name=Constants.APT, cloud_type=Constants.CloudType.AZURE): # Init data - self.current_env = Constants.DEV - os.environ[Constants.LPE_ENV_VARIABLE] = self.current_env - self.argv = argv if argv != Constants.DEFAULT_UNSPECIFIED_VALUE else ArgumentComposer().get_composed_arguments() - self.vm_cloud_type = vm_cloud_type - Constants.SystemPaths.SYSTEMD_ROOT = os.getcwd() # mocking to pass a basic systemd check in Windows + print("[-- RUNTIME COMPOSITOR DIALTONE --]") + self.current_env = Constants.ExecEnv.DEV + os.environ[Constants.AZGPS_LPE_ENVIRONMENT_VAR] = self.current_env + self.argv = argv if argv != Constants.DEFAULT_UNSPECIFIED_VALUE else ArgumentComposer(cloud_type).get_composed_arguments() + self.cloud_type = cloud_type + Constants.SystemPaths.SYSTEMD_ROOT = os.getcwd() # mocking to pass a basic systemd check in Windows + Constants.MAX_PATCH_OPERATION_RETRY_COUNT = 1 self.is_github_runner = os.getenv('RUNNER_TEMP', None) is not None if self.is_github_runner: @@ -66,9 +68,6 @@ def mkdtemp_runner(): # Adapted bootstrapper bootstrapper = Bootstrapper(self.argv, capture_stdout=False) - # Overriding sudo status check - Bootstrapper.check_sudo_status = self.check_sudo_status - # Reconfigure env layer for legacy mode tests self.env_layer = bootstrapper.env_layer if legacy_mode: @@ -85,11 +84,13 @@ def mkdtemp_runner(): bootstrapper.telemetry_writer = self.telemetry_writer bootstrapper.composite_logger.telemetry_writer = self.telemetry_writer - self.lifecycle_manager, self.status_handler = bootstrapper.build_core_components(self.container) + self.lifecycle_manager, self.status_handler, self.execution_config = bootstrapper.get_service_components() # Business logic components - self.execution_config = self.container.get('execution_config') self.legacy_env_layer_extensions.set_temp_folder_path(self.execution_config.temp_folder) + self.patch_mode_manager = self.container.get('patch_mode_manager') + self.sources_manager = self.container.get('sources_manager') + self.health_manager = self.container.get('health_manager') self.package_manager = self.container.get('package_manager') self.backup_get_current_auto_os_patch_state = None self.reconfigure_package_manager() @@ -100,7 +101,12 @@ def mkdtemp_runner(): self.patch_assessor = self.container.get('patch_assessor') self.patch_installer = self.container.get('patch_installer') self.maintenance_window = self.container.get('maintenance_window') - self.vm_cloud_type = bootstrapper.configuration_factory.vm_cloud_type + self.cloud_type = bootstrapper.configuration_factory.cloud_type + self.core_exec = self.container.get('core_execution_engine') + + self.backup_check_minimum_environment_requirements_and_report = self.core_exec.check_minimum_environment_requirements_and_report + self.core_exec.check_minimum_environment_requirements_and_report = self.mock_check_minimum_environment_requirements_and_report + # Extension handler dependency self.write_ext_state_file(self.lifecycle_manager.ext_state_file_path, self.execution_config.sequence_number, datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ"), self.execution_config.operation) @@ -116,6 +122,7 @@ def mkdtemp_runner(): self.configure_patching_processor.auto_assess_service_manager.remove_service = self.mock_remove_service self.backup_remove_timer = self.configure_patching_processor.auto_assess_timer_manager.remove_timer self.configure_patching_processor.auto_assess_timer_manager.remove_timer = self.mock_remove_timer + print("[-- RUNTIME IS READY FOR TEST --]\n") def stop(self): self.file_logger.close(message_at_close="") @@ -149,20 +156,20 @@ def start_reboot(self, message="Test initiated reboot mock"): self.status_handler.set_installation_reboot_status(Constants.RebootStatus.STARTED) def reconfigure_package_manager(self): - self.backup_get_current_auto_os_patch_state = self.package_manager.get_current_auto_os_patch_state - self.package_manager.get_current_auto_os_patch_state = self.get_current_auto_os_patch_state + self.backup_get_current_auto_os_patch_state = self.package_manager.patch_mode_manager.get_current_auto_os_patch_state + self.package_manager.patch_mode_manager.get_current_auto_os_patch_state = self.get_current_auto_os_patch_state def mock_sleep(self, seconds): pass - def check_sudo_status(self, raise_if_not_sudo=True): - return True + def mock_check_minimum_environment_requirements_and_report(self, patch_operation_requested): + pass def get_current_auto_os_patch_state(self): return Constants.AutomaticOSPatchStates.DISABLED def mock_urlopen(self, url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, cafile=None, capath=None, cadefault=False, context=None): - if self.vm_cloud_type == Constants.VMCloudType.AZURE: + if self.cloud_type == Constants.CloudType.AZURE: resp = urlreq.addinfourl(StringIO("mock file"), "mock message", "mockurl") resp.code = 200 resp.msg = "OK" diff --git a/src/core/tests/library/__init__.py b/src/core/tests/library/__init__.py index a1eee39e6..2e3211aae 100644 --- a/src/core/tests/library/__init__.py +++ b/src/core/tests/library/__init__.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/extension/__init__.py b/src/extension/__init__.py index a1eee39e6..2e3211aae 100644 --- a/src/extension/__init__.py +++ b/src/extension/__init__.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/extension/src/ActionHandler.py b/src/extension/src/ActionHandler.py index c27918311..ea1d0a1bd 100644 --- a/src/extension/src/ActionHandler.py +++ b/src/extension/src/ActionHandler.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -169,9 +169,7 @@ def __log_telemetry_info(self, telemetry_supported, events_folder_previously_exi else: telemetry_info += "[AgentVer=Unknown][GoalStateVer=Unknown]" - if telemetry_supported is True: - self.logger.log("{0} {1}".format(Constants.TELEMETRY_AT_AGENT_COMPATIBLE_MSG, telemetry_info)) - else: + if not telemetry_supported: error_msg = "{0} {1}".format(Constants.TELEMETRY_AT_AGENT_NOT_COMPATIBLE_ERROR_MSG, telemetry_info) self.logger.log_error(error_msg) @@ -181,14 +179,14 @@ def install(self): install_command_handler = InstallCommandHandler(self.logger, self.ext_env_handler) exit_code_from_executing_install = install_command_handler.execute_handler_action() if exit_code_from_executing_install == Constants.ExitCode.Okay or exit_code_from_executing_install is None: - self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.Success.lower()) + self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.SUCCESS.lower()) else: - self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.Error.lower(), message="Error occurred during extension install", code=exit_code_from_executing_install) + self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.ERROR.lower(), message="Error occurred during extension install", code=exit_code_from_executing_install) return exit_code_from_executing_install except Exception as error: self.logger.log_error("Error occurred during extension install. [Error={0}]".format(repr(error))) - self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.Error.lower(), message="Error occurred during extension install", code=Constants.ExitCode.HandlerFailed) + self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.ERROR.lower(), message="Error occurred during extension install", code=Constants.ExitCode.HandlerFailed) return Constants.ExitCode.HandlerFailed finally: @@ -219,7 +217,7 @@ def update(self): # b) after all artifacts from the preceding versions have been deleted error_msg = "No earlier versions for the extension found on the machine. So, could not copy any references to the current version." self.logger.log_error(error_msg) - self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.Error.lower(), message=error_msg, code=Constants.ExitCode.HandlerFailed) + self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.ERROR.lower(), message=error_msg, code=Constants.ExitCode.HandlerFailed) return Constants.ExitCode.HandlerFailed # identify the version preceding current @@ -230,7 +228,7 @@ def update(self): error_msg = "Could not find path where preceding extension version artifacts are stored. Hence, cannot copy the required artifacts to the latest version. "\ "[Preceding extension version path={0}]".format(str(preceding_version_path)) self.logger.log_error(error_msg) - self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.Error.lower(), message=error_msg, code=Constants.ExitCode.HandlerFailed) + self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.ERROR.lower(), message=error_msg, code=Constants.ExitCode.HandlerFailed) return Constants.ExitCode.HandlerFailed self.logger.log("Preceding version path. [Path={0}]".format(str(preceding_version_path))) @@ -242,12 +240,12 @@ def update(self): self.ext_env_handler.delete_temp_folder() self.logger.log("All update actions from extension handler completed.") - self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.Success.lower()) + self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.SUCCESS.lower()) return Constants.ExitCode.Okay except Exception as error: self.logger.log_error("Error occurred during extension update. [Error={0}]".format(repr(error))) - self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.Error.lower(), message="Error occurred during extension update", code=Constants.ExitCode.HandlerFailed) + self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.ERROR.lower(), message="Error occurred during extension update", code=Constants.ExitCode.HandlerFailed) return Constants.ExitCode.HandlerFailed finally: @@ -279,14 +277,14 @@ def copy_config_files(self, src, dst, raise_if_not_copied=False): for file_to_copy in files_to_copy: for i in range(0, Constants.MAX_IO_RETRIES): try: - self.logger.log("Copying file. [Source={0}] [Destination={1}]".format(str(file_to_copy), str(dst))) + self.logger.log("Copying file. [Source={0}][Destination={1}]".format(str(file_to_copy), str(dst))) shutil.copy(file_to_copy, dst) break except Exception as error: if i < Constants.MAX_IO_RETRIES - 1: time.sleep(i + 1) else: - error_msg = "Failed to copy file after {0} tries. [Source={1}] [Destination={2}] [Exception={3}]".format(Constants.MAX_IO_RETRIES, str(file_to_copy), str(dst), repr(error)) + error_msg = "Failed to copy file after {0} tries. [Source={1}][Destination={2}][Exception={3}]".format(Constants.MAX_IO_RETRIES, str(file_to_copy), str(dst), repr(error)) self.logger.log_error(error_msg) if raise_if_not_copied: raise Exception(error_msg) @@ -300,12 +298,12 @@ def uninstall(self): # Delete temp_folder self.ext_env_handler.delete_temp_folder() - self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.Success.lower()) + self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.SUCCESS.lower()) return Constants.ExitCode.Okay except Exception as error: self.logger.log_error("Error occurred during extension uninstall. [Error={0}]".format(repr(error))) - self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.Error.lower(), message="Error occurred during extension uninstall", code=Constants.ExitCode.HandlerFailed) + self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.ERROR.lower(), message="Error occurred during extension uninstall", code=Constants.ExitCode.HandlerFailed) return Constants.ExitCode.HandlerFailed finally: @@ -317,12 +315,12 @@ def enable(self): enable_command_handler = EnableCommandHandler(self.logger, self.telemetry_writer, self.utility, self.env_health_manager, self.runtime_context_handler, self.ext_env_handler, self.ext_config_settings_handler, self.core_state_handler, self.ext_state_handler, self.ext_output_status_handler, self.process_handler, self.cmd_exec_start_time) exit_code_returned_from_executing_enable = enable_command_handler.execute_handler_action() if exit_code_returned_from_executing_enable is not None and exit_code_returned_from_executing_enable != Constants.ExitCode.Okay: - self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.Error.lower(), message="Error occurred during extension enable", code=exit_code_returned_from_executing_enable) + self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.ERROR.lower(), message="Error occurred during extension enable", code=exit_code_returned_from_executing_enable) return Constants.ExitCode.Okay if exit_code_returned_from_executing_enable is None else exit_code_returned_from_executing_enable except Exception as error: self.logger.log_error("Error occurred during extension enable. [Error={0}]".format(repr(error))) - self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.Error.lower(), message="Error occurred during extension enable", code=Constants.ExitCode.HandlerFailed) + self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.ERROR.lower(), message="Error occurred during extension enable", code=Constants.ExitCode.HandlerFailed) return Constants.ExitCode.HandlerFailed finally: self.tear_down() @@ -356,12 +354,12 @@ def disable(self): # End of temporary auto-assessment disablement self.logger.log("Extension disabled successfully") - self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.Success.lower()) + self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.SUCCESS.lower()) return Constants.ExitCode.Okay except Exception as error: self.logger.log_error("Error occurred during extension disable. [Error={0}]".format(repr(error))) - self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.Error.lower(), message="Error occurred during extension disable", code=Constants.ExitCode.HandlerFailed) + self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.ERROR.lower(), message="Error occurred during extension disable", code=Constants.ExitCode.HandlerFailed) return Constants.ExitCode.HandlerFailed finally: self.tear_down() @@ -375,12 +373,12 @@ def reset(self): # Clear temp folder self.ext_env_handler.delete_temp_folder_contents() - self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.Success.lower()) + self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.SUCCESS.lower()) return Constants.ExitCode.Okay except Exception as error: self.logger.log_error("Error occurred during extension reset. [Error={0}]".format(repr(error))) - self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.Error.lower(), message="Error occurred during extension reset", code=Constants.ExitCode.HandlerFailed) + self.ext_output_status_handler.write_status_file("", self.seq_no, status=Constants.Status.ERROR.lower(), message="Error occurred during extension reset", code=Constants.ExitCode.HandlerFailed) return Constants.ExitCode.HandlerFailed finally: self.tear_down() diff --git a/src/extension/src/MsftLinuxPatchExtShim.sh b/src/extension/src/AzGPSLinuxPatchExtShim.sh similarity index 93% rename from src/extension/src/MsftLinuxPatchExtShim.sh rename to src/extension/src/AzGPSLinuxPatchExtShim.sh index 714fea85d..3aaeeba5a 100644 --- a/src/extension/src/MsftLinuxPatchExtShim.sh +++ b/src/extension/src/AzGPSLinuxPatchExtShim.sh @@ -6,7 +6,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -17,10 +17,10 @@ # Requires Python 2.7+ # Keeping the default command -COMMAND="MsftLinuxPatchExt.py" +COMMAND="AzGPSLinuxPatchExt.py" PYTHON="" -USAGE="$(basename "$0") [-h] [-i|--install] [-u|--uninstall] [-d|--disable] [-e|--enable] [-p|--update] [-r|--reset] +USAGE="$(basename "$0") [-h][-i|--install][-u|--uninstall][-d|--disable][-e|--enable][-p|--update][-r|--reset] Program to find the installed python on the box and invoke a Python extension script. where: -h|--help show this help text diff --git a/src/extension/src/Constants.py b/src/extension/src/Constants.py index a9f57c844..92e131eb0 100644 --- a/src/extension/src/Constants.py +++ b/src/extension/src/Constants.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -27,14 +27,12 @@ def __iter__(self): if item == self.__dict__[item]: yield item - # Extension version (todo: move to a different file) - EXT_VERSION = "1.6.48" + AZGPS_LPE_VERSION = "[%exec_ver%]" # Runtime environments TEST = 'Test' DEV = 'Dev' PROD = 'Prod' # Azure Native Patch Management - UNKNOWN_ENV = 'Unknown' # Non-functional code placeholder prior to compile # File Constants HANDLER_ENVIRONMENT_FILE = 'HandlerEnvironment.json' @@ -44,8 +42,8 @@ def __iter__(self): HANDLER_ENVIRONMENT_FILE_PATH = os.getcwd() CONFIG_SETTINGS_FILE_EXTENSION = '.settings' STATUS_FILE_EXTENSION = '.status' - CORE_CODE_FILE_NAME = 'MsftLinuxPatchCore.py' - CORE_AUTO_ASSESS_SH_FILE_NAME = "MsftLinuxPatchAutoAssess.sh" + CORE_CODE_FILE_NAME = 'AzGPSLinuxPatchCore.py' + CORE_AUTO_ASSESS_SH_FILE_NAME = "AzGPSLinuxPatchAutoAssess.sh" LOG_FILE_EXTENSION = '.log' LOG_FILES_TO_RETAIN = 15 MAX_LOG_FILES_ALLOWED = 40 @@ -59,29 +57,31 @@ def __iter__(self): ENABLE_MAX_RUNTIME = 3 DISABLE_MAX_RUNTIME = 13 - # Telemetry Settings - # Note: these limits are based on number of characters as confirmed with agent team - TELEMETRY_MSG_SIZE_LIMIT_IN_CHARS = 3072 - TELEMETRY_EVENT_SIZE_LIMIT_IN_CHARS = 6144 - TELEMETRY_EVENT_FILE_SIZE_LIMIT_IN_CHARS = 4194304 - TELEMETRY_DIR_SIZE_LIMIT_IN_CHARS = 41943040 - TELEMETRY_BUFFER_FOR_DROPPED_COUNT_MSG_IN_CHARS = 25 # buffer for the chars dropped text added at the end of the truncated telemetry message + class TelemetryConfig(EnumBackport): + """ Telemetry limits that are imposed by the Azure Linux Agent """ + MSG_SIZE_LIMIT_IN_CHARS = 3072 + EVENT_SIZE_LIMIT_IN_CHARS = 6144 + EVENT_FILE_SIZE_LIMIT_IN_CHARS = 4194304 + DIR_SIZE_LIMIT_IN_CHARS = 41943040 + BUFFER_FOR_DROPPED_COUNT_MSG_IN_CHARS = 25 # buffer for the chars dropped text added at the end of the truncated telemetry message + EVENT_COUNTER_MSG_SIZE_LIMIT_IN_CHARS = 15 # buffer for telemetry event counter text added at the end of every message sent to telemetry + MAX_EVENT_COUNT_THROTTLE = 72 # increased by Agent team for AzGPS in 2023 (up from 60) + MAX_TIME_IN_SECONDS_FOR_EVENT_COUNT_THROTTLE = 60 - TELEMETRY_ENABLED_AT_EXTENSION = True TELEMETRY_AT_AGENT_NOT_COMPATIBLE_ERROR_MSG = "The minimum Azure Linux Agent version prerequisite for Linux patching was not met. Please update the Azure Linux Agent on this machine following instructions here: http://aka.ms/UpdateLinuxAgent" - TELEMETRY_AT_AGENT_COMPATIBLE_MSG = "The minimum Azure Linux Agent version prerequisite for Linux patching was met." AZURE_GUEST_AGENT_EXTENSION_SUPPORTED_FEATURES_ENV_VAR = 'AZURE_GUEST_AGENT_EXTENSION_SUPPORTED_FEATURES' TELEMETRY_EXTENSION_PIPELINE_SUPPORTED_KEY = 'ExtensionTelemetryPipeline' # Telemetry Event Level - class TelemetryEventLevel(EnumBackport): - Critical = "Critical" + class EventLevel(EnumBackport): + # Critical = "Critical" # unused by AzGPS Error = "Error" Warning = "Warning" - Verbose = "Verbose" - Informational = "Informational" - LogAlways = "LogAlways" + Info = "Informational" + Debug = "Debug" + Verbose = "Verbose" # do not log to telemetry - AzGPS override + # LogAlways = "LogAlways" # unused by AzGPS TELEMETRY_TASK_NAME = "Handler" @@ -93,15 +93,19 @@ class TelemetryEventLevel(EnumBackport): # Re-try limit for verifying core process has started successfully MAX_PROCESS_STATUS_CHECK_RETRIES = 5 - # Operations - NOOPERATION = "NoOperation" - PATCH_NOOPERATION_SUMMARY = "PatchNoOperationSummary" - ASSESSMENT = "Assessment" - PATCH_ASSESSMENT_SUMMARY = "PatchAssessmentSummary" - INSTALLATION = "Installation" - PATCH_INSTALLATION_SUMMARY = "PatchInstallationSummary" - CONFIGURE_PATCHING = "ConfigurePatching" - CONFIGURE_PATCHING_SUMMARY = "ConfigurePatchingSummary" + class Op(EnumBackport): + NO_OPERATION = "NoOperation" # only used in handler + ASSESSMENT = "Assessment" + INSTALLATION = "Installation" + CONFIGURE_PATCHING = "ConfigurePatching" + CONFIGURE_PATCHING_AUTO_ASSESSMENT = "ConfigurePatching_AutoAssessment" + + class OpSummary(EnumBackport): + NO_OPERATION = "PatchNoOperationSummary" # only used in handler + """ CONFIGURE_PATCHING = "ConfigurePatchingSummary" + ASSESSMENT = "PatchAssessmentSummary" + INSTALLATION = "PatchInstallationSummary" + PATCH_METADATA_FOR_HEALTHSTORE = "PatchMetadataForHealthStore" """ # Handler actions ENABLE = "Enable" @@ -121,7 +125,7 @@ class PatchOperationTopLevelErrorCode(EnumBackport): class PatchOperationErrorCodes(EnumBackport): # todo: finalize these error codes - PACKAGE_MANAGER_FAILURE = "PACKAGE_MANAGER_FAILURE" + PACKAGE_MANAGER_FAILURE = "CL_PACKAGE_MANAGER_FAILURE" OPERATION_FAILED = "OPERATION_FAILED" DEFAULT_ERROR = "ERROR" # default error code @@ -198,10 +202,10 @@ class CoreStateFields(EnumBackport): # Status values class Status(EnumBackport): - Transitioning = "Transitioning" - Error = "Error" - Success = "Success" - Warning = "Warning" + TRANSITIONING = "Transitioning" + ERROR = "Error" + SUCCESS = "Success" + WARNING = "Warning" class ExitCode(EnumBackport): Okay = 0 diff --git a/src/extension/src/EnableCommandHandler.py b/src/extension/src/EnableCommandHandler.py index 22dd404fb..cea8720fd 100644 --- a/src/extension/src/EnableCommandHandler.py +++ b/src/extension/src/EnableCommandHandler.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -59,9 +59,9 @@ def execute_handler_action(self): operation = config_settings.__getattribute__(self.config_public_settings.operation) # Allow only certain operations - if operation not in [Constants.NOOPERATION, Constants.ASSESSMENT, Constants.INSTALLATION, Constants.CONFIGURE_PATCHING]: + if operation not in [Constants.Op.NO_OPERATION, Constants.Op.ASSESSMENT, Constants.Op.INSTALLATION, Constants.Op.CONFIGURE_PATCHING]: self.logger.log_error("Requested operation is not supported by the extension") - self.ext_output_status_handler.write_status_file(operation, self.seq_no, status=Constants.Status.Error.lower(), message="Requested operation {0} is not supported by the extension".format(str(operation)), code=Constants.ExitCode.OperationNotSupported) + self.ext_output_status_handler.write_status_file(operation, self.seq_no, status=Constants.Status.ERROR.lower(), message="Requested operation {0} is not supported by the extension".format(str(operation)), code=Constants.ExitCode.OperationNotSupported) exit(Constants.ExitCode.OperationNotSupported) prev_patch_max_end_time = self.cmd_exec_start_time + datetime.timedelta(hours=0, minutes=Constants.ENABLE_MAX_RUNTIME) @@ -72,7 +72,7 @@ def execute_handler_action(self): self.ext_env_handler.log_temp_folder_details() # if NoOperation is requested, terminate all running processes from previous operation and update status file - if operation == Constants.NOOPERATION: + if operation == Constants.Op.NO_OPERATION: self.process_nooperation(config_settings, core_state_content) else: # if any of the other operations are requested, verify if request is a new request or a re-enable, by comparing sequence number from the prev request and current one @@ -123,7 +123,7 @@ def launch_new_process(self, config_settings, create_status_output_file): # create Status file if create_status_output_file: - self.ext_output_status_handler.write_status_file(config_settings.__getattribute__(self.config_public_settings.operation), self.seq_no, status=self.status.Transitioning.lower()) + self.ext_output_status_handler.write_status_file(config_settings.__getattribute__(self.config_public_settings.operation), self.seq_no, status=self.status.TRANSITIONING.lower()) else: self.ext_output_status_handler.update_file(self.seq_no) # launch core code in a process and exit extension handler @@ -133,16 +133,16 @@ def launch_new_process(self, config_settings, create_status_output_file): def process_nooperation(self, config_settings, core_state_content): self.logger.log("NoOperation requested. Terminating older patch operation, if still in progress.") - self.ext_output_status_handler.set_current_operation(Constants.NOOPERATION) + self.ext_output_status_handler.set_current_operation(Constants.Op.NO_OPERATION) activity_id = config_settings.__getattribute__(self.config_public_settings.activity_id) operation = config_settings.__getattribute__(self.config_public_settings.operation) start_time = config_settings.__getattribute__(self.config_public_settings.start_time) try: - self.ext_output_status_handler.set_nooperation_substatus_json(operation, activity_id, start_time, seq_no=self.seq_no, status=Constants.Status.Transitioning) + self.ext_output_status_handler.set_nooperation_substatus_json(operation, activity_id, start_time, seq_no=self.seq_no, status=Constants.Status.TRANSITIONING) self.runtime_context_handler.terminate_processes_from_previous_operation(self.process_handler, core_state_content) self.utility.delete_file(self.core_state_handler.dir_path, self.core_state_handler.file, raise_if_not_found=False) # ToDo: log prev activity id later - self.ext_output_status_handler.set_nooperation_substatus_json(operation, activity_id, start_time, seq_no=self.seq_no, status=Constants.Status.Success) + self.ext_output_status_handler.set_nooperation_substatus_json(operation, activity_id, start_time, seq_no=self.seq_no, status=Constants.Status.SUCCESS) self.logger.log("exiting extension handler") exit(Constants.ExitCode.Okay) except Exception as error: @@ -152,5 +152,5 @@ def process_nooperation(self, config_settings, core_state_content): self.ext_output_status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.OPERATION_FAILED) else: self.ext_output_status_handler.add_error_to_status("Error executing NoOperation due to last reported error.", Constants.PatchOperationErrorCodes.OPERATION_FAILED) - self.ext_output_status_handler.set_nooperation_substatus_json(operation, activity_id, start_time, seq_no=self.seq_no, status=Constants.Status.Error) + self.ext_output_status_handler.set_nooperation_substatus_json(operation, activity_id, start_time, seq_no=self.seq_no, status=Constants.Status.ERROR) diff --git a/src/extension/src/EnvHealthManager.py b/src/extension/src/EnvHealthManager.py index 714945bd9..57f0b6036 100644 --- a/src/extension/src/EnvHealthManager.py +++ b/src/extension/src/EnvHealthManager.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -20,35 +20,25 @@ class EnvHealthManager(object): def __init__(self, env_layer): self.env_layer = env_layer - def check_sudo_status(self, raise_if_not_sudo=True): - """ Checks if we can invoke sudo successfully. """ + def check_sudo_status(self): + """ + Checks if we can invoke sudo successfully. + Reference output: tools/references/cmd_output_references/sudo_output_expected.txt + """ + error_details = None try: print("Performing sudo status check... This should complete within 10 seconds.") - return_code, output = self.env_layer.run_command_output("timeout 10 sudo id && echo True || echo False", False, False) - # output should look like either this (bad): - # [sudo] password for username: - # False - # or this (good): - # uid=0(root) gid=0(root) groups=0(root) - # True + return_code, output = self.env_layer.run_command_output("sudo timeout 10 id && echo True || echo False", False, False) output_lines = output.splitlines() - if len(output_lines) < 2: - raise Exception("Unexpected sudo check result. Output: " + " ".join(output.split("\n"))) - - if output_lines[1] == "True": + if len(output_lines) >= 2 and output_lines[1] == "True": return True - elif output_lines[1] == "False": - if raise_if_not_sudo: - raise Exception("Unable to invoke sudo successfully. Output: " + " ".join(output.split("\n"))) - return False else: - raise Exception("Unexpected sudo check result. Output: " + " ".join(output.split("\n"))) + error_details = "[Output={0}]".format(" | ".join(output.split("\n"))) except Exception as exception: - print("Sudo status check failed. Please ensure the computer is configured correctly for sudo invocation. " + - "Exception details: " + str(exception)) - if raise_if_not_sudo: - raise + error_details = str("[Error={0}]".format(str(exception))) + + raise Exception("Sudo status check failed. Please ensure the computer is configured correctly for sudo invocation. " + str(error_details)) def ensure_tty_not_required(self): """ Checks current tty settings in /etc/sudoers and disables it within the current user context, if required. Sudo commands don't execute if tty is required. """ @@ -63,10 +53,10 @@ def disable_tty_for_current_user(self): """ Sets requiretty to False in the custom sudoers file for linuxpatchextension""" try: disable_tty_for_current_user_config = "Defaults:" + self.env_layer.get_current_user() + " !" + self.env_layer.require_tty_setting + "\n" - print("Disabling tty for current user in custom sudoers for the extension [FileName={0}] [ConfigAdded={1}]".format(str(self.env_layer.etc_sudoers_linux_patch_extension_file_path), disable_tty_for_current_user_config)) + print("Disabling tty for current user in custom sudoers for the extension [FileName={0}][ConfigAdded={1}]".format(str(self.env_layer.etc_sudoers_linux_patch_extension_file_path), disable_tty_for_current_user_config)) self.env_layer.file_system.write_with_retry(self.env_layer.etc_sudoers_linux_patch_extension_file_path, disable_tty_for_current_user_config, mode='w+') print("tty for current user disabled") except Exception as error: - print("Error occurred while disabling tty for current user. [FileName={0}] [Error={1}]".format(str(self.env_layer.etc_sudoers_linux_patch_extension_file_path), repr(error))) + print("Error occurred while disabling tty for current user. [FileName={0}][Error={1}]".format(str(self.env_layer.etc_sudoers_linux_patch_extension_file_path), repr(error))) raise diff --git a/src/extension/src/EnvLayer.py b/src/extension/src/EnvLayer.py index de85ffd60..520ba9099 100644 --- a/src/extension/src/EnvLayer.py +++ b/src/extension/src/EnvLayer.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -162,7 +162,7 @@ def is_tty_required_in_sudoers(self): return tty_set_to_required except Exception as error: - print("Error occurred while fetching data from [FilePath={0}] [Exception={1}]".format(str(self.etc_sudoers_file_path), repr(error))) + print("Error occurred while fetching data from [FilePath={0}][Exception={1}]".format(str(self.etc_sudoers_file_path), repr(error))) raise def is_tty_defaults_set(self, setting_substr_without_requiretty): @@ -186,7 +186,7 @@ def is_tty_disabled_in_linux_patch_extension_sudoers(self): return True return False except Exception as error: - print("Error occurred while fetching data from [FilePath={0}] [Exception={1}]".format(str(self.etc_sudoers_file_path), repr(error))) + print("Error occurred while fetching data from [FilePath={0}][Exception={1}]".format(str(self.etc_sudoers_file_path), repr(error))) raise @staticmethod diff --git a/src/extension/src/HandlerManifest.json b/src/extension/src/HandlerManifest.json index 3f08d65ed..6c5babd55 100644 --- a/src/extension/src/HandlerManifest.json +++ b/src/extension/src/HandlerManifest.json @@ -2,12 +2,12 @@ { "version": 1.0, "handlerManifest": { - "disableCommand": "MsftLinuxPatchExtShim.sh -d", - "enableCommand": "MsftLinuxPatchExtShim.sh -e", - "installCommand": "MsftLinuxPatchExtShim.sh -i", - "uninstallCommand": "MsftLinuxPatchExtShim.sh -u", - "updateCommand": "MsftLinuxPatchExtShim.sh -p", - "resetStateCommand": "MsftLinuxPatchExtShim.sh -r", + "disableCommand": "AzGPSLinuxPatchExtShim.sh -d", + "enableCommand": "AzGPSLinuxPatchExtShim.sh -e", + "installCommand": "AzGPSLinuxPatchExtShim.sh -i", + "uninstallCommand": "AzGPSLinuxPatchExtShim.sh -u", + "updateCommand": "AzGPSLinuxPatchExtShim.sh -p", + "resetStateCommand": "AzGPSLinuxPatchExtShim.sh -r", "rebootAfterInstall": false, "reportHeartbeat": false, "updateMode": "UpdateWithoutInstall" diff --git a/src/extension/src/InstallCommandHandler.py b/src/extension/src/InstallCommandHandler.py index f8c1b22ec..6fddac3ee 100644 --- a/src/extension/src/InstallCommandHandler.py +++ b/src/extension/src/InstallCommandHandler.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -63,7 +63,7 @@ def validate_key(self, key, config_type, data_type, is_required, file): if is_required: # Required key doesn't exist in config file if key not in config_type: - error_msg = "Config not found in file. [Config={0}] [File={1}]".format(key, file) + error_msg = "Config not found in file. [Config={0}][File={1}]".format(key, file) self.logger.log_error_and_raise_new_exception(error_msg, Exception) # Required key doesn't have value elif data_type is not bool and not config_type[key]: diff --git a/src/extension/src/ProcessHandler.py b/src/extension/src/ProcessHandler.py index aed742bc6..854741f8d 100644 --- a/src/extension/src/ProcessHandler.py +++ b/src/extension/src/ProcessHandler.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -163,7 +163,7 @@ def __check_process_state(self, process, seq_no): self.logger.log("Exception from process.communicate() while getting output from core process. Exception:{0}".format(repr(error))) self.logger.log("Process not running for [sequence={0}]".format(seq_no)) - self.logger.log("Output and error for the inactive process: [Output={0}] [Error={1}]".format(str(output), str(unused_err))) + self.logger.log("Output and error for the inactive process: [Output={0}][Error={1}]".format(str(output), str(unused_err))) return did_process_start @@ -201,8 +201,8 @@ def kill_process(self, pid): self.logger.log("Terminating process: [PID={0}]".format(str(pid))) os.kill(pid, signal.SIGTERM) except OSError as error: - self.logger.log_error("Error terminating process. [Process ID={0}] [Error={1}]".format(pid, repr(error))) - self.ext_output_status_handler.add_error_to_status("Error terminating process. [Process ID={0}] [Error={1}]".format(pid, repr(error)), Constants.PatchOperationErrorCodes.DEFAULT_ERROR) + self.logger.log_error("Error terminating process. [Process ID={0}][Error={1}]".format(pid, repr(error))) + self.ext_output_status_handler.add_error_to_status("Error terminating process. [Process ID={0}][Error={1}]".format(pid, repr(error)), Constants.PatchOperationErrorCodes.DEFAULT_ERROR) if Constants.ERROR_ADDED_TO_STATUS not in repr(error): error.args = (error.args, "[{0}]".format(Constants.ERROR_ADDED_TO_STATUS)) raise diff --git a/src/extension/src/RuntimeContextHandler.py b/src/extension/src/RuntimeContextHandler.py index 9e513ab7d..d3b081d09 100644 --- a/src/extension/src/RuntimeContextHandler.py +++ b/src/extension/src/RuntimeContextHandler.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/extension/src/TelemetryWriter.py b/src/extension/src/TelemetryWriter.py index af7747eac..b0e8a9501 100644 --- a/src/extension/src/TelemetryWriter.py +++ b/src/extension/src/TelemetryWriter.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -40,7 +40,7 @@ def __init__(self, logger, env_layer): def __new_event_json(self, event_level, message, task_name): return { - "Version": Constants.EXT_VERSION, + "Version": Constants.AZGPS_LPE_VERSION, "Timestamp": str(datetime.datetime.utcnow()), "TaskName": task_name, "EventLevel": event_level, @@ -53,14 +53,14 @@ def __new_event_json(self, event_level, message, task_name): def __ensure_message_restriction_compliance(self, full_message): """ Removes line breaks, tabs and restricts message to a byte limit """ try: - message_size_limit_in_chars = Constants.TELEMETRY_MSG_SIZE_LIMIT_IN_CHARS + message_size_limit_in_chars = Constants.TelemetryConfig.MSG_SIZE_LIMIT_IN_CHARS formatted_message = re.sub(r"\s+", " ", str(full_message)) if len(formatted_message.encode('utf-8')) > message_size_limit_in_chars: self.logger.log_telemetry_module("Data sent to telemetry will be truncated as it exceeds size limit. [Message={0}]".format(str(formatted_message))) formatted_message = formatted_message.encode('utf-8') - chars_dropped = len(formatted_message) - message_size_limit_in_chars + Constants.TELEMETRY_BUFFER_FOR_DROPPED_COUNT_MSG_IN_CHARS - return formatted_message[:message_size_limit_in_chars - Constants.TELEMETRY_BUFFER_FOR_DROPPED_COUNT_MSG_IN_CHARS].decode('utf-8') + '. [{0} chars dropped]'.format(chars_dropped) + chars_dropped = len(formatted_message) - message_size_limit_in_chars + Constants.TelemetryConfig.BUFFER_FOR_DROPPED_COUNT_MSG_IN_CHARS + return formatted_message[:message_size_limit_in_chars - Constants.TelemetryConfig.BUFFER_FOR_DROPPED_COUNT_MSG_IN_CHARS].decode('utf-8') + '. [{0} chars dropped]'.format(chars_dropped) return formatted_message @@ -90,17 +90,17 @@ def __get_agent_supports_telemetry_from_env_var(self): def __events_folder_exists(self): return self.events_folder_path is not None and os.path.exists(self.events_folder_path) - def write_event(self, message, event_level=Constants.TelemetryEventLevel.Informational, task_name=Constants.TELEMETRY_TASK_NAME): + def write_event(self, message, event_level=Constants.EventLevel.Info, task_name=Constants.TELEMETRY_TASK_NAME): """ Creates and writes event to event file after validating none of the telemetry size restrictions are breached """ try: - if not self.is_telemetry_supported() or not Constants.TELEMETRY_ENABLED_AT_EXTENSION: + if not self.is_telemetry_supported(): return self.__delete_older_events() task_name = self.__task_name if task_name == Constants.TELEMETRY_TASK_NAME else task_name event = self.__new_event_json(event_level, message, task_name) - if len(json.dumps(event)) > Constants.TELEMETRY_EVENT_SIZE_LIMIT_IN_CHARS: + if len(json.dumps(event)) > Constants.TelemetryConfig.EVENT_SIZE_LIMIT_IN_CHARS: self.logger.log_telemetry_module_error("Cannot send data to telemetry as it exceeded the acceptable data size. [Data not sent={0}]".format(json.dumps(message))) else: self.__write_event_using_temp_file(self.events_folder_path, event) @@ -111,7 +111,7 @@ def write_event(self, message, event_level=Constants.TelemetryEventLevel.Informa def __delete_older_events(self): """ Delete older events until the at least one new event file can be added as per the size restrictions """ try: - if self.__get_events_dir_size() < Constants.TELEMETRY_DIR_SIZE_LIMIT_IN_CHARS - Constants.TELEMETRY_EVENT_FILE_SIZE_LIMIT_IN_CHARS: + if self.__get_events_dir_size() < Constants.TelemetryConfig.DIR_SIZE_LIMIT_IN_CHARS - Constants.TelemetryConfig.EVENT_FILE_SIZE_LIMIT_IN_CHARS: # Not deleting any existing event files as the event directory does not exceed max limit. At least one new event file can be added. Not printing this statement as it will add repetitive logs return @@ -121,7 +121,7 @@ def __delete_older_events(self): for event_file in event_files: try: - if self.__get_events_dir_size() < Constants.TELEMETRY_DIR_SIZE_LIMIT_IN_CHARS - Constants.TELEMETRY_EVENT_FILE_SIZE_LIMIT_IN_CHARS: + if self.__get_events_dir_size() < Constants.TelemetryConfig.DIR_SIZE_LIMIT_IN_CHARS - Constants.TelemetryConfig.EVENT_FILE_SIZE_LIMIT_IN_CHARS: # Not deleting any more event files as the event directory has sufficient space to add at least one new event file. Not printing this statement as it will add repetitive logs break @@ -129,9 +129,9 @@ def __delete_older_events(self): os.remove(event_file) self.logger.log_telemetry_module("Deleted event file. [File={0}]".format(repr(event_file))) except Exception as e: - self.logger.log_telemetry_module_error("Error deleting event file. [File={0}] [Exception={1}]".format(repr(event_file), repr(e))) + self.logger.log_telemetry_module_error("Error deleting event file. [File={0}][Exception={1}]".format(repr(event_file), repr(e))) - if self.__get_events_dir_size() >= Constants.TELEMETRY_DIR_SIZE_LIMIT_IN_CHARS: + if self.__get_events_dir_size() >= Constants.TelemetryConfig.DIR_SIZE_LIMIT_IN_CHARS: self.logger.log_telemetry_module_error("Older event files were not deleted. Current event will not be sent to telemetry as events directory size exceeds maximum limit") raise @@ -147,7 +147,7 @@ def __write_event_using_temp_file(self, folder_path, data, mode='w'): if os.path.exists(file_path): file_size = self.get_file_size(file_path) # if file_size exceeds max limit, sleep for 1 second, so the event can be written to a new file since the event file name is a timestamp - if file_size >= Constants.TELEMETRY_EVENT_FILE_SIZE_LIMIT_IN_CHARS: + if file_size >= Constants.TelemetryConfig.EVENT_FILE_SIZE_LIMIT_IN_CHARS: time.sleep(1) file_path = self.__get_event_file_path(folder_path) else: @@ -159,7 +159,7 @@ def __write_event_using_temp_file(self, folder_path, data, mode='w'): tempname = tf.name shutil.move(tempname, file_path) except Exception as error: - self.logger.log_telemetry_module_error("Unable to write to telemetry. [Event File={0}] [Error={1}].".format(str(file_path), repr(error))) + self.logger.log_telemetry_module_error("Unable to write to telemetry. [Event File={0}][Error={1}].".format(str(file_path), repr(error))) raise def set_operation_id(self, operation_id): @@ -194,7 +194,7 @@ def get_agent_version(self): return self.__extract_agent_version_from_string(r'WALinuxAgent-\S+ running', out) # Command failed, so log error and debugging information - self.logger.log_telemetry_module_error('Failed to execute command to get guest agent version. [Code={0}] [Out={1}]'.format(str(code), str(out))) + self.logger.log_telemetry_module_error('Failed to execute command to get guest agent version. [Code={0}][Out={1}]'.format(str(code), str(out))) return None def get_goal_state_agent_version(self): @@ -210,7 +210,7 @@ def get_goal_state_agent_version(self): return self.__extract_agent_version_from_string(r'Goal state agent: \S+', out) # Command failed, so log error and debugging information - self.logger.log_telemetry_module_error('Failed to execute command to get guest agent goal state version. [Cmd={0}] [Code={1}] [Out={2}]'.format(cmd, str(code), str(out))) + self.logger.log_telemetry_module_error('Failed to execute command to get guest agent goal state version. [Cmd={0}][Code={1}][Out={2}]'.format(cmd, str(code), str(out))) return None def __extract_agent_version_from_string(self, pattern, string): @@ -219,14 +219,14 @@ def __extract_agent_version_from_string(self, pattern, string): regex = re.compile(pattern) version_str_search = regex.search(string) if version_str_search is None: - self.logger.log_telemetry_module_error('Failed to extract agent version substring from agent version command output. [Input={0}] [Pattern={1}]'.format(string, pattern)) + self.logger.log_telemetry_module_error('Failed to extract agent version substring from agent version command output. [Input={0}][Pattern={1}]'.format(string, pattern)) return None # Extract the version string regex = re.compile(r'(\d+[.]*)+') version_search = regex.search(version_str_search.group()) if version_search is None: - self.logger.log_telemetry_module_error('Failed to extract agent version from agent version command output. [Input={0}] [Pattern={1}]'.format(string, pattern)) + self.logger.log_telemetry_module_error('Failed to extract agent version from agent version command output. [Input={0}][Pattern={1}]'.format(string, pattern)) return None return version_search.group() @@ -251,7 +251,7 @@ def __fetch_events_from_previous_file(self, file_path): if error.errno == errno.ENOENT: return [] else: - self.logger.log_telemetry_module_error("Error occurred while fetching contents from existing event file. [File={0}] [Error={1}].".format(repr(file_path), repr(error))) + self.logger.log_telemetry_module_error("Error occurred while fetching contents from existing event file. [File={0}][Error={1}].".format(repr(file_path), repr(error))) raise def is_telemetry_supported(self): diff --git a/src/extension/src/Utility.py b/src/extension/src/Utility.py index 837c5591a..b81898eb3 100644 --- a/src/extension/src/Utility.py +++ b/src/extension/src/Utility.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -39,10 +39,10 @@ def delete_file(self, dir_path, file, raise_if_not_found=True): os.remove(file_path) return True except Exception as e: - error_msg = "Trial {0}: Could not delete file. [File={1}] [Exception={2}]".format(retry+1, file, repr(e)) + error_msg = "Trial {0}: Could not delete file. [File={1}][Exception={2}]".format(retry+1, file, repr(e)) self.logger.log_warning(error_msg) - error_msg = "Failed to delete file after {0} tries. [File={1}] [Exception={2}]".format(self.retry_count, file, error_msg) + error_msg = "Failed to delete file after {0} tries. [File={1}][Exception={2}]".format(self.retry_count, file, error_msg) self.logger.log_error(error_msg) else: error_msg = "File Not Found: [File={0}] in [path={1}]".format(file, dir_path) diff --git a/src/extension/src/__init__.py b/src/extension/src/__init__.py index a1eee39e6..2e3211aae 100644 --- a/src/extension/src/__init__.py +++ b/src/extension/src/__init__.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/extension/src/__main__.py b/src/extension/src/__main__.py index b178f21b7..547f8962d 100644 --- a/src/extension/src/__main__.py +++ b/src/extension/src/__main__.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -44,7 +44,7 @@ def main(argv): exit_code = None try: # initializing action handler - # args will have values install, uninstall, etc, as given in MsftLinuxPatchExtShim.sh in the operation var + # args will have values install, uninstall, etc, as given in AzGPSLinuxPatchExtShim.sh in the operation var cmd_exec_start_time = datetime.datetime.utcnow() utility = Utility(logger) runtime_context_handler = RuntimeContextHandler(logger) @@ -67,7 +67,7 @@ def main(argv): exit_code = Constants.ExitCode.Okay if exit_code_from_handler_actions is None else exit_code_from_handler_actions else: error_cause = "No configuration provided in HandlerEnvironment" if ext_env_handler.handler_environment_json is None else "Path to config folder not specified in HandlerEnvironment" - error_msg = "Error processing file. [File={0}] [Error={1}]".format(Constants.HANDLER_ENVIRONMENT_FILE, error_cause) + error_msg = "Error processing file. [File={0}][Error={1}]".format(Constants.HANDLER_ENVIRONMENT_FILE, error_cause) raise Exception(error_msg) except Exception as error: logger.log_error(repr(error)) diff --git a/src/extension/src/file_handlers/CoreStateHandler.py b/src/extension/src/file_handlers/CoreStateHandler.py index d5652f290..303f2eadd 100644 --- a/src/extension/src/file_handlers/CoreStateHandler.py +++ b/src/extension/src/file_handlers/CoreStateHandler.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/extension/src/file_handlers/ExtConfigSettingsHandler.py b/src/extension/src/file_handlers/ExtConfigSettingsHandler.py index d858bcb5f..ec99bb371 100644 --- a/src/extension/src/file_handlers/ExtConfigSettingsHandler.py +++ b/src/extension/src/file_handlers/ExtConfigSettingsHandler.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -92,7 +92,7 @@ def __get_seq_no_from_config_settings(self): if re.match('^\d+' + self.file_ext + '$', file): cur_seq_no = int(os.path.basename(file).split('.')[0]) file_modified_time = os.path.getmtime(os.path.join(self.config_folder, file)) - self.logger.log("Sequence number being considered and the corresponding file modified time. [Sequence No={0}] [Modified={1}]".format(str(cur_seq_no), str(file_modified_time))) + self.logger.log("Sequence number being considered and the corresponding file modified time. [Sequence No={0}][Modified={1}]".format(str(cur_seq_no), str(file_modified_time))) if freshest_time is None: freshest_time = file_modified_time seq_no = cur_seq_no @@ -140,7 +140,7 @@ def read_file(self, seq_no): config_invalid_due_to = "no content found in the file" if config_settings_json is None else "settings not in expected format" raise Exception("Config Settings json file invalid due to " + config_invalid_due_to) except Exception as error: - error_msg = "Error processing config settings file. [Sequence Number={0}] [Exception= {1}]".format(seq_no, repr(error)) + error_msg = "Error processing config settings file. [Sequence Number={0}][Exception= {1}]".format(seq_no, repr(error)) self.logger.log_error(error_msg) raise diff --git a/src/extension/src/file_handlers/ExtEnvHandler.py b/src/extension/src/file_handlers/ExtEnvHandler.py index 9cbec33cf..b3052a2ce 100644 --- a/src/extension/src/file_handlers/ExtEnvHandler.py +++ b/src/extension/src/file_handlers/ExtEnvHandler.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/extension/src/file_handlers/ExtOutputStatusHandler.py b/src/extension/src/file_handlers/ExtOutputStatusHandler.py index 5a77d6d4c..eb7a061c6 100644 --- a/src/extension/src/file_handlers/ExtOutputStatusHandler.py +++ b/src/extension/src/file_handlers/ExtOutputStatusHandler.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -26,7 +26,7 @@ "version": 1.0, "timestampUTC": "2019-07-20T12:12:14Z", "status": { - "name": "Azure Patch Management", + "name": "Azure Guest Patching Service", "operation": "Assessment / Installation / NoOperation / ConfigurePatching", "status": "transitioning / error / success / warning", "code": 0, @@ -62,7 +62,7 @@ def __init__(self, logger, utility, json_file_handler, dir_path): #ToDo: move it to some other location, since seq no is not available at load # self.read_file() - def write_status_file(self, operation, seq_no, status=Constants.Status.Transitioning.lower(), message="", code=Constants.ExitCode.Okay): + def write_status_file(self, operation, seq_no, status=Constants.Status.TRANSITIONING.lower(), message="", code=Constants.ExitCode.Okay): if seq_no is None: self.logger.log_error("Cannot write status file since sequence number is not available. [Sequence={0}]".format(str(seq_no))) return @@ -81,7 +81,7 @@ def __new_basic_status_json(self, operation, status, message="", code=Constants. self.file_keys.version: 1.0, self.file_keys.timestamp_utc: str(self.utility.get_str_from_datetime(datetime.datetime.utcnow())), self.file_keys.status: { - self.file_keys.status_name: "Azure Patch Management", + self.file_keys.status_name: "Azure Guest Patching Service", self.file_keys.status_operation: str(operation), self.file_keys.status_status: status.lower(), self.file_keys.status_code: code, @@ -108,7 +108,7 @@ def read_file(self, seq_no): for i in range(0, len(status_json[0]['status']['substatus'])): name = status_json[0]['status']['substatus'][i]['name'] - if name == Constants.PATCH_NOOPERATION_SUMMARY: # if it exists, it must be to spec, or an exception will get thrown + if name == Constants.OpSummary.NO_OPERATION: # if it exists, it must be to spec, or an exception will get thrown message = status_json[0]['status']['substatus'][i]['formattedMessage']['message'] self.__nooperation_summary_json = json.loads(message) errors = self.__nooperation_summary_json['errors'] @@ -128,7 +128,7 @@ def update_key_value_safely(self, status_json, key, value_to_update, parent_key= else: self.logger.log_error("Error updating config value in status file. [Config={0}]".format(key)) - def update_file(self, seq_no, status=Constants.Status.Transitioning.lower(), code=Constants.ExitCode.Okay, message=""): + def update_file(self, seq_no, status=Constants.Status.TRANSITIONING.lower(), code=Constants.ExitCode.Okay, message=""): """ Reseting status, code and message with latest timestamp, while retaining all other values""" try: file_name = self.__get_status_file_name(seq_no) @@ -151,13 +151,13 @@ def update_file(self, seq_no, status=Constants.Status.Transitioning.lower(), cod def __get_status_file_name(self, seq_no): return str(seq_no) + self.file_ext - def set_nooperation_substatus_json(self, operation, activity_id, start_time, seq_no, status=Constants.Status.Transitioning, code=Constants.ExitCode.Okay): + def set_nooperation_substatus_json(self, operation, activity_id, start_time, seq_no, status=Constants.Status.TRANSITIONING, code=Constants.ExitCode.Okay): """ Prepare the nooperation substatus json including the message containing nooperation summary """ # Wrap patches into nooperation summary self.__nooperation_summary_json = self.new_nooperation_summary_json(activity_id, start_time) # Wrap nooperation summary into nooperation substatus - self.__nooperation_substatus_json = self.new_substatus_json_for_operation(Constants.PATCH_NOOPERATION_SUMMARY, status, code, json.dumps(self.__nooperation_summary_json)) + self.__nooperation_substatus_json = self.new_substatus_json_for_operation(Constants.OpSummary.NO_OPERATION, status, code, json.dumps(self.__nooperation_summary_json)) # Update status on disk self.write_status_file(operation, seq_no, status=status) @@ -209,7 +209,7 @@ def add_error_to_status(self, message, error_code=Constants.PatchOperationErrorC "message": str(formatted_message) } - if self.__current_operation == Constants.NOOPERATION: + if self.__current_operation == Constants.Op.NO_OPERATION: if self.__try_add_error(self.__nooperation_errors, error_detail): self.__nooperation_total_error_count += 1 else: diff --git a/src/extension/src/file_handlers/ExtStateHandler.py b/src/extension/src/file_handlers/ExtStateHandler.py index 02f8d3b42..a05a8938a 100644 --- a/src/extension/src/file_handlers/ExtStateHandler.py +++ b/src/extension/src/file_handlers/ExtStateHandler.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/extension/src/file_handlers/JsonFileHandler.py b/src/extension/src/file_handlers/JsonFileHandler.py index 4508d1b65..7dc02bfcd 100644 --- a/src/extension/src/file_handlers/JsonFileHandler.py +++ b/src/extension/src/file_handlers/JsonFileHandler.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -38,13 +38,13 @@ def get_json_file_content(self, file_name, dir_path, raise_if_not_found=False): file_contents = file_handle.read() return json.loads(file_contents) except ValueError as e: - error_msg = "Incorrect file format. [File={0}] [Location={1}] [Exception={2}]".format(file_name, str(file_path), repr(e)) + error_msg = "Incorrect file format. [File={0}][Location={1}][Exception={2}]".format(file_name, str(file_path), repr(e)) self.logger.log_warning(error_msg) except Exception as e: - error_msg = "Trial {0}: Could not read file. [File={1}] [Location={2}] [Exception={3}]".format(retry + 1, file_name, str(file_path), repr(e)) + error_msg = "Trial {0}: Could not read file. [File={1}][Location={2}][Exception={3}]".format(retry + 1, file_name, str(file_path), repr(e)) self.logger.log_warning(error_msg) - error_msg = "Failed to read file after {0} tries. [File={1}] [Location={2}] [Exception={3}]".format(self.retry_count, file_name, str(file_path), error_msg) + error_msg = "Failed to read file after {0} tries. [File={1}][Location={2}][Exception={3}]".format(self.retry_count, file_name, str(file_path), error_msg) self.logger.log_warning(error_msg) if raise_if_not_found: self.logger.log_error("Extension cannot continue without this file. [File={0}]".format(file_name)) @@ -68,7 +68,7 @@ def write_to_json_file(self, dir_path, file_name, content): if os.path.exists(dir_path): file_path = os.path.join(dir_path, file_name) error_message = "" - self.logger.log("Writing JSON file. [File={0}] [Content={1}]".format(file_name, str(content))) + self.logger.log("Writing JSON file. [File={0}][Content={1}]".format(file_name, str(content))) for retry in range(0, self.retry_count): try: time.sleep(retry) @@ -76,10 +76,10 @@ def write_to_json_file(self, dir_path, file_name, content): json.dump(content, json_file, default=self.json_default_converter) return except Exception as error: - error_message = "Trial {0}: Could not write to file. [File={1}] [Location={2}] [Exception={3}]".format(retry+1, file_name, str(file_path), error) + error_message = "Trial {0}: Could not write to file. [File={1}][Location={2}][Exception={3}]".format(retry+1, file_name, str(file_path), error) self.logger.log_warning(error_message) - error_msg = "Failed to write to file after {0} tries. [File={1}] [Location={2}] [Exception={3}]".format(self.retry_count, file_name, str(file_path), error_message) + error_msg = "Failed to write to file after {0} tries. [File={1}][Location={2}][Exception={3}]".format(self.retry_count, file_name, str(file_path), error_message) self.logger.log_error_and_raise_new_exception(error_msg, Exception) else: error_msg = "Directory Not Found: [Directory={0}]".format(dir_path) diff --git a/src/extension/src/file_handlers/__init__.py b/src/extension/src/file_handlers/__init__.py index a1eee39e6..2e3211aae 100644 --- a/src/extension/src/file_handlers/__init__.py +++ b/src/extension/src/file_handlers/__init__.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/extension/src/local_loggers/FileLogger.py b/src/extension/src/local_loggers/FileLogger.py index ce7018a56..98bd38add 100644 --- a/src/extension/src/local_loggers/FileLogger.py +++ b/src/extension/src/local_loggers/FileLogger.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -28,7 +28,7 @@ def __init__(self, log_folder, log_file): self.log_file_path = os.path.join(log_folder, log_file) self.log_file_handle = open(self.log_file_path, "a") except Exception as error: - sys.stdout.write("FileLogger - Error opening file. [File={0}] [Exception={1}]".format(self.log_file_path, repr(error))) + sys.stdout.write("FileLogger - Error opening file. [File={0}][Exception={1}]".format(self.log_file_path, repr(error))) # Retaining 10 most recent log files, deleting others self.delete_older_log_files(log_folder) @@ -83,7 +83,7 @@ def write(self, message, fail_silently=True): except ValueError as error: sys.stdout.write("FileLogger - [Error={0}]".format(repr(error))) except Exception as error: - sys.stdout.write("FileLogger - Error opening file. [File={0}] [Exception={1}]".format(self.log_file_path, repr(error))) + sys.stdout.write("FileLogger - Error opening file. [File={0}][Exception={1}]".format(self.log_file_path, repr(error))) def flush(self): if self.log_file_handle is not None: diff --git a/src/extension/src/local_loggers/Logger.py b/src/extension/src/local_loggers/Logger.py index 272851234..780d2e6ff 100644 --- a/src/extension/src/local_loggers/Logger.py +++ b/src/extension/src/local_loggers/Logger.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -36,7 +36,7 @@ def log(self, message): """log output""" message = self.__remove_substring_from_message(message, Constants.ERROR_ADDED_TO_STATUS) if self.telemetry_writer is not None and self.telemetry_writer.events_folder_path is not None: - self.telemetry_writer.write_event(message, Constants.TelemetryEventLevel.Informational) + self.telemetry_writer.write_event(message, Constants.EventLevel.Info) for line in message.splitlines(): # allows the extended file logger to strip unnecessary white space if self.file_logger is not None: self.file_logger.write("\n" + line) @@ -48,7 +48,7 @@ def log_error(self, message): message = self.__remove_substring_from_message(message, Constants.ERROR_ADDED_TO_STATUS) message = (self.NEWLINE_REPLACE_CHAR.join(message.split(os.linesep))).strip() if self.telemetry_writer is not None and self.telemetry_writer.events_folder_path is not None: - self.telemetry_writer.write_event(message, Constants.TelemetryEventLevel.Error) + self.telemetry_writer.write_event(message, Constants.EventLevel.Error) if self.file_logger is not None: self.file_logger.write("\n" + self.ERROR + " " + message) else: @@ -65,7 +65,7 @@ def log_warning(self, message): message = self.__remove_substring_from_message(message, Constants.ERROR_ADDED_TO_STATUS) message = (self.NEWLINE_REPLACE_CHAR.join(message.split(os.linesep))).strip() if self.telemetry_writer is not None and self.telemetry_writer.events_folder_path is not None: - self.telemetry_writer.write_event(message, Constants.TelemetryEventLevel.Warning) + self.telemetry_writer.write_event(message, Constants.EventLevel.Warning) if self.file_logger is not None: self.file_logger.write("\n" + self.WARNING + " " + message) else: @@ -76,7 +76,7 @@ def log_debug(self, message): message = self.__remove_substring_from_message(message, Constants.ERROR_ADDED_TO_STATUS) message = message.strip() if self.telemetry_writer is not None and self.telemetry_writer.events_folder_path is not None: - self.telemetry_writer.write_event(message, Constants.TelemetryEventLevel.Verbose) + self.telemetry_writer.write_event(message, Constants.EventLevel.Verbose) if self.current_env in (Constants.DEV, Constants.TEST): print(self.current_env + ": " + message) # send to standard output if dev or test env if self.file_logger is not None: diff --git a/src/extension/src/local_loggers/StdOutFileMirror.py b/src/extension/src/local_loggers/StdOutFileMirror.py index 65adc30ea..b647f7741 100644 --- a/src/extension/src/local_loggers/StdOutFileMirror.py +++ b/src/extension/src/local_loggers/StdOutFileMirror.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/extension/src/local_loggers/__init__.py b/src/extension/src/local_loggers/__init__.py index a1eee39e6..2e3211aae 100644 --- a/src/extension/src/local_loggers/__init__.py +++ b/src/extension/src/local_loggers/__init__.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/extension/tests/Test_ActionHandler.py b/src/extension/tests/Test_ActionHandler.py index d494c1fa0..623038165 100644 --- a/src/extension/tests/Test_ActionHandler.py +++ b/src/extension/tests/Test_ActionHandler.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -397,9 +397,9 @@ def test_write_basic_status(self): self.action_handler.write_basic_status(Constants.INSTALL) self.assertTrue(os.path.exists(os.path.join(self.ext_env_handler.status_folder, '6789.status'))) status_json = self.action_handler.ext_output_status_handler.read_file(self.action_handler.seq_no) - self.assertEqual(status_json[0]["status"]["name"], "Azure Patch Management") + self.assertEqual(status_json[0]["status"]["name"], "Azure Guest Patching Service") self.assertEqual(status_json[0]["status"]["operation"], "") - self.assertEqual(status_json[0]["status"]["status"], Constants.Status.Transitioning.lower()) + self.assertEqual(status_json[0]["status"]["status"], Constants.Status.TRANSITIONING.lower()) self.assertEqual(status_json[0]["status"]["code"], 0) self.assertEqual(status_json[0]["status"]["formattedMessage"]["message"], "") self.assertEqual(status_json[0]["status"]["substatus"], []) @@ -412,9 +412,9 @@ def test_write_basic_status(self): self.action_handler.write_basic_status(Constants.ENABLE) self.assertTrue(os.path.exists(os.path.join(self.ext_env_handler.status_folder, '1234.status'))) status_json = self.action_handler.ext_output_status_handler.read_file(self.action_handler.seq_no) - self.assertEqual(status_json[0]["status"]["name"], "Azure Patch Management") + self.assertEqual(status_json[0]["status"]["name"], "Azure Guest Patching Service") self.assertEqual(status_json[0]["status"]["operation"], "Installation") - self.assertEqual(status_json[0]["status"]["status"], Constants.Status.Transitioning.lower()) + self.assertEqual(status_json[0]["status"]["status"], Constants.Status.TRANSITIONING.lower()) self.assertEqual(status_json[0]["status"]["code"], 0) self.assertEqual(status_json[0]["status"]["formattedMessage"]["message"], "") self.assertEqual(status_json[0]["status"]["substatus"], []) @@ -544,9 +544,9 @@ def test_status_file_on_enable_success(self): self.action_handler.enable() self.assertTrue(os.path.exists(os.path.join(self.ext_env_handler.status_folder, '1234.status'))) status_json = self.action_handler.ext_output_status_handler.read_file(self.action_handler.seq_no) - self.assertEqual(status_json[0]["status"]["name"], "Azure Patch Management") + self.assertEqual(status_json[0]["status"]["name"], "Azure Guest Patching Service") self.assertEqual(status_json[0]["status"]["operation"], "Installation") - self.assertEqual(status_json[0]["status"]["status"], Constants.Status.Transitioning.lower()) + self.assertEqual(status_json[0]["status"]["status"], Constants.Status.TRANSITIONING.lower()) self.assertEqual(status_json[0]["status"]["code"], 0) self.assertEqual(status_json[0]["status"]["formattedMessage"]["message"], "") self.assertEqual(status_json[0]["status"]["substatus"], []) @@ -592,9 +592,9 @@ def validate_status_file_on_success(self, seq_no): # validate status file self.assertTrue(os.path.exists(os.path.join(self.ext_env_handler.status_folder, str(seq_no) + '.status'))) status_json = self.action_handler.ext_output_status_handler.read_file(seq_no) - self.assertEqual(status_json[0]["status"]["name"], "Azure Patch Management") + self.assertEqual(status_json[0]["status"]["name"], "Azure Guest Patching Service") self.assertEqual(status_json[0]["status"]["operation"], "") - self.assertEqual(status_json[0]["status"]["status"], Constants.Status.Success.lower()) + self.assertEqual(status_json[0]["status"]["status"], Constants.Status.SUCCESS.lower()) self.assertEqual(status_json[0]["status"]["code"], 0) self.assertEqual(status_json[0]["status"]["formattedMessage"]["message"], "") self.assertEqual(status_json[0]["status"]["substatus"], []) @@ -602,9 +602,9 @@ def validate_status_file_on_success(self, seq_no): def validate_status_file_on_failure(self, seq_no, message, code=Constants.ExitCode.HandlerFailed): self.assertTrue(os.path.exists(os.path.join(self.ext_env_handler.status_folder, str(seq_no) + '.status'))) status_json = self.action_handler.ext_output_status_handler.read_file(seq_no) - self.assertEqual(status_json[0]["status"]["name"], "Azure Patch Management") + self.assertEqual(status_json[0]["status"]["name"], "Azure Guest Patching Service") self.assertEqual(status_json[0]["status"]["operation"], "") - self.assertEqual(status_json[0]["status"]["status"], Constants.Status.Error.lower()) + self.assertEqual(status_json[0]["status"]["status"], Constants.Status.ERROR.lower()) self.assertEqual(status_json[0]["status"]["code"], code) self.assertEqual(status_json[0]["status"]["formattedMessage"]["message"], message) self.assertEqual(status_json[0]["status"]["substatus"], []) diff --git a/src/extension/tests/Test_CoreStateHandler.py b/src/extension/tests/Test_CoreStateHandler.py index ec60d90cb..e9faed205 100644 --- a/src/extension/tests/Test_CoreStateHandler.py +++ b/src/extension/tests/Test_CoreStateHandler.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/extension/tests/Test_EnableCommandHandler.py b/src/extension/tests/Test_EnableCommandHandler.py index cd530e800..219f66979 100644 --- a/src/extension/tests/Test_EnableCommandHandler.py +++ b/src/extension/tests/Test_EnableCommandHandler.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -139,7 +139,7 @@ def test_process_nooperation_enable_request(self): # update operation to 'NoOperation' since it is set to Assessment in the original helper file with open(new_settings_file, 'r+') as f: config_settings = json.load(f) - config_settings[self.constants.RUNTIME_SETTINGS][0][self.constants.HANDLER_SETTINGS][self.constants.PUBLIC_SETTINGS][self.constants.ConfigPublicSettingsFields.operation] = self.constants.NOOPERATION + config_settings[self.constants.RUNTIME_SETTINGS][0][self.constants.HANDLER_SETTINGS][self.constants.PUBLIC_SETTINGS][self.constants.ConfigPublicSettingsFields.operation] = self.constants.Op.NO_OPERATION f.seek(0) # rewind json.dump(config_settings, f) f.truncate() @@ -164,7 +164,7 @@ def test_process_configure_patching_request(self): # update operation to 'ConfigurePatching' since it is set to Assessment in the original helper file with open(new_settings_file, 'r+') as f: config_settings = json.load(f) - config_settings[self.constants.RUNTIME_SETTINGS][0][self.constants.HANDLER_SETTINGS][self.constants.PUBLIC_SETTINGS][self.constants.ConfigPublicSettingsFields.operation] = self.constants.CONFIGURE_PATCHING + config_settings[self.constants.RUNTIME_SETTINGS][0][self.constants.HANDLER_SETTINGS][self.constants.PUBLIC_SETTINGS][self.constants.ConfigPublicSettingsFields.operation] = self.constants.Op.CONFIGURE_PATCHING f.seek(0) # rewind json.dump(config_settings, f) f.truncate() @@ -175,9 +175,9 @@ def test_process_configure_patching_request(self): self.assertEqual(sys_exit.exception.code, Constants.ExitCode.Okay) status_json = self.ext_output_status_handler.read_file('12') parent_key = Constants.StatusFileFields.status - self.assertEqual(status_json[0][parent_key][Constants.StatusFileFields.status_name], "Azure Patch Management") - self.assertEqual(status_json[0][parent_key][Constants.StatusFileFields.status_operation], Constants.CONFIGURE_PATCHING) - self.assertEqual(status_json[0][parent_key][Constants.StatusFileFields.status_status], Constants.Status.Transitioning.lower()) + self.assertEqual(status_json[0][parent_key][Constants.StatusFileFields.status_name], "Azure Guest Patching Service") + self.assertEqual(status_json[0][parent_key][Constants.StatusFileFields.status_operation], Constants.Op.CONFIGURE_PATCHING) + self.assertEqual(status_json[0][parent_key][Constants.StatusFileFields.status_status], Constants.Status.TRANSITIONING.lower()) def test_process_invalid_request(self): # setup to mock environment when enable is triggered with an invalid request diff --git a/src/extension/tests/Test_EnvHealthManager.py b/src/extension/tests/Test_EnvHealthManager.py index 3d5da01a9..4f2fcc91d 100644 --- a/src/extension/tests/Test_EnvHealthManager.py +++ b/src/extension/tests/Test_EnvHealthManager.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/extension/tests/Test_ExtConfigSettingsHandler.py b/src/extension/tests/Test_ExtConfigSettingsHandler.py index 7aef552f6..172f92258 100644 --- a/src/extension/tests/Test_ExtConfigSettingsHandler.py +++ b/src/extension/tests/Test_ExtConfigSettingsHandler.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/extension/tests/Test_ExtEnvHandler.py b/src/extension/tests/Test_ExtEnvHandler.py index bead11701..c9c57ae17 100644 --- a/src/extension/tests/Test_ExtEnvHandler.py +++ b/src/extension/tests/Test_ExtEnvHandler.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/extension/tests/Test_ExtOutputStatusHandler.py b/src/extension/tests/Test_ExtOutputStatusHandler.py index d96a6d27d..4be8d9129 100644 --- a/src/extension/tests/Test_ExtOutputStatusHandler.py +++ b/src/extension/tests/Test_ExtOutputStatusHandler.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -46,15 +46,15 @@ def test_create_status_file(self): dir_path = tempfile.mkdtemp() operation = "Assessment" ext_status_handler = ExtOutputStatusHandler(self.logger, self.utility, self.json_file_handler, dir_path) - ext_status_handler.write_status_file(operation, file_name, self.status.Transitioning.lower()) + ext_status_handler.write_status_file(operation, file_name, self.status.TRANSITIONING.lower()) with open(dir_path + "\\" + file_name + ext_status_handler.file_ext) as status_file: content = json.load(status_file) parent_key = self.status_file_fields.status self.assertTrue(content is not None) - self.assertEqual(content[0][parent_key][self.status_file_fields.status_name], "Azure Patch Management") + self.assertEqual(content[0][parent_key][self.status_file_fields.status_name], "Azure Guest Patching Service") self.assertEqual(content[0][parent_key][self.status_file_fields.status_operation], operation) - self.assertEqual(content[0][parent_key][self.status_file_fields.status_status], self.status.Transitioning.lower()) + self.assertEqual(content[0][parent_key][self.status_file_fields.status_status], self.status.TRANSITIONING.lower()) shutil.rmtree(dir_path) def test_read_file(self): @@ -63,12 +63,12 @@ def test_read_file(self): operation = "Assessment" ext_output_status_handler = ExtOutputStatusHandler(self.logger, self.utility, self.json_file_handler, dir_path) - ext_output_status_handler.write_status_file(operation, file_name, self.status.Transitioning.lower()) + ext_output_status_handler.write_status_file(operation, file_name, self.status.TRANSITIONING.lower()) status_json = ext_output_status_handler.read_file(file_name) parent_key = self.status_file_fields.status - self.assertEqual(status_json[0][parent_key][self.status_file_fields.status_name], "Azure Patch Management") + self.assertEqual(status_json[0][parent_key][self.status_file_fields.status_name], "Azure Guest Patching Service") self.assertEqual(status_json[0][parent_key][self.status_file_fields.status_operation], operation) - self.assertEqual(status_json[0][parent_key][self.status_file_fields.status_status], self.status.Transitioning.lower()) + self.assertEqual(status_json[0][parent_key][self.status_file_fields.status_status], self.status.TRANSITIONING.lower()) shutil.rmtree(dir_path) def test_update_file(self): @@ -80,7 +80,7 @@ def test_update_file(self): operation = "Assessment" ext_status_handler = ExtOutputStatusHandler(self.logger, self.utility, self.json_file_handler, dir_path) - ext_status_handler.write_status_file(operation, file_name, self.status.Success.lower()) + ext_status_handler.write_status_file(operation, file_name, self.status.SUCCESS.lower()) stat_file_name = os.stat(os.path.join(dir_path, file_name + ".status")) prev_modified_time = stat_file_name.st_mtime @@ -94,19 +94,19 @@ def test_update_file(self): modified_time = stat_file_name.st_mtime self.assertNotEqual(prev_modified_time, modified_time) # Fails here on GitHub updated_status_json = ext_status_handler.read_file(file_name) - self.assertEqual(updated_status_json[0][self.status_file_fields.status][self.status_file_fields.status_status], self.status.Transitioning.lower()) - self.assertEqual(updated_status_json[0][self.status_file_fields.status][self.status_file_fields.status_name], "Azure Patch Management") + self.assertEqual(updated_status_json[0][self.status_file_fields.status][self.status_file_fields.status_status], self.status.TRANSITIONING.lower()) + self.assertEqual(updated_status_json[0][self.status_file_fields.status][self.status_file_fields.status_name], "Azure Guest Patching Service") self.assertEqual(updated_status_json[0][self.status_file_fields.status][self.status_file_fields.status_operation], "Assessment") self.assertEqual(updated_status_json[0][self.status_file_fields.status][self.status_file_fields.status_code], Constants.ExitCode.Okay) self.assertEqual(updated_status_json[0][self.status_file_fields.status][self.status_file_fields.status_formatted_message][self.status_file_fields.status_formatted_message_message], "") - ext_status_handler.update_file(file_name, Constants.Status.Success.lower(), Constants.ExitCode.Okay, "Test message") + ext_status_handler.update_file(file_name, Constants.Status.SUCCESS.lower(), Constants.ExitCode.Okay, "Test message") stat_file_name = os.stat(os.path.join(dir_path, file_name + ".status")) modified_time = stat_file_name.st_mtime self.assertNotEqual(prev_modified_time, modified_time) updated_status_json = ext_status_handler.read_file(file_name) - self.assertEqual(updated_status_json[0][self.status_file_fields.status][self.status_file_fields.status_status], self.status.Success.lower()) - self.assertEqual(updated_status_json[0][self.status_file_fields.status][self.status_file_fields.status_name], "Azure Patch Management") + self.assertEqual(updated_status_json[0][self.status_file_fields.status][self.status_file_fields.status_status], self.status.SUCCESS.lower()) + self.assertEqual(updated_status_json[0][self.status_file_fields.status][self.status_file_fields.status_name], "Azure Guest Patching Service") self.assertEqual(updated_status_json[0][self.status_file_fields.status][self.status_file_fields.status_operation], "Assessment") self.assertEqual(updated_status_json[0][self.status_file_fields.status][self.status_file_fields.status_code], Constants.ExitCode.Okay) self.assertEqual(updated_status_json[0][self.status_file_fields.status][self.status_file_fields.status_formatted_message][self.status_file_fields.status_formatted_message_message], "Test message") @@ -116,16 +116,16 @@ def test_add_error_to_status(self): file_name = "test" dir_path = tempfile.mkdtemp() ext_output_status_handler = ExtOutputStatusHandler(self.logger, self.utility, self.json_file_handler, dir_path) - ext_output_status_handler.set_current_operation(Constants.NOOPERATION) + ext_output_status_handler.set_current_operation(Constants.Op.NO_OPERATION) self.logger.file_logger = FileLogger(dir_path, "test.log") ext_output_status_handler.read_file(file_name) # Unexpected input self.assertTrue(ext_output_status_handler.add_error_to_status(None) is None) ext_output_status_handler.add_error_to_status("Adding test exception", Constants.PatchOperationErrorCodes.DEFAULT_ERROR) - ext_output_status_handler.set_nooperation_substatus_json(Constants.NOOPERATION, activity_id="", start_time="", seq_no=file_name, status=self.status.Success.lower()) + ext_output_status_handler.set_nooperation_substatus_json(Constants.Op.NO_OPERATION, activity_id="", start_time="", seq_no=file_name, status=self.status.SUCCESS.lower()) updated_status_json = ext_output_status_handler.read_file(file_name) - self.assertEqual(updated_status_json[0][self.status_file_fields.status][self.status_file_fields.status_substatus][0][self.status_file_fields.status_name], Constants.PATCH_NOOPERATION_SUMMARY) + self.assertEqual(updated_status_json[0][self.status_file_fields.status][self.status_file_fields.status_substatus][0][self.status_file_fields.status_name], Constants.OpSummary.NO_OPERATION) self.assertNotEqual(json.loads(updated_status_json[0]["status"]["substatus"][0]["formattedMessage"]["message"])["errors"], None) self.assertEqual(json.loads(updated_status_json[0]["status"]["substatus"][0]["formattedMessage"]["message"])["errors"]["code"], 1) self.assertEqual(json.loads(updated_status_json[0]["status"]["substatus"][0]["formattedMessage"]["message"])["errors"]["details"][0]["code"], Constants.PatchOperationErrorCodes.DEFAULT_ERROR) @@ -136,9 +136,9 @@ def test_add_error_to_status(self): ext_output_status_handler.add_error_to_status("exception4", Constants.PatchOperationErrorCodes.DEFAULT_ERROR) ext_output_status_handler.add_error_to_status("exception5", Constants.PatchOperationErrorCodes.DEFAULT_ERROR) ext_output_status_handler.add_error_to_status("exception6", Constants.PatchOperationErrorCodes.DEFAULT_ERROR) - ext_output_status_handler.set_nooperation_substatus_json(Constants.NOOPERATION, activity_id="", start_time="", seq_no=file_name, status=self.status.Success.lower()) + ext_output_status_handler.set_nooperation_substatus_json(Constants.Op.NO_OPERATION, activity_id="", start_time="", seq_no=file_name, status=self.status.SUCCESS.lower()) updated_status_json = ext_output_status_handler.read_file(file_name) - self.assertEqual(updated_status_json[0][self.status_file_fields.status][self.status_file_fields.status_substatus][0][self.status_file_fields.status_name], Constants.PATCH_NOOPERATION_SUMMARY) + self.assertEqual(updated_status_json[0][self.status_file_fields.status][self.status_file_fields.status_substatus][0][self.status_file_fields.status_name], Constants.OpSummary.NO_OPERATION) self.assertNotEqual(json.loads(updated_status_json[0]["status"]["substatus"][0]["formattedMessage"]["message"])["errors"], None) self.assertEqual(json.loads(updated_status_json[0]["status"]["substatus"][0]["formattedMessage"]["message"])["errors"]["code"], 1) self.assertEqual(len(json.loads(updated_status_json[0]["status"]["substatus"][0]["formattedMessage"]["message"])["errors"]["details"]), 5) @@ -150,7 +150,7 @@ def test_add__duplicate_error_to_status(self): file_name = "test" dir_path = tempfile.mkdtemp() ext_output_status_handler = ExtOutputStatusHandler(self.logger, self.utility, self.json_file_handler, dir_path) - ext_output_status_handler.set_current_operation(Constants.NOOPERATION) + ext_output_status_handler.set_current_operation(Constants.Op.NO_OPERATION) self.logger.file_logger = FileLogger(dir_path, "test.log") ext_output_status_handler.read_file(file_name) # Unexpected input @@ -163,9 +163,9 @@ def test_add__duplicate_error_to_status(self): ext_output_status_handler.add_error_to_status("exception3", Constants.PatchOperationErrorCodes.DEFAULT_ERROR) ext_output_status_handler.add_error_to_status("exception2: extra details", Constants.PatchOperationErrorCodes.DEFAULT_ERROR) ext_output_status_handler.add_error_to_status("exception2", Constants.PatchOperationErrorCodes.DEFAULT_ERROR) - ext_output_status_handler.set_nooperation_substatus_json(Constants.NOOPERATION, activity_id="", start_time="", seq_no=file_name, status=self.status.Success.lower()) + ext_output_status_handler.set_nooperation_substatus_json(Constants.Op.NO_OPERATION, activity_id="", start_time="", seq_no=file_name, status=self.status.SUCCESS.lower()) updated_status_json = ext_output_status_handler.read_file(file_name) - self.assertEqual(updated_status_json[0][self.status_file_fields.status][self.status_file_fields.status_substatus][0][self.status_file_fields.status_name], Constants.PATCH_NOOPERATION_SUMMARY) + self.assertEqual(updated_status_json[0][self.status_file_fields.status][self.status_file_fields.status_substatus][0][self.status_file_fields.status_name], Constants.OpSummary.NO_OPERATION) self.assertNotEqual(json.loads(updated_status_json[0]["status"]["substatus"][0]["formattedMessage"]["message"])["errors"], None) self.assertEqual(json.loads(updated_status_json[0]["status"]["substatus"][0]["formattedMessage"]["message"])["errors"]["code"], 1) self.assertEqual(len(json.loads(updated_status_json[0]["status"]["substatus"][0]["formattedMessage"]["message"])["errors"]["details"]), 3) diff --git a/src/extension/tests/Test_ExtStateHandler.py b/src/extension/tests/Test_ExtStateHandler.py index 29632b79f..a7f262600 100644 --- a/src/extension/tests/Test_ExtStateHandler.py +++ b/src/extension/tests/Test_ExtStateHandler.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/extension/tests/Test_FileLogger.py b/src/extension/tests/Test_FileLogger.py index f5159d6c6..125506a15 100644 --- a/src/extension/tests/Test_FileLogger.py +++ b/src/extension/tests/Test_FileLogger.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/extension/tests/Test_HandlerManifest.py b/src/extension/tests/Test_HandlerManifest.py index 28c6c7b35..90959a286 100644 --- a/src/extension/tests/Test_HandlerManifest.py +++ b/src/extension/tests/Test_HandlerManifest.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -38,11 +38,11 @@ def test_handler_manifest_json(self): handler_json = json.loads(file_contents) self.assertEqual(len(handler_json), 1) self.assertEqual(handler_json[0]['version'], 1.0) - self.assertEqual(handler_json[0]['handlerManifest']['disableCommand'], "MsftLinuxPatchExtShim.sh -d") - self.assertEqual(handler_json[0]['handlerManifest']['enableCommand'], "MsftLinuxPatchExtShim.sh -e") - self.assertEqual(handler_json[0]['handlerManifest']['uninstallCommand'], "MsftLinuxPatchExtShim.sh -u") - self.assertEqual(handler_json[0]['handlerManifest']['installCommand'], "MsftLinuxPatchExtShim.sh -i") - self.assertEqual(handler_json[0]['handlerManifest']['updateCommand'], "MsftLinuxPatchExtShim.sh -p") + self.assertEqual(handler_json[0]['handlerManifest']['disableCommand'], "AzGPSLinuxPatchExtShim.sh -d") + self.assertEqual(handler_json[0]['handlerManifest']['enableCommand'], "AzGPSLinuxPatchExtShim.sh -e") + self.assertEqual(handler_json[0]['handlerManifest']['uninstallCommand'], "AzGPSLinuxPatchExtShim.sh -u") + self.assertEqual(handler_json[0]['handlerManifest']['installCommand'], "AzGPSLinuxPatchExtShim.sh -i") + self.assertEqual(handler_json[0]['handlerManifest']['updateCommand'], "AzGPSLinuxPatchExtShim.sh -p") self.assertEqual(handler_json[0]['handlerManifest']['rebootAfterInstall'], False) self.assertEqual(handler_json[0]['handlerManifest']['reportHeartbeat'], False) self.handler_manifest_file_handle.close() diff --git a/src/extension/tests/Test_InstallCommandHandler.py b/src/extension/tests/Test_InstallCommandHandler.py index d19fe68d4..6114b7122 100644 --- a/src/extension/tests/Test_InstallCommandHandler.py +++ b/src/extension/tests/Test_InstallCommandHandler.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/extension/tests/Test_JsonFileHandler.py b/src/extension/tests/Test_JsonFileHandler.py index 1a6f489cd..b32303e36 100644 --- a/src/extension/tests/Test_JsonFileHandler.py +++ b/src/extension/tests/Test_JsonFileHandler.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/extension/tests/Test_Logger.py b/src/extension/tests/Test_Logger.py index 424aaf481..b3a36359d 100644 --- a/src/extension/tests/Test_Logger.py +++ b/src/extension/tests/Test_Logger.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/extension/tests/Test_ProcessHandler.py b/src/extension/tests/Test_ProcessHandler.py index f346c2d91..e5fa54928 100644 --- a/src/extension/tests/Test_ProcessHandler.py +++ b/src/extension/tests/Test_ProcessHandler.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/extension/tests/Test_RuntimeContextHandler.py b/src/extension/tests/Test_RuntimeContextHandler.py index 93b928344..00919fce8 100644 --- a/src/extension/tests/Test_RuntimeContextHandler.py +++ b/src/extension/tests/Test_RuntimeContextHandler.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/extension/tests/Test_TelemetryWriter.py b/src/extension/tests/Test_TelemetryWriter.py index fa91c4232..22d713d42 100644 --- a/src/extension/tests/Test_TelemetryWriter.py +++ b/src/extension/tests/Test_TelemetryWriter.py @@ -31,7 +31,7 @@ def mock_os_path_exists(self, filepath): return True def mock_get_file_size(self, file_path): - return Constants.TELEMETRY_EVENT_FILE_SIZE_LIMIT_IN_CHARS + 10 + return Constants.TelemetryConfig.EVENT_FILE_SIZE_LIMIT_IN_CHARS + 10 def mock_os_listdir(self, file_path): return ['testevent1.json', 'testevent2.json', 'testevent3.json', 'testevent4.json'] @@ -40,14 +40,14 @@ def test_write_event(self): if self.runtime.is_github_runner: return - self.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task") + self.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task") with open(os.path.join(self.telemetry_writer.events_folder_path, os.listdir(self.telemetry_writer.events_folder_path)[0]), 'r+') as f: events = json.load(f) self.assertTrue(events is not None) self.assertEqual(events[0]["TaskName"], "Test Task") f.close() - self.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task2") + self.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task2") if len(os.listdir(self.telemetry_writer.events_folder_path)) > 1: with open(os.path.join(self.telemetry_writer.events_folder_path, os.listdir(self.telemetry_writer.events_folder_path)[1]), 'r+') as f: events = json.load(f) @@ -68,8 +68,8 @@ def test_write_multiple_events_in_same_file(self): time_backup = time.time time.time = self.mock_time - self.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task") - self.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task2") + self.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task") + self.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task2") with open(os.path.join(self.telemetry_writer.events_folder_path, os.listdir(self.telemetry_writer.events_folder_path)[0]), 'r+') as f: events = json.load(f) self.assertTrue(events is not None) @@ -82,13 +82,13 @@ def test_write_multiple_events_in_same_file(self): def test_write_event_msg_size_limit(self): # Assuming 1 char is 1 byte message = "a"*3074 - self.telemetry_writer.write_event(message, Constants.TelemetryEventLevel.Error, "Test Task") + self.telemetry_writer.write_event(message, Constants.EventLevel.Error, "Test Task") with open(os.path.join(self.telemetry_writer.events_folder_path, os.listdir(self.telemetry_writer.events_folder_path)[0]), 'r+') as f: events = json.load(f) self.assertTrue(events is not None) self.assertEqual(events[0]["TaskName"], "Test Task") self.assertTrue(len(events[0]["Message"]) < len(message.encode('utf-8'))) - chars_dropped = len(message.encode('utf-8')) - Constants.TELEMETRY_MSG_SIZE_LIMIT_IN_CHARS + Constants.TELEMETRY_BUFFER_FOR_DROPPED_COUNT_MSG_IN_CHARS + chars_dropped = len(message.encode('utf-8')) - Constants.TelemetryConfig.MSG_SIZE_LIMIT_IN_CHARS + Constants.TelemetryConfig.BUFFER_FOR_DROPPED_COUNT_MSG_IN_CHARS self.assertEqual(events[0]["Message"], "a"*(len(message.encode('utf-8')) - chars_dropped) + ". [{0} chars dropped]".format(chars_dropped)) f.close() @@ -96,18 +96,18 @@ def test_write_event_size_limit(self): # will not write to telemetry if event size exceeds limit message = "a"*3074 task_name = "b"*5000 - self.telemetry_writer.write_event(task_name, message, Constants.TelemetryEventLevel.Error) + self.telemetry_writer.write_event(task_name, message, Constants.EventLevel.Error) self.assertTrue(len(os.listdir(self.telemetry_writer.events_folder_path)) == 0) # TODO: The following test is failing almost consistently commenting it out to be tracked in: Task 10912099: [Bug] Bug in telemetry writer - overwriting prior events in fast execution # def test_write_to_new_file_if_event_file_limit_reached(self): - # self.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task") + # self.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task") # os_path_exists_backup = os.path.exists # os.path.exists = self.mock_os_path_exists # telemetry_get_event_file_size_backup = self.telemetry_writer.get_file_size # self.telemetry_writer.get_file_size = self.mock_get_file_size # - # self.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task2") + # self.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task2") # events = os.listdir(self.telemetry_writer.events_folder_path) # self.assertEqual(len(events), 2) # os.path.exists = os_path_exists_backup @@ -118,44 +118,44 @@ def test_delete_older_events(self): return # deleting older event files before adding new one - self.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task") - self.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task2") - self.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task3") + self.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task") + self.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task2") + self.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task3") old_events = os.listdir(self.telemetry_writer.events_folder_path) - telemetry_dir_size_backup = Constants.TELEMETRY_DIR_SIZE_LIMIT_IN_CHARS - Constants.TELEMETRY_DIR_SIZE_LIMIT_IN_CHARS = 1030 - telemetry_event_size_backup = Constants.TELEMETRY_EVENT_FILE_SIZE_LIMIT_IN_CHARS - Constants.TELEMETRY_EVENT_FILE_SIZE_LIMIT_IN_CHARS = 1024 + telemetry_dir_size_backup = Constants.TelemetryConfig.DIR_SIZE_LIMIT_IN_CHARS + Constants.TelemetryConfig.DIR_SIZE_LIMIT_IN_CHARS = 1030 + telemetry_event_size_backup = Constants.TelemetryConfig.EVENT_FILE_SIZE_LIMIT_IN_CHARS + Constants.TelemetryConfig.EVENT_FILE_SIZE_LIMIT_IN_CHARS = 1024 - self.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task4") + self.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task4") new_events = os.listdir(self.telemetry_writer.events_folder_path) self.assertEqual(len(new_events), 1) self.assertTrue(old_events[0] not in new_events) # Fails here on GitHub - Constants.TELEMETRY_DIR_SIZE_LIMIT_IN_CHARS = telemetry_dir_size_backup - Constants.TELEMETRY_EVENT_FILE_SIZE_LIMIT_IN_CHARS = telemetry_event_size_backup + Constants.TelemetryConfig.DIR_SIZE_LIMIT_IN_CHARS = telemetry_dir_size_backup + Constants.TelemetryConfig.EVENT_FILE_SIZE_LIMIT_IN_CHARS = telemetry_event_size_backup # error while deleting event files where the directory size exceeds limit even after deletion attempts - self.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task") - self.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task2") - self.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task3") + self.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task") + self.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task2") + self.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task3") old_events = os.listdir(self.telemetry_writer.events_folder_path) - telemetry_dir_size_backup = Constants.TELEMETRY_DIR_SIZE_LIMIT_IN_CHARS - Constants.TELEMETRY_DIR_SIZE_LIMIT_IN_CHARS = 500 - telemetry_event_size_backup = Constants.TELEMETRY_EVENT_FILE_SIZE_LIMIT_IN_CHARS - Constants.TELEMETRY_EVENT_FILE_SIZE_LIMIT_IN_CHARS = 400 + telemetry_dir_size_backup = Constants.TelemetryConfig.DIR_SIZE_LIMIT_IN_CHARS + Constants.TelemetryConfig.DIR_SIZE_LIMIT_IN_CHARS = 500 + telemetry_event_size_backup = Constants.TelemetryConfig.EVENT_FILE_SIZE_LIMIT_IN_CHARS + Constants.TelemetryConfig.EVENT_FILE_SIZE_LIMIT_IN_CHARS = 400 os_remove_backup = os.remove os.remove = self.mock_os_remove - self.assertRaises(Exception, lambda: self.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task4")) + self.assertRaises(Exception, lambda: self.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task4")) - Constants.TELEMETRY_DIR_SIZE_LIMIT_IN_CHARS = telemetry_dir_size_backup - Constants.TELEMETRY_EVENT_FILE_SIZE_LIMIT_IN_CHARS = telemetry_event_size_backup + Constants.TelemetryConfig.DIR_SIZE_LIMIT_IN_CHARS = telemetry_dir_size_backup + Constants.TelemetryConfig.EVENT_FILE_SIZE_LIMIT_IN_CHARS = telemetry_event_size_backup os.remove = os_remove_backup def test_events_deleted_outside_of_extension_while_extension_is_running(self): backup_os_listdir = os.listdir os.listdir = self.mock_os_listdir - self.telemetry_writer.write_event("testing telemetry write to file", Constants.TelemetryEventLevel.Error, "Test Task") + self.telemetry_writer.write_event("testing telemetry write to file", Constants.EventLevel.Error, "Test Task") os.listdir = backup_os_listdir diff --git a/src/extension/tests/Test_Utility.py b/src/extension/tests/Test_Utility.py index e77b1ac57..a9be1aadd 100644 --- a/src/extension/tests/Test_Utility.py +++ b/src/extension/tests/Test_Utility.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/extension/tests/__init__.py b/src/extension/tests/__init__.py index a1eee39e6..2e3211aae 100644 --- a/src/extension/tests/__init__.py +++ b/src/extension/tests/__init__.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/extension/tests/helpers/VirtualTerminal.py b/src/extension/tests/helpers/VirtualTerminal.py index e1e6851a9..afcfa869b 100644 --- a/src/extension/tests/helpers/VirtualTerminal.py +++ b/src/extension/tests/helpers/VirtualTerminal.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/extension/tests/helpers/__init__.py b/src/extension/tests/helpers/__init__.py index a1eee39e6..2e3211aae 100644 --- a/src/extension/tests/helpers/__init__.py +++ b/src/extension/tests/helpers/__init__.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/tools/Package-All.py b/src/tools/Package-All.py index 20bd80fef..b4c3ac4ed 100644 --- a/src/tools/Package-All.py +++ b/src/tools/Package-All.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,9 +14,10 @@ # # Requires Python 2.7+ -""" Merges individual python modules from src to the MsftLinuxPatchExt files in the out directory. +""" Merges individual python modules from src to the AzGPSLinuxPatchExt files in the out directory. Relative source and destination paths for the extension are auto-detected if the optional src parameter is not present. -How to use: python Package.py """ +How to use: python Package-All.py +Note: Package-All.py internally invokes Package-Core.py to generate AzGPSLinuxPatchCore.py """ from __future__ import print_function import sys @@ -26,6 +27,7 @@ from shutil import copyfile from shutil import make_archive import subprocess +import xml.etree.ElementTree as et # imports in VERY_FIRST_IMPORTS, order should be kept VERY_FIRST_IMPORTS = [ @@ -60,15 +62,16 @@ def write_merged_code(code, merged_file_full_path): def insert_copyright_notice(merged_file_full_path, merged_file_name): - notice = '# --------------------------------------------------------------------------------------------------------------------\n' + notice = '# coding=utf-8\n' + notice += '# --------------------------------------------------------------------------------------------------------------------\n' notice += '# \n' - notice += '# Copyright 2020 Microsoft Corporation\n' \ + notice += '# Copyright ' + str(datetime.date.today().year) + ' Microsoft Corporation\n' \ '#\n' \ '# Licensed under the Apache License, Version 2.0 (the "License");\n' \ '# you may not use this file except in compliance with the License.\n' \ '# You may obtain a copy of the License at\n' \ '#\n' \ - '# http://www.apache.org/licenses/LICENSE-2.0\n' \ + '# https://www.apache.org/licenses/LICENSE-2.0\n' \ '#\n' \ '# Unless required by applicable law or agreed to in writing, software\n' \ '# distributed under the License is distributed on an "AS IS" BASIS,\n' \ @@ -105,7 +108,7 @@ def prepend_content_to_file(content, file_name): os.rename(temp_file, file_name) -def generate_compiled_script(source_code_path, merged_file_full_path, merged_file_name, environment): +def generate_compiled_script(source_code_path, merged_file_full_path, merged_file_name, environment, new_version): try: print('\n\n=============================== GENERATING ' + merged_file_name + '... =============================================================\n') @@ -145,8 +148,9 @@ def generate_compiled_script(source_code_path, merged_file_full_path, merged_fil print('========== Set Copyright, Version and Environment. Also enforce UNIX-style line endings.\n') insert_copyright_notice(merged_file_full_path, merged_file_name) timestamp = datetime.datetime.utcnow().strftime("%y%m%d-%H%M") - replace_text_in_file(merged_file_full_path, '[%exec_name%]', merged_file_name.split('.')[0]) - replace_text_in_file(merged_file_full_path, '[%exec_sub_ver%]', timestamp) + replace_text_in_file(merged_file_full_path, '[%exec_name%]', merged_file_name) + replace_text_in_file(merged_file_full_path, '[%exec_ver%]', str(new_version)) + replace_text_in_file(merged_file_full_path, '[%exec_build_timestamp%]', timestamp) replace_text_in_file(merged_file_full_path, 'Constants.UNKNOWN_ENV', environment) replace_text_in_file(merged_file_full_path, '\r\n', '\n') @@ -190,11 +194,22 @@ def main(argv): exec_core_build_path = os.path.join(working_directory, 'tools', 'Package-Core.py') subprocess.call('python ' + exec_core_build_path, shell=True) + # Get version from manifest for code + new_version = None + manifest_xml_file_path = os.path.join(working_directory, 'extension', 'src', 'manifest.xml') + manifest_tree = et.parse(manifest_xml_file_path) + manifest_root = manifest_tree.getroot() + for i in range(0, len(manifest_root)): + if 'Version' in str(manifest_root[i]): + new_version = manifest_root[i].text + if new_version is None: + raise Exception("Unable to determine target version.") + # Generated compiled scripts at the destination - merged_file_details = [('MsftLinuxPatchExt.py', 'Constants.PROD')] + merged_file_details = [('AzGPSLinuxPatchExt.py', 'Constants.ExecEnv.PROD')] for merged_file_detail in merged_file_details: merged_file_destination = os.path.join(working_directory, 'out', merged_file_detail[0]) - generate_compiled_script(source_code_path, merged_file_destination, merged_file_detail[0], merged_file_detail[1]) + generate_compiled_script(source_code_path, merged_file_destination, merged_file_detail[0], merged_file_detail[1], new_version) # GENERATING EXTENSION print('\n\n=============================== GENERATING LinuxPatchExtension.zip... =============================================================\n') @@ -214,7 +229,7 @@ def main(argv): # Copy extension files print('\n========== Copying extension files + enforcing UNIX style line endings.\n') - ext_files = ['HandlerManifest.json', 'manifest.xml', 'MsftLinuxPatchExtShim.sh'] + ext_files = ['HandlerManifest.json', 'manifest.xml', 'AzGPSLinuxPatchExtShim.sh'] for ext_file in ext_files: ext_file_src = os.path.join(working_directory, 'extension', 'src', ext_file) ext_file_destination = os.path.join(working_directory, 'out', ext_file) diff --git a/src/tools/Package-Core.py b/src/tools/Package-Core.py index bae1a0337..450999116 100644 --- a/src/tools/Package-Core.py +++ b/src/tools/Package-Core.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,9 +14,9 @@ # # Requires Python 2.7+ -""" Merges individual python modules from src to the PatchMicrosoftOMSLinuxComputer.py and MsftLinuxPatchCore.py files in the out directory. -Relative source and destination paths for the patch runbook are auto-detected if the optional src parameter is not present. -How to use: python Package.py """ +""" Merges individual python modules from src to the AzGPSLinuxPatchCore.py files in the out directory. +Relative source and destination paths for the extension Core are auto-detected if the optional src parameter is not present. +How to use: python Package-Core.py """ from __future__ import print_function @@ -25,6 +25,7 @@ import os import errno import datetime +import xml.etree.ElementTree as et # imports in VERY_FIRST_IMPORTS, order should be kept @@ -62,15 +63,16 @@ def write_merged_code(code, merged_file_full_path): def insert_copyright_notice(merged_file_full_path, merged_file_name): - notice = '# --------------------------------------------------------------------------------------------------------------------\n' + notice = '# coding=utf-8\n' + notice += '# --------------------------------------------------------------------------------------------------------------------\n' notice += '# \n' - notice += '# Copyright 2020 Microsoft Corporation\n' \ + notice += '# Copyright ' + str(datetime.date.today().year) + ' Microsoft Corporation\n' \ '#\n' \ '# Licensed under the Apache License, Version 2.0 (the "License");\n' \ '# you may not use this file except in compliance with the License.\n' \ '# You may obtain a copy of the License at\n' \ '#\n' \ - '# http://www.apache.org/licenses/LICENSE-2.0\n' \ + '# https://www.apache.org/licenses/LICENSE-2.0\n' \ '#\n' \ '# Unless required by applicable law or agreed to in writing, software\n' \ '# distributed under the License is distributed on an "AS IS" BASIS,\n' \ @@ -107,7 +109,7 @@ def prepend_content_to_file(content, file_name): os.rename(temp_file, file_name) -def generate_compiled_script(source_code_path, merged_file_full_path, merged_file_name, environment): +def generate_compiled_script(source_code_path, merged_file_full_path, merged_file_name, environment, new_version): try: print('\n\n=============================== GENERATING ' + merged_file_name + '... =============================================================\n') @@ -128,7 +130,7 @@ def generate_compiled_script(source_code_path, merged_file_full_path, merged_fil continue elif 'external_dependencies' in file_path: continue - elif os.path.basename(file_path) in ('PackageManager.py', 'Constants.py', 'LifecycleManager.py', 'SystemctlManager.py'): + elif os.path.basename(file_path) in ('PatchOperator.py', 'PackageManager.py', 'Constants.py', 'LifecycleManager.py', 'SystemctlManager.py'): modules_to_be_merged.insert(0, file_path) else: if len(modules_to_be_merged) > 0 and '__main__.py' in modules_to_be_merged[-1]: @@ -149,8 +151,9 @@ def generate_compiled_script(source_code_path, merged_file_full_path, merged_fil print('========== Set Copyright, Version and Environment. Also enforce UNIX-style line endings.\n') insert_copyright_notice(merged_file_full_path, merged_file_name) timestamp = datetime.datetime.utcnow().strftime("%y%m%d-%H%M") - replace_text_in_file(merged_file_full_path, '[%exec_name%]', merged_file_name.split('.')[0]) - replace_text_in_file(merged_file_full_path, '[%exec_sub_ver%]', timestamp) + replace_text_in_file(merged_file_full_path, '[%exec_name%]', merged_file_name) + replace_text_in_file(merged_file_full_path, '[%exec_ver%]', str(new_version)) + replace_text_in_file(merged_file_full_path, '[%exec_build_timestamp%]', timestamp) replace_text_in_file(merged_file_full_path, '\r\n', '\n') print("========== Merged core code was saved to:\n{0}\n".format(merged_file_full_path)) @@ -217,11 +220,22 @@ def main(argv): if e.errno != errno.EEXIST: raise + # Get version from manifest for code + new_version = None + manifest_xml_file_path = os.path.join(working_directory, 'extension', 'src', 'manifest.xml') + manifest_tree = et.parse(manifest_xml_file_path) + manifest_root = manifest_tree.getroot() + for i in range(0, len(manifest_root)): + if 'Version' in str(manifest_root[i]): + new_version = manifest_root[i].text + if new_version is None: + raise Exception("Unable to determine target version.") + # Generated compiled scripts at the destination - merged_file_details = [('MsftLinuxPatchCore.py', 'Constants.PROD')] + merged_file_details = [('AzGPSLinuxPatchCore.py', 'Constants.ExecEnv.PROD')] for merged_file_detail in merged_file_details: merged_file_destination = os.path.join(working_directory, 'out', merged_file_detail[0]) - generate_compiled_script(source_code_path, merged_file_destination, merged_file_detail[0], merged_file_detail[1]) + generate_compiled_script(source_code_path, merged_file_destination, merged_file_detail[0], merged_file_detail[1], new_version) # add all dependencies under core/src/external_dependencies to destination directory external_dependencies_destination = os.path.join(merge_file_directory, 'external_dependencies') diff --git a/src/tools/references/cmd_output_references/sudo_output_expected.txt b/src/tools/references/cmd_output_references/sudo_output_expected.txt new file mode 100644 index 000000000..466ea1b19 --- /dev/null +++ b/src/tools/references/cmd_output_references/sudo_output_expected.txt @@ -0,0 +1,10 @@ +Command: +sudo timeout 10 id && echo True || echo False + +Good output: +uid=0(root) gid=0(root) groups=0(root) +True + +Bad output: +[sudo] password for username: +False \ No newline at end of file