diff --git a/src/core/src/package_managers/AptitudePackageManager.py b/src/core/src/package_managers/AptitudePackageManager.py index 9f1a3eb8..9b451d5c 100644 --- a/src/core/src/package_managers/AptitudePackageManager.py +++ b/src/core/src/package_managers/AptitudePackageManager.py @@ -18,8 +18,8 @@ import json import os import re +import shutil import sys -import uuid from core.src.package_managers.PackageManager import PackageManager from core.src.bootstrap.Constants import Constants @@ -33,25 +33,34 @@ class AptitudePackageManager(PackageManager): def __init__(self, env_layer, execution_config, composite_logger, telemetry_writer, status_handler): super(AptitudePackageManager, self).__init__(env_layer, execution_config, composite_logger, telemetry_writer, status_handler) - # Accept EULA (End User License Agreement) as per the EULA settings set by user - optional_accept_eula_in_cmd = "ACCEPT_EULA=Y" if execution_config.accept_package_eula else "" + # Apt constants config + self.APT_SOURCES_LIST_PATH = '/etc/apt/sources.list' + self.APT_SOURCES_DIR_PATH = '/etc/apt/sources.list.d/' + self.APT_SOURCES_DIR_LIST_EXT = 'list' + self.APT_SOURCES_DIR_SRC_EXT = 'sources' + + # Support to get packages and their dependencies + custom_source_timestamp = self.env_layer.datetime.timestamp().replace(":",".") + self.custom_source_list_template = os.path.join(execution_config.temp_folder, 'azgps-src-{0}-.list'.format(str(custom_source_timestamp))) + self.custom_source_parts_dir_template = os.path.join(execution_config.temp_folder, 'azgps-src-{0}-.d'.format(str(custom_source_timestamp))) + self.current_source_parts_dir = self.current_source_list = self.current_source_formula = None + self.current_source_parts_file_name = "azgps-src-parts.sources" # Repo refresh self.cmd_repo_refresh_template = 'sudo apt-get -q update ' - - # Support to get updates and their dependencies - self.cached_customer_source_list_formula = None - self.custom_sources_list = os.path.join(execution_config.temp_folder, 'azgps-patch-custom-{0}.list'.format(str(uuid.uuid4()))) - self.cmd_prep_custom_sources_list_template = 'sudo grep -hR /etc/apt/sources.list /etc/apt/sources.list.d/ > ' + os.path.normpath(self.custom_sources_list) self.cmd_dist_upgrade_simulation_template = 'LANG=en_US.UTF8 sudo apt-get -s dist-upgrade ' # Dist-upgrade simulation template - needs to be replaced before use; sudo is used as sometimes the sources list needs sudo to be readable + # Accept EULA (End User License Agreement) as per the EULA settings set by user + optional_accept_eula_in_cmd = "ACCEPT_EULA=Y" if execution_config.accept_package_eula else "" + + # Package evaluations self.cmd_single_package_check_versions_template = 'apt-cache madison ' self.cmd_single_package_find_install_dpkg_template = 'sudo dpkg -s ' self.cmd_single_package_find_install_apt_template = 'sudo apt list --installed ' self.single_package_upgrade_simulation_cmd = '''DEBIAN_FRONTEND=noninteractive ''' + optional_accept_eula_in_cmd + ''' LANG=en_US.UTF8 apt-get -y --only-upgrade true -s install ''' self.single_package_dependency_resolution_template = 'DEBIAN_FRONTEND=noninteractive ' + optional_accept_eula_in_cmd + ' LANG=en_US.UTF8 apt-get -y --only-upgrade true -s install ' - # Install update + # Package installation # --only-upgrade: upgrade only single package (only if it is installed) self.single_package_upgrade_cmd = '''sudo DEBIAN_FRONTEND=noninteractive LANG=en_US.UTF8 ''' + optional_accept_eula_in_cmd + ''' apt-get -y --only-upgrade true install ''' self.install_security_updates_azgps_coordinated_cmd = '''sudo DEBIAN_FRONTEND=noninteractive LANG=en_US.UTF8 ''' + optional_accept_eula_in_cmd + ''' apt-get -y --only-upgrade true dist-upgrade ''' @@ -80,70 +89,184 @@ def __init__(self, env_layer, execution_config, composite_logger, telemetry_writ self.ubuntu_pro_client_all_updates_cached = [] self.ubuntu_pro_client_all_updates_versions_cached = [] - self.package_install_expected_avg_time_in_seconds = 90 # As per telemetry data, the average time to install package is around 81 seconds for apt. + self.package_install_expected_avg_time_in_seconds = 90 # As per telemetry data, the average time to install package is around 81 seconds for apt. # region Sources Management def __get_custom_sources_to_spec(self, max_patch_published_date=str(), base_classification=str()): - # type: (str, str) -> str + # type: (str, str) -> (str, str) """ Prepares the custom sources list for use in a command. Idempotent. """ try: + # basic input validation if max_patch_published_date != str() and len(max_patch_published_date) != 16: raise Exception("[APM] Invalid max patch published date received. [Value={0}]".format(str(max_patch_published_date))) + if base_classification != str() and base_classification != Constants.PackageClassification.SECURITY: + raise Exception("[APM] Invalid classification selection received. [Value={0}]".format(str(base_classification))) + + # utilize caching if data exists and cache is also in the desired state + formula = "{0}-{1}".format(max_patch_published_date if max_patch_published_date != str() else "any", base_classification.lower() if base_classification != str() else "all") + if self.current_source_formula == formula: + return self.current_source_parts_dir, self.current_source_list # no need to refresh repo as state should match + + # utilize caching with refresh repo if data exists but cache is not in the desired state + self.current_source_formula = formula + self.current_source_parts_dir = self.custom_source_parts_dir_template.replace("", formula) + self.current_source_list = self.custom_source_list_template.replace("", formula) + if os.path.exists(self.current_source_list) and os.path.isdir(self.current_source_parts_dir): + self.refresh_repo(source_parts_dir=self.current_source_parts_dir, source_list=self.current_source_list) # refresh repo as requested state has changed + return self.current_source_parts_dir, self.current_source_list + + # Produce data for combination not seen previously in this execution - source list + source_list_content = self.__read_one_line_style_list_format(self.APT_SOURCES_LIST_PATH, max_patch_published_date, base_classification) if os.path.exists(self.APT_SOURCES_LIST_PATH) else str() + self.env_layer.file_system.write_with_retry(self.current_source_list, source_list_content, "w") + self.composite_logger.log_verbose("[APM] Source list content written. [List={0}][Content={1}]".format(self.current_source_list, source_list_content)) + + # Produce data for combination not seen previously in this execution - source parts + source_parts_deb882_style_content, source_parts_list_content = self.__get_consolidated_source_parts_content(max_patch_published_date, base_classification) + if len(source_parts_list_content) > 0: # list(s) in source parts + self.env_layer.file_system.write_with_retry(self.current_source_list, "\n" + source_parts_list_content, "a") + self.composite_logger.log_verbose("[APM] Source parts list content appended. [List={0}][Content={1}]".format(self.current_source_list, source_parts_list_content)) + + # Source parts debstyle882-only initialization + current_source_parts_deb882_style_file = os.path.join(self.current_source_parts_dir, self.current_source_parts_file_name) + if os.path.isdir(self.current_source_parts_dir): + shutil.rmtree(self.current_source_parts_dir) + + # Create the folder and write to it only if there is debstyle882 content to be written + if len(source_parts_deb882_style_content) > 0: + os.makedirs(self.current_source_parts_dir) + self.env_layer.file_system.write_with_retry(current_source_parts_deb882_style_file, source_parts_deb882_style_content, "w") + self.composite_logger.log_verbose("[APM] Source parts debstyle882 content written. [Dir={0}][Content={1}]".format(current_source_parts_deb882_style_file, source_parts_deb882_style_content)) - formula = "F-[{0}]-[{1}]".format(max_patch_published_date, base_classification) - if self.cached_customer_source_list_formula == formula: - return self.custom_sources_list - self.cached_customer_source_list_formula = formula + except Exception as error: + self.composite_logger.log_error("[APM] Error in modifying custom sources list. [Error={0}]".format(repr(error))) + return str(), str() # defaults code to safety - command = self.cmd_prep_custom_sources_list_template.replace("", base_classification if base_classification == "security" else "\"\"") - code, out = self.env_layer.run_command_output(command, False, False) - sources_content = self.env_layer.file_system.read_with_retry(self.custom_sources_list) - self.composite_logger.log_debug("[APM] Modified custom sources list with classification. [Code={0}][Out={1}][Content={2}]".format(str(code), str(out), str(sources_content))) # non-zero error code to be investigated + # Refresh repo + self.refresh_repo(source_parts_dir=self.current_source_parts_dir, source_list=self.current_source_list) - if max_patch_published_date != str(): - target = "://snapshot.ubuntu.com/ubuntu/{0}".format(self.max_patch_publish_date) + return self.current_source_parts_dir, self.current_source_list - if "://snapshot.ubuntu.com/ubuntu/" in sources_content: - sources_content_split = sources_content.split(" ") - for i in range(0, len(sources_content_split)): - if "://snapshot.ubuntu.com/ubuntu/" in sources_content_split[i]: - sources_content.replace(sources_content_split[i], target) + def __get_consolidated_source_parts_content(self, max_patch_published_date, base_classification): + # type: (str, str) -> (str, str) + """ Consolidates all list and sources files into a consistent format single source list """ + # read basic sources list - exception being thrown are acceptable + source_parts_list_content = str() + source_parts_deb882_style_content = str() - sources_content = sources_content.replace("://azure.archive.ubuntu.com/ubuntu/", target) - sources_content = sources_content.replace("://in.archive.ubuntu.com/ubuntu/", target) - sources_content = sources_content.replace("://security.ubuntu.com/ubuntu/", target) - sources_content = sources_content.replace("http://snapshot.ubuntu.com/", "https://snapshot.ubuntu.com/") - self.composite_logger.log_debug("[APM] Modified custom sources list with snapshot. [Code={0}][Out={1}][Content={2}]".format(str(code), str(out), str(sources_content))) + if os.path.isdir(self.APT_SOURCES_DIR_PATH): + # process files in directory + dir_path = self.APT_SOURCES_DIR_PATH + for file_name in [f for f in os.listdir(dir_path) if os.path.isfile(os.path.join(dir_path, f))]: + try: + file_path = os.path.join(dir_path, file_name) - self.env_layer.file_system.write_with_retry_using_temp_file(self.custom_sources_list, sources_content) + if file_name.endswith(self.APT_SOURCES_DIR_LIST_EXT): # .list type + source_parts_list_content += "\n" + self.__read_one_line_style_list_format(file_path, max_patch_published_date, base_classification) + elif file_name.endswith(self.APT_SOURCES_DIR_SRC_EXT): # .sources type + source_parts_deb882_style_content += "\n" + self.__read_deb882_style_format(file_path, max_patch_published_date, base_classification) + + except Exception as error: # does not throw to allow patching to happen with functioning sources + self.composite_logger.log_error("[APM] Error while processing consolidated sources list. [File={0}][Error={1}]".format(file_name, repr(error))) + + return source_parts_deb882_style_content.strip(), source_parts_list_content.strip() + + def __read_one_line_style_list_format(self, file_path, max_patch_published_date, base_classification): + # type: (str, str, str) -> str + # Reads a *.list file and returns only lines that are functionally required + # Reference: tools/references/apt/sources.list + self.composite_logger.log_verbose("[APM] Reading source list for consolidation. [FilePath={0}]".format(file_path)) + + std_source_list_content = str() + sources_content = self.env_layer.file_system.read_with_retry(file_path) + self.composite_logger.log_verbose("[APM][Srclist] Input source list file. [FilePath={0}][Content={1}]".format(file_path, sources_content)) + sources_content_lines = sources_content.splitlines() + + for line in sources_content_lines: + if len(line.strip()) != 0 and not line.strip().startswith("#"): + if base_classification == Constants.PackageClassification.SECURITY and "security" not in line: + continue + std_source_list_content += "\n" + self.__apply_max_patch_publish_date(sources_content=line, max_patch_publish_date=max_patch_published_date) + + self.composite_logger.log_verbose("[APM][Srclist] Output source list slice. [FilePath={0}][TransformedContent={1}]".format(file_path, std_source_list_content)) + return std_source_list_content.strip() + + def __read_deb882_style_format(self, file_path, max_patch_published_date, base_classification): + # type: (str, str, str) -> str + # Reference: tools/references/apt/sources.list.d/ubuntu.sources + std_source_parts_content = str() + stanza = str() + + try: + source_parts_content = self.env_layer.file_system.read_with_retry(file_path) + "\n" + self.composite_logger.log_verbose("[APM][Deb882] Input source parts file. [FilePath={0}][Content={1}]".format(file_path, source_parts_content)) + source_parts_content_lines = source_parts_content.splitlines() + + for line in source_parts_content_lines: + if line.startswith("#"): # comments + continue + + if len(line.strip()) == 0: # stanza separating line + if stanza != str(): + if base_classification == str() or (base_classification == Constants.PackageClassification.SECURITY and "security" in stanza): + std_source_parts_content += self.__apply_max_patch_publish_date(sources_content=stanza, max_patch_publish_date=max_patch_published_date) + '\n' + stanza = str() + continue + + stanza += line + '\n' - self.refresh_repo(sources=self.custom_sources_list) except Exception as error: - self.composite_logger.log_error("[APM] Error in modifying custom sources list. [Error={0}]".format(repr(error))) - return str() # defaults code to safety + self.composite_logger.log_error("[APM][Deb882] Error while reading DEB882-style format. [File={0}][Error={1}]".format(file_path, repr(error))) + + self.composite_logger.log_verbose("[APM][Deb882] Output source parts slice. [FilePath={0}][TransformedContent={1}]".format(file_path, std_source_parts_content)) + return std_source_parts_content + + def __apply_max_patch_publish_date(self, sources_content, max_patch_publish_date): + # type: (str, str) -> str + if max_patch_publish_date is str(): + return sources_content + + candidates = ["://azure.archive.ubuntu.com/ubuntu/", "archive.ubuntu.com/ubuntu/", "security.ubuntu.com/ubuntu/", "://snapshot.ubuntu.com/ubuntu/"] + target = "https://snapshot.ubuntu.com/ubuntu/{0}".format(max_patch_publish_date) + matched = False + + for candidate in candidates: + if candidate in sources_content: + sources_content_split = sources_content.split(" ") + for i in range(0, len(sources_content_split)): + if candidate in sources_content_split[i]: + sources_content = sources_content.replace(sources_content_split[i], target) + sources_content = sources_content.replace("http://snapshot.ubuntu.com/", "https://snapshot.ubuntu.com/") + matched = True - return self.custom_sources_list + if not matched: + self.composite_logger.log_debug("[APM] Repo unsupported for snapshot. [RepoInfo={0}]".format(sources_content)) - def refresh_repo(self, sources=str()): - self.composite_logger.log("[APM] Refreshing local repo... [Sources={0}]".format(sources if sources != str() else "Default")) - self.invoke_package_manager(self.__generate_command(self.cmd_repo_refresh_template, sources)) + return sources_content + + def refresh_repo(self, source_parts_dir=str(), source_list=str()): + self.composite_logger.log("[APM] Refreshing local repo... [SourcePartsDir={0}][SourceList={1}]".format(source_parts_dir, source_list)) + self.invoke_package_manager(self.__generate_command_with_custom_sources(self.cmd_repo_refresh_template, source_parts_dir, source_list)) @staticmethod - def __generate_command(command_template, new_sources_list=str()): - # type: (str, str) -> str + def __generate_command_with_custom_sources(command_template, source_parts=str(), source_list=str()): + # type: (str, str, str) -> str """ Prepares a standard command to use custom sources. Pre-requisite: Refresh repo post list change. """ - return command_template.replace('', ('-oDir::Etc::Sourcelist={0}'.format(str(new_sources_list))) if new_sources_list != str() else str()) + if source_parts == str() and source_list == str(): + return command_template.replace('', str()) + else: + return command_template.replace('', ('-oDir::Etc::SourceParts={0}/ -oDir::Etc::SourceList={1}'.format(str(source_parts) if source_parts != str() else "/dev/null", + str(source_list) if source_list != str() else "/dev/null"))) # endregion Sources Management # region Get Available Updates def invoke_package_manager_advanced(self, command, raise_on_exception=True): """Get missing updates using the command input""" - self.composite_logger.log_debug('\nInvoking package manager using: ' + command) + self.composite_logger.log_verbose('[APM] Invoking package manager. [Command={0}]'.format(command)) code, out = self.env_layer.run_command_output(command, False, False) if code != self.apt_exitcode_ok and self.STR_DPKG_WAS_INTERRUPTED in out: self.composite_logger.log_error('[ERROR] YOU NEED TO TAKE ACTION TO PROCEED. The package manager on this machine is not in a healthy state, and ' - 'Patch Management cannot proceed successfully. Before the next Patch Operation, please run the following ' + 'Patch Management cannot proceed successfully. Before the next Pa-oDir::Etc::SourceParts=tch Operation, please run the following ' 'command and perform any configuration steps necessary on the machine to return it to a healthy state: ' 'sudo dpkg --configure -a') self.telemetry_writer.write_execution_error(command, code, out) @@ -152,39 +275,28 @@ def invoke_package_manager_advanced(self, command, raise_on_exception=True): if raise_on_exception: raise Exception(error_msg, "[{0}]".format(Constants.ERROR_ADDED_TO_STATUS)) elif code != self.apt_exitcode_ok: - self.composite_logger.log('[ERROR] Package manager was invoked using: ' + command) - self.composite_logger.log_warning(" - Return code from package manager: " + str(code)) - self.composite_logger.log_warning(" - Output from package manager: \n|\t" + "\n|\t".join(out.splitlines())) - self.telemetry_writer.write_execution_error(command, code, out) - error_msg = 'Unexpected return code (' + str(code) + ') from package manager on command: ' + command + self.composite_logger.log_warning('[ERROR] Customer environment error. [Command={0}][Code={1}][Output={2}]'.format(command, str(code), str(out))) + error_msg = "Customer environment error: Investigate and resolve unexpected return code ({0}) from package manager on command: {1}".format(str(code), command) self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.PACKAGE_MANAGER_FAILURE) if raise_on_exception: raise Exception(error_msg, "[{0}]".format(Constants.ERROR_ADDED_TO_STATUS)) # more known return codes should be added as appropriate else: # verbose diagnostic log - self.composite_logger.log_verbose("\n\n==[SUCCESS]===============================================================") - self.composite_logger.log_debug(" - Return code from package manager: " + str(code)) - self.composite_logger.log_debug(" - Output from package manager: \n|\t" + "\n|\t".join(out.splitlines())) - self.composite_logger.log_verbose("==========================================================================\n\n") + self.composite_logger.log_debug('[APM] Invoked package manager. [Command={0}][Code={1}][Output={2}]'.format(command, str(code), str(out))) return out, code def invoke_apt_cache(self, command): """Invoke apt-cache using the command input""" - self.composite_logger.log_debug('Invoking apt-cache using: ' + command) + self.composite_logger.log_verbose('[APM] Invoking apt-cache using: ' + command) code, out = self.env_layer.run_command_output(command, False, False) if code != 0: - self.composite_logger.log('[ERROR] apt-cache was invoked using: ' + command) - self.composite_logger.log_warning(" - Return code from apt-cache: " + str(code)) - self.composite_logger.log_warning(" - Output from apt-cache: \n|\t" + "\n|\t".join(out.splitlines())) - error_msg = 'Unexpected return code (' + str(code) + ') from apt-cache on command: ' + command + self.composite_logger.log_warning('[ERROR] Customer environment error. [Command={0}][Code={1}][Output={2}]'.format(command, str(code), str(out))) + error_msg = "Customer environment error: Investigate and resolve unexpected return code (\'{0}\') from package manager on command: {1}".format(str(code), command) self.status_handler.add_error_to_status(error_msg, Constants.PatchOperationErrorCodes.PACKAGE_MANAGER_FAILURE) raise Exception(error_msg, "[{0}]".format(Constants.ERROR_ADDED_TO_STATUS)) # more known return codes should be added as appropriate else: # verbose diagnostic log - self.composite_logger.log_verbose("\n\n==[SUCCESS]===============================================================") - self.composite_logger.log_debug(" - Return code from apt-cache: " + str(code)) - self.composite_logger.log_debug(" - Output from apt-cache: \n|\t" + "\n|\t".join(out.splitlines())) - self.composite_logger.log_verbose("==========================================================================\n\n") + self.composite_logger.log_verbose('[APM] Invoked apt-cache. [Command={0}][Code={1}][Output={2}]'.format(command, str(code), str(out))) return out # region Classification-based (incl. All) update check @@ -194,7 +306,7 @@ def get_all_updates(self, cached=False): all_updates_versions = [] ubuntu_pro_client_all_updates_query_success = False - self.composite_logger.log_debug("\nDiscovering all packages...") + self.composite_logger.log_verbose("[APM] Discovering all packages...") # use Ubuntu Pro Client cached list when the conditions are met. if self.__pro_client_prereq_met and not len(self.ubuntu_pro_client_all_updates_cached) == 0: all_updates = self.ubuntu_pro_client_all_updates_cached @@ -205,25 +317,27 @@ def get_all_updates(self, cached=False): all_updates_versions = self.all_update_versions_cached if cached and not len(all_updates) == 0: - self.composite_logger.log_debug("Get all updates : [Cached={0}][PackagesCount={1}]]".format(cached, len(all_updates))) + self.composite_logger.log_debug("[APM] Get all updates : [Cached={0}][PackagesCount={1}]]".format(cached, len(all_updates))) return all_updates, all_updates_versions # when cached is False, query both default way and using Ubuntu Pro Client. - cmd = self.__generate_command(self.cmd_dist_upgrade_simulation_template, self.__get_custom_sources_to_spec(self.max_patch_publish_date)) + source_parts, source_list = self.__get_custom_sources_to_spec(self.max_patch_publish_date, base_classification=str()) + cmd = self.__generate_command_with_custom_sources(command_template=self.cmd_dist_upgrade_simulation_template, source_parts=source_parts, source_list=source_list) out = self.invoke_package_manager(cmd) self.all_updates_cached, self.all_update_versions_cached = self.extract_packages_and_versions(out) if self.__pro_client_prereq_met: ubuntu_pro_client_all_updates_query_success, self.ubuntu_pro_client_all_updates_cached, self.ubuntu_pro_client_all_updates_versions_cached = self.ubuntu_pro_client.get_all_updates() - - self.composite_logger.log_debug("Get all updates : [DefaultAllPackagesCount={0}][UbuntuProClientQuerySuccess={1}][UbuntuProClientAllPackagesCount={2}]".format(len(self.all_updates_cached), ubuntu_pro_client_all_updates_query_success, len(self.ubuntu_pro_client_all_updates_cached))) - - # Get the list of updates that are present in only one of the two lists. - different_updates = list(set(self.all_updates_cached) - set(self.ubuntu_pro_client_all_updates_cached)) + list(set(self.ubuntu_pro_client_all_updates_cached) - set(self.all_updates_cached)) - self.composite_logger.log_debug("Get all updates : [DifferentUpdatesCount={0}][Updates={1}]".format(len(different_updates), different_updates)) - - # Prefer Ubuntu Pro Client output when available. - if ubuntu_pro_client_all_updates_query_success: + pro_client_missed_updates = list(set(self.all_updates_cached) - set(self.ubuntu_pro_client_all_updates_cached)) + all_updates_missed_updates = list(set(self.ubuntu_pro_client_all_updates_cached) - set(self.all_updates_cached)) + self.composite_logger.log_debug("[APM-Pro] Get all updates : [DefaultAllPackagesCount={0}][UbuntuProClientQuerySuccess={1}][UbuntuProClientAllPackagesCount={2}]" + .format(len(self.all_updates_cached), ubuntu_pro_client_all_updates_query_success, len(self.ubuntu_pro_client_all_updates_cached))) + if len(pro_client_missed_updates) > 0: # not good, needs investigation + self.composite_logger.log_debug("[APM-Pro][!] Pro client missed updates found. [Count={0}][Updates={1}]".format(len(pro_client_missed_updates), pro_client_missed_updates)) + if len(all_updates_missed_updates) > 0: # interesting, for review + self.composite_logger.log_debug("[APM-Pro] Pro client only updates found. [Count={0}][Updates={1}]".format(len(all_updates_missed_updates), all_updates_missed_updates)) + + if ubuntu_pro_client_all_updates_query_success: # this needs to be revisited based on logs above return self.ubuntu_pro_client_all_updates_cached, self.ubuntu_pro_client_all_updates_versions_cached else: return self.all_updates_cached, self.all_update_versions_cached @@ -234,17 +348,24 @@ def get_security_updates(self): ubuntu_pro_client_security_packages = [] ubuntu_pro_client_security_package_versions = [] - self.composite_logger.log("\nDiscovering 'security' packages...") - cmd = self.__generate_command(self.cmd_dist_upgrade_simulation_template, self.__get_custom_sources_to_spec(self.max_patch_publish_date, base_classification="security")) + self.composite_logger.log_verbose("[APM] Discovering 'security' packages...") + source_parts, source_list = self.__get_custom_sources_to_spec(self.max_patch_publish_date, base_classification=str()) + cmd = self.__generate_command_with_custom_sources(self.cmd_dist_upgrade_simulation_template, source_parts=source_parts, source_list=source_list) out = self.invoke_package_manager(cmd) security_packages, security_package_versions = self.extract_packages_and_versions(out) + self.composite_logger.log_debug("[APM] Discovered 'security' packages. [Count={0}]".format(len(security_packages))) if self.__pro_client_prereq_met: ubuntu_pro_client_security_updates_query_success, ubuntu_pro_client_security_packages, ubuntu_pro_client_security_package_versions = self.ubuntu_pro_client.get_security_updates() - - self.composite_logger.log_debug("Get Security Updates : [DefaultSecurityPackagesCount={0}][UbuntuProClientQuerySuccess={1}][UbuntuProClientSecurityPackagesCount={2}]".format(len(security_packages), ubuntu_pro_client_security_updates_query_success, len(ubuntu_pro_client_security_packages))) - - if ubuntu_pro_client_security_updates_query_success: + pro_client_missed_updates = list(set(security_packages) - set(ubuntu_pro_client_security_packages)) + sec_updates_missed_updates = list(set(ubuntu_pro_client_security_packages) - set(security_packages)) + self.composite_logger.log_debug("[APM-Pro][Sec] Get Security Updates : [DefaultSecurityPackagesCount={0}][UbuntuProClientQuerySuccess={1}][UbuntuProClientSecurityPackagesCount={2}]".format(len(security_packages), ubuntu_pro_client_security_updates_query_success, len(ubuntu_pro_client_security_packages))) + if len(pro_client_missed_updates) > 0: # not good, needs investigation + self.composite_logger.log_debug("[APM-Pro][Sec][!] Pro client missed updates found. [Count={0}][Updates={1}]".format(len(pro_client_missed_updates), pro_client_missed_updates)) + if len(sec_updates_missed_updates) > 0: # interesting, for review + self.composite_logger.log_debug("[APM-Pro][Sec] Pro client only updates found. [Count={0}][Updates={1}]".format(len(sec_updates_missed_updates), sec_updates_missed_updates)) + + if ubuntu_pro_client_security_updates_query_success: # this needs to be revisited based on logs above return ubuntu_pro_client_security_packages, ubuntu_pro_client_security_package_versions else: return security_packages, security_package_versions @@ -258,7 +379,7 @@ def get_security_esm_updates(self): if self.__pro_client_prereq_met: ubuntu_pro_client_security_esm_updates_query_success, ubuntu_pro_client_security_esm_packages, ubuntu_pro_client_security_package_esm_versions = self.ubuntu_pro_client.get_security_esm_updates() - self.composite_logger.log_debug("Get Security ESM updates : [UbuntuProClientQuerySuccess={0}][UbuntuProClientSecurityEsmPackagesCount={1}]".format(ubuntu_pro_client_security_esm_updates_query_success, len(ubuntu_pro_client_security_esm_packages))) + self.composite_logger.log_debug("[APM-Pro] Get Security ESM updates : [UbuntuProClientQuerySuccess={0}][UbuntuProClientSecurityEsmPackagesCount={1}]".format(ubuntu_pro_client_security_esm_updates_query_success, len(ubuntu_pro_client_security_esm_packages))) return ubuntu_pro_client_security_esm_updates_query_success, ubuntu_pro_client_security_esm_packages, ubuntu_pro_client_security_package_esm_versions def get_other_updates(self): @@ -269,7 +390,7 @@ def get_other_updates(self): other_packages = [] other_package_versions = [] - self.composite_logger.log("\nDiscovering 'other' packages...") + self.composite_logger.log_verbose("[APM] Discovering 'other' packages...") all_packages, all_package_versions = self.get_all_updates(True) security_packages, security_package_versions = self.get_security_updates() @@ -280,8 +401,7 @@ def get_other_updates(self): if self.__pro_client_prereq_met: ubuntu_pro_client_other_updates_query_success, ubuntu_pro_client_other_packages, ubuntu_pro_client_other_package_versions = self.ubuntu_pro_client.get_other_updates() - - self.composite_logger.log_debug("Get Other Updates : [DefaultOtherPackagesCount={0}][UbuntuProClientQuerySuccess={1}][UbuntuProClientOtherPackagesCount={2}]".format(len(other_packages), ubuntu_pro_client_other_updates_query_success, len(ubuntu_pro_client_other_packages))) + self.composite_logger.log_debug("[APM-Pro] Get Other Updates : [DefaultOtherPackagesCount={0}][UbuntuProClientQuerySuccess={1}][UbuntuProClientOtherPackagesCount={2}]".format(len(other_packages), ubuntu_pro_client_other_updates_query_success, len(ubuntu_pro_client_other_packages))) if ubuntu_pro_client_other_updates_query_success: return ubuntu_pro_client_other_packages, ubuntu_pro_client_other_package_versions @@ -300,7 +420,7 @@ def extract_packages_and_versions(self, output): # Inst python3-update-manager [1:16.10.7] (1:16.10.8 Ubuntu:16.10/yakkety-updates [all]) [update-manager-core:amd64 ] # Inst update-manager-core [1:16.10.7] (1:16.10.8 Ubuntu:16.10/yakkety-updates [all]) - self.composite_logger.log_debug("\nExtracting package and version data...") + self.composite_logger.log_verbose("[APM] Extracting package and version data...") packages = [] versions = [] @@ -312,7 +432,7 @@ def extract_packages_and_versions(self, output): packages.append(package[0]) versions.append(package[1]) - self.composite_logger.log_debug(" - Extracted package and version data for " + str(len(packages)) + " packages [BASIC].") + self.composite_logger.log_verbose("[APM] Extracted package and version data for " + str(len(packages)) + " packages [BASIC].") # Discovering ESM packages - Distro versions with extended security maintenance lines = output.strip().split('\n') @@ -332,7 +452,7 @@ def extract_packages_and_versions(self, output): for package in esm_packages: packages.append(package) versions.append(Constants.UA_ESM_REQUIRED) - self.composite_logger.log_debug(" - Extracted package and version data for " + str(len(packages)) + " packages [TOTAL].") + self.composite_logger.log_verbose("[APM] Extracted package and version data for " + str(len(packages)) + " packages [TOTAL].") return packages, versions # endregion @@ -346,7 +466,8 @@ def install_updates_fail_safe(self, excluded_packages): return def install_security_updates_azgps_coordinated(self): - command = self.__generate_command(self.install_security_updates_azgps_coordinated_cmd, self.__get_custom_sources_to_spec(self.max_patch_publish_date, base_classification="security")) + source_parts, source_list = self.__get_custom_sources_to_spec(self.max_patch_publish_date, base_classification=str()) + command = self.__generate_command_with_custom_sources(self.install_security_updates_azgps_coordinated_cmd, source_parts=source_parts, source_list=source_list) out, code = self.invoke_package_manager_advanced(command, raise_on_exception=False) return code, out # endregion @@ -368,20 +489,20 @@ def get_all_available_versions_of_package(self, package_name): for line in lines: package_details = line.split(' |') if len(package_details) == 3: - self.composite_logger.log_debug(" - Applicable line: " + str(line)) + self.composite_logger.log_verbose(" - Applicable line: " + str(line)) package_versions.append(package_details[1].strip()) else: - self.composite_logger.log_debug(" - Inapplicable line: " + str(line)) + self.composite_logger.log_verbose(" - Inapplicable line: " + str(line)) return package_versions def is_package_version_installed(self, package_name, package_version): """ Returns true if the specific package version is installed """ - self.composite_logger.log_debug("\nCHECKING PACKAGE INSTALL STATUS FOR: " + str(package_name) + " (" + str(package_version) + ")") + self.composite_logger.log_verbose("\nCHECKING PACKAGE INSTALL STATUS FOR: " + str(package_name) + " (" + str(package_version) + ")") # DEFAULT METHOD - self.composite_logger.log_debug(" - [1/2] Verifying install status with Dpkg.") + self.composite_logger.log_verbose(" - [1/2] Verifying install status with Dpkg.") cmd = self.cmd_single_package_find_install_dpkg_template.replace('', package_name) code, output = self.env_layer.run_command_output(cmd, False, False) lines = output.strip().split('\n') @@ -392,13 +513,13 @@ def is_package_version_installed(self, package_name, package_version): # Use dpkg --info (= dpkg-deb --info) to examine archive files, # and dpkg --contents (= dpkg-deb --contents) to list their contents. # ------------------------------------------ ------------------- - self.composite_logger.log_debug(" - Return code: 1. The package is likely NOT present on the system.") + self.composite_logger.log_verbose(" - Return code: 1. The package is likely NOT present on the system.") for line in lines: if 'not installed' in line and package_name in line: - self.composite_logger.log_debug(" - Discovered to be not installed: " + str(line)) + self.composite_logger.log_verbose(" - Discovered to be not installed: " + str(line)) return False else: - self.composite_logger.log_debug(" - Inapplicable line: " + str(line)) + self.composite_logger.log_verbose(" - Inapplicable line: " + str(line)) self.telemetry_writer.write_event("[Installed check] Return code: 1. Unable to verify package not present on the system: " + str(output), Constants.TelemetryEventLevel.Verbose) elif code == 0: # likely found @@ -414,35 +535,35 @@ def is_package_version_installed(self, package_name, package_version): # Version: 5.7.25-0ubuntu0.16.04.2 # Depends: mysql-server-5.7 # ------------------------------------------ -------------------- - self.composite_logger.log_debug(" - Return code: 0. The package is likely present on the system.") + self.composite_logger.log_verbose(" - Return code: 0. The package is likely present on the system.") composite_found_flag = 0 for line in lines: if 'Package: ' in line: if package_name in line: composite_found_flag = composite_found_flag | 1 else: # should never hit for the way this is invoked, hence telemetry - self.composite_logger.log_debug(" - Did not match name: " + str(package_name) + " (" + str(line) + ")") + self.composite_logger.log_verbose(" - Did not match name: " + str(package_name) + " (" + str(line) + ")") self.telemetry_writer.write_event("[Installed check] Name did not match: " + package_name + " (line=" + str(line) + ")(out=" + str(output) + ")", Constants.TelemetryEventLevel.Verbose) continue if 'Version: ' in line: if package_version in line: composite_found_flag = composite_found_flag | 2 else: # should never hit for the way this is invoked, hence telemetry - self.composite_logger.log_debug(" - Did not match version: " + str(package_version) + " (" + str(line) + ")") + self.composite_logger.log_verbose(" - Did not match version: " + str(package_version) + " (" + str(line) + ")") self.telemetry_writer.write_event("[Installed check] Version did not match: " + str(package_version) + " (line=" + str(line) + ")(out=" + str(output) + ")", Constants.TelemetryEventLevel.Verbose) continue if 'Status: ' in line: if 'install ok installed' in line: composite_found_flag = composite_found_flag | 4 else: # should never hit for the way this is invoked, hence telemetry - self.composite_logger.log_debug(" - Did not match status: " + str(package_name) + " (" + str(line) + ")") + self.composite_logger.log_verbose(" - Did not match status: " + str(package_name) + " (" + str(line) + ")") self.telemetry_writer.write_event("[Installed check] Status did not match: 'install ok installed' (line=" + str(line) + ")(out=" + str(output) + ")", Constants.TelemetryEventLevel.Verbose) continue if composite_found_flag & 7 == 7: # whenever this becomes true, the exact package version is installed - self.composite_logger.log_debug(" - Package, Version and Status matched. Package is detected as 'Installed'.") + self.composite_logger.log_verbose(" - Package, Version and Status matched. Package is detected as 'Installed'.") return True - self.composite_logger.log_debug(" - Inapplicable line: " + str(line)) - self.composite_logger.log_debug(" - Install status check did NOT find the package installed: (composite_found_flag=" + str(composite_found_flag) + ")") + self.composite_logger.log_verbose(" - Inapplicable line: " + str(line)) + self.composite_logger.log_verbose(" - Install status check did NOT find the package installed: (composite_found_flag=" + str(composite_found_flag) + ")") self.telemetry_writer.write_event("Install status check did NOT find the package installed: (composite_found_flag=" + str(composite_found_flag) + ")(output=" + output + ")", Constants.TelemetryEventLevel.Verbose) else: # This is not expected to execute. If it does, the details will show up in telemetry. Improve this code with that information. self.composite_logger.log_debug(" - Unexpected return code from dpkg: " + str(code) + ". Output: " + str(output)) @@ -452,7 +573,7 @@ def is_package_version_installed(self, package_name, package_version): # Sample output format # Listing... Done # apt/xenial-updates,now 1.2.29 amd64 [installed] - self.composite_logger.log_debug(" - [2/2] Verifying install status with Apt.") + self.composite_logger.log_verbose(" - [2/2] Verifying install status with Apt.") cmd = self.cmd_single_package_find_install_apt_template.replace('', package_name) output = self.invoke_package_manager(cmd) lines = output.strip().split('\n') @@ -460,24 +581,24 @@ def is_package_version_installed(self, package_name, package_version): for line in lines: package_details = line.split(' ') if len(package_details) < 4: - self.composite_logger.log_debug(" - Inapplicable line: " + str(line)) + self.composite_logger.log_verbose(" - Inapplicable line: " + str(line)) else: - self.composite_logger.log_debug(" - Applicable line: " + str(line)) + self.composite_logger.log_verbose(" - Applicable line: " + str(line)) discovered_package_name = package_details[0].split('/')[0] # index out of bounds check is deliberately not being done if discovered_package_name != package_name: - self.composite_logger.log_debug(" - Did not match name: " + discovered_package_name + " (" + package_name + ")") + self.composite_logger.log_verbose(" - Did not match name: " + discovered_package_name + " (" + package_name + ")") continue if package_details[1] != package_version: - self.composite_logger.log_debug(" - Did not match version: " + package_details[1] + " (" + str(package_details[1]) + ")") + self.composite_logger.log_verbose(" - Did not match version: " + package_details[1] + " (" + str(package_details[1]) + ")") continue if 'installed' not in package_details[3]: - self.composite_logger.log_debug(" - Did not find status: " + str(package_details[3] + " (" + str(package_details[3]) + ")")) + self.composite_logger.log_verbose(" - Did not find status: " + str(package_details[3] + " (" + str(package_details[3]) + ")")) continue - self.composite_logger.log_debug(" - Package version specified was determined to be installed.") + self.composite_logger.log_verbose(" - Package version specified was determined to be installed.") self.telemetry_writer.write_event("[Installed check] Fallback code disagreed with dpkg.", Constants.TelemetryEventLevel.Verbose) return True - self.composite_logger.log_debug(" - Package version specified was determined to NOT be installed.") + self.composite_logger.log_verbose(" - Package version specified was determined to NOT be installed.") return False def get_dependent_list(self, packages): @@ -490,7 +611,7 @@ def get_dependent_list(self, packages): cmd = self.single_package_dependency_resolution_template.replace('', package_names) - self.composite_logger.log_debug("\nRESOLVING DEPENDENCIES USING COMMAND: " + str(cmd)) + self.composite_logger.log_verbose("\nRESOLVING DEPENDENCIES USING COMMAND: " + str(cmd)) output = self.invoke_package_manager(cmd) dependencies, dependency_versions = self.extract_packages_and_versions(output) @@ -499,7 +620,7 @@ def get_dependent_list(self, packages): if package in dependencies: dependencies.remove(package) - self.composite_logger.log_debug(str(len(dependencies)) + " dependent packages were found for packages '" + str(packages) + "'.") + self.composite_logger.log_verbose(str(len(dependencies)) + " dependent packages were found for packages '" + str(packages) + "'.") return dependencies def get_product_name(self, package_name): @@ -687,7 +808,7 @@ def is_reboot_pending(self): if ubuntu_pro_client_check_success: # Prefer Ubuntu Pro Client reboot status. reported_reboot_status = ubuntu_pro_client_reboot_status - self.composite_logger.log_debug("Reboot required advanced debug flags:[DefaultPendingFileExists={0}][DefaultPendingProcessesExists={1}][UbuntuProClientCheckSuccessful={2}][UbuntuProClientRebootStatus={3}][ReportedRebootStatus={4}][DefaultException={5}]".format(default_pending_file_exists, default_pending_processes_exists, ubuntu_pro_client_check_success, ubuntu_pro_client_reboot_status, reported_reboot_status, default_exception)) + self.composite_logger.log_debug("[APM] Reboot required advanced debug flags: [DefaultPendingFileExists={0}][DefaultPendingProcessesExists={1}][UbuntuProClientCheckSuccessful={2}][UbuntuProClientRebootStatus={3}][ReportedRebootStatus={4}][DefaultException={5}]".format(default_pending_file_exists, default_pending_processes_exists, ubuntu_pro_client_check_success, ubuntu_pro_client_reboot_status, reported_reboot_status, default_exception)) return reported_reboot_status def check_pro_client_prerequisites(self): @@ -699,14 +820,14 @@ def check_pro_client_prerequisites(self): except Exception as error: exception_error = repr(error) - self.composite_logger.log_debug("Ubuntu Pro Client pre-requisite checks:[IsFeatureEnabled={0}][IsOSVersionCompatible={1}][IsPythonCompatible={2}][Error={3}]".format(Constants.UbuntuProClientSettings.FEATURE_ENABLED, self.__get_os_major_version() <= Constants.UbuntuProClientSettings.MAX_OS_MAJOR_VERSION_SUPPORTED, self.__is_minimum_required_python_installed(), exception_error)) + self.composite_logger.log_debug("[APM-Pro] Ubuntu Pro Client pre-requisite checks:[IsFeatureEnabled={0}][IsOSVersionCompatible={1}][IsPythonCompatible={2}][Error={3}]".format(Constants.UbuntuProClientSettings.FEATURE_ENABLED, self.__get_os_major_version() <= Constants.UbuntuProClientSettings.MAX_OS_MAJOR_VERSION_SUPPORTED, self.__is_minimum_required_python_installed(), exception_error)) return self.__pro_client_prereq_met def set_security_esm_package_status(self, operation, packages): """Set the security-ESM classification for the esm packages.""" security_esm_update_query_success, security_esm_updates, security_esm_updates_versions = self.get_security_esm_updates() if self.__pro_client_prereq_met and security_esm_update_query_success and len(security_esm_updates) > 0: - self.telemetry_writer.write_event("set Security-ESM package status:[Operation={0}][Updates={1}]".format(operation, str(security_esm_updates)), Constants.TelemetryEventLevel.Verbose) + self.telemetry_writer.write_event("[APM] Set Security-ESM package status:[Operation={0}][Updates={1}]".format(operation, str(security_esm_updates)), Constants.TelemetryEventLevel.Verbose) if operation == Constants.ASSESSMENT: self.status_handler.set_package_assessment_status(security_esm_updates, security_esm_updates_versions, Constants.PackageClassification.SECURITY_ESM) # If the Ubuntu Pro Client is not attached, set the error with the code UA_ESM_REQUIRED. This will be used in portal to mark the VM as unattached to pro. @@ -715,7 +836,7 @@ def set_security_esm_package_status(self, operation, packages): elif operation == Constants.INSTALLATION: if security_esm_update_query_success: esm_packages_selected_to_install = [package for package in packages if package in security_esm_updates] - self.composite_logger.log_debug("Setting security ESM package status. [SelectedEsmPackagesCount={0}]".format(len(esm_packages_selected_to_install))) + self.composite_logger.log_debug("[APM] Setting security ESM package status. [SelectedEsmPackagesCount={0}]".format(len(esm_packages_selected_to_install))) self.status_handler.set_package_install_status_classification(security_esm_updates, security_esm_updates_versions, Constants.PackageClassification.SECURITY_ESM) def __get_os_major_version(self): @@ -761,7 +882,7 @@ def separate_out_esm_packages(self, packages, package_versions): if ua_esm_required_packages_found: self.status_handler.add_error_to_status("{0} patches requires Ubuntu Pro for Infrastructure with Extended Security Maintenance".format(len(ua_esm_required_packages)), Constants.PatchOperationErrorCodes.UA_ESM_REQUIRED) # Set the error status with the esm_package details. Will be used in portal. - self.composite_logger.log_debug("Filter esm packages : [TotalPackagesCount={0}][EsmPackagesCount={1}]".format(len(packages), len(ua_esm_required_packages))) + self.composite_logger.log_debug("[APM] Filter esm packages : [TotalPackagesCount={0}][EsmPackagesCount={1}]".format(len(packages), len(ua_esm_required_packages))) return non_esm_packages, non_esm_package_versions, ua_esm_required_packages, ua_esm_required_package_versions, ua_esm_required_packages_found def get_package_install_expected_avg_time_in_seconds(self): diff --git a/src/core/src/service_interfaces/LifecycleManagerAzure.py b/src/core/src/service_interfaces/LifecycleManagerAzure.py index 8e854b28..d3f7b7ca 100644 --- a/src/core/src/service_interfaces/LifecycleManagerAzure.py +++ b/src/core/src/service_interfaces/LifecycleManagerAzure.py @@ -122,10 +122,10 @@ def execution_start_check(self): self.composite_logger.log_debug("Completed execution start check.") def lifecycle_status_check(self): - self.composite_logger.log_debug("Performing lifecycle status check...") + self.composite_logger.log_verbose("Performing lifecycle status check...") extension_sequence = self.read_extension_sequence() if int(extension_sequence['number']) == int(self.execution_config.sequence_number): - self.composite_logger.log_debug("Extension sequence number verified to have not changed: {0}".format(str(extension_sequence['number']))) + self.composite_logger.log_verbose("Extension sequence number verified to have not changed: {0}".format(str(extension_sequence['number']))) self.update_core_sequence(completed=False) else: self.composite_logger.log_error("Extension goal state has changed. Terminating current sequence: {0}".format(self.execution_config.sequence_number)) @@ -133,7 +133,7 @@ def lifecycle_status_check(self): self.update_core_sequence(completed=True) # forced-to-complete scenario | extension wrapper will be watching for this event self.composite_logger.file_logger.close() self.env_layer.exit(0) - self.composite_logger.log_debug("Completed lifecycle status check.") + self.composite_logger.log_verbose("Completed lifecycle status check.") # End region State checkers # region - Identity diff --git a/src/core/tests/Test_AptitudePackageManagerCustomSources.py b/src/core/tests/Test_AptitudePackageManagerCustomSources.py new file mode 100644 index 00000000..e8b1c300 --- /dev/null +++ b/src/core/tests/Test_AptitudePackageManagerCustomSources.py @@ -0,0 +1,216 @@ +# Copyright 2024 Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Requires Python 2.7+ +import os +import shutil +import unittest +from os import mkdir + +from core.src.bootstrap.Constants import Constants +from core.tests.library.ArgumentComposer import ArgumentComposer +from core.tests.library.RuntimeCompositor import RuntimeCompositor +from core.src.package_managers import AptitudePackageManager + + +class TestAptitudePackageManagerCustomSources(unittest.TestCase): + def setUp(self): + self.argument_composer = ArgumentComposer().get_composed_arguments() + self.runtime = RuntimeCompositor(self.argument_composer, True, Constants.APT) + self.container = self.runtime.container + + def tearDown(self): + self.runtime.stop() + + def test_bad_custom_sources_to_spec_invocation(self): + package_manager = AptitudePackageManager.AptitudePackageManager(self.runtime.env_layer, self.runtime.execution_config, self.runtime.composite_logger, self.runtime.telemetry_writer, self.runtime.status_handler) + sources_dir, sources_list = package_manager._AptitudePackageManager__get_custom_sources_to_spec(base_classification="other") # invalid call + self.assertEqual(sources_list, str()) + self.assertEqual(sources_dir, str()) + + def test_force_defensive_exception_handling_coverage(self): + package_manager = AptitudePackageManager.AptitudePackageManager(self.runtime.env_layer, self.runtime.execution_config, self.runtime.composite_logger, self.runtime.telemetry_writer, self.runtime.status_handler) + package_manager._AptitudePackageManager__read_deb882_style_format("fake-path.list", "some-date", "security") + + package_manager = AptitudePackageManager.AptitudePackageManager(self.runtime.env_layer, self.runtime.execution_config, self.runtime.composite_logger, self.runtime.telemetry_writer, self.runtime.status_handler) + tmp_path = os.path.join(self.runtime.scratch_path, "tmp") + mock_sources_path = self.__prep_scratch_with_sources(include_sources_list=True) + self.__adapt_package_manager_for_mock_sources(package_manager, mock_sources_path) + package_manager._AptitudePackageManager__read_one_line_style_list_format = None + package_manager._AptitudePackageManager__get_consolidated_source_parts_content("some-date", "security") + + def test_sources_list_and_parts_combinations(self): + # Tests 32 combinations of source configuration on disk and desired manipulations + caching + for include_sources_list in [True, False]: + for include_source_parts_list in [True, False]: + for include_source_parts_debstyle in [True, False]: + for include_max_patch_publish_date in [str(), "20240401T160000Z"]: + print("\n\nTesting combination: [SourcesList={0}][SourcePartsList={1}][SourcePartsDebstyle={2}][PublishDate={3}]".format( + include_sources_list, include_source_parts_list, include_source_parts_debstyle, include_max_patch_publish_date)) + self.__lib_test_custom_sources_with(include_sources_list=include_sources_list, + include_source_parts_list=include_source_parts_list, + include_source_parts_debstyle=include_source_parts_debstyle, + include_max_patch_publish_date=include_max_patch_publish_date) + + def __lib_test_custom_sources_with(self, include_sources_list=False, include_source_parts_list=False, include_source_parts_debstyle=False, + include_max_patch_publish_date=str()): + # type: (bool, bool, bool, str) -> None + # Provides the base unit for testing various source configurations and assertions on the outcomes + + # Prepare the file system for the test + tmp_path = os.path.join(self.runtime.scratch_path, "tmp") + mock_sources_path = self.__prep_scratch_with_sources(include_sources_list, include_source_parts_list, include_source_parts_debstyle) + + # Instantiate the package manager, and redirect sources in the test environment + package_manager = AptitudePackageManager.AptitudePackageManager(self.runtime.env_layer, self.runtime.execution_config, self.runtime.composite_logger, self.runtime.telemetry_writer, self.runtime.status_handler) + self.__adapt_package_manager_for_mock_sources(package_manager, mock_sources_path) + + # Checks swapping and caching: All -> Security -> All -> All + for i in range(3): + # All + expected_debstyle_entry_count = 2 if include_source_parts_debstyle else 0 # 2 entries in the debstyle mock + expected_sources_list_entry_count = (4 if include_sources_list else 0) + (5 if include_source_parts_list else 0) # 4 in regular file, 3 in mock folder + sources_dir, sources_list = package_manager._AptitudePackageManager__get_custom_sources_to_spec(include_max_patch_publish_date) + self.__check_custom_sources(sources_dir, sources_list, + sources_debstyle_expected=include_source_parts_debstyle, + sources_list_expected=include_sources_list, + security_only=False, + sources_debstyle_entry_count=expected_debstyle_entry_count, + sources_list_entry_count=expected_sources_list_entry_count, + max_patch_publish_date=include_max_patch_publish_date) + + # caching combinatorial exercise + if i >= 1: + continue + + # Security + expected_debstyle_entry_count = 1 if include_source_parts_debstyle else 0 # 1 security entry in the debstyle mock + expected_sources_list_entry_count = (1 if include_sources_list else 0) + (1 if include_source_parts_list else 0) # 1 security entry in regular file, 1 security entry in mock folder + sources_dir, sources_list = package_manager._AptitudePackageManager__get_custom_sources_to_spec(include_max_patch_publish_date, "Security") + self.__check_custom_sources(sources_dir, sources_list, + sources_debstyle_expected=include_source_parts_debstyle, + sources_list_expected=include_sources_list, + security_only=True, + sources_debstyle_entry_count=expected_debstyle_entry_count, + sources_list_entry_count=expected_sources_list_entry_count, + max_patch_publish_date=include_max_patch_publish_date) + + # Clean up file system after the test + self.__clear_mock_sources_path(mock_sources_path) + shutil.rmtree(tmp_path) + mkdir(tmp_path) + + def __check_custom_sources(self, sources_dir, sources_list, sources_debstyle_expected=False, sources_list_expected=False, + security_only=False, sources_debstyle_entry_count=-1, sources_list_entry_count=-1, + max_patch_publish_date=str()): + # type: (str, str, bool, bool, bool, int, int, str) -> None + # Selectively checks assertions and conditions based on the test scenario + + if sources_debstyle_expected: + self.assertTrue(os.path.isdir(sources_dir)) + source_parts_file = os.path.join(sources_dir, "azgps-src-parts.sources") + self.assertTrue(os.path.exists(source_parts_file)) + with self.runtime.env_layer.file_system.open(source_parts_file, 'r') as file_handle: + data = file_handle.read().split("\n\n") + self.assertEqual(len(data), sources_debstyle_entry_count) + for entry in data: + if security_only: + self.assertTrue("security" in entry) + if max_patch_publish_date != str(): + self.assertTrue(max_patch_publish_date in entry) + else: + self.assertFalse(os.path.isdir(sources_dir)) + + if sources_list_expected: + self.assertTrue(os.path.exists(sources_list)) + with self.runtime.env_layer.file_system.open(sources_list, 'r') as file_handle: + data = file_handle.readlines() + self.assertEqual(len(data), sources_list_entry_count) + for entry in data: + if security_only: + self.assertTrue("security" in entry) + if max_patch_publish_date != str() and "ppa" not in entry: # exception for unsupported repo + self.assertTrue(max_patch_publish_date in entry) + + # region - Mock sources preparation and clean up + def __prep_scratch_with_sources(self, include_sources_list=True, include_source_parts_list=True, include_source_parts_debstyle=True): + # type: (bool, bool, bool) -> str + # Prepares the file system with input test sources data + timestamp = self.runtime.env_layer.datetime.timestamp().replace(":", ".") + mock_sources_path = os.path.join(self.runtime.scratch_path, "apt-src-" + timestamp) + if os.path.isdir(mock_sources_path): + shutil.rmtree(mock_sources_path) + os.makedirs(mock_sources_path) + os.makedirs(os.path.join(mock_sources_path, "sources.list.d")) + + if include_sources_list: + self.runtime.env_layer.file_system.write_with_retry(os.path.join(mock_sources_path, "sources.list"), + data=self.__get_sources_data_one_line_style_def(), mode="w") + if include_source_parts_list: + self.runtime.env_layer.file_system.write_with_retry(os.path.join(mock_sources_path, "sources.list.d", "azgps-src.list"), + data=self.__get_sources_data_one_line_style_ext(), mode="w") + if include_source_parts_debstyle: + self.runtime.env_layer.file_system.write_with_retry(os.path.join(mock_sources_path, "sources.list.d", "azgps-src.sources"), + data=self.__get_sources_data_debstyle(), mode="w") + + return mock_sources_path + + @staticmethod + def __clear_mock_sources_path(mock_sources_path): + # type: (str) -> None + # Clears out the input test data + shutil.rmtree(mock_sources_path) + + @staticmethod + def __adapt_package_manager_for_mock_sources(package_manager, mock_sources_path): + # type: (object, str) -> None + # Modifies the package manager internals to the mock input data sources + package_manager.APT_SOURCES_LIST_PATH = os.path.join(mock_sources_path, "sources.list") + package_manager.APT_SOURCES_DIR_PATH = os.path.join(mock_sources_path, "sources.list.d") + + @staticmethod + def __get_sources_data_one_line_style_def(): + return "deb http://azure.archive.ubuntu.com/ubuntu/ focal main restricted\n" + \ + "deb http://azure.archive.ubuntu.com/ubuntu/ focal-security main restricted\n" + \ + "deb http://azure.archive.ubuntu.com/ubuntu/ focal universe\n" + \ + "deb http://azure.archive.ubuntu.com/ubuntu/ focal multiverse\n" + + @staticmethod + def __get_sources_data_one_line_style_ext(): + return "deb http://us.archive.ubuntu.com/ubuntu/ focal-backports main restricted universe multiverse\n" + \ + "deb http://ppa.launchpad.net/upubuntu-com/web/ubuntu focal main\n" + \ + "deb http://azure.archive.ubuntu.com/ubuntu/ focal-security universe\n" + \ + "deb http://in.archive.ubuntu.com/ubuntu/ focal multiverse\n" + \ + "deb http://cn.archive.ubuntu.com/ubuntu/ focal main\n" + + @staticmethod + def __get_sources_data_debstyle(): + return "## See the sources.list(5) manual page for further settings. \n" + \ + "Types: deb \n" + \ + "URIs: http://azure.archive.ubuntu.com/ubuntu/ \n" + \ + "Suites: noble noble-updates noble-backports \n" + \ + "Components: main universe restricted multiverse \n" + \ + "Signed-By: /usr/share/keyrings/ubuntu-archive-keyring.gpg \n" + \ + "\n" + \ + "## Ubuntu security updates. Aside from URIs and Suites, \n" + \ + "## this should mirror your choices in the previous section. \n" + \ + "Types: deb \n" + \ + "URIs: http://azure.archive.ubuntu.com/ubuntu/ \n" + \ + "Suites: noble-security \n" + \ + "Components: main universe restricted multiverse \n" + \ + "Signed-By: /usr/share/keyrings/ubuntu-archive-keyring.gpg \n" + # endregion + +if __name__ == '__main__': + unittest.main() diff --git a/src/core/tests/Test_PatchAssessor.py b/src/core/tests/Test_PatchAssessor.py index 6d0a9f11..9a50c994 100644 --- a/src/core/tests/Test_PatchAssessor.py +++ b/src/core/tests/Test_PatchAssessor.py @@ -56,7 +56,7 @@ def test_assessment_fail_with_status_update(self): self.assertRaises(Exception, self.runtime.patch_assessor.start_assessment) with open(self.runtime.execution_config.status_file_path, 'r') as file_handle: file_contents = json.loads(file_handle.read()) - self.assertTrue('Unexpected return code (100) from package manager on command: LANG=en_US.UTF8 sudo apt-get -s dist-upgrade' in str(file_contents)) + self.assertTrue('Customer environment error: Investigate and resolve unexpected return code (100) from package manager on command: ' in str(file_contents)) def test_assessment_telemetry_fail(self): backup_telemetry_writer = self.runtime.telemetry_writer diff --git a/src/core/tests/Test_PatchInstaller.py b/src/core/tests/Test_PatchInstaller.py index cfb235a2..c6282ad8 100644 --- a/src/core/tests/Test_PatchInstaller.py +++ b/src/core/tests/Test_PatchInstaller.py @@ -230,7 +230,7 @@ def test_patch_installer_for_azgps_coordinated(self): argument_composer.maximum_duration = "PT235M" argument_composer.health_store_id = "pub_offer_sku_2024.04.01" runtime = RuntimeCompositor(argument_composer.get_composed_arguments(), True, Constants.APT) - runtime.package_manager.custom_sources_list = os.path.join(argument_composer.temp_folder, "temp2.list") + runtime.package_manager.current_source_list = os.path.join(argument_composer.temp_folder, "temp2.list") # Path change runtime.set_legacy_test_type('HappyPath') self.assertTrue(runtime.patch_installer.start_installation()) diff --git a/src/core/tests/library/RuntimeCompositor.py b/src/core/tests/library/RuntimeCompositor.py index eaee7b20..0d3b2088 100644 --- a/src/core/tests/library/RuntimeCompositor.py +++ b/src/core/tests/library/RuntimeCompositor.py @@ -50,6 +50,7 @@ def __init__(self, argv=Constants.DEFAULT_UNSPECIFIED_VALUE, legacy_mode=False, self.vm_cloud_type = vm_cloud_type Constants.SystemPaths.SYSTEMD_ROOT = os.getcwd() # mocking to pass a basic systemd check in Windows self.is_github_runner = os.getenv('RUNNER_TEMP', None) is not None + self.scratch_path = os.path.join(os.path.curdir, "scratch") # speed up test execution Constants.MAX_FILE_OPERATION_RETRY_COUNT = 1 diff --git a/src/tools/references/apt/sources.list b/src/tools/references/apt/sources.list new file mode 100644 index 00000000..f33c6e81 --- /dev/null +++ b/src/tools/references/apt/sources.list @@ -0,0 +1,57 @@ +## Note, this file is written by cloud-init on first boot of an instance +## modifications made here will not survive a re-bundle. +## if you wish to make changes you can: +## a.) add 'apt_preserve_sources_list: true' to /etc/cloud/cloud.cfg +## or do the same in user-data +## b.) add sources in /etc/apt/sources.list.d +## c.) make changes to template file /etc/cloud/templates/sources.list.tmpl + +# See http://help.ubuntu.com/community/UpgradeNotes for how to upgrade to +# newer versions of the distribution. +deb http://azure.archive.ubuntu.com/ubuntu/ focal main restricted +# deb-src http://azure.archive.ubuntu.com/ubuntu/ focal main restricted + +## Major bug fix updates produced after the final release of the +## distribution. +deb http://azure.archive.ubuntu.com/ubuntu/ focal-updates main restricted +# deb-src http://azure.archive.ubuntu.com/ubuntu/ focal-updates main restricted + +## N.B. software from this repository is ENTIRELY UNSUPPORTED by the Ubuntu +## team. Also, please note that software in universe WILL NOT receive any +## review or updates from the Ubuntu security team. +deb http://azure.archive.ubuntu.com/ubuntu/ focal universe +# deb-src http://azure.archive.ubuntu.com/ubuntu/ focal universe +deb http://azure.archive.ubuntu.com/ubuntu/ focal-updates universe +# deb-src http://azure.archive.ubuntu.com/ubuntu/ focal-updates universe + +## N.B. software from this repository is ENTIRELY UNSUPPORTED by the Ubuntu +## team, and may not be under a free licence. Please satisfy yourself as to +## your rights to use the software. Also, please note that software in +## multiverse WILL NOT receive any review or updates from the Ubuntu +## security team. +deb http://azure.archive.ubuntu.com/ubuntu/ focal multiverse +# deb-src http://azure.archive.ubuntu.com/ubuntu/ focal multiverse +deb http://azure.archive.ubuntu.com/ubuntu/ focal-updates multiverse +# deb-src http://azure.archive.ubuntu.com/ubuntu/ focal-updates multiverse + +## N.B. software from this repository may not have been tested as +## extensively as that contained in the main release, although it includes +## newer versions of some applications which may provide useful features. +## Also, please note that software in backports WILL NOT receive any review +## or updates from the Ubuntu security team. +deb http://azure.archive.ubuntu.com/ubuntu/ focal-backports main restricted universe multiverse +# deb-src http://azure.archive.ubuntu.com/ubuntu/ focal-backports main restricted universe multiverse + +## Uncomment the following two lines to add software from Canonical's +## 'partner' repository. +## This software is not part of Ubuntu, but is offered by Canonical and the +## respective vendors as a service to Ubuntu users. +# deb http://archive.canonical.com/ubuntu focal partner +# deb-src http://archive.canonical.com/ubuntu focal partner + +deb http://azure.archive.ubuntu.com/ubuntu/ focal-security main restricted +# deb-src http://azure.archive.ubuntu.com/ubuntu/ focal-security main restricted +deb http://azure.archive.ubuntu.com/ubuntu/ focal-security universe +# deb-src http://azure.archive.ubuntu.com/ubuntu/ focal-security universe +deb http://azure.archive.ubuntu.com/ubuntu/ focal-security multiverse +# deb-src http://azure.archive.ubuntu.com/ubuntu/ focal-security multiverse diff --git a/src/tools/references/apt/sources.list.d/ubuntu-esm-apps.sources b/src/tools/references/apt/sources.list.d/ubuntu-esm-apps.sources new file mode 100644 index 00000000..f819da22 --- /dev/null +++ b/src/tools/references/apt/sources.list.d/ubuntu-esm-apps.sources @@ -0,0 +1,6 @@ +# Written by ubuntu-pro-client +Types: deb +URIs: https://esm.ubuntu.com/apps/ubuntu +Suites: noble-apps-security noble-apps-updates +Components: main +Signed-By: /usr/share/keyrings/ubuntu-pro-esm-apps.gpg diff --git a/src/tools/references/apt/sources.list.d/ubuntu-esm-infra.sources b/src/tools/references/apt/sources.list.d/ubuntu-esm-infra.sources new file mode 100644 index 00000000..8a339f2d --- /dev/null +++ b/src/tools/references/apt/sources.list.d/ubuntu-esm-infra.sources @@ -0,0 +1,6 @@ +# Written by ubuntu-pro-client +Types: deb +URIs: https://esm.ubuntu.com/infra/ubuntu +Suites: noble-infra-security noble-infra-updates +Components: main +Signed-By: /usr/share/keyrings/ubuntu-pro-esm-infra.gpg diff --git a/src/tools/references/apt/sources.list.d/ubuntu.sources b/src/tools/references/apt/sources.list.d/ubuntu.sources new file mode 100644 index 00000000..f083b15a --- /dev/null +++ b/src/tools/references/apt/sources.list.d/ubuntu.sources @@ -0,0 +1,55 @@ +## Note, this file is written by cloud-init on first boot of an instance +## modifications made here will not survive a re-bundle. +## +## If you wish to make changes you can: +## a.) add 'apt_preserve_sources_list: true' to /etc/cloud/cloud.cfg +## or do the same in user-data +## b.) add supplemental sources in /etc/apt/sources.list.d +## c.) make changes to template file +## /etc/cloud/templates/sources.list.ubuntu.deb822.tmpl +## + +# See http://help.ubuntu.com/community/UpgradeNotes for how to upgrade to +# newer versions of the distribution. + +## Ubuntu distribution repository +## +## The following settings can be adjusted to configure which packages to use from Ubuntu. +## Mirror your choices (except for URIs and Suites) in the security section below to +## ensure timely security updates. +## +## Types: Append deb-src to enable the fetching of source package. +## URIs: A URL to the repository (you may add multiple URLs) +## Suites: The following additional suites can be configured +## -updates - Major bug fix updates produced after the final release of the +## distribution. +## -backports - software from this repository may not have been tested as +## extensively as that contained in the main release, although it includes +## newer versions of some applications which may provide useful features. +## Also, please note that software in backports WILL NOT receive any review +## or updates from the Ubuntu security team. +## Components: Aside from main, the following components can be added to the list +## restricted - Software that may not be under a free license, or protected by patents. +## universe - Community maintained packages. Software in this repository receives maintenance +## from volunteers in the Ubuntu community, or a 10 year security maintenance +## commitment from Canonical when an Ubuntu Pro subscription is attached. +## multiverse - Community maintained of restricted. Software from this repository is +## ENTIRELY UNSUPPORTED by the Ubuntu team, and may not be under a free +## licence. Please satisfy yourself as to your rights to use the software. +## Also, please note that software in multiverse WILL NOT receive any +## review or updates from the Ubuntu security team. +## +## See the sources.list(5) manual page for further settings. +Types: deb +URIs: http://azure.archive.ubuntu.com/ubuntu/ +Suites: noble noble-updates noble-backports +Components: main universe restricted multiverse +Signed-By: /usr/share/keyrings/ubuntu-archive-keyring.gpg + +## Ubuntu security updates. Aside from URIs and Suites, +## this should mirror your choices in the previous section. +Types: deb +URIs: http://azure.archive.ubuntu.com/ubuntu/ +Suites: noble-security +Components: main universe restricted multiverse +Signed-By: /usr/share/keyrings/ubuntu-archive-keyring.gpg