diff --git a/.gitignore b/.gitignore index b6e47617..46dbdae2 100644 --- a/.gitignore +++ b/.gitignore @@ -127,3 +127,7 @@ dmypy.json # Pyre type checker .pyre/ + +# JetBrains Rider +.idea/ +*.sln.iml diff --git a/src/extension/__init__.py b/src/extension/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/extension/src/ActionHandler.py b/src/extension/src/ActionHandler.py new file mode 100644 index 00000000..2e452408 --- /dev/null +++ b/src/extension/src/ActionHandler.py @@ -0,0 +1,76 @@ +import datetime +from src.Constants import Constants +from src.EnableCommandHandler import EnableCommandHandler +from src.InstallCommandHandler import InstallCommandHandler + + +class ActionHandler(object): + """Responsible for identifying the action to perform based on the user input""" + def __init__(self, logger, utility, runtime_context_handler, json_file_handler, ext_env_handler, ext_config_settings_handler, core_state_handler, ext_state_handler, ext_output_status_handler, process_handler, cmd_exec_start_time, seq_no): + self.logger = logger + self.utility = utility + self.runtime_context_handler = runtime_context_handler + self.json_file_handler = json_file_handler + self.ext_env_handler = ext_env_handler + self.ext_config_settings_handler = ext_config_settings_handler + self.core_state_handler = core_state_handler + self.ext_state_handler = ext_state_handler + self.ext_output_status_handler = ext_output_status_handler + self.process_handler = process_handler + self.cmd_exec_start_time = cmd_exec_start_time + self.seq_no = seq_no + + def determine_operation(self, command): + switcher = { + "-install": self.install, + "-uninstall": self.uninstall, + "-disable": self.disable, + "-enable": self.enable, + "-update": self.update, + "-reset": self.reset + } + try: + return switcher[command]() + except KeyError as e: + raise e + + def install(self): + self.logger.log("Extension installation started") + install_command_handler = InstallCommandHandler(self.logger, self.ext_env_handler) + return install_command_handler.execute_handler_action() + + def update(self): + """ as per the extension user guide, upon update request, Azure agent calls + 1. disable on the prev version + 2. update on the new version + 3. uninstall on the prev version + 4. install (if updateMode is UpdateWithInstall) + 5. enable on the new version + on uninstall the agent deletes removes configuration files""" + # todo: in the test run verify if CoreState.json, ExtState.json and the .status files are deleted, if yes, move them to a separate location + self.logger.log("Extension updated") + return Constants.ExitCode.Okay + + def uninstall(self): + # ToDo: verify if the agent deletes config files. And find out from the extension/agent team if we need to delete older logs + self.logger.log("Extension uninstalled") + return Constants.ExitCode.Okay + + def enable(self): + self.logger.log("Enable triggered on extension") + enable_command_handler = EnableCommandHandler(self.logger, self.utility, self.runtime_context_handler, self.ext_env_handler, self.ext_config_settings_handler, self.core_state_handler, self.ext_state_handler, self.ext_output_status_handler, self.process_handler, self.cmd_exec_start_time, self.seq_no) + return enable_command_handler.execute_handler_action() + + def disable(self): + self.logger.log("Disable triggered on extension") + prev_patch_max_end_time = self.cmd_exec_start_time + datetime.timedelta(hours=0, minutes=Constants.DISABLE_MAX_RUNTIME) + self.runtime_context_handler.process_previous_patch_operation(self.core_state_handler, self.process_handler, prev_patch_max_end_time, core_state_content=None) + self.logger.log("Extension disabled successfully") + return Constants.ExitCode.Okay + + def reset(self): + #ToDo: do we have to delete log and status files? and raise error if delete fails? + self.logger.log("Reset triggered on extension, deleting CoreState and ExtState files") + self.utility.delete_file(self.core_state_handler.dir_path, self.core_state_handler.file, raise_if_not_found=False) + self.utility.delete_file(self.ext_state_handler.dir_path, self.ext_state_handler.file, raise_if_not_found=False) + return Constants.ExitCode.Okay diff --git a/src/extension/src/Constants.py b/src/extension/src/Constants.py new file mode 100644 index 00000000..665b9722 --- /dev/null +++ b/src/extension/src/Constants.py @@ -0,0 +1,132 @@ +import os + + +class Constants(object): + """Static class contains all constant variables""" + + class EnumBackport(object): + class __metaclass__(type): + def __iter__(self): + for item in self.__dict__: + if item == self.__dict__[item]: + yield item + + # Runtime environments + TEST = 'Test' + DEV = 'Dev' + PROD = 'Prod' # Azure Native Patch Management + UNKNOWN_ENV = 'Unknown' # Non-functional code placeholder prior to compile + + # File Constants + HANDLER_ENVIRONMENT_FILE = 'HandlerEnvironment.json' + HANDLER_MANIFEST_FILE = 'HandlerManifest.json' + CORE_STATE_FILE = 'CoreState.json' + EXT_STATE_FILE = 'ExtState.json' + HANDLER_ENVIRONMENT_FILE_PATH = os.getcwd() + CONFIG_SETTINGS_FILE_EXTENSION = '.settings' + STATUS_FILE_EXTENSION = '.status' + CORE_CODE_FILE_NAME = 'MsftLinuxPatchCore.py' + LOG_FILE_EXTENSION = '.log' + LOG_FILES_TO_RETAIN = 10 + MAX_LOG_FILES_ALLOWED = 40 + + # Environment variables + SEQ_NO_ENVIRONMENT_VAR = "ConfigSequenceNumber" + + # Max runtime for specific commands in minutes + ENABLE_MAX_RUNTIME = 3 + DISABLE_MAX_RUNTIME = 13 + + # Todo: will be implemented later + # Telemetry Categories + TelemetryExtState = "State" + TelemetryConfig = "Config" + TelemetryError = "Error" + TelemetryWarning = "Warning" + TelemetryInfo = "Info" + TelemetryDebug = "Debug" + + # Re-try limit for file operations + MAX_IO_RETRIES = 5 + + # Operations + NOOPERATION = "NoOperation" + PATCH_NOOPERATION_SUMMARY = "PatchNoOperationSummary" + + # HandlerEnvironment constants + class EnvSettingsFields(EnumBackport): + version = "version" + settings_parent_key = "handlerEnvironment" + log_folder = "logFolder" + config_folder = "configFolder" + status_folder = "statusFolder" + + # Config Settings json keys + RUNTIME_SETTINGS = "runtimeSettings" + HANDLER_SETTINGS = "handlerSettings" + PUBLIC_SETTINGS = "publicSettings" + + # Public Settings within Config Settings + class ConfigPublicSettingsFields(EnumBackport): + operation = "operation" + activity_id = "activityId" + start_time = "startTime" + maximum_duration = "maximumDuration" + reboot_setting = "rebootSetting" + include_classifications = "classificationsToInclude" + include_patches = "patchesToInclude" + exclude_patches = "patchesToExclude" + internal_settings = "internalSettings" + + # ExtState.json keys + class ExtStateFields(EnumBackport): + ext_seq = "extensionSequence" + ext_seq_number = "number" + ext_seq_achieve_enable_by = "achieveEnableBy" + ext_seq_operation = "operation" + + # .status keys + class StatusFileFields(EnumBackport): + version = "version" + timestamp_utc = "timestampUTC" + status = "status" + status_name = "name" + status_operation = "operation" + status_status = "status" + status_code = "code" + status_formatted_message = "formattedMessage" + status_formatted_message_lang = "lang" + status_formatted_message_message = "message" + status_substatus = "substatus" + + # CoreState.json keys + class CoreStateFields(EnumBackport): + parent_key = "coreSequence" + number = "number" + action = "action" + completed = "completed" + last_heartbeat = "lastHeartbeat" + process_ids = "processIds" + + # Status values + class Status(EnumBackport): + Transitioning = "Transitioning" + Error = "Error" + Success = "Success" + Warning = "Warning" + + class ExitCode(EnumBackport): + Okay = 0 + HandlerFailed = -1 + MissingConfig = -2 + BadConfig = -3 + UnsupportedOperatingSystem = 51 + MissingDependency = 52 + ConfigurationError = 53 + BadHandlerEnvironmentFile = 3560 + UnableToReadStatusFile = 3561 + CreateFileLoggerFailure = 3562 + ReadingAndDeserializingConfigFileFailure = 3563 + InvalidConfigSettingPropertyValue = 3564 + CreateLoggerFailure = 3565 + CreateStatusWriterFailure = 3566 diff --git a/src/extension/src/EnableCommandHandler.py b/src/extension/src/EnableCommandHandler.py new file mode 100644 index 00000000..d1fb4685 --- /dev/null +++ b/src/extension/src/EnableCommandHandler.py @@ -0,0 +1,104 @@ +import datetime +from src.Constants import Constants + + +class EnableCommandHandler(object): + """ Responsible for executing the action for enable command """ + def __init__(self, logger, utility, runtime_context_handler, ext_env_handler, ext_config_settings_handler, core_state_handler, ext_state_handler, ext_output_status_handler, process_handler, cmd_exec_start_time, seq_no): + self.logger = logger + self.utility = utility + self.runtime_context_handler = runtime_context_handler + self.ext_env_handler = ext_env_handler + self.ext_config_settings_handler = ext_config_settings_handler + self.core_state_handler = core_state_handler + self.ext_state_handler = ext_state_handler + self.ext_output_status_handler = ext_output_status_handler + self.process_handler = process_handler + self.cmd_exec_start_time = cmd_exec_start_time + self.seq_no = seq_no + self.config_public_settings = Constants.ConfigPublicSettingsFields + self.core_state_fields = Constants.CoreStateFields + self.status = Constants.Status + + def execute_handler_action(self): + """ Responsible for taking appropriate action for enable command as per the request sent in Handler Configuration file by user """ + try: + config_settings = self.ext_config_settings_handler.read_file(self.seq_no) + prev_patch_max_end_time = self.cmd_exec_start_time + datetime.timedelta(hours=0, minutes=Constants.ENABLE_MAX_RUNTIME) + self.ext_state_handler.create_file(self.seq_no, config_settings.__getattribute__(self.config_public_settings.operation), prev_patch_max_end_time) + core_state_content = self.core_state_handler.read_file() + + # if NoOperation is requested, terminate all running processes from previous operation and update status file + if config_settings.__getattribute__(self.config_public_settings.operation) == Constants.NOOPERATION: + self.logger.log("NoOperation requested. Terminating older patch operation, if still in progress.") + self.process_nooperation(config_settings, core_state_content) + else: + # if any of the other operations are requested, verify if request is a new request or a re-enable, by comparing sequence number from the prev request and current one + if core_state_content is None or core_state_content.__getattribute__(self.core_state_fields.number) is None: + # first patch request for the VM + self.logger.log("No state information was found for any previous patch operation. Launching a new patch operation.") + self.launch_new_process(config_settings, create_status_output_file=True) + else: + if int(core_state_content.__getattribute__(self.core_state_fields.number)) != int(self.seq_no): + # new request + self.process_enable_request(config_settings, prev_patch_max_end_time, core_state_content) + else: + # re-enable request + self.process_reenable_request(config_settings, core_state_content) + except Exception as error: + self.logger.log_error("Failed to execute enable. [Exception={0}]".format(repr(error))) + raise + + def process_enable_request(self, config_settings, prev_patch_max_end_time, core_state_content): + """ Called when the current request is different from the one before. Identifies and waits for the previous request action to complete, if required before addressing the current request """ + self.logger.log("Terminating older patch operation, if still in progress, as per it's completion duration and triggering the new requested patch opertaion.") + self.runtime_context_handler.process_previous_patch_operation(self.core_state_handler, self.process_handler, prev_patch_max_end_time, core_state_content) + self.utility.delete_file(self.core_state_handler.dir_path, self.core_state_handler.file) + self.launch_new_process(config_settings, create_status_output_file=True) + + def process_reenable_request(self, config_settings, core_state_content): + """ Called when the current request has the same config as the one before it. Restarts the operation if the previous request has errors, no action otherwise """ + self.logger.log("This is the same request as the previous patch operation. Checking previous request's status") + if core_state_content.__getattribute__(self.core_state_fields.completed).lower() == 'false': + running_process_ids = self.process_handler.identify_running_processes(core_state_content.__getattribute__(self.core_state_fields.process_ids)) + if len(running_process_ids) == 0: + self.logger.log("Re-triggering the patch operation as the previous patch operation was not running and hadn't marked completion either.") + self.utility.delete_file(self.core_state_handler.dir_path, self.core_state_handler.file) + self.launch_new_process(config_settings, create_status_output_file=False) + else: + self.logger.log("Patch operation is in progress from the previous request. [Operation={0}]".format(config_settings.__getattribute__(self.config_public_settings.operation))) + exit(Constants.ExitCode.Okay) + + else: + self.logger.log("Patch operation already completed in the previous request. [Operation={0}]".format(config_settings.__getattribute__(self.config_public_settings.operation))) + exit(Constants.ExitCode.Okay) + + def launch_new_process(self, config_settings, create_status_output_file): + """ Creates .status to report the current request's status and launches core code to handle the requested operation """ + # create Status file + if create_status_output_file: + self.ext_output_status_handler.write_status_file(self.seq_no, self.ext_env_handler.status_folder, config_settings.__getattribute__(self.config_public_settings.operation), substatus_json=[], status=self.status.Transitioning.lower()) + else: + self.ext_output_status_handler.update_file(self.seq_no, self.ext_env_handler.status_folder) + # launch core code in a process and exit extension handler + process = self.process_handler.start_daemon(self.seq_no, config_settings, self.ext_env_handler) + self.logger.log("exiting extension handler") + exit(Constants.ExitCode.Okay) + + def process_nooperation(self, config_settings, core_state_content): + activity_id = config_settings.__getattribute__(self.config_public_settings.activity_id) + operation = config_settings.__getattribute__(self.config_public_settings.operation) + start_time = config_settings.__getattribute__(self.config_public_settings.start_time) + try: + self.ext_output_status_handler.set_nooperation_substatus_json(self.seq_no, self.ext_env_handler.status_folder, operation, activity_id, start_time, status=Constants.Status.Transitioning) + self.runtime_context_handler.terminate_processes_from_previous_operation(self.process_handler, core_state_content) + self.utility.delete_file(self.core_state_handler.dir_path, self.core_state_handler.file, raise_if_not_found=False) + # ToDo: log prev activity id later + self.ext_output_status_handler.set_nooperation_substatus_json(self.seq_no, self.ext_env_handler.status_folder, operation, activity_id, start_time, status=Constants.Status.Success) + self.logger.log("exiting extension handler") + exit(Constants.ExitCode.Okay) + except Exception as error: + self.logger.log("Error executing NoOperation.") + self.ext_output_status_handler.set_nooperation_substatus_json(self.seq_no, self.ext_env_handler.status_folder, operation, activity_id, start_time, status=Constants.Status.Error) + + diff --git a/src/extension/src/HandlerManifest.json b/src/extension/src/HandlerManifest.json new file mode 100644 index 00000000..3f08d65e --- /dev/null +++ b/src/extension/src/HandlerManifest.json @@ -0,0 +1,16 @@ +[ + { + "version": 1.0, + "handlerManifest": { + "disableCommand": "MsftLinuxPatchExtShim.sh -d", + "enableCommand": "MsftLinuxPatchExtShim.sh -e", + "installCommand": "MsftLinuxPatchExtShim.sh -i", + "uninstallCommand": "MsftLinuxPatchExtShim.sh -u", + "updateCommand": "MsftLinuxPatchExtShim.sh -p", + "resetStateCommand": "MsftLinuxPatchExtShim.sh -r", + "rebootAfterInstall": false, + "reportHeartbeat": false, + "updateMode": "UpdateWithoutInstall" + } + } +] \ No newline at end of file diff --git a/src/extension/src/InstallCommandHandler.py b/src/extension/src/InstallCommandHandler.py new file mode 100644 index 00000000..a511e6fd --- /dev/null +++ b/src/extension/src/InstallCommandHandler.py @@ -0,0 +1,64 @@ +import sys +from src.Constants import Constants + + +class InstallCommandHandler(object): + + def __init__(self, logger, ext_env_handler): + self.logger = logger + self.ext_env_handler = ext_env_handler + + def execute_handler_action(self): + self.validate_os_type() + self.validate_environment() + self.logger.log("Install Command Completed") + return Constants.ExitCode.Okay + + def validate_os_type(self): + os_type = sys.platform + self.logger.log("Validating OS. [Platform={0}]".format(os_type)) + if not os_type.__contains__('linux'): + error_msg = "Incompatible system: This update is for Linux OS" + self.logger.log_error_and_raise_new_exception(error_msg, Exception) + return True + + def validate_environment(self): + file = Constants.HANDLER_ENVIRONMENT_FILE + env_settings_fields = Constants.EnvSettingsFields + config_type = env_settings_fields.settings_parent_key + self.logger.log("Validating file. [File={0}]".format(file)) + + if self.ext_env_handler.handler_environment_json is not None and self.ext_env_handler.handler_environment_json is not Exception: + if len(self.ext_env_handler.handler_environment_json) != 1: + error_msg = "Incorrect file format. [File={0}]".format(file) + self.logger.log_error_and_raise_new_exception(error_msg, Exception) + + self.validate_key(config_type, self.ext_env_handler.handler_environment_json[0], 'dict', True, file) + self.validate_key(env_settings_fields.log_folder, self.ext_env_handler.handler_environment_json[0][config_type], ['str', 'unicode'], True, file) + self.validate_key(env_settings_fields.config_folder, self.ext_env_handler.handler_environment_json[0][config_type], ['str', 'unicode'], True, file) + self.validate_key(env_settings_fields.status_folder, self.ext_env_handler.handler_environment_json[0][config_type], ['str', 'unicode'], True, file) + self.logger.log("Handler Environment validated") + else: + error_msg = "No content in file. [File={0}]".format(file) + self.logger.log_error_and_raise_new_exception(error_msg, Exception) + + """ Validates json files for required key/value pairs """ + def validate_key(self, key, config_type, data_type, is_required, file): + if is_required: + # Required key doesn't exist in config file + if key not in config_type: + error_msg = "Config not found in file. [Config={0}] [File={1}]".format(key, file) + self.logger.log_error_and_raise_new_exception(error_msg, Exception) + # Required key doesn't have value + elif data_type is not bool and not config_type[key]: + error_msg = "Empty value error. [Config={0}]".format(key) + self.logger.log_error_and_raise_new_exception(error_msg, Exception) + # Required key does not have value of expected datatype + elif type(config_type[key]).__name__ not in data_type: + error_msg = "Unexpected data type. [config={0}] in [file={1}]".format(key, file) + self.logger.log_error_and_raise_new_exception(error_msg, Exception) + else: + # Expected data type for an optional key + if key in config_type and config_type[key] and type(config_type[key]).__name__ not in data_type: + error_msg = "Unexpected data type. [config={0}] in [file={1}]".format(key, file) + self.logger.log_error_and_raise_new_exception(error_msg, Exception) diff --git a/src/extension/src/MsftLinuxPatchExtShim.sh b/src/extension/src/MsftLinuxPatchExtShim.sh new file mode 100644 index 00000000..f1de10a5 --- /dev/null +++ b/src/extension/src/MsftLinuxPatchExtShim.sh @@ -0,0 +1,103 @@ +#!/usr/bin/env bash + +# Keeping the default command +COMMAND="MsftLinuxPatchExt.py" +PYTHON="" + +USAGE="$(basename "$0") [-h] [-i|--install] [-u|--uninstall] [-d|--disable] [-e|--enable] [-p|--update] [-r|--reset] +Program to find the installed python on the box and invoke a Python extension script. +where: + -h|--help show this help text + -i|--install install the extension + -u|--uninstall uninstall the extension + -d|--disable disable the extension + -e|--enable enable the extension + -p|--update update the extension + -r|--reset reset the extension +" + +function find_python(){ + local python_exec_command=$1 + + # Check if there is python defined. + if command -v python >/dev/null 2>&1 ; then + eval ${python_exec_command}="python" + else + # Python was not found. Searching for Python3 now. + if command -v python3 >/dev/null 2>&1 ; then + eval ${python_exec_command}="python3" + fi + fi +} + +# Transform long options to short ones for getopts support (getopts doesn't support long args) +for arg in "$@"; do + shift + case "$arg" in + "--help") set -- "$@" "-h" ;; + "--install") set -- "$@" "-i" ;; + "--update") set -- "$@" "-p" ;; + "--enable") set -- "$@" "-e" ;; + "--disable") set -- "$@" "-d" ;; + "--uninstall") set -- "$@" "-u" ;; + "--reset") set -- "$@" "-r" ;; + *) set -- "$@" "$arg" + esac +done + +if [ -z "$arg" ] +then + echo "$USAGE" >&2 + exit 1 +fi + +# Get the arguments +while getopts "iudephrt:?" o; do + case "${o}" in + h|\?) + echo "$USAGE" + exit 0 + ;; + i) + operation="-install" + ;; + u) + operation="-uninstall" + ;; + d) + operation="-disable" + ;; + e) + operation="-enable" + ;; + p) + operation="-update" + ;; + r) + operation="-reset" + ;; + t) + COMMAND="$OPTARG" + ;; + *) + echo "$USAGE" >&2 + exit 1 + ;; + esac +done + +shift $((OPTIND-1)) + +# If find_python is not able to find a python installed, $PYTHON will be null. +find_python PYTHON + + +if [ -z "$PYTHON" ]; then + echo "No Python interpreter found on the box" >&2 + exit 51 # Not Supported +else + echo "${PYTHON} --version" +fi + +${PYTHON} "${COMMAND}" ${operation} +# DONE \ No newline at end of file diff --git a/src/extension/src/ProcessHandler.py b/src/extension/src/ProcessHandler.py new file mode 100644 index 00000000..0e02a121 --- /dev/null +++ b/src/extension/src/ProcessHandler.py @@ -0,0 +1,90 @@ +import base64 +import json +import os +import signal +import subprocess +import errno +from src.Constants import Constants + + +class ProcessHandler(object): + def __init__(self, logger): + self.logger = logger + + def get_public_config_settings(self, config_settings): + """ Fetches only public settings from given config_settings and returns them in json format """ + public_config_settings = {} + public_settings_keys = Constants.ConfigPublicSettingsFields + if config_settings is not None: + public_config_settings.update({public_settings_keys.operation: config_settings.__getattribute__(public_settings_keys.operation), + public_settings_keys.activity_id: config_settings.__getattribute__(public_settings_keys.activity_id), + public_settings_keys.start_time: config_settings.__getattribute__(public_settings_keys.start_time), + public_settings_keys.maximum_duration: config_settings.__getattribute__(public_settings_keys.maximum_duration), + public_settings_keys.reboot_setting: config_settings.__getattribute__(public_settings_keys.reboot_setting), + public_settings_keys.include_classifications: config_settings.__getattribute__(public_settings_keys.include_classifications), + public_settings_keys.include_patches: config_settings.__getattribute__(public_settings_keys.include_patches), + public_settings_keys.exclude_patches: config_settings.__getattribute__(public_settings_keys.exclude_patches), + public_settings_keys.internal_settings: config_settings.__getattribute__(public_settings_keys.internal_settings)}) + return public_config_settings + + def get_env_settings(self, ext_env_handler): + """ Fetches configs required by the core code from HandlerEnvironment file returns them in json format """ + env_settings = {} + env_settings_keys = Constants.EnvSettingsFields + if env_settings is not None: + env_settings.update({env_settings_keys.log_folder: ext_env_handler.log_folder}) + env_settings.update({env_settings_keys.config_folder: ext_env_handler.config_folder}) + env_settings.update({env_settings_keys.status_folder: ext_env_handler.status_folder}) + return env_settings + + def start_daemon(self, seq_no, config_settings, ext_env_handler): + """ Launches the core code in a separate independent process with required arguements and exits the current process immediately """ + exec_path = os.path.join(os.getcwd(), Constants.CORE_CODE_FILE_NAME) + public_config_settings = base64.b64encode(json.dumps(self.get_public_config_settings(config_settings)).encode("utf-8")).decode("utf-8") + env_settings = base64.b64encode(json.dumps(self.get_env_settings(ext_env_handler)).encode("utf-8")).decode("utf-8") + + args = " -sequenceNumber {0} -environmentSettings \'{1}\' -configSettings \'{2}\'".format(str(seq_no), env_settings, public_config_settings) + command = ["python " + exec_path + " " + args] + self.logger.log("Launching process. [command={0}]".format(str(command))) + process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + if process.pid is not None: + self.logger.log("New shell process launched successfully. [Process ID (PID)={0}]".format(str(process.pid))) + return process + self.logger.log_error("Error launching process for given sequence. [sequence={0}]".format(seq_no)) + + def identify_running_processes(self, process_ids): + """ Returns a list of all currently active processes from the given list of process ids """ + running_process_ids = [] + for process_id in process_ids: + if process_id != "": + process_id = int(process_id) + if self.is_process_running(process_id): + running_process_ids.append(process_id) + self.logger.log("Processes still running from the previous request: [PIDs={0}]".format(str(running_process_ids))) + return running_process_ids + + def is_process_running(self, pid): + # check to see if the process is still alive + try: + # Sending signal 0 to a pid will raise an OSError exception if the pid is not running, and do nothing otherwise. + os.kill(pid, 0) + return True + except OSError as error: + if error.errno == errno.ESRCH: + # ESRCH == No such process + return False + elif error.errno == errno.EPERM: + # EPERM = No permission, which means there's a process to which access is denied + return True + else: + # According to "man 2 kill" possible error values are (EINVAL, EPERM, ESRCH) Thus considering this as an error + return False + + def kill_process(self, pid): + try: + if self.is_process_running(pid): + self.logger.log("Terminating process: [PID={0}]".format(str(pid))) + os.kill(pid, signal.SIGTERM) + except OSError as error: + self.logger.log_error("Error terminating process. [Process ID={0}] [Error={1}]".format(pid, repr(error))) + raise diff --git a/src/extension/src/RuntimeContextHandler.py b/src/extension/src/RuntimeContextHandler.py new file mode 100644 index 00000000..d547d575 --- /dev/null +++ b/src/extension/src/RuntimeContextHandler.py @@ -0,0 +1,59 @@ +import datetime +import time +from src.Constants import Constants + + +class RuntimeContextHandler(object): + def __init__(self, logger): + self.logger = logger + self.core_state_fields = Constants.CoreStateFields + + def terminate_processes_from_previous_operation(self, process_handler, core_state_content): + """ Terminates all running processes from the previous request """ + self.logger.log("Verifying if previous patch operation is still in progress") + if core_state_content is None or core_state_content.__getattribute__(self.core_state_fields.completed).lower() == 'true': + self.logger.log("Previous request is complete") + return + # verify if processes from prev request are running + running_process_ids = process_handler.identify_running_processes(core_state_content.__getattribute__(self.core_state_fields.process_ids)) + if len(running_process_ids) != 0: + for pid in running_process_ids: + process_handler.kill_process(pid) + + def process_previous_patch_operation(self, core_state_handler, process_handler, prev_patch_max_end_time, core_state_content): + """ Waits for the previous request action to complete for a specific time, terminates previous process if it goes over that time """ + self.logger.log("Verifying if previous patch operation is still in progress") + core_state_content = core_state_handler.read_file() if core_state_content is None else core_state_content + if core_state_content is None or core_state_content.__getattribute__(self.core_state_fields.completed).lower() == 'true': + self.logger.log("Previous request is complete") + return + # verify if processes from prev request are running + running_process_ids = process_handler.identify_running_processes(core_state_content.__getattribute__(self.core_state_fields.process_ids)) + if len(running_process_ids) != 0: + is_patch_complete = self.check_if_patch_completes_in_time(prev_patch_max_end_time, core_state_content.__getattribute__(self.core_state_fields.last_heartbeat), core_state_handler) + if is_patch_complete: + self.logger.log("Previous request is complete") + return + for pid in running_process_ids: + self.logger.log("Previous request did not complete in time. Terminating all of it's running processes.") + process_handler.kill_process(pid) + + def check_if_patch_completes_in_time(self, time_for_prev_patch_to_complete, core_state_last_heartbeat, core_state_handler): + """ Waits for the previous request to complete in given time, with intermittent status checks """ + if type(time_for_prev_patch_to_complete) is not datetime.datetime: + raise Exception("System Error: Unable to identify the time to wait for previous request to complete") + max_wait_interval_in_seconds = 60 + current_time = datetime.datetime.utcnow() + remaining_wait_time = (time_for_prev_patch_to_complete - current_time).total_seconds() + core_state_content = None + while remaining_wait_time > 0: + next_wait_time_in_seconds = max_wait_interval_in_seconds if remaining_wait_time > max_wait_interval_in_seconds else remaining_wait_time + core_state_last_heartbeat = core_state_last_heartbeat if core_state_content is None else core_state_content.__getattribute__(self.core_state_fields.last_heartbeat) + self.logger.log("Previous patch operation is still in progress with last status update at {0}. Waiting for a maximum of {1} seconds for it to complete with intermittent status change checks. Next check will be performed after {2} seconds.".format(str(core_state_last_heartbeat), str(remaining_wait_time), str(next_wait_time_in_seconds))) + time.sleep(next_wait_time_in_seconds) + remaining_wait_time = (time_for_prev_patch_to_complete - datetime.datetime.utcnow()).total_seconds() + # read CoreState.json file again, to verify if the previous processes is completed + core_state_content = core_state_handler.read_file() + if core_state_content.__getattribute__(self.core_state_fields.completed).lower() == 'true': + return True + return False diff --git a/src/extension/src/TelemetryWriter.py b/src/extension/src/TelemetryWriter.py new file mode 100644 index 00000000..74598f74 --- /dev/null +++ b/src/extension/src/TelemetryWriter.py @@ -0,0 +1,69 @@ +import platform +from src.Constants import Constants + + +class TelemetryWriter(object): + """Class for writing telemetry data to data transports""" + + def __init__(self): + self.data_transports = [] + self.activity_id = None + + # Init state report + self.send_ext_state_info('Started Linux patch extension execution.') + self.send_machine_config_info() + + # region Primary payloads + def send_ext_state_info(self, state_info): + # Expected to send up only pivotal extension state changes + return self.try_send_message(state_info, Constants.TelemetryExtState) + + def send_config_info(self, config_info, config_type='unknown'): + # Configuration info + payload_json = { + 'config_type': config_type, + 'config_value': config_info + } + return self.try_send_message(payload_json, Constants.TelemetryConfig) + + def send_error_info(self, error_info): + # Expected to log significant errors or exceptions + return self.try_send_message(error_info, Constants.TelemetryError) + + def send_debug_info(self, error_info): + # Usually expected to instrument possibly problematic code + return self.try_send_message(error_info, Constants.TelemetryDebug) + + def send_info(self, info): + # Usually expected to be significant runbook output + return self.try_send_message(info, Constants.TelemetryInfo) + # endregion + + # Composed payload + def send_machine_config_info(self): + # Machine info + machine_info = { + 'platform_name': str(platform.linux_distribution()[0]), + 'platform_version': str(platform.linux_distribution()[1]), + 'machine_arch': str(platform.machine()) + } + return self.send_config_info(machine_info, 'machine_config') + + def send_execution_error(self, cmd, code, output): + # Expected to log any errors from a cmd execution, including package manager execution errors + error_payload = { + 'cmd': str(cmd), + 'code': str(code), + 'output': str(output)[0:3072] + } + return self.send_error_info(error_payload) + # endregion + + # region Transport layer + def try_send_message(self, message, category=Constants.TelemetryInfo): + raise NotImplementedError + + def close_transports(self): + """Close data transports""" + raise NotImplementedError + # endregion diff --git a/src/extension/src/Utility.py b/src/extension/src/Utility.py new file mode 100644 index 00000000..fe832f05 --- /dev/null +++ b/src/extension/src/Utility.py @@ -0,0 +1,49 @@ +import datetime +import os +import time +from src.Constants import Constants +from src.local_loggers.FileLogger import FileLogger + + +class Utility(object): + def __init__(self, logger): + self.logger = logger + self.retry_count = Constants.MAX_IO_RETRIES + + def delete_file(self, dir_path, file, raise_if_not_found=True): + """ Retries delete operation for a set number of times before failing """ + self.logger.log("Deleting file. [File={0}]".format(file)) + file_path = os.path.join(dir_path, file) + error_msg = "" + if os.path.exists(file_path) and os.path.isfile(file_path): + for retry in range(0, self.retry_count): + try: + time.sleep(retry) + os.remove(file_path) + return True + except Exception as e: + error_msg = "Trial {0}: Could not delete file. [File={1}] [Exception={2}]".format(retry+1, file, repr(e)) + self.logger.log_error(error_msg) + error_msg = "Failed to delete file after {0} tries. [File={1}] [Exception={2}]".format(self.retry_count, file, error_msg) + self.logger.log_error(error_msg) + else: + error_msg = "File Not Found: [File={0}] in [path={1}]".format(file, dir_path) + self.logger.log_error(error_msg) + if raise_if_not_found: + raise Exception(error_msg) + + def create_log_file(self, log_folder, seq_no): + """ Creates .ext.log file under the path for logFolder provided in HandlerEnvironment """ + file_path = str(seq_no) + str(".ext") + Constants.LOG_FILE_EXTENSION + if seq_no is not None and os.path.exists(log_folder): + self.logger.log("Creating log file. [File={0}]".format(file_path)) + return FileLogger(log_folder, file_path) + else: + self.logger.log_error("File creation error: [File={0}]".format(file_path)) + return None + + def get_datetime_from_str(self, date_str): + return datetime.datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%SZ") + + def get_str_from_datetime(self, date): + return date.strftime("%Y-%m-%dT%H:%M:%SZ") diff --git a/src/extension/src/__init__.py b/src/extension/src/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/extension/src/__main__.py b/src/extension/src/__main__.py new file mode 100644 index 00000000..c3463fce --- /dev/null +++ b/src/extension/src/__main__.py @@ -0,0 +1,68 @@ +import datetime +import os +import sys +from src.ActionHandler import ActionHandler +from src.RuntimeContextHandler import RuntimeContextHandler +from src.file_handlers.JsonFileHandler import JsonFileHandler +from src.file_handlers.CoreStateHandler import CoreStateHandler +from src.file_handlers.ExtConfigSettingsHandler import ExtConfigSettingsHandler +from src.file_handlers.ExtEnvHandler import ExtEnvHandler +from src.file_handlers.ExtOutputStatusHandler import ExtOutputStatusHandler +from src.file_handlers.ExtStateHandler import ExtStateHandler +from src.local_loggers.Logger import Logger +from src.ProcessHandler import ProcessHandler +from src.Utility import Utility +from src.local_loggers.StdOutFileMirror import StdOutFileMirror +from src.Constants import Constants + + +def main(argv): + stdout_file_mirror = None + file_logger = None + logger = Logger() + try: + # initializing action handler + # args will have values install, uninstall, etc, as given in MsftLinuxPatchExtShim.sh in the operation var + cmd_exec_start_time = datetime.datetime.utcnow() + utility = Utility(logger) + runtime_context_handler = RuntimeContextHandler(logger) + json_file_handler = JsonFileHandler(logger) + ext_env_handler = ExtEnvHandler(json_file_handler) + if ext_env_handler.handler_environment_json is not None and ext_env_handler.config_folder is not None: + config_folder = ext_env_handler.config_folder + if config_folder is None or not os.path.exists(config_folder): + logger.log_error("Config folder not found at [{0}].".format(repr(config_folder))) + exit(Constants.ExitCode.MissingConfig) + + ext_config_settings_handler = ExtConfigSettingsHandler(logger, json_file_handler, config_folder) + seq_no = ext_config_settings_handler.get_seq_no() + if seq_no is None: + logger.log_error("Sequence number for current operation not found") + exit(Constants.ExitCode.MissingConfig) + + file_logger = utility.create_log_file(ext_env_handler.log_folder, seq_no) + if file_logger is not None: + stdout_file_mirror = StdOutFileMirror(file_logger) + + core_state_handler = CoreStateHandler(config_folder, json_file_handler) + ext_state_handler = ExtStateHandler(config_folder, utility, json_file_handler) + ext_output_status_handler = ExtOutputStatusHandler(logger, json_file_handler) + process_handler = ProcessHandler(logger) + action_handler = ActionHandler(logger, utility, runtime_context_handler, json_file_handler, ext_env_handler, ext_config_settings_handler, core_state_handler, ext_state_handler, ext_output_status_handler, process_handler, cmd_exec_start_time, seq_no) + action_handler.determine_operation(argv[1]) + else: + error_cause = "No configuration provided in HandlerEnvironment" if ext_env_handler.handler_environment_json is None else "Path to config folder not specified in HandlerEnvironment" + error_msg = "Error processing file. [File={0}] [Error={1}]".format(Constants.HANDLER_ENVIRONMENT_FILE, error_cause) + raise Exception(error_msg) + except Exception as error: + logger.log_error(repr(error)) + raise + # todo: add a exitcode instead of raising an exception + finally: + if stdout_file_mirror is not None: + stdout_file_mirror.stop() + if file_logger is not None: + file_logger.close() + +if __name__ == '__main__': + main(sys.argv) \ No newline at end of file diff --git a/src/extension/src/file_handlers/CoreStateHandler.py b/src/extension/src/file_handlers/CoreStateHandler.py new file mode 100644 index 00000000..735e5041 --- /dev/null +++ b/src/extension/src/file_handlers/CoreStateHandler.py @@ -0,0 +1,37 @@ +""" +CoreState.json sample structure: +{ + "coreSequence": { + "number": 3, + "action": "", + "completed": "", + "lastHeartbeat": "", + "processIds": ["", ...] + } +} +""" +import collections +from src.Constants import Constants + + +class CoreStateHandler(object): + """ Responsible for managing CoreState.json file """ + def __init__(self, dir_path, json_file_handler): + self.dir_path = dir_path + self.file = Constants.CORE_STATE_FILE + self.json_file_handler = json_file_handler + self.core_state_fields = Constants.CoreStateFields + + def read_file(self): + """ Fetches config from CoreState.json. Returns None if no content/file found """ + core_state_json = self.json_file_handler.get_json_file_content(self.file, self.dir_path, raise_if_not_found=False) + parent_key = self.core_state_fields.parent_key + core_state_values = collections.namedtuple(parent_key, [self.core_state_fields.number, self.core_state_fields.action, self.core_state_fields.completed, self.core_state_fields.last_heartbeat, self.core_state_fields.process_ids]) + if core_state_json is not None: + seq_no = self.json_file_handler.get_json_config_value_safely(core_state_json, self.core_state_fields.number, parent_key) + action = self.json_file_handler.get_json_config_value_safely(core_state_json, self.core_state_fields.action, parent_key) + completed = self.json_file_handler.get_json_config_value_safely(core_state_json, self.core_state_fields.completed, parent_key) + last_heartbeat = self.json_file_handler.get_json_config_value_safely(core_state_json, self.core_state_fields.last_heartbeat, parent_key) + process_ids = self.json_file_handler.get_json_config_value_safely(core_state_json, self.core_state_fields.process_ids, parent_key) + return core_state_values(seq_no, action, completed, last_heartbeat, process_ids) + diff --git a/src/extension/src/file_handlers/ExtConfigSettingsHandler.py b/src/extension/src/file_handlers/ExtConfigSettingsHandler.py new file mode 100644 index 00000000..e9bbd9e4 --- /dev/null +++ b/src/extension/src/file_handlers/ExtConfigSettingsHandler.py @@ -0,0 +1,125 @@ +import collections +import os +import os.path +import re +from src.Constants import Constants + + +class ExtConfigSettingsHandler(object): + """ Responsible for managing any operations with .settings file """ + def __init__(self, logger, json_file_handler, config_folder): + self.config_folder = config_folder + self.logger = logger + self.json_file_handler = json_file_handler + self.file_ext = Constants.CONFIG_SETTINGS_FILE_EXTENSION + self.runtime_settings_key = Constants.RUNTIME_SETTINGS + self.handler_settings_key = Constants.HANDLER_SETTINGS + self.public_settings_key = Constants.PUBLIC_SETTINGS + self.public_settings_all_keys = Constants.ConfigPublicSettingsFields + + def get_seq_no(self): + """ Fetches sequence number, initially from the env variable. If nothing is set in env variable then fetches from config folder based on timestamp, since GA updates the settings file before calling a command """ + try: + seq_no = os.getenv(Constants.SEQ_NO_ENVIRONMENT_VAR) + if seq_no is not None: + return seq_no + + seq_no = None + cur_seq_no = None + freshest_time = None + for subdir, dirs, files in os.walk(self.config_folder): + for file in files: + try: + if re.match('^\d+' + self.file_ext + '$', file): + cur_seq_no = int(os.path.basename(file).split('.')[0]) + if freshest_time is None: + freshest_time = os.path.getmtime(os.path.join(self.config_folder, file)) + seq_no = cur_seq_no + else: + current_file_m_time = os.path.getmtime(os.path.join(self.config_folder, file)) + if current_file_m_time > freshest_time: + freshest_time = current_file_m_time + seq_no = cur_seq_no + except ValueError: + continue + return seq_no + except Exception as error: + error_message = "Error occurred while fetching sequence number" + self.logger.log_error(error_message) + raise + + def read_file(self, seq_no): + """ Fetches config from .settings file in . Raises an exception if no content/file found/errors processing file """ + try: + file = str(seq_no) + self.file_ext + config_settings_json = self.json_file_handler.get_json_file_content(file, self.config_folder, raise_if_not_found=True) + if config_settings_json is not None and self.are_config_settings_valid(config_settings_json): + operation = self.get_ext_config_value_safely(config_settings_json, self.public_settings_all_keys.operation) + activity_id = self.get_ext_config_value_safely(config_settings_json, self.public_settings_all_keys.activity_id) + start_time = self.get_ext_config_value_safely(config_settings_json, self.public_settings_all_keys.start_time) + max_duration = self.get_ext_config_value_safely(config_settings_json, self.public_settings_all_keys.maximum_duration, raise_if_not_found=False) + reboot_setting = self.get_ext_config_value_safely(config_settings_json, self.public_settings_all_keys.reboot_setting, raise_if_not_found=False) + include_classifications = self.get_ext_config_value_safely(config_settings_json, self.public_settings_all_keys.include_classifications, raise_if_not_found=False) + include_patches = self.get_ext_config_value_safely(config_settings_json, self.public_settings_all_keys.include_patches, raise_if_not_found=False) + exclude_patches = self.get_ext_config_value_safely(config_settings_json, self.public_settings_all_keys.exclude_patches, raise_if_not_found=False) + internal_settings = self.get_ext_config_value_safely(config_settings_json, self.public_settings_all_keys.internal_settings, raise_if_not_found=False) + + config_settings_values = collections.namedtuple("config_settings", [self.public_settings_all_keys.operation, self.public_settings_all_keys.activity_id, self.public_settings_all_keys.start_time, + self.public_settings_all_keys.maximum_duration, self.public_settings_all_keys.reboot_setting, self.public_settings_all_keys.include_classifications, + self.public_settings_all_keys.include_patches, self.public_settings_all_keys.exclude_patches, self.public_settings_all_keys.internal_settings]) + return config_settings_values(operation, activity_id, start_time, max_duration, reboot_setting, include_classifications, include_patches, exclude_patches, internal_settings) + else: + #ToDo log which of the 2 conditions failed, similar to this logs in other multiple condition checks + raise Exception("Config Settings json file invalid") + except Exception as error: + error_msg = "Error processing config settings file. [Sequence Number={0}] [Exception= {1}]".format(seq_no, repr(error)) + self.logger.log_error(error_msg) + raise + + def are_config_settings_valid(self, config_settings_json): + """ Validates all the configs in .settings file. Raises an exception if any issues found """ + try: + if config_settings_json is None or type(config_settings_json) is not dict or not bool(config_settings_json): + self.logger.log_error("Configuration settings not of expected format") + return False + # file contains "runtimeSettings" + if self.runtime_settings_key not in config_settings_json or type(config_settings_json[self.runtime_settings_key]) is not list or config_settings_json[self.runtime_settings_key] is None or len(config_settings_json[self.runtime_settings_key]) is 0: + self.logger.log_error("runtimeSettings not of expected format") + return False + # file contains "handlerSettings" + if self.handler_settings_key not in config_settings_json[self.runtime_settings_key][0] or type(config_settings_json[self.runtime_settings_key][0][self.handler_settings_key]) is not dict \ + or config_settings_json[self.runtime_settings_key][0][self.handler_settings_key] is None or not bool(config_settings_json[self.runtime_settings_key][0][self.handler_settings_key]): + self.logger.log_error("handlerSettings not of expected format") + return False + # file contains "publicSettings" + if self.public_settings_key not in config_settings_json[self.runtime_settings_key][0][self.handler_settings_key] or type(config_settings_json[self.runtime_settings_key][0][self.handler_settings_key][self.public_settings_key]) is not dict \ + or config_settings_json[self.runtime_settings_key][0][self.handler_settings_key][self.public_settings_key] is None or not bool(config_settings_json[self.runtime_settings_key][0][self.handler_settings_key][self.public_settings_key]): + self.logger.log_error("publicSettings not of expected format") + return False + + # verifying Configuration settings contain all the mandatory keys + for public_setting in [self.public_settings_all_keys.operation, self.public_settings_all_keys.activity_id, self.public_settings_all_keys.start_time]: + if public_setting in config_settings_json[self.runtime_settings_key][0][self.handler_settings_key][self.public_settings_key] and config_settings_json[self.runtime_settings_key][0][self.handler_settings_key][self.public_settings_key][public_setting]: + continue + else: + self.logger.log_error("Mandatory key missing in publicSettings section of the configuration settings: " + str(public_setting)) + return False + return True + except Exception as error: + self.logger.log_error(error) + return False + + def get_ext_config_value_safely(self, config_settings_json, key, raise_if_not_found=True): + """ Allows a patch deployment configuration value to be queried safely with a fall-back default (optional). + An exception will be raised if default_value is not explicitly set when called (considered by-design). """ + + if config_settings_json is not None and len(config_settings_json) is not 0: + if key in config_settings_json[self.runtime_settings_key][0][self.handler_settings_key][self.public_settings_key]: + value = config_settings_json[self.runtime_settings_key][0][self.handler_settings_key][self.public_settings_key][key] + return value + else: # If it is not present + if raise_if_not_found: + raise Exception("Value not found for given config. [Config={0}]".format(key)) + else: + return None + return None diff --git a/src/extension/src/file_handlers/ExtEnvHandler.py b/src/extension/src/file_handlers/ExtEnvHandler.py new file mode 100644 index 00000000..0e7693d7 --- /dev/null +++ b/src/extension/src/file_handlers/ExtEnvHandler.py @@ -0,0 +1,47 @@ +import traceback + +from src.Constants import Constants + +''' +Structure of the file this class deals with: HandlerEnvironment.json +[{ + "version": 1.0, + "handlerEnvironment": { + "logFolder": "", + "configFolder": "", + "statusFolder": "", + "heartbeatFile": "", + "deploymentid": "", + "rolename": "", + "instance": "" + } +}] +''' + + +class ExtEnvHandler(object): + """ Responsible for all operations with HandlerEnvironment.json file """ + def __init__(self, json_file_handler, handler_env_file=Constants.HANDLER_ENVIRONMENT_FILE, handler_env_file_path=Constants.HANDLER_ENVIRONMENT_FILE_PATH): + json_file_handler = json_file_handler + self.env_settings_all_keys = Constants.EnvSettingsFields + + self.handler_environment_json = json_file_handler.get_json_file_content(handler_env_file, handler_env_file_path, raise_if_not_found=True) + if self.handler_environment_json is not None: + self.log_folder = self.get_ext_env_config_value_safely(self.env_settings_all_keys.log_folder) + self.config_folder = self.get_ext_env_config_value_safely(self.env_settings_all_keys.config_folder) + self.status_folder = self.get_ext_env_config_value_safely(self.env_settings_all_keys.status_folder) + + def get_ext_env_config_value_safely(self, key, raise_if_not_found=True): + """ Allows a update deployment configuration value to be queried safely with a fall-back default (optional). + An exception will be raised if default_value is not explicitly set when called (considered by-design). """ + config_type = self.env_settings_all_keys.settings_parent_key + if self.handler_environment_json is not None and len(self.handler_environment_json) is not 0: + if key in self.handler_environment_json[0][config_type]: + value = self.handler_environment_json[0][config_type][key] + return value + else: # If it is not present + if raise_if_not_found: + raise Exception("Value not found for given config. [Config={0}]".format(key)) + else: + return None + return None diff --git a/src/extension/src/file_handlers/ExtOutputStatusHandler.py b/src/extension/src/file_handlers/ExtOutputStatusHandler.py new file mode 100644 index 00000000..0041fca5 --- /dev/null +++ b/src/extension/src/file_handlers/ExtOutputStatusHandler.py @@ -0,0 +1,123 @@ +import datetime +import json + +from src.Constants import Constants + +''' +.status +For the extension wrapper, the status structure is simply the following (no substatuses): +[{ + "version": 1.0, + "timestampUTC": "2019-07-20T12:12:14Z", + "status": { + "name": "Azure Patch Management", + "operation": "Assessment / Deployment / NoOperation", + "status": "transitioning / error / success / warning", + "code": 0, + "formattedMessage": { + "lang": "en-US", + "message": "" + } + } +}] +''' + + +class ExtOutputStatusHandler(object): + """ Responsible for managing .status file in the status folder path given in HandlerEnvironment.json """ + def __init__(self, logger, json_file_handler): + self.logger = logger + self.json_file_handler = json_file_handler + self.file_ext = Constants.STATUS_FILE_EXTENSION + self.file_keys = Constants.StatusFileFields + self.status = Constants.Status + + def write_status_file(self, seq_no, dir_path, operation, substatus_json, status=Constants.Status.Transitioning.lower()): + self.logger.log("Writing status file to provide patch management data for [Sequence={0}]".format(str(seq_no))) + file_name = str(seq_no) + self.file_ext + content = [{ + self.file_keys.version: "1.0", + self.file_keys.timestamp_utc: str(datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")), + self.file_keys.status: { + self.file_keys.status_name: "Azure Patch Management", + self.file_keys.status_operation: str(operation), + self.file_keys.status_status: status.lower(), + self.file_keys.status_code: 0, + self.file_keys.status_formatted_message: { + self.file_keys.status_formatted_message_lang: "en-US", + self.file_keys.status_formatted_message_message: "" + }, + self.file_keys.status_substatus: substatus_json + } + }] + self.json_file_handler.write_to_json_file(dir_path, file_name, content) + + def read_file(self, seq_no, dir_path): + file_name = str(seq_no) + self.file_ext + status_json = self.json_file_handler.get_json_file_content(file_name, dir_path) + if status_json is None: + return None + return status_json + + def update_key_value_safely(self, status_json, key, value_to_update, parent_key=None): + if status_json is not None and len(status_json) is not 0: + if parent_key is None: + status_json[0].update({key: value_to_update}) + else: + if parent_key in status_json[0]: + status_json[0].get(parent_key).update({key: value_to_update}) + else: + self.logger.log_error("Error updating config value in status file. [Config={0}]".format(key)) + + def update_file(self, seq_no, dir_path): + """ Reseting status=Transitioning and code=0 with latest timestamp, while retaining all other values""" + try: + file_name = str(seq_no) + self.file_ext + self.logger.log("Updating file. [File={0}]".format(file_name)) + status_json = self.read_file(str(seq_no), dir_path) + + if status_json is None: + self.logger.log_error("Error processing file. [File={0}]".format(file_name)) + return + self.update_key_value_safely(status_json, self.file_keys.status_status, self.status.Transitioning.lower(), self.file_keys.status_status) + self.update_key_value_safely(status_json, self.file_keys.status_code, 0, self.file_keys.status_status) + self.update_key_value_safely(status_json, self.file_keys.timestamp_utc, str(datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"))) + self.json_file_handler.write_to_json_file(dir_path, file_name, status_json) + except Exception as error: + error_message = "Error in status file creation: " + repr(error) + self.logger.log_error(error_message) + raise + + def set_nooperation_substatus_json(self, seq_no, dir_path, operation, activity_id, start_time, status=Constants.Status.Transitioning, code=0): + """ Prepare the nooperation substatus json including the message containing nooperation summary """ + # Wrap patches into nooperation summary + nooperation_summary_json = self.new_nooperation_summary_json(activity_id, start_time) + + # Wrap nooperation summary into nooperation substatus + nooperation_substatus_json = self.new_substatus_json_for_operation(Constants.PATCH_NOOPERATION_SUMMARY, status, code, json.dumps(nooperation_summary_json)) + + # Update status on disk + self.write_status_file(seq_no, dir_path, operation, nooperation_substatus_json, status) + + def new_nooperation_summary_json(self, activity_id, start_time): + """ This is the message inside the nooperation substatus """ + # Compose substatus message + return { + "activityId": str(activity_id), + "startTime": str(start_time), + "lastModifiedTime": str(datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")), + "errors": "" # TODO: Implement this to spec + } + + def new_substatus_json_for_operation(self, operation_name, status="Transitioning", code=0, message=json.dumps("{}")): + """ Generic substatus for nooperation """ + # NOTE: Todo Function is same for assessment and install, can be generalized later + return { + "name": str(operation_name), + "status": str(status).lower(), + "code": code, + "formattedMessage": { + "lang": "en-US", + "message": str(message) + } + } diff --git a/src/extension/src/file_handlers/ExtStateHandler.py b/src/extension/src/file_handlers/ExtStateHandler.py new file mode 100644 index 00000000..054e2994 --- /dev/null +++ b/src/extension/src/file_handlers/ExtStateHandler.py @@ -0,0 +1,35 @@ +import collections +from src.Constants import Constants + + +class ExtStateHandler(object): + """ Responsible for managing ExtState.json file """ + def __init__(self, dir_path, utility, json_file_handler): + self.dir_path = dir_path + self.file = Constants.EXT_STATE_FILE + self.utility = utility + self.json_file_handler = json_file_handler + self.ext_fields = Constants.ExtStateFields + + def create_file(self, sequence_number, operation, prev_patch_max_end_time): + """ Creates ExtState.json file using the config provided in Handler Configuration """ + parent_key = self.ext_fields.ext_seq + ext_state = {parent_key: {}} + ext_state[parent_key][self.ext_fields.ext_seq_number] = sequence_number + ext_state[parent_key][self.ext_fields.ext_seq_achieve_enable_by] = self.utility.get_str_from_datetime(prev_patch_max_end_time) + ext_state[parent_key][self.ext_fields.ext_seq_operation] = operation + self.json_file_handler.write_to_json_file(self.dir_path, self.file, ext_state) + + def read_file(self): + """ Returns the config values in the file """ + parent_key = self.ext_fields.ext_seq + ext_state_values = collections.namedtuple(parent_key, [self.ext_fields.ext_seq_number, self.ext_fields.ext_seq_achieve_enable_by, self.ext_fields.ext_seq_operation]) + seq_no = None + achieve_enable_by = None + operation_type = None + ext_state_json = self.json_file_handler.get_json_file_content(self.file, self.dir_path, raise_if_not_found=False) + if ext_state_json is not None: + seq_no = self.json_file_handler.get_json_config_value_safely(ext_state_json, self.ext_fields.ext_seq_number, parent_key) + achieve_enable_by = self.json_file_handler.get_json_config_value_safely(ext_state_json, self.ext_fields.ext_seq_achieve_enable_by, parent_key) + operation_type = self.json_file_handler.get_json_config_value_safely(ext_state_json, self.ext_fields.ext_seq_operation, parent_key) + return ext_state_values(seq_no, achieve_enable_by, operation_type) diff --git a/src/extension/src/file_handlers/JsonFileHandler.py b/src/extension/src/file_handlers/JsonFileHandler.py new file mode 100644 index 00000000..02488582 --- /dev/null +++ b/src/extension/src/file_handlers/JsonFileHandler.py @@ -0,0 +1,69 @@ +import json +import os +import time + +from src.Constants import Constants + + +class JsonFileHandler(object): + def __init__(self, logger): + self.logger = logger + self.retry_count = Constants.MAX_IO_RETRIES + + def get_json_file_content(self, file, dir_path, raise_if_not_found=False): + """ Returns content read from the given json file under the directory/path. Re-tries the operation a certain number of times and raises an exception if it still fails """ + file_path = os.path.join(dir_path, file) + error_msg = "" + self.logger.log("Reading file. [File={0}]".format(file)) + for retry in range(0, self.retry_count): + try: + time.sleep(retry) + with open(file_path, 'r') as file_handle: + file_contents = file_handle.read() + return json.loads(file_contents) + except ValueError as e: + error_msg = "Incorrect file format. [File={0}] [Location={1}] [Exception={2}]".format(file, str(file_path), repr(e)) + self.logger.log_error(error_msg) + except Exception as e: + error_msg = "Trial {0}: Could not read file. [File={1}] [Location={2}] [Exception={3}]".format(retry + 1, file, str(file_path), repr(e)) + self.logger.log_error(error_msg) + + error_msg = "Failed to read file after {0} tries. [File={1}] [Location={2}] [Exception={3}]".format(self.retry_count, file, str(file_path), error_msg) + self.logger.log_error(error_msg) + if raise_if_not_found: + raise Exception(error_msg) + + def get_json_config_value_safely(self, handler_json, key, parent_key, raise_if_not_found=True): + """ Allows a update deployment configuration value to be queried safely with a fall-back default (optional). An exception will be raised if default_value is not explicitly set when called (considered by-design). """ + if handler_json is not None and len(handler_json) is not 0: + if key in handler_json[parent_key]: + value = handler_json[parent_key][key] + return value + else: # If it is not present + if raise_if_not_found: + raise Exception("Value not found for given config. [Config={0}]".format(key)) + return None + + def write_to_json_file(self, dir_path, file, content): + """ Retries create operation for a set number of times before failing """ + if os.path.exists(dir_path): + file_path = os.path.join(dir_path, file) + error_message = "" + self.logger.log("Writing file. [File={0}]".format(file)) + for retry in range(0, self.retry_count): + try: + time.sleep(retry) + with open(file_path, 'w') as json_file: + json.dump(content, json_file, default=self.json_default_converter) + return + except Exception as error: + error_message = "Trial {0}: Could not write to file. [File={1}] [Location={2}] [Exception={3}]".format(retry+1, file, str(file_path), error) + self.logger.log_error(error_message) + error_msg = "Failed to write to file after {0} tries. [File={1}] [Location={2}] [Exception={3}]".format(self.retry_count, file, str(file_path), error_message) + self.logger.log_error_and_raise_new_exception(error_msg, Exception) + else: + error_msg = "Directory Not Found: [Directory={0}]".format(dir_path) + self.logger.log_error_and_raise_new_exception(error_msg, Exception) + + def json_default_converter(self, value): + return value.__str__() diff --git a/src/extension/src/file_handlers/__init__.py b/src/extension/src/file_handlers/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/extension/src/local_loggers/FileLogger.py b/src/extension/src/local_loggers/FileLogger.py new file mode 100644 index 00000000..a622895d --- /dev/null +++ b/src/extension/src/local_loggers/FileLogger.py @@ -0,0 +1,72 @@ +import os +import sys +from src.Constants import Constants + + +class FileLogger(object): + """Facilitates writing selected logs to a file""" + + def __init__(self, log_folder, log_file): + # opening/creating the log file + try: + self.log_file_path = os.path.join(log_folder, log_file) + self.log_file_handle = open(self.log_file_path, "a") + except Exception as error: + sys.stdout.write("FileLogger - Error opening file. [File={0}] [Exception={1}]".format(self.log_file_path, repr(error))) + + # Retaining 10 most recent log files, deleting others + self.delete_older_log_files(log_folder) + # verifying if the log file retention was applied. + log_files = self.get_all_log_files(log_folder) + if len(log_files) > Constants.MAX_LOG_FILES_ALLOWED: + print("Retention failed for log files") + raise Exception("Retention failed for log files") + + def __del__(self): + self.close() + + def get_all_log_files(self, log_folder): + """ Returns all files with .log extension within the given folder""" + return [os.path.join(log_folder, file) for file in os.listdir(log_folder) if (file.lower().endswith('.log'))] + + def delete_older_log_files(self, log_folder): + """ deletes older log files, retaining only the last 10 log files """ + print("Retaining " + str(Constants.LOG_FILES_TO_RETAIN) + " most recent operation logs, deleting others.") + try: + log_files = self.get_all_log_files(log_folder) + log_files.sort(key=os.path.getmtime, reverse=True) + except Exception as e: + print("Error identifying log files to delete. [Exception={0}]".format(repr(e))) + return + + if len(log_files) >= Constants.LOG_FILES_TO_RETAIN: + for file in log_files[Constants.LOG_FILES_TO_RETAIN:]: + try: + if os.path.exists(file): + os.remove(file) + print("Deleted [File={0}]".format(repr(file))) + except Exception as e: + print("Error deleting log file. [File={0} [Exception={1}]]".format(repr(file), repr(e))) + + def write(self, message, fail_silently=True): + try: + if self.log_file_handle is not None: + self.log_file_handle.write(message) + else: + raise Exception("Log file not found") + except IOError: + # DO NOT write any errors here to stdout + if not fail_silently: + raise + except ValueError as error: + sys.stdout.write("FileLogger - [Error={0}]".format(repr(error))) + except Exception as error: + sys.stdout.write("FileLogger - Error opening file. [File={0}] [Exception={1}]".format(self.log_file_path, repr(error))) + + def flush(self): + if self.log_file_handle is not None: + self.log_file_handle.flush() + + def close(self): + if self.log_file_handle is not None: + self.log_file_handle.close() diff --git a/src/extension/src/local_loggers/Logger.py b/src/extension/src/local_loggers/Logger.py new file mode 100644 index 00000000..5240e550 --- /dev/null +++ b/src/extension/src/local_loggers/Logger.py @@ -0,0 +1,53 @@ +from __future__ import print_function +import os + +from src.Constants import Constants + +class Logger(object): + def __init__(self, file_logger=None, current_env=None): + self.file_logger = file_logger + self.ERROR = "ERROR:" + self.WARNING = "WARNING:" + self.DEBUG = "DEBUG:" + self.VERBOSE = "VERBOSE:" + self.current_env = current_env + self.NEWLINE_REPLACE_CHAR = " " + + def log(self, message): + """log output""" + for line in message.splitlines(): # allows the extended file logger to strip unnecessary white space + print(line) + if self.file_logger is not None: + self.file_logger.write(line) + + def log_error(self, message): + """log errors""" + message = (self.NEWLINE_REPLACE_CHAR.join(message.split(os.linesep))).strip() + print(self.ERROR + " " + message) + if self.file_logger is not None: + self.file_logger.write(self.ERROR + " " + message) + + def log_error_and_raise_new_exception(self, message, exception): + """log errors and raise exception passed in as an arg""" + self.log_error(repr(message)) + raise exception(message) + + def log_warning(self, message): + """log warning""" + message = (self.NEWLINE_REPLACE_CHAR.join(message.split(os.linesep))).strip() + print(self.WARNING + " " + message) + if self.file_logger is not None: + self.file_logger.write(self.WARNING + " " + message) + + def log_debug(self, message): + """log debug""" + message = message.strip() + if self.current_env in (Constants.DEV, Constants.TEST): + print(self.current_env + ": " + message) # send to standard output if dev or test env + if self.file_logger is not None: + self.file_logger.write(self.DEBUG + " " + "\n\t".join(message.splitlines()).strip()) + + def log_verbose(self, message): + """log verbose""" + if self.file_logger is not None: + self.file_logger.write(self.VERBOSE + " " + "\n\t".join(message.strip().splitlines()).strip()) diff --git a/src/extension/src/local_loggers/StdOutFileMirror.py b/src/extension/src/local_loggers/StdOutFileMirror.py new file mode 100644 index 00000000..429f900e --- /dev/null +++ b/src/extension/src/local_loggers/StdOutFileMirror.py @@ -0,0 +1,36 @@ +"""Mirrors all terminal output to a local file +If the log file language is set to 'Python' in Notepad++, with code as implemented below, useful collapsibility is obtained.""" +import sys +import datetime + + +class StdOutFileMirror(object): + """Mirrors all terminal output to a local file""" + + def __init__(self, file_logger): + self.terminal = sys.stdout # preserve for recovery + self.file_logger = file_logger + + if self.file_logger.log_file_handle is not None: + sys.stdout = self + sys.stdout.write(str('-'*128) + "\n") # provoking an immediate failure if anything is wrong + else: + sys.stdout = self.terminal + sys.stdout.write("WARNING: StdOutFileMirror - Skipping as FileLogger is not initialized") + + def write(self, message): + self.terminal.write(message) # enable standard job output + + if len(message.strip()) > 0: + try: + timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S") + self.file_logger.write("\n" + timestamp + "> " + repr(message), fail_silently=False) # also write to the file logger file + except Exception as error: + sys.stdout = self.terminal # suppresses further job output mirror failures + sys.stdout.write("WARNING: StdOutFileMirror - Error writing to log file: " + repr(error)) + + def flush(self): + pass + + def stop(self): + sys.stdout = self.terminal diff --git a/src/extension/src/local_loggers/__init__.py b/src/extension/src/local_loggers/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/extension/src/manifest.xml b/src/extension/src/manifest.xml new file mode 100644 index 00000000..7d20490c --- /dev/null +++ b/src/extension/src/manifest.xml @@ -0,0 +1,15 @@ + + + Microsoft.CPlat.Core + LinuxPatchExtension + 1.4.23 + + VmRole + + Microsoft Azure VM InGuest Patch Extension for Linux Virtual Machines + true + true + Linux + Microsoft + + \ No newline at end of file diff --git a/src/extension/tests/TestCoreStateHandler.py b/src/extension/tests/TestCoreStateHandler.py new file mode 100644 index 00000000..24a2a61e --- /dev/null +++ b/src/extension/tests/TestCoreStateHandler.py @@ -0,0 +1,106 @@ +import os +import shutil +import tempfile +import unittest +from unittest import mock +from src.Constants import Constants +from src.file_handlers.JsonFileHandler import JsonFileHandler +from src.file_handlers.CoreStateHandler import CoreStateHandler +from src.local_loggers.Logger import Logger +from src.Utility import Utility +from tests.helpers.VirtualTerminal import VirtualTerminal + + +class TestCoreStateHandler(unittest.TestCase): + + def setUp(self): + VirtualTerminal().print_lowlight("\n----------------- setup test runner -----------------") + self.logger = Logger() + self.utility = Utility(self.logger) + self.json_file_handler = JsonFileHandler(self.logger) + self.core_state_fields = Constants.CoreStateFields + + def tearDown(self): + VirtualTerminal().print_lowlight("\n----------------- tear down test runner -----------------") + + def test_file_exists(self): + core_state_handler = CoreStateHandler(os.path.join(os.path.pardir, "tests", "helpers"), self.json_file_handler) + self.assertIsNotNone(core_state_handler.read_file()) + + @mock.patch('src.file_handlers.JsonFileHandler.time.sleep', autospec=True) + def test_file_does_not_exists(self, time_sleep): + core_state_handler = CoreStateHandler(os.path.join(os.path.pardir, "tests", "helper"), self.json_file_handler) + core_state_json = core_state_handler.read_file() + self.assertIsNone(core_state_json) + self.assertFalse(hasattr(core_state_json, self.core_state_fields.number)) + self.assertFalse(hasattr(core_state_json, self.core_state_fields.last_heartbeat)) + self.assertEqual(time_sleep.call_count, 5) + + def test_file_empty(self): + core_state_handler = CoreStateHandler(os.path.join(os.path.pardir, "tests", "helpers"), self.json_file_handler) + with mock.patch('tests.TestCoreStateHandler.JsonFileHandler.get_json_file_content', return_value=None, autospec=True): + core_state_json = core_state_handler.read_file() + self.assertIsNone(core_state_json) + self.assertFalse(hasattr(core_state_json, self.core_state_fields.number)) + + with mock.patch('tests.TestCoreStateHandler.JsonFileHandler.get_json_file_content', return_value={}, autospec=True): + core_state_json = core_state_handler.read_file() + self.assertTrue(hasattr(core_state_json, self.core_state_fields.number)) + self.assertTrue(hasattr(core_state_json, self.core_state_fields.action)) + self.assertIsNone(core_state_json.number) + self.assertIsNone(core_state_json.action) + + @mock.patch('tests.TestCoreStateHandler.JsonFileHandler.get_json_file_content', autospec=True) + def test_key_not_in_file(self, mock_core_state_json): + mock_core_state_json.return_value = None + parent_key = self.core_state_fields.parent_key + core_state_json = { + parent_key: { + "test_no": 1 + } + } + core_state_handler = CoreStateHandler("test_path", self.json_file_handler) + core_state_handler.read_file() + seq_no = self.core_state_fields.number + self.assertIsNone(core_state_handler.json_file_handler.get_json_config_value_safely(core_state_json, seq_no, parent_key, False)) + + def test_success_file_read(self): + core_state_handler = CoreStateHandler(os.path.join(os.path.pardir, "tests", "helpers"), self.json_file_handler) + core_state_json = core_state_handler.read_file() + self.assertIsNotNone(core_state_json.number) + self.assertIsNotNone(core_state_json.completed) + self.assertEqual(core_state_json.action, "Assessment") + self.assertEqual(core_state_json.number, 1234) + + def test_delete_file_failure(self): + # Create a temporary directory + test_dir = tempfile.mkdtemp() + file_path = os.path.join(test_dir, Constants.EXT_STATE_FILE) + # create a file + test_file_handler = open(file_path, 'w') + test_file_handler.close() + # delete file + core_state_handler = CoreStateHandler("test", self.json_file_handler) + self.assertRaises(Exception, self.utility.delete_file, core_state_handler.dir_path, core_state_handler.file) + self.assertTrue(os.path.exists(file_path)) + # Remove the directory after the test + shutil.rmtree(test_dir) + + def test_delete_file_success(self): + # Create a temporary directory + test_dir = tempfile.mkdtemp() + file_path = os.path.join(test_dir, Constants.CORE_STATE_FILE) + # create a file + test_file_handler = open(file_path, 'w') + test_file_handler.close() + # delete file + core_state_handler = CoreStateHandler(test_dir, self.json_file_handler) + self.utility.delete_file(core_state_handler.dir_path, core_state_handler.file) + self.assertFalse(os.path.exists(file_path)) + # Remove the directory after the test + shutil.rmtree(test_dir) + + + + + diff --git a/src/extension/tests/TestEnableCommandHandler.py b/src/extension/tests/TestEnableCommandHandler.py new file mode 100644 index 00000000..41367335 --- /dev/null +++ b/src/extension/tests/TestEnableCommandHandler.py @@ -0,0 +1,186 @@ +import json +import os +import shutil +import tempfile +import time +import unittest +from datetime import datetime +from unittest import mock +from src.Constants import Constants +from src.RuntimeContextHandler import RuntimeContextHandler +from src.file_handlers.JsonFileHandler import JsonFileHandler +from src.file_handlers.CoreStateHandler import CoreStateHandler +from src.EnableCommandHandler import EnableCommandHandler +from src.file_handlers.ExtConfigSettingsHandler import ExtConfigSettingsHandler +from src.file_handlers.ExtEnvHandler import ExtEnvHandler +from src.file_handlers.ExtOutputStatusHandler import ExtOutputStatusHandler +from src.file_handlers.ExtStateHandler import ExtStateHandler +from src.local_loggers.Logger import Logger +from src.ProcessHandler import ProcessHandler +from src.Utility import Utility +from tests.helpers.VirtualTerminal import VirtualTerminal + + +class TestEnableCommandHandler(unittest.TestCase): + + def setUp(self): + VirtualTerminal().print_lowlight("\n----------------- setup test runner -----------------") + self.logger = Logger() + self.utility = Utility(self.logger) + self.runtime_context_handler = RuntimeContextHandler(self.logger) + self.json_file_handler = JsonFileHandler(self.logger) + self.ext_env_handler = ExtEnvHandler(self.json_file_handler, handler_env_file_path=os.path.join(os.path.pardir, "tests", "helpers")) + self.config_folder = self.ext_env_handler.config_folder + self.ext_config_settings_handler = ExtConfigSettingsHandler(self.logger, self.json_file_handler, self.config_folder) + self.core_state_handler = CoreStateHandler(self.config_folder, self.json_file_handler) + self.ext_state_handler = ExtStateHandler(self.config_folder, self.utility, self.json_file_handler) + self.ext_output_status_handler = ExtOutputStatusHandler(self.logger, self.json_file_handler) + self.process_handler = ProcessHandler(self.logger) + self.enable_command_handler = EnableCommandHandler(self.logger, self.utility, self.runtime_context_handler, self.ext_env_handler, self.ext_config_settings_handler, self.core_state_handler, self.ext_state_handler, self.ext_output_status_handler, self.process_handler, datetime.utcnow(), 1234) + self.constants = Constants + + def tearDown(self): + VirtualTerminal().print_lowlight("\n----------------- tear down test runner -----------------") + + @mock.patch('tests.TestEnableCommandHandler.ProcessHandler.start_daemon', autospec=True, return_value=None) + @mock.patch('builtins.exit', autospec=True, return_value=None) + @mock.patch('src.file_handlers.JsonFileHandler.time.sleep', autospec=True) + def test_enable_command_first_request(self, start_daemon_result, exit_return, time_sleep): + # create tempdir which will have all the required files + dir_path = tempfile.mkdtemp() + config_file_path, config_folder_path = self.setup_for_enable_handler(dir_path) + self.enable_command_handler.execute_handler_action() + self.assertTrue(os.path.exists(config_file_path)) + self.assertTrue(os.path.exists(os.path.join(config_folder_path, self.constants.EXT_STATE_FILE))) + ext_state_json = self.json_file_handler.get_json_file_content(self.constants.EXT_STATE_FILE, config_folder_path) + self.assertIsNotNone(ext_state_json) + self.assertEqual(ext_state_json[self.constants.ExtStateFields.ext_seq][self.constants.ExtStateFields.ext_seq_number], 1234) + # delete tempdir + shutil.rmtree(dir_path) + + @mock.patch('tests.TestEnableCommandHandler.ProcessHandler.start_daemon', autospec=True, return_value=None) + @mock.patch('builtins.exit', autospec=True, return_value=None) + @mock.patch('tests.TestEnableCommandHandler.RuntimeContextHandler.check_if_patch_completes_in_time', autospec=True, return_value=False) + @mock.patch('src.file_handlers.JsonFileHandler.time.sleep', autospec=True) + def test_process_reenable_when_previous_req_complete(self, start_daemon_result, exit_return, wait_prev_ops_return, time_sleep): + # create tempdir which will have all the required files + dir_path = tempfile.mkdtemp() + config_file_path, config_folder_path = self.setup_for_enable_handler(dir_path) + shutil.copy(os.path.join("helpers", self.constants.EXT_STATE_FILE), config_folder_path) + shutil.copy(os.path.join("helpers", self.constants.CORE_STATE_FILE), config_folder_path) + prev_ext_state_json = self.json_file_handler.get_json_file_content(self.constants.EXT_STATE_FILE, config_folder_path) + self.enable_command_handler.execute_handler_action() + + ext_state_json = self.json_file_handler.get_json_file_content(self.constants.EXT_STATE_FILE, config_folder_path) + self.assertEqual(prev_ext_state_json[self.constants.ExtStateFields.ext_seq][self.constants.ExtStateFields.ext_seq_number], ext_state_json[self.constants.ExtStateFields.ext_seq][self.constants.ExtStateFields.ext_seq_number]) + self.assertEqual(prev_ext_state_json[self.constants.ExtStateFields.ext_seq][self.constants.ExtStateFields.ext_seq_operation], + ext_state_json[self.constants.ExtStateFields.ext_seq][self.constants.ExtStateFields.ext_seq_operation]) + + # delete tempdir + shutil.rmtree(dir_path) + + @mock.patch('tests.TestEnableCommandHandler.ProcessHandler.start_daemon', autospec=True, return_value=None) + @mock.patch('builtins.exit', autospec=True, return_value=None) + @mock.patch('tests.TestEnableCommandHandler.RuntimeContextHandler.check_if_patch_completes_in_time', autospec=True, return_value=False) + @mock.patch('src.file_handlers.JsonFileHandler.time.sleep', autospec=True) + def test_process_enable_request(self, start_daemon_result, exit_return, wait_prev_ops_return, time_sleep): + # create tempdir which will have all the required files + dir_path = tempfile.mkdtemp() + + # setup to mock environment when enable is triggered with a different sequence number than the prev operation + config_file_path, config_folder_path = self.setup_for_enable_handler(dir_path) + new_settings_file = self.create_helpers_for_enable_request(config_folder_path) + + prev_ext_state_json = self.json_file_handler.get_json_file_content(self.constants.EXT_STATE_FILE, config_folder_path) + enable_command_handler = EnableCommandHandler(self.logger, self.utility, self.runtime_context_handler, self.ext_env_handler, self.ext_config_settings_handler, self.core_state_handler, self.ext_state_handler, self.ext_output_status_handler, self.process_handler, datetime.utcnow(), 12) + enable_command_handler.execute_handler_action() + ext_state_json = self.json_file_handler.get_json_file_content(self.constants.EXT_STATE_FILE, config_folder_path) + self.assertNotEqual(prev_ext_state_json, ext_state_json) + self.assertNotEqual(prev_ext_state_json[self.constants.ExtStateFields.ext_seq][self.constants.ExtStateFields.ext_seq_number], + ext_state_json[self.constants.ExtStateFields.ext_seq][self.constants.ExtStateFields.ext_seq_number]) + # delete tempdir + shutil.rmtree(dir_path) + + @mock.patch('builtins.exit', autospec=True, return_value=None) + @mock.patch('src.file_handlers.JsonFileHandler.time.sleep', autospec=True) + def test_process_nooperation_enable_request(self, exit_return, time_sleep): + # create tempdir which will have all the required files + dir_path = tempfile.mkdtemp() + + # setup to mock environment when enable is triggered with a nooperation request + config_file_path, config_folder_path = self.setup_for_enable_handler(dir_path) + new_settings_file = self.create_helpers_for_enable_request(config_folder_path) + + # update operation to 'NoOperation' since it is set to Assessment in the original helper file + with open(new_settings_file, 'r+') as f: + config_settings = json.load(f) + config_settings[self.constants.RUNTIME_SETTINGS][0][self.constants.HANDLER_SETTINGS][self.constants.PUBLIC_SETTINGS][self.constants.ConfigPublicSettingsFields.operation] = self.constants.NOOPERATION + f.seek(0) # rewind + json.dump(config_settings, f) + f.truncate() + f.close() + + prev_ext_state_json = self.json_file_handler.get_json_file_content(self.constants.EXT_STATE_FILE, config_folder_path) + enable_command_handler = EnableCommandHandler(self.logger, self.utility, self.runtime_context_handler, self.ext_env_handler, self.ext_config_settings_handler, self.core_state_handler, self.ext_state_handler, self.ext_output_status_handler, self.process_handler, datetime.utcnow(), 12) + enable_command_handler.execute_handler_action() + ext_state_json = self.json_file_handler.get_json_file_content(self.constants.EXT_STATE_FILE, config_folder_path) + core_state_json = self.json_file_handler.get_json_file_content(self.constants.CORE_STATE_FILE, config_folder_path, raise_if_not_found=False) + self.assertIsNone(core_state_json) + self.assertNotEqual(prev_ext_state_json, ext_state_json) + self.assertNotEqual(prev_ext_state_json[self.constants.ExtStateFields.ext_seq][self.constants.ExtStateFields.ext_seq_number], ext_state_json[self.constants.ExtStateFields.ext_seq][self.constants.ExtStateFields.ext_seq_number]) + # delete tempdir + shutil.rmtree(dir_path) + + def setup_for_enable_handler(self, dir_path): + config_folder_name = self.config_folder + status_folder_name = self.ext_env_handler.status_folder + log_folder_name = self.ext_env_handler.log_folder + + # creating the required folder (e.g: config folder, log folder, status folder) under the temp directory + config_folder_path = os.path.join(dir_path, config_folder_name) + status_folder_path = os.path.join(dir_path, status_folder_name) + log_folder_path = os.path.join(dir_path, log_folder_name) + os.mkdir(config_folder_path) + os.mkdir(status_folder_path) + os.mkdir(log_folder_path) + + # copying a sample version of the .settings file from the helpers folder to the temp directory + shutil.copy(os.path.join("helpers", "1234.settings"), config_folder_path) + config_file_path = os.path.join(config_folder_path, '1234.settings') + + # updating the timestamp because the backup logic fetches seq no from the handler configuration files/.settings in config folder, if nothing is set in the env variable + with open(config_file_path, 'a') as f: + timestamp = time.mktime(datetime.strptime('2019-07-20T12:10:14Z', '%Y-%m-%dT%H:%M:%S%z').timetuple()) + os.utime(config_file_path, (timestamp, timestamp)) + f.close() + + self.ext_env_handler.config_folder = config_folder_path + self.ext_env_handler.status_folder = status_folder_path + self.ext_env_handler.log_folder = log_folder_path + + self.ext_config_settings_handler.config_folder = config_folder_path + self.core_state_handler.dir_path = config_folder_path + self.ext_state_handler.dir_path = config_folder_path + + return config_file_path, config_folder_path + + def create_helpers_for_enable_request(self, config_folder_path): + """ Create config settings, CoreState and ExtState files needed for enable request """ + # create config settings, CoreState and ExtState files in tempdir using references from the respective files under helpers in tests dir + shutil.copy(os.path.join("helpers", self.constants.EXT_STATE_FILE), config_folder_path) + shutil.copy(os.path.join("helpers", self.constants.CORE_STATE_FILE), config_folder_path) + new_settings_file = os.path.join(config_folder_path, "12.settings") + shutil.copy(os.path.join("helpers", "1234.settings"), new_settings_file) + + # set the modified time of the config settings file in tempdir + with open(new_settings_file, 'a') as f: + timestamp = time.mktime(datetime.strptime('2019-07-21T12:10:14Z', '%Y-%m-%dT%H:%M:%S%z').timetuple()) + os.utime(new_settings_file, (timestamp, timestamp)) + f.close() + + return new_settings_file + + +if __name__ == '__main__': + SUITE = unittest.TestLoader().loadTestsFromTestCase(TestEnableCommandHandler) + unittest.TextTestRunner(verbosity=2).run(SUITE) \ No newline at end of file diff --git a/src/extension/tests/TestExtConfigSettingsHandler.py b/src/extension/tests/TestExtConfigSettingsHandler.py new file mode 100644 index 00000000..2f5f6bc7 --- /dev/null +++ b/src/extension/tests/TestExtConfigSettingsHandler.py @@ -0,0 +1,246 @@ +"""Unit test for extension ExtConfigSettingsHandler.py""" +import os +import shutil +import tempfile +import time +import unittest +from datetime import datetime +from unittest import mock #TODO: Remove dependency on mock. +from src.Constants import Constants +from src.file_handlers.JsonFileHandler import JsonFileHandler +from src.file_handlers.ExtConfigSettingsHandler import ExtConfigSettingsHandler +from src.local_loggers.Logger import Logger +from tests.helpers.VirtualTerminal import VirtualTerminal + + +class TestExtConfigSettingsHandler(unittest.TestCase): + + def setUp(self): + VirtualTerminal().print_lowlight("\n----------------- setup test runner -----------------") + self.logger = Logger() + self.json_file_handler = JsonFileHandler(self.logger) + self.config_public_settings_fields = Constants.ConfigPublicSettingsFields + + def tearDown(self): + VirtualTerminal().print_lowlight("\n----------------- tear down test runner -----------------") + + @mock.patch('src.file_handlers.ExtConfigSettingsHandler.os.getenv', autospec=True, return_value=1234) + def test_get_seq_no_from_env_variable(self, seq_no_from_env_var): + ext_config_settings_handler = ExtConfigSettingsHandler(self.logger, self.json_file_handler, "mockConfig") + seq_no = ext_config_settings_handler.get_seq_no() + self.assertIsNotNone(seq_no) + self.assertEqual(seq_no, seq_no_from_env_var.return_value) + + def test_seq_no_from_config_folder(self): + files = [ + {"name": '1.json', "lastModified": '2019-07-20T12:12:14Z'}, + {"name": '2.json', "lastModified": '2018-07-20T12:12:14Z'}, + {"name": '11.json', "lastModified": '2019-07-20T12:12:14Z'}, + {"name": '12.settings', "lastModified": '2019-07-02T12:12:14Z'}, + {"name": '121.settings', "lastModified": '2017-07-20T12:12:14Z'}, + {"name": '122.settings', "lastModified": '2019-07-20T12:12:14Z'}, + {"name": '123.json', "lastModified": '2019-07-20T11:12:14Z'}, + {"name": '10.settings', "lastModified": '2019-07-20T10:12:14Z'}, + {"name": '111.settings', "lastModified": '2019-07-20T12:10:14Z'}, + {"name": 'dir1', "lastModified": '2019-07-20T12:12:14Z'}, + {"name": '111111', "lastModified": '2019-07-20T12:12:14Z'}, + {"name": '2.settings', "lastModified": '2019-07-20T12:12:12Z'}, + {"name": '3a.settings', "lastModified": '2019-07-20T12:12:14Z'}, + {"name": 'aa.settings', "lastModified": '2019-07-20T12:12:14Z'}, + {"name": 'a3.settings', "lastModified": '2019-07-20T12:12:14Z'}, + {"name": '22.settings.settings', "lastModified": '2019-07-20T12:12:14Z'}, + {"name": '0.settings', "lastModified": '2019-07-19T12:12:14Z'}, + {"name": 'abc.123.settings', "lastModified": '2019-07-20T12:12:14Z'}, + {"name": '.settings', "lastModified": '2019-07-20T12:12:14Z'} + ] + + test_dir = tempfile.mkdtemp() + for file in files: + file_path = os.path.join(test_dir, file["name"]) + with open(file_path, 'w') as f: + timestamp = time.mktime(datetime.strptime(file["lastModified"], '%Y-%m-%dT%H:%M:%S%z').timetuple()) + os.utime(file_path, (timestamp, timestamp)) + f.close() + ext_config_settings_handler = ExtConfigSettingsHandler(self.logger, self.json_file_handler, test_dir) + seq_no = ext_config_settings_handler.get_seq_no() + self.assertEqual(122, seq_no) + shutil.rmtree(test_dir) + + def test_seq_no_from_empty_config_folder(self): + test_dir = tempfile.mkdtemp() + ext_config_settings_handler = ExtConfigSettingsHandler(self.logger, self.json_file_handler, test_dir) + seq_no = ext_config_settings_handler.get_seq_no() + self.assertEqual(None, seq_no) + shutil.rmtree(test_dir) + + def test_are_config_settings_valid(self): + ext_config_settings_handler = ExtConfigSettingsHandler(self.logger, self.json_file_handler, "mockConfig") + + runtime_settings_key = Constants.RUNTIME_SETTINGS + handler_settings_key = Constants.HANDLER_SETTINGS + public_settings_key = Constants.PUBLIC_SETTINGS + + config_settings_json = None + self.assertFalse(ext_config_settings_handler.are_config_settings_valid(config_settings_json)) + + config_settings_json = [] + self.assertFalse(ext_config_settings_handler.are_config_settings_valid(config_settings_json)) + + config_settings_json = {} + self.assertFalse(ext_config_settings_handler.are_config_settings_valid(config_settings_json)) + + # runtimeSettings not in file + config_settings_json = {'key': 'test'} + self.assertFalse(ext_config_settings_handler.are_config_settings_valid(config_settings_json)) + # runtimeSettings not of type list + config_settings_json = {runtime_settings_key: "test"} + self.assertFalse(ext_config_settings_handler.are_config_settings_valid(config_settings_json)) + config_settings_json = {runtime_settings_key: {}} + self.assertFalse(ext_config_settings_handler.are_config_settings_valid(config_settings_json)) + # runtimeSettings is None or empty + config_settings_json = {runtime_settings_key: None} + self.assertFalse(ext_config_settings_handler.are_config_settings_valid(config_settings_json)) + # runtimeSettings is on len 0 + config_settings_json = {runtime_settings_key: []} + self.assertFalse(ext_config_settings_handler.are_config_settings_valid(config_settings_json)) + + # handlerSettings not in runtimeSettings + config_settings_json = {runtime_settings_key: ["test"]} + self.assertFalse(ext_config_settings_handler.are_config_settings_valid(config_settings_json)) + # handlerSettings not of type dict + config_settings_json = {runtime_settings_key: [{handler_settings_key: []}]} + self.assertFalse(ext_config_settings_handler.are_config_settings_valid(config_settings_json)) + config_settings_json = {runtime_settings_key: [{handler_settings_key: "test"}]} + self.assertFalse(ext_config_settings_handler.are_config_settings_valid(config_settings_json)) + config_settings_json = {runtime_settings_key: [{handler_settings_key: ["test"]}]} + self.assertFalse(ext_config_settings_handler.are_config_settings_valid(config_settings_json)) + # handlerSettings is None or empty + config_settings_json = {runtime_settings_key: [{handler_settings_key: None}]} + self.assertFalse(ext_config_settings_handler.are_config_settings_valid(config_settings_json)) + config_settings_json = {runtime_settings_key: [{handler_settings_key: {}}]} + self.assertFalse(ext_config_settings_handler.are_config_settings_valid(config_settings_json)) + + # publicSettings not in handlerSettings + config_settings_json = {runtime_settings_key: [{handler_settings_key: {"testKey": "testVal"}}]} + self.assertFalse(ext_config_settings_handler.are_config_settings_valid(config_settings_json)) + # handlerSettings not of type dict + config_settings_json = {runtime_settings_key: [{handler_settings_key: {"testKey": "testVal", public_settings_key: []}}]} + self.assertFalse(ext_config_settings_handler.are_config_settings_valid(config_settings_json)) + config_settings_json = {runtime_settings_key: [{handler_settings_key: {"testKey": "testVal", public_settings_key: "test"}}]} + self.assertFalse(ext_config_settings_handler.are_config_settings_valid(config_settings_json)) + config_settings_json = {runtime_settings_key: [{handler_settings_key: {"testKey": "testVal", public_settings_key: ["test"]}}]} + self.assertFalse(ext_config_settings_handler.are_config_settings_valid(config_settings_json)) + # publicSettings is None or empty + config_settings_json = {runtime_settings_key: [{handler_settings_key: {"testKey": "testVal", public_settings_key: None}}]} + self.assertFalse(ext_config_settings_handler.are_config_settings_valid(config_settings_json)) + config_settings_json = {runtime_settings_key: [{handler_settings_key: {"testKey": "testVal", public_settings_key: {}}}]} + self.assertFalse(ext_config_settings_handler.are_config_settings_valid(config_settings_json)) + + # accepted config settings + config_settings_json = { + runtime_settings_key: [{ + handler_settings_key: { + "testKey": "testVal", + public_settings_key: { + self.config_public_settings_fields.operation: "test", + self.config_public_settings_fields.activity_id: "12345-2312-1234-23245-32112", + self.config_public_settings_fields.start_time: "2019-07-20T12:12:14Z", + self.config_public_settings_fields.maximum_duration: "20m", + self.config_public_settings_fields.reboot_setting: "IfRequired", + self.config_public_settings_fields.include_classifications: ["Critical","Security"], + self.config_public_settings_fields.include_patches: ["*", "test*", "*ern*=1.2*", "kern*=1.23.45"], + self.config_public_settings_fields.exclude_patches: ["*", "test", "*test"], + self.config_public_settings_fields.internal_settings: "" + } + } + }] + } + self.assertTrue(ext_config_settings_handler.are_config_settings_valid(config_settings_json)) + + config_settings_json = { + runtime_settings_key: [{ + handler_settings_key: { + "testKey": "testVal", + public_settings_key: { + self.config_public_settings_fields.operation: "test", + self.config_public_settings_fields.activity_id: "12345-2312-1234-23245-32112", + self.config_public_settings_fields.start_time: "2019-07-20T12:12:14Z" + } + } + }] + } + self.assertTrue(ext_config_settings_handler.are_config_settings_valid(config_settings_json)) + + config_settings_json = { + runtime_settings_key: [{ + handler_settings_key: { + "testKey": "testVal", + public_settings_key: { + self.config_public_settings_fields.operation: "test", + self.config_public_settings_fields.activity_id: "12345-2312-1234-23245-32112", + self.config_public_settings_fields.start_time: "2019-07-20T12:12:14Z", + self.config_public_settings_fields.maximum_duration: "", + self.config_public_settings_fields.reboot_setting: "IfRequired", + self.config_public_settings_fields.include_classifications: ["Critical", "Security"], + self.config_public_settings_fields.include_patches: ["*", "test*", "*ern*=1.2*", "kern*=1.23.45"], + self.config_public_settings_fields.exclude_patches: ["*", "test", "*test"], + self.config_public_settings_fields.internal_settings: "" + } + } + }] + } + self.assertTrue(ext_config_settings_handler.are_config_settings_valid(config_settings_json)) + + def test_read_file_success(self): + ext_config_settings_handler = ExtConfigSettingsHandler(self.logger, self.json_file_handler, os.path.join(os.path.pardir, "tests", "helpers")) + seq_no = "1234" + config_values = ext_config_settings_handler.read_file(seq_no) + self.assertEqual(config_values.__getattribute__(self.config_public_settings_fields.operation), "Deployment") + self.assertEqual(config_values.__getattribute__(self.config_public_settings_fields.reboot_setting), "IfRequired") + + @mock.patch('src.file_handlers.JsonFileHandler.time.sleep', autospec=True) + def test_read_file_failures(self, time_sleep): + ext_config_settings_handler = ExtConfigSettingsHandler(self.logger, self.json_file_handler, os.path.join(os.path.pardir, "tests", "helpers")) + # Seq_no invalid, none, -1, empty + seq_no = None + self.assertRaises(Exception, ext_config_settings_handler.read_file, seq_no) + seq_no = -1 + self.assertRaises(Exception, ext_config_settings_handler.read_file, seq_no) + seq_no = "" + self.assertRaises(Exception, ext_config_settings_handler.read_file, seq_no) + + # FileNotFound + seq_no = "12345" + self.assertRaises(Exception, ext_config_settings_handler.read_file, seq_no) + + # empty file + test_dir = tempfile.mkdtemp() + file_name = "123.settings" + with open(os.path.join(test_dir, file_name), 'w') as f: + f.close() + ext_config_settings_handler = ExtConfigSettingsHandler(self.logger, self.json_file_handler, test_dir) + seq_no = "123" + self.assertRaises(Exception, ext_config_settings_handler.read_file, seq_no) + shutil.rmtree(test_dir) + + # empty valid file + test_dir = tempfile.mkdtemp() + file_name = "1237.settings" + with open(os.path.join(test_dir, file_name), 'w') as f: + f.write("{}") + f.close() + ext_config_settings_handler = ExtConfigSettingsHandler(self.logger, self.json_file_handler, test_dir) + seq_no = "1237" + self.assertRaises(Exception, ext_config_settings_handler.read_file, seq_no) + shutil.rmtree(test_dir) + + # file not valid + seq_no = "1234" + ext_config_settings_handler = ExtConfigSettingsHandler(self.logger, self.json_file_handler, os.path.join(os.path.pardir, "tests", "helpers")) + with mock.patch('tests.TestExtConfigSettingsHandler.ExtConfigSettingsHandler.are_config_settings_valid', autospec=True, return_value=False): + self.assertRaises(Exception, ext_config_settings_handler.read_file, seq_no) + + +if __name__ == '__main__': + SUITE = unittest.TestLoader().loadTestsFromTestCase(TestExtConfigSettingsHandler) + unittest.TextTestRunner(verbosity=2).run(SUITE) diff --git a/src/extension/tests/TestExtEnvHandler.py b/src/extension/tests/TestExtEnvHandler.py new file mode 100644 index 00000000..1b7c5564 --- /dev/null +++ b/src/extension/tests/TestExtEnvHandler.py @@ -0,0 +1,40 @@ +import os.path +import unittest +from unittest import mock +from src.Constants import Constants +from src.file_handlers.JsonFileHandler import JsonFileHandler +from src.file_handlers.ExtEnvHandler import ExtEnvHandler +from src.local_loggers.Logger import Logger +from tests.helpers.VirtualTerminal import VirtualTerminal + + +class TestExtEnvHandler(unittest.TestCase): + def setUp(self): + VirtualTerminal().print_lowlight("\n----------------- setup test runner -----------------") + self.logger = Logger() + self.json_file_handler = JsonFileHandler(self.logger) + self.env_settings_fields = Constants.EnvSettingsFields + + def tearDown(self): + VirtualTerminal().print_lowlight("\n----------------- tear down test runner -----------------") + + def test_file_read_success(self): + ext_env_handler = ExtEnvHandler(self.json_file_handler, handler_env_file_path=os.path.join(os.path.pardir, "tests", "helpers")) + self.assertIsNotNone(ext_env_handler.log_folder) + self.assertEqual(ext_env_handler.log_folder, "mockLog") + self.assertIsNotNone(ext_env_handler.status_folder) + + @mock.patch('tests.TestExtEnvHandler.JsonFileHandler.get_json_file_content', autospec=True) + def test_file_read_failure(self, mock_response): + mock_response.return_value = None + handler_env_file_path = os.path.join(os.path.pardir, "tests", "helpers") + ext_env_handler = ExtEnvHandler(self.json_file_handler, handler_env_file_path=handler_env_file_path) + self.assertIsNone(ext_env_handler.handler_environment_json) + self.assertFalse(hasattr(ext_env_handler, 'config_folder')) + self.assertFalse(hasattr(ext_env_handler, 'log_folder')) + + mock_response.return_value = [{"key1": "value"}, {"key2": "value2"}] + self.assertRaises(Exception, ExtEnvHandler, self.json_file_handler, handler_env_file_path=handler_env_file_path) + + mock_response.return_value = [{}] + self.assertRaises(Exception, ExtEnvHandler, self.json_file_handler, handler_env_file_path=handler_env_file_path) \ No newline at end of file diff --git a/src/extension/tests/TestExtOutputStatusHandler.py b/src/extension/tests/TestExtOutputStatusHandler.py new file mode 100644 index 00000000..7a5c4c0f --- /dev/null +++ b/src/extension/tests/TestExtOutputStatusHandler.py @@ -0,0 +1,84 @@ +import json +import os +import shutil +import tempfile +import unittest +from unittest import mock +from src.Constants import Constants +from src.file_handlers.JsonFileHandler import JsonFileHandler +from src.file_handlers.ExtOutputStatusHandler import ExtOutputStatusHandler +from src.local_loggers.Logger import Logger +from tests.helpers.VirtualTerminal import VirtualTerminal + + +class TestExtOutputStatusHandler(unittest.TestCase): + def setUp(self): + VirtualTerminal().print_lowlight("\n----------------- setup test runner -----------------") + self.logger = Logger() + self.json_file_handler = JsonFileHandler(self.logger) + self.status_file_fields = Constants.StatusFileFields + self.status = Constants.Status + + def tearDown(self): + VirtualTerminal().print_lowlight("\n----------------- tear down test runner -----------------") + + def test_create_status_file(self): + file_name = "test" + dir_path = tempfile.mkdtemp() + operation = "Assessment" + ext_status_handler = ExtOutputStatusHandler(self.logger, self.json_file_handler) + ext_status_handler.write_status_file(file_name, dir_path, operation, [], self.status.Transitioning.lower()) + + with open(dir_path + "\\" + file_name + ext_status_handler.file_ext) as status_file: + content = json.load(status_file) + parent_key = self.status_file_fields.status + self.assertIsNotNone(content) + self.assertEqual(content[0][parent_key][self.status_file_fields.status_name], "Azure Patch Management") + self.assertEqual(content[0][parent_key][self.status_file_fields.status_operation], operation) + self.assertEqual(content[0][parent_key][self.status_file_fields.status_status], self.status.Transitioning.lower()) + shutil.rmtree(dir_path) + + def test_read_file(self): + file_name = "test" + dir_path = tempfile.mkdtemp() + operation = "Assessment" + + ext_status_handler = ExtOutputStatusHandler(self.logger, self.json_file_handler) + ext_status_handler.write_status_file(file_name, dir_path, operation, [], self.status.Transitioning.lower()) + status_json = ext_status_handler.read_file(file_name, dir_path) + parent_key = self.status_file_fields.status + self.assertEqual(status_json[0][parent_key][self.status_file_fields.status_name], "Azure Patch Management") + self.assertEqual(status_json[0][parent_key][self.status_file_fields.status_operation], operation) + self.assertEqual(status_json[0][parent_key][self.status_file_fields.status_status], self.status.Transitioning.lower()) + shutil.rmtree(dir_path) + + @mock.patch('src.file_handlers.JsonFileHandler.time.sleep', autospec=True) + def test_update_file(self, time_sleep): + file_name = "test" + dir_path = tempfile.mkdtemp() + operation = "Assessment" + + ext_status_handler = ExtOutputStatusHandler(self.logger, self.json_file_handler) + ext_status_handler.write_status_file(file_name, dir_path, operation, [], self.status.Success.lower()) + status_json = ext_status_handler.read_file(file_name, dir_path) + prev_timestamp = status_json[0][self.status_file_fields.timestamp_utc] + stat_file_name = os.stat(os.path.join(dir_path, file_name + ".status")) + prev_modified_time = stat_file_name.st_mtime + + ext_status_handler.update_file("test1", dir_path) + stat_file_name = os.stat(os.path.join(dir_path, file_name + ".status")) + modified_time = stat_file_name.st_mtime + self.assertEqual(prev_modified_time, modified_time) + + ext_status_handler.update_file(file_name, dir_path) + stat_file_name = os.stat(os.path.join(dir_path, file_name + ".status")) + modified_time = stat_file_name.st_mtime + self.assertNotEqual(prev_modified_time, modified_time) + updated_status_json = ext_status_handler.read_file(file_name, dir_path) + self.assertEqual(updated_status_json[0][self.status_file_fields.status][self.status_file_fields.status_status], self.status.Transitioning.lower()) + shutil.rmtree(dir_path) + + +if __name__ == '__main__': + SUITE = unittest.TestLoader().loadTestsFromTestCase(TestExtOutputStatusHandler) + unittest.TextTestRunner(verbosity=2).run(SUITE) diff --git a/src/extension/tests/TestExtStateHandler.py b/src/extension/tests/TestExtStateHandler.py new file mode 100644 index 00000000..5b8076f8 --- /dev/null +++ b/src/extension/tests/TestExtStateHandler.py @@ -0,0 +1,79 @@ +import datetime +import os +import shutil +import tempfile +import unittest +from unittest import mock +from src.Constants import Constants +from src.file_handlers.JsonFileHandler import JsonFileHandler +from src.file_handlers.ExtStateHandler import ExtStateHandler +from src.local_loggers.Logger import Logger +from src.Utility import Utility +from tests.helpers.VirtualTerminal import VirtualTerminal + + +class TestExtStateHandler(unittest.TestCase): + + def setUp(self): + VirtualTerminal().print_lowlight("\n----------------- setup test runner -----------------") + self.logger = Logger() + self.utility = Utility(self.logger) + self.json_file_handler = JsonFileHandler(self.logger) + self.ext_state_fields = Constants.ExtStateFields + + def tearDown(self): + VirtualTerminal().print_lowlight("\n----------------- tear down test runner -----------------") + + def test_create_file(self): + test_dir = tempfile.mkdtemp() + ext_state_handler = ExtStateHandler(test_dir, self.utility, self.json_file_handler) + ext_state_handler.create_file(1, "Assessment", datetime.datetime.utcnow()) + self.assertTrue(os.path.exists(os.path.join(test_dir, Constants.EXT_STATE_FILE))) + self.utility.delete_file(ext_state_handler.dir_path, ext_state_handler.file) + shutil.rmtree(test_dir) + + def test_read_file(self): + ext_state_handler = ExtStateHandler(os.path.join(os.path.pardir, "tests", "helpers"), self.utility, self.json_file_handler) + ext_state_values = ext_state_handler.read_file() + self.assertIsNotNone(ext_state_values.__getattribute__(self.ext_state_fields.ext_seq_number)) + self.assertEqual(ext_state_values.__getattribute__(self.ext_state_fields.ext_seq_number), 1234) + self.assertIsNotNone(ext_state_values.__getattribute__(self.ext_state_fields.ext_seq_achieve_enable_by)) + self.assertIsNotNone(ext_state_values.__getattribute__(self.ext_state_fields.ext_seq_operation)) + self.assertEqual(ext_state_values.__getattribute__(self.ext_state_fields.ext_seq_operation), "Deployment") + + @mock.patch('src.file_handlers.JsonFileHandler.time.sleep', autospec=True) + def test_read_file_no_content(self, time_sleep): + ext_state_handler = ExtStateHandler(os.path.join(os.path.pardir, "tests", "helper"), self.utility, self.json_file_handler) + ext_state_values = ext_state_handler.read_file() + self.assertIsNotNone(ext_state_values) + self.assertIsNone(ext_state_values.__getattribute__(self.ext_state_fields.ext_seq_number)) + self.assertIsNone(ext_state_values.__getattribute__(self.ext_state_fields.ext_seq_achieve_enable_by)) + self.assertIsNone(ext_state_values.__getattribute__(self.ext_state_fields.ext_seq_operation)) + + def test_delete_file_failure(self): + # Create a temporary directory + test_dir = tempfile.mkdtemp() + file_path = os.path.join(test_dir, Constants.EXT_STATE_FILE) + # create a file + test_file_handler = open(file_path, 'w') + test_file_handler.close() + # delete file + ext_state_handler = ExtStateHandler('test', self.utility, self.json_file_handler) + self.assertRaises(Exception, self.utility.delete_file, ext_state_handler.dir_path, ext_state_handler.file) + self.assertTrue(os.path.exists(file_path)) + # Remove the directory after the test + shutil.rmtree(test_dir) + + def test_delete_file_success(self): + # Create a temporary directory + test_dir = tempfile.mkdtemp() + file_path = os.path.join(test_dir, Constants.EXT_STATE_FILE) + # create a file + test_file_handler = open(file_path, 'w') + test_file_handler.close() + # delete file + ext_state_handler = ExtStateHandler(test_dir, self.utility, self.json_file_handler) + self.utility.delete_file(ext_state_handler.dir_path, ext_state_handler.file) + self.assertFalse(os.path.exists(file_path)) + # Remove the directory after the test + shutil.rmtree(test_dir) \ No newline at end of file diff --git a/src/extension/tests/TestFileLogger.py b/src/extension/tests/TestFileLogger.py new file mode 100644 index 00000000..9b86cb5e --- /dev/null +++ b/src/extension/tests/TestFileLogger.py @@ -0,0 +1,104 @@ +""" Unit test for FileLogger """ +import shutil +import tempfile +import time +import unittest +from datetime import datetime +import os +from os import path +from src.local_loggers.FileLogger import FileLogger +from tests.helpers.VirtualTerminal import VirtualTerminal + + +class TestFileLogger(unittest.TestCase): + + # setup + def setUp(self): + VirtualTerminal().print_lowlight("\n----------------- setup test runner -----------------\n") + self.test_dir = tempfile.mkdtemp() + self.file_path = path.join(self.test_dir, 'test.log') + self.file_logger = FileLogger(self.test_dir, 'test.log') + + # teardown + def tearDown(self): + VirtualTerminal().print_lowlight("\n----------------- tear down test runner -----------------") + shutil.rmtree(self.test_dir) + + def test_file_open(self): + self.assertIsNotNone(self.file_logger.log_file_handle) + self.file_logger.close() + + def test_write_file_not_found_exception(self): + self.file_logger.close() + self.file_logger.log_file_handle = None + self.file_logger.write("Test log") + self.assertRaises(Exception) + + def test_write(self): + self.file_logger.write("Test log") + self.file_logger.close() + file_read = open(self.file_path, "r") + self.assertIsNotNone(file_read) + self.assertIn("Test log", file_read.readlines()[-1]) + file_read.close() + + def test_flush(self): + self.file_logger.write("flush this") + self.file_logger.flush() + file_read = open(self.file_path, "r") + self.assertIsNotNone(file_read) + self.assertIn("flush this", file_read.readlines()[-1]) + file_read.close() + self.file_logger.close() + + def test_close(self): + self.file_logger.close() + self.assertTrue(self.file_logger.log_file_handle.closed) + # with self.assertRaises(ValueError): + # self.file_logger.write("write in closed file") + + def test_delete_older_log_files_success(self): + files = [ + {"name": '1.ext.log', "lastModified": '2019-07-20T12:12:14Z'}, # reverse sort order seqno: 1 + {"name": '121.log', "lastModified": '2017-07-21T12:12:14Z'}, # reverse sort order seqno: 7 + {"name": '122.log', "lastModified": '2017-07-21T12:12:14Z'}, # reverse sort order seqno: 8 + {"name": '123.log', "lastModified": '2017-07-21T12:12:14Z'}, # reverse sort order seqno: 9 + {"name": '124.log', "lastModified": '2017-07-21T12:12:14Z'}, # reverse sort order seqno: 10 + {"name": '125.log', "lastModified": '2017-07-21T12:12:14Z'}, # reverse sort order seqno: 11 + {"name": '126.log', "lastModified": '2017-07-21T12:12:14Z'}, # reverse sort order seqno: 12 + {"name": '127.log', "lastModified": '2017-07-21T12:12:14Z'}, # reverse sort order seqno: 13 + {"name": 'test.log', "lastModified": '2017-07-21T12:12:14Z'}, # testing with the current log file, reverse sort order seqno: 14 + {"name": '123.json', "lastModified": '2019-07-20T11:12:14Z'}, + {"name": '10.settings', "lastModified": '2019-07-20T10:12:14Z'}, + {"name": '111.txt', "lastModified": '2019-07-20T12:10:14Z'}, + {"name": '12.core.log', "lastModified": '2019-07-02T12:12:14Z'}, # reverse sort order seqno: 6 + {"name": 'dir1', "lastModified": '2019-07-20T12:12:14Z'}, + {"name": '111111', "lastModified": '2019-07-20T12:12:14Z'}, + {"name": '2.core.log', "lastModified": '2019-07-20T12:12:12Z'}, # reverse sort order seqno: 5 + {"name": '22.log.log', "lastModified": '2019-07-20T12:12:14Z'}, # reverse sort order seqno: 2 + {"name": 'abc.123.log', "lastModified": '2019-07-20T12:12:14Z'}, # reverse sort order seqno: 3 + {"name": '.log', "lastModified": '2019-07-20T12:12:14Z'} # reverse sort order seqno: 4 + ] + + for file in files: + file_path = os.path.join(self.test_dir, file["name"]) + with open(file_path, 'w') as f: + timestamp = time.mktime(datetime.strptime(file["lastModified"], '%Y-%m-%dT%H:%M:%S%z').timetuple()) + os.utime(file_path, (timestamp, timestamp)) + f.close() + + # modifying timestamp format of 127.log, to test with a diff time format + file_path = os.path.join(self.test_dir, "127.log") + with open(file_path, 'w') as f: + timestamp = time.mktime(datetime.strptime("21-07-2017T12:12:14Z", '%d-%m-%YT%H:%M:%S%z').timetuple()) + os.utime(file_path, (timestamp, timestamp)) + f.close() + + self.file_logger.delete_older_log_files(self.test_dir) + self.assertEqual(11, len(self.file_logger.get_all_log_files(self.test_dir))) + self.file_logger.close() + + +if __name__ == '__main__': + SUITE = unittest.TestLoader().loadTestsFromTestCase(TestFileLogger) + unittest.TextTestRunner(verbosity=2).run(SUITE) diff --git a/src/extension/tests/TestHandlerManifest.py b/src/extension/tests/TestHandlerManifest.py new file mode 100644 index 00000000..7c8ee8a8 --- /dev/null +++ b/src/extension/tests/TestHandlerManifest.py @@ -0,0 +1,37 @@ +"""Unit test for extension HandlerManifest""" +import os +import json +import unittest +from src.Constants import Constants +from tests.helpers.VirtualTerminal import VirtualTerminal + + +class TestHandlerManifest(unittest.TestCase): + """Test case to guard against handler manifest changes - not really a unit test""" + + def setUp(self): + VirtualTerminal().print_lowlight("\n----------------- setup test runner -----------------") + self.handler_manifest_file = os.path.join(os.path.pardir, 'src', Constants.HANDLER_MANIFEST_FILE) + + def tearDown(self): + VirtualTerminal().print_lowlight("\n----------------- tear down test runner -----------------") + + def test_handler_manifest_json(self): + self.handler_manifest_file_handle = open(self.handler_manifest_file, "r") + file_contents = self.handler_manifest_file_handle.read() + handler_json = json.loads(file_contents) + self.assertEqual(len(handler_json), 1) + self.assertEqual(handler_json[0]['version'], 1.0) + self.assertEqual(handler_json[0]['handlerManifest']['disableCommand'], "MsftLinuxPatchExtShim.sh -d") + self.assertEqual(handler_json[0]['handlerManifest']['enableCommand'], "MsftLinuxPatchExtShim.sh -e") + self.assertEqual(handler_json[0]['handlerManifest']['uninstallCommand'], "MsftLinuxPatchExtShim.sh -u") + self.assertEqual(handler_json[0]['handlerManifest']['installCommand'], "MsftLinuxPatchExtShim.sh -i") + self.assertEqual(handler_json[0]['handlerManifest']['updateCommand'], "MsftLinuxPatchExtShim.sh -p") + self.assertEqual(handler_json[0]['handlerManifest']['rebootAfterInstall'], False) + self.assertEqual(handler_json[0]['handlerManifest']['reportHeartbeat'], False) + self.handler_manifest_file_handle.close() + + +if __name__ == '__main__': + SUITE = unittest.TestLoader().loadTestsFromTestCase(TestHandlerManifest) + unittest.TextTestRunner(verbosity=2).run(SUITE) diff --git a/src/extension/tests/TestInstallCommandHandler.py b/src/extension/tests/TestInstallCommandHandler.py new file mode 100644 index 00000000..24246745 --- /dev/null +++ b/src/extension/tests/TestInstallCommandHandler.py @@ -0,0 +1,88 @@ +import os +import unittest +from unittest import mock +from unittest.mock import patch +from src.InstallCommandHandler import InstallCommandHandler +from src.file_handlers.ExtEnvHandler import ExtEnvHandler +from src.file_handlers.JsonFileHandler import JsonFileHandler +from src.local_loggers.Logger import Logger +from tests.helpers.VirtualTerminal import VirtualTerminal + + +class TestInstallCommandHandler(unittest.TestCase): + + def setUp(self): + VirtualTerminal().print_lowlight("\n----------------- setup test runner -----------------") + self.logger = Logger() + self.json_file_handler = JsonFileHandler(self.logger) + + def tearDown(self): + VirtualTerminal().print_lowlight("\n----------------- tear down test runner -----------------") + + @patch('tests.TestInstallCommandHandler.JsonFileHandler.get_json_file_content') + def test_validate_os_type_is_linux(self, mock_ext_env_handler): + mock_ext_env_handler.return_value = None + ext_env_handler = ExtEnvHandler(self.json_file_handler) + install_command_handler = InstallCommandHandler(self.logger, ext_env_handler) + with mock.patch('src.InstallCommandHandler.sys.platform', 'linux'): + self.assertTrue(install_command_handler.validate_os_type()) + + @patch('tests.TestInstallCommandHandler.JsonFileHandler.get_json_file_content') + def test_validate_os_type_not_linux(self, mock_ext_env_handler): + mock_ext_env_handler.return_value = None + ext_env_handler = ExtEnvHandler(self.json_file_handler) + install_command_handler = InstallCommandHandler(self.logger, ext_env_handler) + with mock.patch('src.InstallCommandHandler.sys.platform', 'win32'): + self.assertRaises(Exception, install_command_handler.validate_os_type) + + def test_validate_environment(self): + config_type = 'handlerEnvironment' + + # file has no content + handler_environment = None + with mock.patch('tests.TestInstallCommandHandler.JsonFileHandler.get_json_file_content', return_value=handler_environment): + ext_env_handler = ExtEnvHandler(self.json_file_handler) + install_command_handler = InstallCommandHandler(self.logger, ext_env_handler) + self.assertRaises(Exception, install_command_handler.validate_environment) + + # Validating datatype for fields in HandlerEnvironment + handler_environment = [] + handler_environment_dict = {} + handler_environment.append(handler_environment_dict) + install_command_handler = InstallCommandHandler(self.logger, handler_environment) + self.verify_key(handler_environment[0], 'version', 1.0, 'abc', True, Exception, install_command_handler.validate_environment) + self.verify_key(handler_environment[0], 'version', 1.0, '', True, Exception, install_command_handler.validate_environment) + self.verify_key(handler_environment[0], 'handlerEnvironment', {}, 'abc', True, Exception, install_command_handler.validate_environment) + self.verify_key(handler_environment[0][config_type], 'logFolder', 'test', 1.0, True, Exception, install_command_handler.validate_environment) + self.verify_key(handler_environment[0][config_type], 'configFolder', 'test', 1.0, True, Exception, install_command_handler.validate_environment) + self.verify_key(handler_environment[0][config_type], 'statusFolder', 'test', 1.0, True, Exception, install_command_handler.validate_environment) + + # Validating HandlerEnvironment.json file + ext_env_handler = ExtEnvHandler(self.json_file_handler, handler_env_file_path=os.path.join(os.path.pardir, "tests", "helpers")) + install_command_handler = InstallCommandHandler(self.logger, ext_env_handler) + install_command_handler.validate_environment() + + def verify_key(self, config_type, key, expected_value, incorrect_value, is_required, exception_type, function_name): + # removing key value pair from handler if it exists + config_type.pop(key, None) + # required key not in config + if (is_required): + self.assertRaises(exception_type, function_name) + # key not of expected type + config_type[key] = incorrect_value + self.assertRaises(exception_type, function_name) + config_type[key] = expected_value + + @patch('src.InstallCommandHandler.InstallCommandHandler.validate_os_type') + @patch('src.InstallCommandHandler.InstallCommandHandler.validate_environment') + @patch('tests.TestInstallCommandHandler.JsonFileHandler.get_json_file_content') + def test_all_validate_methods_called_from_install_handler(self, mock_os_type, mock_validate_environment, mock_ext_env_handler): + ext_env_handler = ExtEnvHandler(self.json_file_handler) + install_command_handler = InstallCommandHandler(self.logger, ext_env_handler) + install_command_handler.execute_handler_action() + self.assertTrue(mock_os_type.called) + self.assertTrue(mock_validate_environment.called) + +if __name__ == '__main__': + SUITE = unittest.TestLoader().loadTestsFromTestCase(TestInstallCommandHandler) + unittest.TextTestRunner(verbosity=2).run(SUITE) \ No newline at end of file diff --git a/src/extension/tests/TestJsonFileHandler.py b/src/extension/tests/TestJsonFileHandler.py new file mode 100644 index 00000000..8f82e691 --- /dev/null +++ b/src/extension/tests/TestJsonFileHandler.py @@ -0,0 +1,175 @@ +import os +import shutil +import tempfile +import unittest +from unittest import mock +from src.Constants import Constants +from src.file_handlers.JsonFileHandler import JsonFileHandler +from src.local_loggers.Logger import Logger +from tests.helpers.VirtualTerminal import VirtualTerminal + + +class TestJsonFileHandler(unittest.TestCase): + + def setUp(self): + VirtualTerminal().print_lowlight("\n----------------- setup test runner -----------------") + self.logger = Logger() + self.json_file_handler = JsonFileHandler(self.logger) + + def tearDown(self): + VirtualTerminal().print_lowlight("\n----------------- tear down test runner -----------------") + + @mock.patch('src.file_handlers.JsonFileHandler.time.sleep', autospec=True) + def test_get_json_file_content_success(self, time_sleep): + file = Constants.EXT_STATE_FILE + dir_path = os.path.join(os.path.pardir, "tests", "helpers") + json_content = self.json_file_handler.get_json_file_content(file, dir_path, raise_if_not_found=True) + self.assertIsNotNone(json_content) + time_sleep.assert_called_once() + + @mock.patch('src.file_handlers.JsonFileHandler.time.sleep', autospec=True) + def test_get_json_file_content_failure(self, time_sleep): + file = Constants.EXT_STATE_FILE + dir_path = os.path.join(os.path.pardir, "tests", "helper") + self.assertRaises(Exception, self.json_file_handler.get_json_file_content, file, dir_path, raise_if_not_found=True) + self.assertEqual(time_sleep.call_count, 5) + + dir_path = os.path.join(os.path.pardir, "tests", "helpers") + with mock.patch('src.file_handlers.JsonFileHandler.json.loads', autospec=True) as mock_get_content: + # 1st read trial failed + time_sleep.call_count = 0 + mock_get_content.call_count = 0 + mock_get_content.side_effect = [OSError, None] + self.json_file_handler.get_json_file_content(file, dir_path) + self.assertEqual(time_sleep.call_count, 2) + self.assertEqual(mock_get_content.call_count, 2) + + # 2nd read trial failed + time_sleep.call_count = 0 + mock_get_content.call_count = 0 + mock_get_content.side_effect = [OSError, OSError, None] + self.json_file_handler.get_json_file_content(file, dir_path) + self.assertEqual(time_sleep.call_count, 3) + self.assertEqual(mock_get_content.call_count, 3) + + # 3rd read trial failed + time_sleep.call_count = 0 + mock_get_content.call_count = 0 + mock_get_content.side_effect = [OSError, Exception, OSError, None] + self.json_file_handler.get_json_file_content(file, dir_path) + self.assertEqual(time_sleep.call_count, 4) + self.assertEqual(mock_get_content.call_count, 4) + + # 4th read trial failed + time_sleep.call_count = 0 + mock_get_content.call_count = 0 + mock_get_content.side_effect = [OSError, Exception, OSError, OSError, None] + self.json_file_handler.get_json_file_content(file, dir_path) + self.assertEqual(time_sleep.call_count, 5) + self.assertEqual(mock_get_content.call_count, 5) + + # All read trial failed, doesn't throw exception + time_sleep.call_count = 0 + mock_get_content.call_count = 0 + mock_get_content.side_effect = [OSError, Exception, OSError, OSError, OSError] + json_content = self.json_file_handler.get_json_file_content(file, dir_path, raise_if_not_found=False) + self.assertEqual(time_sleep.call_count, 5) + self.assertEqual(mock_get_content.call_count, 5) + self.assertIsNone(json_content) + + # All read trial failed throws exception + time_sleep.call_count = 0 + mock_get_content.call_count = 0 + mock_get_content.side_effect = [OSError, Exception, ValueError, OSError, OSError] + self.assertRaises(Exception, self.json_file_handler.get_json_file_content, file, dir_path, raise_if_not_found=True) + self.assertEqual(time_sleep.call_count, 5) + self.assertEqual(mock_get_content.call_count, 5) + + @mock.patch('src.file_handlers.JsonFileHandler.time.sleep', autospec=True) + def test_create_file_success(self, time_sleep): + # Create a temporary directory + test_dir = tempfile.mkdtemp() + file = "test.json" + content = {'testKey1': 'testVal1', + 'testKey2': {'testsubKey1': 'testsubVal1'}, + 'testKey3': [{'testsubKey2': 'testsubVal2'}]} + # create a file + self.json_file_handler.write_to_json_file(test_dir, file, content) + self.assertTrue(os.path.exists(os.path.join(test_dir, "test.json"))) + time_sleep.assert_called_once() + json_content = self.json_file_handler.get_json_file_content(file, test_dir, raise_if_not_found=False) + self.assertTrue('testKey1' in json_content) + # Remove the directory after the test + shutil.rmtree(test_dir) + + @mock.patch('src.file_handlers.JsonFileHandler.time.sleep', autospec=True) + def test_create_file_failure(self, time_sleep): + # Create a temporary directory + test_dir = tempfile.mkdtemp() + file = "test.json" + content = {'testKey1': 'testVal1', + 'testKey2': {'testsubKey1': 'testsubVal1'}, + 'testKey3': [{'testsubKey2': 'testsubVal2'}]} + + self.assertRaises(Exception, self.json_file_handler.write_to_json_file, "test_dir", file, content) + self.assertEqual(time_sleep.call_count, 0) + + with mock.patch('src.file_handlers.JsonFileHandler.json.dump', autospec=True) as mock_create: + # 1st read trial failed + time_sleep.call_count = 0 + mock_create.call_count = 0 + mock_create.side_effect = [OSError, None] + self.json_file_handler.write_to_json_file(test_dir, file, content) + self.assertEqual(time_sleep.call_count, 2) + self.assertEqual(mock_create.call_count, 2) + + # 2nd delete trial failed + time_sleep.call_count = 0 + mock_create.call_count = 0 + mock_create.side_effect = [OSError, OSError, None] + self.json_file_handler.write_to_json_file(test_dir, file, content) + self.assertEqual(time_sleep.call_count, 3) + self.assertEqual(mock_create.call_count, 3) + + # 3rd delete trial failed + time_sleep.call_count = 0 + mock_create.call_count = 0 + mock_create.side_effect = [OSError, Exception, OSError, None] + self.json_file_handler.write_to_json_file(test_dir, file, content) + self.assertEqual(time_sleep.call_count, 4) + self.assertEqual(mock_create.call_count, 4) + + # 4th delete trial failed + time_sleep.call_count = 0 + mock_create.call_count = 0 + mock_create.side_effect = [OSError, Exception, OSError, OSError, None] + self.json_file_handler.write_to_json_file(test_dir, file, content) + self.assertEqual(time_sleep.call_count, 5) + self.assertEqual(mock_create.call_count, 5) + + # All delete trial failed + time_sleep.call_count = 0 + mock_create.call_count = 0 + mock_create.side_effect = [OSError, Exception, OSError, OSError, OSError] + self.assertRaises(Exception, self.json_file_handler.write_to_json_file, test_dir, file, content) + self.assertEqual(time_sleep.call_count, 5) + self.assertEqual(mock_create.call_count, 5) + + # Remove the directory after the test + shutil.rmtree(test_dir) + + def test_get_json_config_value_safely(self): + content = {'testKey1': 'testVal1', + 'testKey2': {'testsubKey1': 'testsubVal1'}, + 'testKey3': [{'testsubKey2': 'testsubVal2'}]} + + self.assertIsNone(self.json_file_handler.get_json_config_value_safely(None, 'testsubKey1', 'testKey2', raise_if_not_found=True)) + self.assertEqual(self.json_file_handler.get_json_config_value_safely(content, 'testsubKey1', 'testKey2', raise_if_not_found=True), 'testsubVal1') + self.assertRaises(Exception, self.json_file_handler.get_json_config_value_safely, content, 'testsubKey1', 'testKey3', raise_if_not_found=True) + self.assertRaises(Exception, self.json_file_handler.get_json_config_value_safely, content, 'testsubKey2', 'testKey3', raise_if_not_found=True) + self.assertIsNone(self.json_file_handler.get_json_config_value_safely(content, 'testsubKey2', 'testKey3', raise_if_not_found=False)) + self.assertRaises(Exception, self.json_file_handler.get_json_config_value_safely, content, 'testKey1', None, raise_if_not_found=True) + +if __name__ == '__main__': + SUITE = unittest.TestLoader().loadTestsFromTestCase(TestJsonFileHandler) + unittest.TextTestRunner(verbosity=2).run(SUITE) diff --git a/src/extension/tests/TestLogger.py b/src/extension/tests/TestLogger.py new file mode 100644 index 00000000..c436d190 --- /dev/null +++ b/src/extension/tests/TestLogger.py @@ -0,0 +1,68 @@ +""" Unit test for Logger """ +import shutil +import tempfile +import unittest +from os import path +from src.local_loggers.FileLogger import FileLogger +from src.local_loggers.Logger import Logger +from tests.helpers.VirtualTerminal import VirtualTerminal + + +class TestLogger(unittest.TestCase): + + # setup + def setUp(self): + VirtualTerminal().print_lowlight("\n----------------- setup test runner -----------------\n") + self.test_dir = tempfile.mkdtemp() + self.file_path = path.join(self.test_dir, 'test.txt') + self.file_logger = FileLogger(self.test_dir, 'test.txt') + self.logger = Logger(self.file_logger) + + # teardown + def tearDown(self): + VirtualTerminal().print_lowlight("\n----------------- tear down test runner -----------------") + shutil.rmtree(self.test_dir) + + def test_log(self): + self.logger.log("Test message") + self.file_logger.close() + file_read = open(self.file_path, "r") + self.assertIsNotNone(file_read) + self.assertIn("Test message", file_read.readlines()[-1]) + file_read.close() + + def test_log_verbose(self): + self.logger.log_verbose("Test verbose message") + self.file_logger.close() + file_read = open(self.file_path, "r") + self.assertIsNotNone(file_read) + self.assertIn(self.logger.VERBOSE + " Test verbose message", file_read.readlines()[-1]) + file_read.close() + + def test_log_error(self): + self.logger.log_error("Test error message") + self.file_logger.close() + file_read = open(self.file_path, "r") + self.assertIsNotNone(file_read) + self.assertIn(self.logger.ERROR + " Test error message", file_read.readlines()[-1]) + file_read.close() + + def test_log_warning(self): + self.logger.log_warning("Test warning message") + self.file_logger.close() + file_read = open(self.file_path, "r") + self.assertIsNotNone(file_read) + self.assertIn(self.logger.WARNING + " Test warning message", file_read.readlines()[-1]) + file_read.close() + + def test_log_debug(self): + self.logger.log_debug("Test debug message") + self.file_logger.close() + file_read = open(self.file_path, "r") + self.assertIsNotNone(file_read) + self.assertIn(self.logger.DEBUG + " Test debug message", file_read.readlines()[-1]) + file_read.close() + +if __name__ == '__main__': + SUITE = unittest.TestLoader().loadTestsFromTestCase(TestLogger) + unittest.TextTestRunner(verbosity=2).run(SUITE) diff --git a/src/extension/tests/TestProcessHandler.py b/src/extension/tests/TestProcessHandler.py new file mode 100644 index 00000000..1fbc7bd7 --- /dev/null +++ b/src/extension/tests/TestProcessHandler.py @@ -0,0 +1,54 @@ +import os +import unittest +from unittest.mock import patch +from src.Constants import Constants +from src.file_handlers.JsonFileHandler import JsonFileHandler +from src.file_handlers.ExtConfigSettingsHandler import ExtConfigSettingsHandler +from src.file_handlers.ExtEnvHandler import ExtEnvHandler +from src.local_loggers.Logger import Logger +from src.ProcessHandler import ProcessHandler +from src.Utility import Utility +from tests.helpers.VirtualTerminal import VirtualTerminal + + +class TestProcessHandler(unittest.TestCase): + + def setUp(self): + VirtualTerminal().print_lowlight("\n----------------- setup test runner -----------------") + self.logger = Logger() + self.utility = Utility(self.logger) + self.json_file_handler = JsonFileHandler(self.logger) + + def tearDown(self): + VirtualTerminal().print_lowlight("\n----------------- tear down test runner -----------------") + + def test_get_public_config_settings(self): + ext_config_settings_handler = ExtConfigSettingsHandler(self.logger, self.json_file_handler, os.path.join(os.path.pardir, "tests", "helpers")) + seq_no = "1234" + config_settings = ext_config_settings_handler.read_file(seq_no) + process_handler = ProcessHandler(self.logger) + public_config_settings = process_handler.get_public_config_settings(config_settings) + self.assertIsNotNone(public_config_settings) + self.assertEqual(public_config_settings.get(Constants.ConfigPublicSettingsFields.operation), "Deployment") + + def test_get_env_settings(self): + handler_env_file_path = os.path.join(os.path.pardir, "tests", "helpers") + ext_env_handler = ExtEnvHandler(self.json_file_handler, handler_env_file_path=handler_env_file_path) + process_handler = ProcessHandler(self.logger) + env_settings = process_handler.get_env_settings(ext_env_handler) + self.assertIsNotNone(env_settings) + self.assertEqual(env_settings.get(Constants.EnvSettingsFields.log_folder), "mockLog") + + @patch('src.ProcessHandler.os.kill', autospec=True) + @patch('tests.TestProcessHandler.ProcessHandler.is_process_running', autospec=True, return_value=True) + def test_kill_process(self, is_process_running, os_kill): + pid = 123 + os_kill.side_effect = OSError + process_handler = ProcessHandler(self.logger) + self.assertRaises(OSError, process_handler.kill_process, pid) + + +if __name__ == '__main__': + SUITE = unittest.TestLoader().loadTestsFromTestCase(TestProcessHandler) + unittest.TextTestRunner(verbosity=2).run(SUITE) + diff --git a/src/extension/tests/TestRuntimeContextHandler.py b/src/extension/tests/TestRuntimeContextHandler.py new file mode 100644 index 00000000..e10db45c --- /dev/null +++ b/src/extension/tests/TestRuntimeContextHandler.py @@ -0,0 +1,60 @@ +import collections +import datetime +import os +import unittest +from unittest import mock +from src.Constants import Constants +from src.RuntimeContextHandler import RuntimeContextHandler +from src.file_handlers.CoreStateHandler import CoreStateHandler +from src.file_handlers.JsonFileHandler import JsonFileHandler +from src.local_loggers.Logger import Logger +from tests.helpers.VirtualTerminal import VirtualTerminal + + +class TestRuntimeContextHandler(unittest.TestCase): + + def setUp(self): + VirtualTerminal().print_lowlight("\n----------------- setup test runner -----------------") + self.logger = Logger() + self.json_file_handler = JsonFileHandler(self.logger) + self.runtime_context_handler = RuntimeContextHandler(self.logger) + self.core_state_fields = Constants.CoreStateFields + + def tearDown(self): + VirtualTerminal().print_lowlight("\n----------------- tear down test runner -----------------") + + @mock.patch('src.RuntimeContextHandler.time.sleep', autospec=True) + def test_check_if_patch_completes_in_time(self, time_sleep): + core_state_handler = CoreStateHandler(os.path.join(os.path.pardir, "tests", "helpers"), self.json_file_handler) + time_for_prev_patch_to_complete = "" + self.assertRaises(Exception, self.runtime_context_handler.check_if_patch_completes_in_time, time_for_prev_patch_to_complete, core_state_handler) + + time_for_prev_patch_to_complete = datetime.datetime.utcnow() + datetime.timedelta(hours=0, minutes=3) + self.assertTrue(self.runtime_context_handler.check_if_patch_completes_in_time(time_for_prev_patch_to_complete, "2019-07-20T12:12:14Z", core_state_handler)) + # time_sleep is called once while waiting for prev patch and once while reading core state file after wait to get the latest status, hence the count 2 + self.assertEqual(time_sleep.call_count, 2) + + time_sleep.call_count = 0 + datetime.datetime = MockDateTime + time_for_prev_patch_to_complete = datetime.datetime.utcnow() + datetime.timedelta(hours=0, minutes=3) + core_state_values = collections.namedtuple(Constants.CoreStateFields.parent_key, [self.core_state_fields.number, self.core_state_fields.action, self.core_state_fields.completed, self.core_state_fields.last_heartbeat, self.core_state_fields.process_ids]) + core_state_json = core_state_values(1234, "Assessment", "False", "2019-07-20T12:12:14Z", []) + + with mock.patch("tests.TestRuntimeContextHandler.CoreStateHandler.read_file", autospec=True, return_value=core_state_json): + with mock.patch("src.RuntimeContextHandler.datetime.datetime.utcnow", autospec=True, + side_effect=[datetime.datetime(2019, 11, 1, 13, 24, 00), + datetime.datetime(2019, 11, 1, 13, 25, 00), + datetime.datetime(2019, 11, 1, 13, 26, 00), + datetime.datetime(2019, 11, 1, 13, 27, 00)]): + with mock.patch("src.RuntimeContextHandler.type", return_value=MockDateTime): + self.assertFalse( + self.runtime_context_handler.check_if_patch_completes_in_time(time_for_prev_patch_to_complete, "2019-07-20T12:12:14Z", core_state_handler)) + +class MockDateTime(datetime.datetime): + @classmethod + def utcnow(cls): + return cls(2019, 11, 1, 13, 24, 00) + +if __name__ == '__main__': + SUITE = unittest.TestLoader().loadTestsFromTestCase(TestRuntimeContextHandler) + unittest.TextTestRunner(verbosity=2).run(SUITE) diff --git a/src/extension/tests/TestUtility.py b/src/extension/tests/TestUtility.py new file mode 100644 index 00000000..1e81c41a --- /dev/null +++ b/src/extension/tests/TestUtility.py @@ -0,0 +1,101 @@ +import collections +import datetime +import os +import shutil +import tempfile +import unittest +from unittest import mock +from src.Constants import Constants +from src.file_handlers.JsonFileHandler import JsonFileHandler +from src.file_handlers.CoreStateHandler import CoreStateHandler +from src.local_loggers.Logger import Logger +from src.Utility import Utility +from tests.helpers.VirtualTerminal import VirtualTerminal + + +class TestUtility(unittest.TestCase): + + def setUp(self): + VirtualTerminal().print_lowlight("\n----------------- setup test runner -----------------") + self.logger = Logger() + self.utility = Utility(self.logger) + self.json_file_handler = JsonFileHandler(self.logger) + + def tearDown(self): + VirtualTerminal().print_lowlight("\n----------------- tear down test runner -----------------") + + @mock.patch('src.Utility.time.sleep', autospec=True) + def test_delete_file_success(self, time_sleep): + # Create a temporary directory + test_dir = tempfile.mkdtemp() + file_path = os.path.join(test_dir, "test.json") + # create a file + test_file_handler = open(file_path, 'w') + test_file_handler.close() + # delete file + self.utility.delete_file(test_dir, "test.json") + # once the file is deleted, os.path.exists on the ful file path will return False + self.assertFalse(os.path.exists(file_path)) + time_sleep.assert_called_once() + # Remove the directory after the test + shutil.rmtree(test_dir) + + @mock.patch('src.Utility.time.sleep', autospec=True) + def test_delete_file_failure(self, time_sleep): + # Create a temporary directory + test_dir = tempfile.mkdtemp() + file_path = os.path.join(test_dir, "test.json") + # create a file + test_file_handler = open(file_path, 'w') + test_file_handler.close() + + # FileNotFound + self.assertRaises(Exception, self.utility.delete_file, test_dir, "test1.json") + + # test with a directory + file_path = os.path.join(test_dir, "test") + # create a directory + os.makedirs(file_path) + self.assertRaises(Exception, self.utility.delete_file, test_dir, "test") + + with mock.patch('src.Utility.os.remove', autospec=True) as mock_remove: + # 1st delete trial failed + mock_remove.side_effect = [OSError, None] + self.utility.delete_file(test_dir, "test.json") + self.assertEqual(time_sleep.call_count, 2) + self.assertEqual(mock_remove.call_count, 2) + + # 2nd delete trial failed + time_sleep.call_count = 0 + mock_remove.call_count = 0 + mock_remove.side_effect = [OSError, OSError, None] + self.utility.delete_file(test_dir, "test.json") + self.assertEqual(time_sleep.call_count, 3) + self.assertEqual(mock_remove.call_count, 3) + + # 3rd delete trial failed + time_sleep.call_count = 0 + mock_remove.call_count = 0 + mock_remove.side_effect = [OSError, Exception, OSError, None] + self.utility.delete_file(test_dir, "test.json") + self.assertEqual(time_sleep.call_count, 4) + self.assertEqual(mock_remove.call_count, 4) + + # 4th delete trial failed + time_sleep.call_count = 0 + mock_remove.call_count = 0 + mock_remove.side_effect = [OSError, Exception, OSError, OSError, None] + self.utility.delete_file(test_dir, "test.json") + self.assertEqual(time_sleep.call_count, 5) + self.assertEqual(mock_remove.call_count, 5) + + # All delete trial failed + time_sleep.call_count = 0 + mock_remove.call_count = 0 + mock_remove.side_effect = [OSError, Exception, OSError, OSError, OSError] + self.assertRaises(Exception, self.utility.delete_file, test_dir, "test.json") + self.assertEqual(time_sleep.call_count, 5) + self.assertEqual(mock_remove.call_count, 5) + + # Remove the directory after the test + shutil.rmtree(test_dir) diff --git a/src/extension/tests/__init__.py b/src/extension/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/extension/tests/helpers/1234.settings b/src/extension/tests/helpers/1234.settings new file mode 100644 index 00000000..cd56c57d --- /dev/null +++ b/src/extension/tests/helpers/1234.settings @@ -0,0 +1,18 @@ +{ + "runtimeSettings": [{ + "handlerSettings": { + "protectedSettingsCertThumbprint": "", + "protectedSettings": "", + "publicSettings": { + "operation": "Deployment", + "activityId": "12345-2312-1234-23245-32112", + "startTime": "2019-07-20T12:12:14Z", + "maximumDuration": "20", + "rebootSetting": "IfRequired", + "classificationsToInclude":["Critical","Security"], + "patchesToInclude": ["*", "test*","*ern*=1.2*", "kern*=1.23.45"], + "patchesToExclude": ["*", "test", "*test"] + } + } + }] +} \ No newline at end of file diff --git a/src/extension/tests/helpers/CoreState.json b/src/extension/tests/helpers/CoreState.json new file mode 100644 index 00000000..f3a2a6c7 --- /dev/null +++ b/src/extension/tests/helpers/CoreState.json @@ -0,0 +1,9 @@ +{ + "coreSequence": { + "number": 1234, + "action": "Assessment", + "completed": "True", + "lastHeartbeat": "2019-07-20T12:12:14Z", + "processIds": ["11111","22232"] + } +} \ No newline at end of file diff --git a/src/extension/tests/helpers/ExtState.json b/src/extension/tests/helpers/ExtState.json new file mode 100644 index 00000000..0bbfcf3f --- /dev/null +++ b/src/extension/tests/helpers/ExtState.json @@ -0,0 +1,7 @@ +{ + "extensionSequence": { + "number": 1234, + "achieveEnableBy": "2019-07-20T12:12:14Z", + "operation": "Deployment" + } +} diff --git a/src/extension/tests/helpers/HandlerEnvironment.json b/src/extension/tests/helpers/HandlerEnvironment.json new file mode 100644 index 00000000..52e05fab --- /dev/null +++ b/src/extension/tests/helpers/HandlerEnvironment.json @@ -0,0 +1,10 @@ +[ + { + "version": 1.0, + "handlerEnvironment": { + "logFolder": "mockLog", + "configFolder": "mockConfig", + "statusFolder": "mockStatus" + } + } +] \ No newline at end of file diff --git a/src/extension/tests/helpers/VirtualTerminal.py b/src/extension/tests/helpers/VirtualTerminal.py new file mode 100644 index 00000000..087f6eec --- /dev/null +++ b/src/extension/tests/helpers/VirtualTerminal.py @@ -0,0 +1,48 @@ +class VirtualTerminal(object): + class TerminalColors(object): + SUCCESS = '\033[92m' + WARNING = '\033[93m' + ERROR = '\033[91m' + HIGHLIGHT = '\033[95m' + LOWLIGHT = '\033[96m' + DARK = '\033[0;94m' + RESET = '\033[0m' + + def __init__(self, enable_virtual_terminal=True): + self.enabled = True if enable_virtual_terminal else False # forcing boolean + + def print_success(self, message): + if self.enabled: + print(self.TerminalColors.SUCCESS + message + self.TerminalColors.RESET) + else: + print(message) + + def print_warning(self, message): + if self.enabled: + print(self.TerminalColors.WARNING + message + self.TerminalColors.RESET) + else: + print(message) + + def print_error(self, message): + if self.enabled: + print(self.TerminalColors.ERROR + message + self.TerminalColors.RESET) + else: + print(message) + + def print_highlight(self, message): + if self.enabled: + print(self.TerminalColors.HIGHLIGHT + message + self.TerminalColors.RESET) + else: + print(message) + + def print_lowlight(self, message): + if self.enabled: + print(self.TerminalColors.LOWLIGHT + message + self.TerminalColors.RESET) + else: + print(message) + + def print_dark(self, message): + if self.enabled: + print(self.TerminalColors.DARK + message + self.TerminalColors.RESET) + else: + print(message) diff --git a/src/extension/tests/helpers/__init__.py b/src/extension/tests/helpers/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/tools/Package-All.py b/src/tools/Package-All.py new file mode 100644 index 00000000..fa1d9493 --- /dev/null +++ b/src/tools/Package-All.py @@ -0,0 +1,221 @@ +""" Merges individual python modules from src to the MsftLinuxPatchExt files in the out directory. +Relative source and destination paths for the extension are auto-detected if the optional src parameter is not present. +How to use: python Package.py """ + +from __future__ import print_function +import sys +import os +import errno +import datetime +from shutil import copyfile +from shutil import make_archive +import subprocess +import xml.etree.ElementTree as et + +# imports in VERY_FIRST_IMPORTS, order should be kept +VERY_FIRST_IMPORTS = [ + 'from __future__ import print_function\n', + 'from abc import ABCMeta, abstractmethod\n'] +GLOBAL_IMPORTS = set() + + +def read_python_module(source_code_path, module_name): + module_full_path = os.path.join(source_code_path, module_name) + imports = [] + codes = "\n\n# region ########## {0} ##########\n".format(os.path.basename(module_name)) + is_code_body = False + if os.path.exists(module_full_path): + with open(module_full_path) as py_file: + for line in py_file: + if line.startswith('import'): + imports.append(line) + elif line.strip().startswith('class') or line.strip().startswith('def main(argv):'): + is_code_body = True + + if is_code_body is True: + codes = codes + line + codes = codes + "\n# endregion ########## {0} ##########\n".format(os.path.basename(module_name)) + return imports, codes + + +def write_merged_code(code, merged_file_full_path): + with open(merged_file_full_path, 'a+') as py_file: + py_file.write(code) + + +def insert_copyright_notice(merged_file_full_path, merged_file_name): + notice = '# --------------------------------------------------------------------------------------------------------------------\n' + notice += '# \n' + notice += '# Copyright (c) Microsoft Corporation. All rights reserved.\n' + notice += '# \n' + notice += '# --------------------------------------------------------------------------------------------------------------------\n\n' + prepend_content_to_file(notice, merged_file_full_path) + + +# noinspection PyPep8 +def replace_text_in_file(file_path, old_text, new_text): + with open(file_path, 'rb') as file_handle: text = file_handle.read() + text = text.replace(old_text.encode(encoding='UTF-8'), new_text.encode(encoding='UTF-8')) + with open(file_path, 'wb') as file_handle: file_handle.write(text) + + +def insert_imports(imports, merged_file_name): + imports_str = ''.join(imports) + prepend_content_to_file(imports_str, merged_file_name) + + +def prepend_content_to_file(content, file_name): + temp_file = os.path.join(os.path.dirname(file_name), "temp_.py") + with open(file_name, 'r') as file1: + with open(temp_file, 'w+') as file2: + file2.write(content) + file2.write(file1.read()) + if os.name.lower() == 'nt': + os.unlink(file_name) + os.rename(temp_file, file_name) + + +def generate_compiled_script(source_code_path, merged_file_full_path, merged_file_name, environment): + try: + print('\n\n=============================== GENERATING ' + merged_file_name + '... =============================================================\n') + + print('========== Delete old extension file if it exists.') + if os.path.exists(merged_file_full_path): + os.remove(merged_file_full_path) + + print('\n========== Merging modules: \n') + modules_to_be_merged = [] + for root, dirs, files in os.walk(source_code_path): + for file_name in files: + if ".py" not in file_name or ".pyc" in file_name: + continue + file_path = os.path.join(root, file_name) + if '__main__.py' in file_path: + modules_to_be_merged.append(file_path) + elif os.path.basename(file_path) in ('__init__.py'): + continue + else: + if len(modules_to_be_merged) > 0 and '__main__.py' in modules_to_be_merged[-1]: + modules_to_be_merged.insert(-1, file_path) + else: + modules_to_be_merged.append(file_path) + for python_module in modules_to_be_merged: + print(format(os.path.basename(python_module)), end=', ') + imports, codes = read_python_module(source_code_path, python_module) + GLOBAL_IMPORTS.update(imports) + write_merged_code(codes, merged_file_full_path) + print("") + + print('\n========== Prepend all import statements\n') + insert_imports(GLOBAL_IMPORTS, merged_file_full_path) + insert_imports(VERY_FIRST_IMPORTS, merged_file_full_path) + + print('========== Set Copyright, Version and Environment. Also enforce UNIX-style line endings.\n') + insert_copyright_notice(merged_file_full_path, merged_file_name) + timestamp = datetime.datetime.utcnow().strftime("%y%m%d-%H%M") + replace_text_in_file(merged_file_full_path, '[%exec_name%]', merged_file_name.split('.')[0]) + replace_text_in_file(merged_file_full_path, '[%exec_sub_ver%]', timestamp) + replace_text_in_file(merged_file_full_path, 'Constants.UNKNOWN_ENV', environment) + replace_text_in_file(merged_file_full_path, '\r\n', '\n') + + print("========== Merged extension code was saved to:\n{0}\n".format(merged_file_full_path)) + + except Exception as error: + print('Exception during merge python modules: ' + repr(error)) + raise + + +def main(argv): + """The main entry of merge python modules run""" + try: + # Clear + os.system('cls' if os.name == 'nt' else 'clear') + + # Determine code path if not specified + if len(argv) < 2: + # auto-detect src path + source_code_path = os.path.dirname(os.path.realpath(__file__)).replace("tools", os.path.join("extension", "src")) + if os.path.exists(os.path.join(source_code_path, "__main__.py")) is False: + print("Invalid extension source code path. Check enlistment.\n") + return + else: + # explicit src path parameter + source_code_path = argv[1] + if os.path.exists(os.path.join(source_code_path, "ActionHandler.py")) is False: + print("Invalid extension source code path. Check src parameter.\n") + return + + # Prepare destination for compiled scripts + working_directory = os.path.abspath(os.path.join(source_code_path, os.pardir, os.pardir)) + merge_file_directory = os.path.join(working_directory, 'out') + try: + os.makedirs(merge_file_directory) + except OSError as e: + if e.errno != errno.EEXIST: + raise + + # Invoke core business logic code packager + exec_core_build_path = os.path.join(working_directory, 'tools', 'Package-Core.py') + subprocess.call('python ' + exec_core_build_path, shell=True) + + # Generated compiled scripts at the destination + merged_file_details = [('MsftLinuxPatchExt.py', 'Constants.PROD')] + for merged_file_detail in merged_file_details: + merged_file_destination = os.path.join(working_directory, 'out', merged_file_detail[0]) + generate_compiled_script(source_code_path, merged_file_destination, merged_file_detail[0], merged_file_detail[1]) + + # GENERATING EXTENSION + print('\n\n=============================== GENERATING LinuxPatchExtension.zip... =============================================================\n') + # Rev handler version + print('\n========== Revising extension version.') + manifest_xml_file_path = os.path.join(working_directory, 'extension', 'src', 'manifest.xml') + manifest_tree = et.parse(manifest_xml_file_path) + manifest_root = manifest_tree.getroot() + for i in range(0, len(manifest_root)): + if 'Version' in str(manifest_root[i]): + current_version = manifest_root[i].text + version_split = current_version.split('.') + version_split[len(version_split)-1] = str(int(version_split[len(version_split)-1]) + 1) + new_version = '.'.join(version_split) + print("Changing extension version from {0} to {1}.".format(current_version, new_version)) + replace_text_in_file(manifest_xml_file_path, current_version, new_version) + + # Copy extension files + print('\n========== Copying extension files + enforcing UNIX style line endings.\n') + ext_files = ['HandlerManifest.json', 'manifest.xml', 'MsftLinuxPatchExtShim.sh'] + for ext_file in ext_files: + ext_file_src = os.path.join(working_directory, 'extension', 'src', ext_file) + ext_file_destination = os.path.join(working_directory, 'out', ext_file) + copyfile(ext_file_src, ext_file_destination) + replace_text_in_file(ext_file_destination, '\r\n', '\n') + + # Generate extension zip + ext_zip_file = 'LinuxPatchExtension.zip' + ext_zip_file_path_src = os.path.join(working_directory, ext_zip_file) + ext_zip_file_path_dest = os.path.join(working_directory, 'out', ext_zip_file) + if os.path.exists(ext_zip_file_path_src): + os.remove(ext_zip_file_path_src) + if os.path.exists(ext_zip_file_path_dest): + os.remove(ext_zip_file_path_dest) + + # Generate zip + print('\n========== Generating extension zip.\n') + make_archive(os.path.splitext(ext_zip_file_path_src)[0], 'zip', os.path.join(working_directory, 'out'), '.') + copyfile(ext_zip_file_path_src, ext_zip_file_path_dest) + os.remove(ext_zip_file_path_src) + + # Remove extension file copies + print('\n========== Cleaning up environment.\n') + for ext_file in ext_files: + ext_file_path = os.path.join(working_directory, 'out', ext_file) + os.remove(ext_file_path) + + print("========== Extension ZIP was saved to:\n{0}\n".format(ext_zip_file_path_dest)) + + except Exception as error: + print('Exception during merge python modules: ' + repr(error)) + raise + + +if __name__ == "__main__": + main(sys.argv) diff --git a/src/tools/Package-Core.py b/src/tools/Package-Core.py new file mode 100644 index 00000000..a6bc9b9b --- /dev/null +++ b/src/tools/Package-Core.py @@ -0,0 +1,166 @@ +""" Merges individual python modules from src to the PatchMicrosoftOMSLinuxComputer.py and MsftLinuxPatchCore.py files in the out directory. +Relative source and destination paths for the patch runbook are auto-detected if the optional src parameter is not present. +How to use: python Package.py """ + +from __future__ import print_function +import sys +import os +import errno +import datetime + +# imports in VERY_FIRST_IMPORTS, order should be kept +VERY_FIRST_IMPORTS = [ + 'from __future__ import print_function\n', + 'from abc import ABCMeta, abstractmethod\n', + 'from datetime import timedelta\n'] +GLOBAL_IMPORTS = set() + + +def read_python_module(source_code_path, module_name): + module_full_path = os.path.join(source_code_path, module_name) + imports = [] + codes = "\n\n# region ########## {0} ##########\n".format(os.path.basename(module_name).replace('.py','')) + is_code_body = False + if os.path.exists(module_full_path): + with open(module_full_path) as py_file: + for line in py_file: + if line.startswith('import'): + imports.append(line) + elif line.strip().startswith('class') or line.strip().startswith('def main(argv):'): + is_code_body = True + + if is_code_body is True: + codes = codes + line + codes = codes + "\n# endregion ########## {0} ##########\n".format(os.path.basename(module_name).replace('.py','')) + return imports, codes + + +def write_merged_code(code, merged_file_full_path): + with open(merged_file_full_path, 'a+') as py_file: + py_file.write(code) + + +def insert_copyright_notice(merged_file_full_path, merged_file_name): + notice = '# --------------------------------------------------------------------------------------------------------------------\n' + notice += '# \n' + notice += '# Copyright (c) Microsoft Corporation. All rights reserved.\n' + notice += '# \n' + notice += '# --------------------------------------------------------------------------------------------------------------------\n\n' + prepend_content_to_file(notice, merged_file_full_path) + + +# noinspection PyPep8 +def replace_text_in_file(file_path, old_text, new_text): + with open(file_path, 'rb') as file_handle: text = file_handle.read() + text = text.replace(old_text.encode(encoding='UTF-8'), new_text.encode(encoding='UTF-8')) + with open(file_path, 'wb') as file_handle: file_handle.write(text) + +def insert_imports(imports, merged_file_name): + imports_str = ''.join(imports) + prepend_content_to_file(imports_str, merged_file_name) + + +def prepend_content_to_file(content, file_name): + temp_file = os.path.join(os.path.dirname(file_name), "temp_.py") + with open(file_name, 'r') as file1: + with open(temp_file, 'w+') as file2: + file2.write(content) + file2.write(file1.read()) + if os.name.lower() == 'nt': + os.unlink(file_name) + os.rename(temp_file, file_name) + + +def generate_compiled_script(source_code_path, merged_file_full_path, merged_file_name, environment): + try: + print('\n\n=============================== GENERATING ' + merged_file_name + '... =============================================================\n') + + print('========== Delete old core file if it exists.') + if os.path.exists(merged_file_full_path): + os.remove(merged_file_full_path) + + print('\n========== Merging modules: \n') + modules_to_be_merged = [] + for root, dirs, files in os.walk(source_code_path): + for file_name in files: + if ".py" not in file_name or ".pyc" in file_name: + continue + file_path = os.path.join(root, file_name) + if '__main__.py' in file_path: + modules_to_be_merged.append(file_path) + elif os.path.basename(file_path) in ('__init__.py'): + continue + elif os.path.basename(file_path) in ('base_log_processor.py', 'PackageManager.py'): + modules_to_be_merged.insert(0, file_path) + else: + if len(modules_to_be_merged) > 0 and '__main__.py' in modules_to_be_merged[-1]: + modules_to_be_merged.insert(-1, file_path) + else: + modules_to_be_merged.append(file_path) + for python_module in modules_to_be_merged: + print(format(os.path.basename(python_module)), end=', ') + imports, codes = read_python_module(source_code_path, python_module) + GLOBAL_IMPORTS.update(imports) + write_merged_code(codes, merged_file_full_path) + print("") + + print('\n========== Prepend all import statements\n') + insert_imports(GLOBAL_IMPORTS, merged_file_full_path) + insert_imports(VERY_FIRST_IMPORTS, merged_file_full_path) + + print('========== Set Copyright, Version and Environment. Also enforce UNIX-style line endings.\n') + insert_copyright_notice(merged_file_full_path, merged_file_name) + timestamp = datetime.datetime.utcnow().strftime("%y%m%d-%H%M") + replace_text_in_file(merged_file_full_path, '[%exec_name%]', merged_file_name.split('.')[0]) + replace_text_in_file(merged_file_full_path, '[%exec_sub_ver%]', timestamp) + replace_text_in_file(merged_file_full_path, '\r\n', '\n') + + print("========== Merged core code was saved to:\n{0}\n".format(merged_file_full_path)) + + except Exception as error: + print('Exception during merge python modules: ' + repr(error)) + raise + + +def main(argv): + """The main entry of merge python modules run""" + try: + # Clear + os.system('cls' if os.name == 'nt' else 'clear') + + # Determine code path if not specified + if len(argv) < 2: + # auto-detect src path + source_code_path = os.path.dirname(os.path.realpath(__file__)).replace("tools", os.path.join("core","src")) + if os.path.exists(os.path.join(source_code_path, "__main__.py")) is False: + print("Invalid core source code path. Check enlistment.\n") + return + else: + # explicit src path parameter + source_code_path = argv[1] + if os.path.exists(os.path.join(source_code_path, "PatchInstaller.py")) is False: + print("Invalid core source code path. Check src parameter.\n") + return + + # Prepare destination for compiled scripts + working_directory = os.path.abspath(os.path.join(source_code_path, os.pardir, os.pardir)) + merge_file_directory = os.path.join(working_directory, 'out') + try: + os.makedirs(merge_file_directory) + except OSError as e: + if e.errno != errno.EEXIST: + raise + + # Generated compiled scripts at the destination + merged_file_details = [('MsftLinuxPatchCore.py', 'Constants.PROD')] + for merged_file_detail in merged_file_details: + merged_file_destination = os.path.join(working_directory, 'out', merged_file_detail[0]) + generate_compiled_script(source_code_path, merged_file_destination, merged_file_detail[0], merged_file_detail[1]) + + except Exception as error: + print('Exception during merge python modules: ' + repr(error)) + raise + + +if __name__ == "__main__": + main(sys.argv) diff --git a/src/tools/misc/EnableVirtualTerminal.reg b/src/tools/misc/EnableVirtualTerminal.reg new file mode 100644 index 00000000..21291ee1 Binary files /dev/null and b/src/tools/misc/EnableVirtualTerminal.reg differ