diff --git a/.github/workflows/test-environment-cleaner-validation.yml b/.github/workflows/test-environment-cleaner-validation.yml new file mode 100644 index 000000000..9201fe4fa --- /dev/null +++ b/.github/workflows/test-environment-cleaner-validation.yml @@ -0,0 +1,27 @@ +name: Validate test environment cleaner + +on: + pull_request: + paths: + - 'jenkins_pipelines/scripts/test_environment_cleaner/**' + +jobs: + test: + runs-on: ubuntu-latest + + steps: + - name: Validate api manager script + uses: actions/checkout@v2 + + - name: Set up Python 3.11 + uses: actions/setup-python@v2 + with: + python-version: '3.11' + + - name: Install dependencies + run: | + python3.11 -m pip install --upgrade pip + pip3.11 install -r jenkins_pipelines/scripts/test_environment_cleaner/requirements.txt + + - name: Run tests + run: cd jenkins_pipelines/scripts/test_environment_cleaner; ./run_tests.sh diff --git a/jenkins_pipelines/environments/common/pipeline-build-validation-cleanup.groovy b/jenkins_pipelines/environments/common/pipeline-build-validation-cleanup.groovy new file mode 100644 index 000000000..605ba65c5 --- /dev/null +++ b/jenkins_pipelines/environments/common/pipeline-build-validation-cleanup.groovy @@ -0,0 +1,245 @@ +def run(params) { + timestamps { + // Define paths and environment variables for reusability + GString TestEnvironmentCleanerProgram = "${WORKSPACE}/susemanager-ci/jenkins_pipelines/scripts/test_environment_cleaner/test_environment_cleaner_program/TestEnvironmentCleaner.py" + GString resultdir = "${WORKSPACE}/results" + GString resultdirbuild = "${resultdir}/${BUILD_NUMBER}" + GString exports = "export BUILD_NUMBER=${BUILD_NUMBER}; export BUILD_VALIDATION=true; " + String container_repository = params.container_repository ?: null + String product_version = null + String controllerHostname = null + String serverHostname = null + GString targetedTfFile = "${WORKSPACE}/../${params.targeted_project}/results/sumaform/main.tf" + GString targetedTfStateFile = "${WORKSPACE}/../${params.targeted_project}/results/sumaform/terraform.tfstate" + GString targetedTerraformDirPath = "${WORKSPACE}/../${params.targeted_project}/results/sumaform/" + GString localSumaformDirPath = "${resultdir}/sumaform/" + GString localTfStateFile = "${localSumaformDirPath}/terraform.tfstate" + GString logFile = "${resultdirbuild}/sumaform.log" + + // Construct the --tf-resources-to-delete argument dynamically + ArrayList defaultResourcesToDelete = [] + if (params.clean_proxy) { + defaultResourcesToDelete.add('proxy') + } + if (params.clean_monitoring_server) { + defaultResourcesToDelete.add('monitoring-server') + } + if (params.clean_retail) { + defaultResourcesToDelete.add('retail') + } + + String defaultResourcesToDeleteArgs = defaultResourcesToDelete.isEmpty() ? '' : "--default-resources-to-delete ${defaultResourcesToDelete.join(' ')}" + + GString commonParams = "--outputdir ${resultdir} --tf ${targetedTfFile} --gitfolder ${resultdir}/sumaform" + + // Define shared environment variables for terraform calls + GString environmentVars = """ + set -x + source /home/jenkins/.credentials + export TF_VAR_CONTAINER_REPOSITORY=${container_repository} + export TERRAFORM=${terraform_bin} + export TERRAFORM_PLUGINS=${terraform_bin_plugins} + """ + + try { + stage('Clone terracumber, susemanager-ci and sumaform') { + + // Prevent rebuild option + if (currentBuild.getBuildCauses().toString().contains("RebuildCause")) { + error "Rebuild is blocked for this job." + } + + // Create a directory to store the build results (if it does not exist). Needed for first run + sh "mkdir -p ${resultdir}" + git url: params.terracumber_gitrepo, branch: params.terracumber_ref + dir("susemanager-ci") { + checkout scm + } + + // Clone sumaform + sh "set +x; source /home/jenkins/.credentials set -x; ${WORKSPACE}/terracumber-cli ${commonParams} --gitrepo ${params.sumaform_gitrepo} --gitref ${params.sumaform_ref} --runstep gitsync" + + // Set product version + if (params.targeted_project.contains("5.0")) { + product_version = '5.0' + } else if (params.targeted_project.contains("4.3")) { + product_version = '4.3' + } else if (params.targeted_project.contains("uyuni")) { + product_version = 'uyuni' + } else if (params.targeted_project.contains("5.1")) { + product_version = '5.1' + } else if (params.targeted_project.contains("head")) { + product_version = 'head' + } + else { + // Use the `error` step instead of `throw` + error("Error: targeted_project must contain either 'head', '5.1', '5.0', '4.3' or uyuni.") + } + } + + stage('Confirm Environment Cleanup') { + // Ask the user what environment they are cleaning, ensuring the answer matches params.targeted_project + def environmentChoice = input( + message: 'What environment are you cleaning?', + parameters: [ + string(name: 'Environment_Name', description: 'Enter the name of the environment you are cleaning.') + ] + ) + + // Validate that the user entered the correct environment + if (environmentChoice != params.targeted_project) { + error("The environment name entered does not match the targeted project. Aborting pipeline.") + } + } + + stage("Copy terraform files from ${params.targeted_project}"){ + // Copy tfstate and terraform directory to the result directory + sh """ + cp ${targetedTfStateFile} ${localTfStateFile} + cp -r ${targetedTerraformDirPath}.terraform ${localSumaformDirPath} + """ + + } + + stage("Extract the controller and server hostname") { + try { + controllerHostname = sh( + script: """ + set -e + cd ${localSumaformDirPath} + terraform output -json configuration | jq -r '.controller.hostname' + """, + returnStdout: true + ).trim() + + serverHostname = sh( + script: """ + set -e + cd ${localSumaformDirPath} + terraform output -json configuration | jq -r '.server.hostname' + """, + returnStdout: true + ).trim() + + // Print the values for confirmation + echo "Extracted controller hostname: ${controllerHostname}" + echo "Extracted server hostname: ${serverHostname}" + + } catch (Exception e) { + error("Failed to extract hostnames: ${e.message}") + } + + } + + GString programCall = "${TestEnvironmentCleanerProgram} --url ${serverHostname} --product_version ${product_version} ${defaultResourcesToDeleteArgs} --mode" + + stage('Delete the systems') { + sh(script: "${programCall} delete_systems") + } + stage('Delete config projects') { + sh(script: "${programCall} delete_config_projects") + } + stage('Delete software channels') { + sh(script: "${programCall} delete_software_channels") + } + stage('Delete activation keys') { + sh(script: "${programCall} delete_activation_keys") + } + stage('Delete minion users') { + sh(script: "${programCall} delete_users") + } + stage('Delete channel repositories') { + sh(script: "${programCall} delete_repositories") + } + stage('Delete salt keys') { + sh(script: "${programCall} delete_salt_keys") + } + + stage('Delete ssh know hosts') { + sh(script: "${TestEnvironmentCleanerProgram} --url ${serverHostname} --product_version ${product_version} --mode delete_known_hosts") + } + + stage('Delete distributions folders') { + sh(script: "${TestEnvironmentCleanerProgram} --url ${serverHostname} --product_version ${product_version} --mode delete_distributions") + } + + stage('Delete client VMs') { + + // Construct the --tf-resources-to-delete argument dynamically + ArrayList tfResourcesToDelete = [] + if (params.clean_proxy) { + tfResourcesToDelete.add('proxy') + } + if (params.clean_monitoring_server) { + tfResourcesToDelete.add('monitoring-server') + } + if (params.clean_retail) { + tfResourcesToDelete.add('retail') + } + + // Join the resources into a comma-separated string if there are any to delete + String tfResourcesToDeleteArg = defaultResourcesToDelete.isEmpty() ? '' : "--tf-resources-to-delete ${defaultResourcesToDelete.join(' ')}" + + // Execute Terracumber CLI to deploy the environment without clients + sh """ + ${environmentVars} + set +x + ${WORKSPACE}/terracumber-cli ${commonParams} --logfile ${logFile} --init --sumaform-backend ${sumaform_backend} --use-tf-resource-cleaner --init --runstep provision ${tfResourcesToDeleteArg} + """ + } + + stage('Redeploy the environment with new client VMs') { + + // Run Terracumber to deploy the environment + sh """ + ${environmentVars} + set +x + ${WORKSPACE}/terracumber-cli ${commonParams} --logfile ${resultdirbuild}/sumaform.log --init --sumaform-backend ${sumaform_backend} --runstep provision + """ + } + + stage('Copy the new custom repository json file to controller') { + if (params.push_new_custom_repositories) { + // Generate custom_repositories.json file in the workspace from the value passed by parameter + if (params.custom_repositories?.trim()) { + writeFile file: 'custom_repositories.json', text: params.custom_repositories, encoding: "UTF-8" + } + + // Generate custom_repositories.json file in the workspace using a Python script - MI Identifiers passed by parameter + if (params.mi_ids?.trim()) { + node('manager-jenkins-node') { + checkout scm + res_python_script_ = sh(script: "python3 jenkins_pipelines/scripts/json_generator/maintenance_json_generator.py --mi_ids ${params.mi_ids}", returnStatus: true) + echo "Build Validation JSON script return code:\n ${json_content}" + if (res_python_script != 0) { + error("MI IDs (${params.mi_ids}) passed by parameter are wrong (or already released)") + } + } + } + sh(script: "${TestEnvironmentCleanerProgram} --url ${controllerHostname} --product_version ${product_version} --mode update_custom_repositories") + } + } + + stage('Sanity check') { + sh "${WORKSPACE}/terracumber-cli ${commonParams} --logfile ${resultdirbuild}/testsuite.log --runstep cucumber --cucumber-cmd 'cd /root/spacewalk/testsuite; ${exports} rake cucumber:build_validation_sanity_check'" + } + + } + finally { + stage('Copy back tfstate') { + catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { + sh "cp ${localTfStateFile} ${targetedTfStateFile}" + } + } + + stage('Rename tfstate to avoid copying it between runs') { + catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { + sh "mv ${localTfStateFile} ${localTfStateFile}.old" + sh "cp ${localTfStateFile}.old ${resultdirbuild}" + } + } + } + } +} + +return this diff --git a/jenkins_pipelines/environments/qe-build-validation-cleaning-pipeline b/jenkins_pipelines/environments/qe-build-validation-cleaning-pipeline new file mode 100644 index 000000000..2554fdf50 --- /dev/null +++ b/jenkins_pipelines/environments/qe-build-validation-cleaning-pipeline @@ -0,0 +1,57 @@ +#!/usr/bin/env groovy + +node { + // Default node + String nodeName = 'sumaform-cucumber' + + // Check the value of targeted_project and adjust the nodeName accordingly + if (params.targeted_project.contains('PRV')) { + nodeName = 'sumaform-cucumber-provo' // Use this for PRV projects + } else if (params.targeted_project.contains('NUE')) { + nodeName = 'sumaform-cucumber' // Use this for NUE projects + } + + // Run on the selected node + node(nodeName) { + properties([ + buildDiscarder(logRotator(numToKeepStr: '5', artifactNumToKeepStr: '3')), + disableConcurrentBuilds(), + parameters([ + choice(name: 'targeted_project', choices: [ + 'default', + 'manager-4.3-qe-build-validation-NUE', + 'manager-5.0-qe-build-validation-NUE', + 'manager-5.0-qe-build-validation-weekly-NUE', + 'uyuni-master-qe-build-validation-NUE', + 'manager-4.3-qe-build-validation-PRV', + 'manager-5.0-qe-build-validation-PRV', + 'uyuni-master-qe-build-validation-PRV' + ], description: 'Path to the tf file to be used'), + string(name: 'sumaform_gitrepo', defaultValue: 'https://github.com/uyuni-project/sumaform.git', description: 'Sumaform Git Repository'), + string(name: 'sumaform_ref', defaultValue: 'master', description: 'Sumaform Git reference (branch, tag...)'), + string(name: 'terracumber_gitrepo', defaultValue: 'https://github.com/uyuni-project/terracumber.git', description: 'Terracumber Git Repository'), + string(name: 'terracumber_ref', defaultValue: 'master', description: 'Terracumber Git ref (branch, tag...)'), + string(name: 'container_repository', defaultValue: 'registry.suse.de/suse/sle-15-sp6/update/products/manager50/update/containerfile', description: 'Proxy and server container registry'), + booleanParam(name: 'clean_proxy', defaultValue: false, description: 'Clean Proxy, will remove all the artefacts related to Proxy and redeploy Proxy'), + booleanParam(name: 'clean_monitoring_server', defaultValue: false, description: 'Clean Monitoring Server, will remove all the artefacts related to Monitoring Server and redeploy Monitoring Server'), + booleanParam(name: 'clean_retail', defaultValue: false, description: 'Clean Retails artefacts, and redeploy terminal and dhcp if applicable'), + booleanParam(name: 'push_new_custom_repositories', defaultValue: false, description: 'Push the new custom_repositories.json to controller'), + text(name: 'mi_ids', defaultValue: '', description: 'MI Identifiers separated by comma or whitespaces (Option A)'), + text(name: 'custom_repositories', defaultValue: '{}', description: 'MU Repositories in json format (Option B)') + ]) + ]) + + stage('Checkout pipeline') { + checkout scm + } + + // Define environment variables + env.sumaform_backend = 'libvirt' + env.terraform_bin = '/usr/bin/terraform' + env.terraform_bin_plugins = '/usr/bin' + + // Load and run the pipeline + def pipeline = load "jenkins_pipelines/environments/common/pipeline-build-validation-cleanup.groovy" + pipeline.run(params) + } +} diff --git a/jenkins_pipelines/scripts/test_environment_cleaner/README.md b/jenkins_pipelines/scripts/test_environment_cleaner/README.md new file mode 100644 index 000000000..810dddb66 --- /dev/null +++ b/jenkins_pipelines/scripts/test_environment_cleaner/README.md @@ -0,0 +1,37 @@ +# SUSE Manager API and SSH Management Script + +This Python script provides a command-line interface to manage various resources and configurations on a SUSE Manager server through the XML-RPC API and SSH commands. +The script enables testers to perform resource cleanup, SSH-based file management, and update custom repositories remotely. + +## Prerequisites + +- **Python 3.11**: Ensure Python 3.11 is installed. +- **Dependencies**: Install the required packages using: + +```bash +pip3.11 install -r test_environment_cleaner/requirements.txt +``` + +## Usage + +```commandline +python3.11 TestEnvironmentCleaner.py --url --mode [options] +``` + +### Command-Line Arguments + + - `--url`: (Required) URL of the SUSE Manager XML-RPC API. + - `--mode`: (Required) Operation mode. Choose one of the following: + `delete_users`: Deletes user accounts. + `delete_activation_keys`: Deletes activation keys. + `delete_config_projects`: Deletes configuration projects. + `delete_software_channels`: Deletes software channels. + `delete_systems`: Deletes managed systems. + `delete_repositories`: Deletes repositories. + `delete_salt_keys`: Deletes Salt keys. + `full_cleanup`: Runs a complete cleanup of selected resources. + `delete_distributions`: Deletes distributions from the server. + `delete_known_hosts`: Deletes known SSH hosts in server. + `update_custom_repositories`: Updates custom repositories in controller. + - `--default-resources-to-delete`: Optional list of resources (proxy, monitoring-server, retail) to enforce deletion during API cleanup operations. + - `--product_version`: SUSE Manager version (head, 5.1, 5.0, 4.3 or uyuni). Used for handling different paths in specific operations. diff --git a/jenkins_pipelines/scripts/test_environment_cleaner/requirements.txt b/jenkins_pipelines/scripts/test_environment_cleaner/requirements.txt new file mode 100644 index 000000000..e81e2ef15 --- /dev/null +++ b/jenkins_pipelines/scripts/test_environment_cleaner/requirements.txt @@ -0,0 +1,3 @@ +argparse +logging +paramiko diff --git a/jenkins_pipelines/scripts/test_environment_cleaner/run_tests.sh b/jenkins_pipelines/scripts/test_environment_cleaner/run_tests.sh new file mode 100755 index 000000000..f6d579a13 --- /dev/null +++ b/jenkins_pipelines/scripts/test_environment_cleaner/run_tests.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +# Set the PYTHONPATH to include the necessary directories +export PYTHONPATH=$(pwd)/test_environment_cleaner_program + +# Run the tests +python3.11 -m unittest discover -s tests diff --git a/jenkins_pipelines/scripts/test_environment_cleaner/test_environment_cleaner_program/TestEnvironmentCleaner.py b/jenkins_pipelines/scripts/test_environment_cleaner/test_environment_cleaner_program/TestEnvironmentCleaner.py new file mode 100755 index 000000000..4a70bdbb0 --- /dev/null +++ b/jenkins_pipelines/scripts/test_environment_cleaner/test_environment_cleaner_program/TestEnvironmentCleaner.py @@ -0,0 +1,75 @@ +#!/usr/bin/python3.11 +import argparse +import logging +from test_environment_cleaner_api import ResourceManager +from test_environment_cleaner_ssh import SSHClientManager + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +# Define the available modes +MODES = [ + 'delete_users', 'delete_activation_keys', 'delete_config_projects', + 'delete_software_channels', 'delete_systems', 'delete_repositories', + 'full_cleanup', 'delete_salt_keys', 'delete_known_hosts', + 'update_custom_repositories', 'delete_distributions' +] + +def main(): + parser = argparse.ArgumentParser(description="Manage SUSE Manager API actions.") + parser.add_argument("--url", required=True, help="The URL of the SUSE Manager XML-RPC API.") + parser.add_argument("--mode", required=True, choices=MODES, help="The mode of operation.") + parser.add_argument("--default-resources-to-delete", type=str, nargs='*', + choices=['proxy', 'monitoring-server', 'retail'], + default=[], help='List of default modules to force deletion') + parser.add_argument("--product_version", required=True, type=str, choices=['head', '5.1', '5.0', '4.3', 'uyuni']) + + args = parser.parse_args() + manager_url = args.url + default_resources_to_delete = [ + item.replace("monitoring-server", "monitoring") if item == "monitoring-server" else item + for item in args.default_resources_to_delete + ] + # API part + if args.mode in ["delete_users", "delete_activation_keys", "delete_config_projects", + "delete_software_channels", "delete_systems", "delete_repositories", + "full_cleanup", "delete_salt_keys"]: + resource_manager = ResourceManager(manager_url, default_resources_to_delete, args.product_version) + resource_manager.get_session_key() + mode_actions = { + "delete_users": resource_manager.delete_users, + "delete_activation_keys": resource_manager.delete_activation_keys, + "delete_config_projects": resource_manager.delete_config_projects, + "delete_software_channels": resource_manager.delete_software_channels, + "delete_systems": resource_manager.delete_systems, + "delete_repositories": resource_manager.delete_channel_repos, + "delete_salt_keys": resource_manager.delete_salt_keys, + "full_cleanup": resource_manager.run, + } + try: + action = mode_actions.get(args.mode) + if action: + action() + else: + logger.error(f"Mode '{args.mode}' is not recognized.") + finally: + resource_manager.logout_session() + + # Server commands part + else: + ssh_manager = SSHClientManager(url=manager_url) + ssh_actions = { + "delete_known_hosts": ssh_manager.delete_known_hosts(args.product_version), + "delete_distributions": ssh_manager.delete_distributions(args.product_version), + "update_custom_repositories": ssh_manager.update_custom_repositories, + } + action = ssh_actions.get(args.mode) + if action: + action() + else: + logger.error(f"Mode '{args.mode}' is not recognized.") + + +if __name__ == "__main__": + main() diff --git a/jenkins_pipelines/scripts/test_environment_cleaner/test_environment_cleaner_program/__init__.py b/jenkins_pipelines/scripts/test_environment_cleaner/test_environment_cleaner_program/__init__.py new file mode 100644 index 000000000..dd1e004b3 --- /dev/null +++ b/jenkins_pipelines/scripts/test_environment_cleaner/test_environment_cleaner_program/__init__.py @@ -0,0 +1,4 @@ +# __init__.py +from .test_environment_cleaner_api import ResourceManager + +__all__ = ['ResourceManager'] diff --git a/jenkins_pipelines/scripts/test_environment_cleaner/test_environment_cleaner_program/test_environment_cleaner_api.py b/jenkins_pipelines/scripts/test_environment_cleaner/test_environment_cleaner_program/test_environment_cleaner_api.py new file mode 100644 index 000000000..038173dff --- /dev/null +++ b/jenkins_pipelines/scripts/test_environment_cleaner/test_environment_cleaner_program/test_environment_cleaner_api.py @@ -0,0 +1,106 @@ +import logging +import xmlrpc.client + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +# Global configuration +username = "admin" +password = "admin" + +class ResourceManager: + def __init__(self, manager_url, resources_to_delete, product_version): + self.manager_url = manager_url + self.resources_to_keep = {"proxy", "monitoring", "build"} - set(resources_to_delete) + self.product_version = product_version + self.client = None + self.session_key = None + + def get_session_key(self): + self.client = xmlrpc.client.ServerProxy(f"http://{self.manager_url}/rpc/api") + self.session_key = self.client.auth.login(username, password) + logger.info("Session key obtained.") + + def logout_session(self): + self.client.auth.logout(self.session_key) + logger.info("Logged out from session.") + + def delete_users(self): + users = self.client.user.listUsers(self.session_key) + for user in users: + if user["login"] != "admin": + logger.info(f"Delete user: {user['login']}") + self.client.user.delete(self.session_key, user["login"]) + + def delete_activation_keys(self): + activation_keys = self.client.activationkey.listActivationKeys(self.session_key) + for activation_key in activation_keys: + if not any(protected in activation_key['key'] for protected in self.resources_to_keep): + logger.info(f"Delete activation key: {activation_key['key']}") + self.client.activationkey.delete(self.session_key, activation_key['key']) + + def delete_config_projects(self): + projects = self.client.contentmanagement.listProjects(self.session_key) + for project in projects: + logger.info(f"Delete project: {project['label']}") + self.client.contentmanagement.removeProject(self.session_key, project['label']) + + def delete_software_channels(self): + channels = self.client.channel.listMyChannels(self.session_key) + + if self.product_version == "uyuni": + for channel in channels: + if "custom" in channel['label'] and not any(protected in channel['label'] for protected in self.resources_to_keep): + logger.info(f"Delete custom channel: {channel['label']}") + self.client.channel.software.delete(self.session_key, channel['label']) + logging.warning("Delete only custom channels for uyuni") + return + + for channel in channels: + details = self.client.channel.software.getDetails(self.session_key, channel['label']) + if "appstream" in channel['label'] and details['parent_channel_label']: + if not any(protected in channel['label'] for protected in self.resources_to_keep): + logger.info(f"Delete sub channel appstream: {channel['label']}") + self.client.channel.software.delete(self.session_key, channel['label']) + + channels = self.client.channel.listMyChannels(self.session_key) + for channel in channels: + if "appstream" in channel['label'] and not any(protected in channel['label'] for protected in self.resources_to_keep): + logger.info(f"Delete parent channel appstream: {channel['label']}") + self.client.channel.software.delete(self.session_key, channel['label']) + + channels = self.client.channel.listMyChannels(self.session_key) + for channel in channels: + if not any(protected in channel['label'] for protected in self.resources_to_keep): + logger.info(f"Delete common channel: {channel['label']}") + self.client.channel.software.delete(self.session_key, channel['label']) + + def delete_systems(self): + systems = self.client.system.listSystems(self.session_key) + for system in systems: + if not any(protected in system['name'] for protected in self.resources_to_keep): + logger.info(f"Delete system : {system['name']} | id : {system['id']}") + self.client.system.deleteSystem(self.session_key, system['id']) + + def delete_channel_repos(self): + repositories = self.client.channel.software.listUserRepos(self.session_key) + for repository in repositories: + logger.info(f"Delete repository : {repository['label']}") + self.client.channel.software.removeRepo(self.session_key, repository['label']) + + def delete_salt_keys(self): + accepted_salt_keys = self.client.saltkey.acceptedList(self.session_key) + for accepted_salt_key in accepted_salt_keys: + if not any(protected in accepted_salt_key for protected in self.resources_to_keep): + logger.info(f"Delete remaining accepted key : {accepted_salt_key}") + self.client.saltkey.delete(self.session_key, accepted_salt_key) + + def run(self): + self.delete_users() + self.delete_activation_keys() + self.delete_config_projects() + self.delete_software_channels() + self.delete_systems() + self.delete_channel_repos() + self.delete_salt_keys() diff --git a/jenkins_pipelines/scripts/test_environment_cleaner/test_environment_cleaner_program/test_environment_cleaner_ssh.py b/jenkins_pipelines/scripts/test_environment_cleaner/test_environment_cleaner_program/test_environment_cleaner_ssh.py new file mode 100644 index 000000000..777181836 --- /dev/null +++ b/jenkins_pipelines/scripts/test_environment_cleaner/test_environment_cleaner_program/test_environment_cleaner_ssh.py @@ -0,0 +1,150 @@ +import paramiko +import re +import logging +from urllib.parse import urlparse + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +class SSHClientManager: + def __init__(self, url, username="root", password="linux", port=22): + self.url = url + self.username = username + self.password = password + self.port = port + self._client = None + + def _connect(self): + """Establishes an SSH connection to the server.""" + if not self._client: + self._client = paramiko.SSHClient() + self._client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + try: + logger.info(f"Connecting to server at: {self.url}:{self.port}") + self._client.connect(self.url, port=self.port, username=self.username, password=self.password) + logger.info("Connection successful.") + except paramiko.AuthenticationException: + logger.error("Authentication failed while connecting to the server.") + raise + except paramiko.SSHException as ssh_exception: + logger.error(f"SSH connection error: {ssh_exception}") + raise + except Exception as e: + logger.error(f"Unexpected exception during SSH connection: {e}") + raise + + def _close(self): + """Closes the SSH connection.""" + if self._client: + self._client.close() + self._client = None + logger.info("SSH connection closed.") + + def _run_command(self, command): + """ + Runs a command on the remote server. + + :param command: Command to execute on the server. + :return: Command output or error message. + """ + self._connect() + try: + logger.info(f"Executing command: {command}") + stdin, stdout, stderr = self._client.exec_command(command) + output = stdout.read().decode().strip() + error = stderr.read().decode().strip() + + if error: + logger.error(f"Command error: {error}") + return error + logger.info(f"Command output: {output}") + return output + except Exception as e: + logger.error(f"Error while executing command: {e}") + raise + finally: + stdin.close() + + def _copy_file(self, local_path, remote_path): + """ + Copies a file to the remote server. + + :param local_path: Path of the local file to copy. + :param remote_path: Destination path on the remote server. + :return: Success message or error details. + """ + try: + self._connect() + sftp = self._client.open_sftp() + sftp.put(local_path, remote_path) + sftp.close() + logger.info(f"File {local_path} copied to {remote_path} on server {self.url}") + except Exception as e: + logger.error(f"Exception during file copy: {str(e)}") + raise + finally: + self._close() + + def delete_known_hosts(self, version): + """ + Deletes the known_hosts file based on the product version. + + :param version: Product version to determine file path. + """ + try: + self._connect() + if version == "4.3": + self._run_command("rm /var/lib/salt/.ssh/known_hosts") + elif version in ["5.0", "head", "5.1", "uyuni"]: + logger.info("Checking files before cleanup...") + self._run_command("rm /var/lib/containers/storage/volumes/var-salt/_data/.ssh/known_hosts") + logger.info("Known_hosts file cleaned up.") + else: + logger.warning(f"Unsupported version for known_hosts deletion: {version}") + finally: + self._close() + + def delete_distributions(self, version): + """ + Deletes distributions directories based on the product version. + + :param version: Product version to determine file paths. + """ + try: + self._connect() + if version == "4.3": + logger.warning("Distribution deletion for version 4.3 is not implemented.") + elif version in ["5.0", "head", "5.1"]: + self._run_command("rm -rf /var/lib/containers/storage/volumes/srv-www/_data/distributions/*") + self._run_command("rm -rf /var/lib/containers/storage/volumes/srv-www/_data/htdocs/pub/*iso") + logger.info("Distributions directories deleted.") + else: + logger.error(f"Unsupported product version: {version}") + finally: + self._close() + + def update_custom_repositories(self): + """ + Updates custom repositories by copying a configuration file to the server. + """ + local_path = "./custom_repositories.json" + remote_path = "/root/spacewalk/testsuite/features/upload_files/custom_repositories.json" + logger.info(f"Copying file from {local_path} to {remote_path}") + self._copy_file(local_path, remote_path) + logger.info("Custom repositories updated successfully.") + + @staticmethod + def extract_ip_from_url(url): + """Extracts and validates the IP address from a URL.""" + parsed_url = urlparse(url) + hostname = parsed_url.hostname + + if not hostname: + raise ValueError("No hostname found in the provided URL.") + + ip_pattern = re.compile(r'^(\d{1,3}\.){3}\d{1,3}$') + if ip_pattern.match(hostname): + return hostname + else: + raise ValueError(f"Invalid IP address format in URL: {hostname}") diff --git a/jenkins_pipelines/scripts/test_environment_cleaner/tests/__init__.py b/jenkins_pipelines/scripts/test_environment_cleaner/tests/__init__.py new file mode 100644 index 000000000..143f486c0 --- /dev/null +++ b/jenkins_pipelines/scripts/test_environment_cleaner/tests/__init__.py @@ -0,0 +1 @@ +# __init__.py diff --git a/jenkins_pipelines/scripts/test_environment_cleaner/tests/test_main_program.py b/jenkins_pipelines/scripts/test_environment_cleaner/tests/test_main_program.py new file mode 100644 index 000000000..98d1181c0 --- /dev/null +++ b/jenkins_pipelines/scripts/test_environment_cleaner/tests/test_main_program.py @@ -0,0 +1,75 @@ +import unittest +from unittest.mock import patch +import sys +from io import StringIO +import os + +# Add the root directory of the project to the sys.path +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../..'))) +from test_environment_cleaner_program.TestEnvironmentCleaner import main + +class TestMainProgram(unittest.TestCase): + + @patch("test_environment_cleaner_program.TestEnvironmentCleaner.ResourceManager") + @patch("test_environment_cleaner_program.TestEnvironmentCleaner.SSHClientManager") + def test_delete_users_mode(self, MockSSHClientManager, MockResourceManager): + test_args = ["test_environment_cleaner_program.TestEnvironmentCleaner", "--url", "http://test-url.com", "--mode", "delete_users", "--product_version", "5.0"] + with patch.object(sys, 'argv', test_args): + mock_resource_manager = MockResourceManager.return_value + mock_resource_manager.get_session_key.return_value = "session_key" + + main() + + MockResourceManager.assert_called_once_with("http://test-url.com", [], "5.0") + mock_resource_manager.get_session_key.assert_called_once() + mock_resource_manager.delete_users.assert_called_once() + mock_resource_manager.logout_session.assert_called_once() + + @patch("test_environment_cleaner_program.TestEnvironmentCleaner.ResourceManager") + @patch("test_environment_cleaner_program.TestEnvironmentCleaner.SSHClientManager") + def test_delete_known_hosts_mode(self, MockSSHClientManager, MockResourceManager): + test_args = ["test_environment_cleaner_program.TestEnvironmentCleaner", "--url", "http://test-url.com", "--mode", "delete_known_hosts", "--product_version", "5.0"] + with patch.object(sys, 'argv', test_args): + mock_ssh_manager = MockSSHClientManager.return_value + + main() + + MockSSHClientManager.assert_called_once_with(url="http://test-url.com") + mock_ssh_manager.delete_known_hosts.assert_called_once_with("5.0") + + @patch("test_environment_cleaner_program.TestEnvironmentCleaner.ResourceManager") + @patch("test_environment_cleaner_program.TestEnvironmentCleaner.SSHClientManager") + def test_invalid_mode(self, MockSSHClientManager, MockResourceManager): + test_args = ["test_environment_cleaner_program.TestEnvironmentCleaner", "--url", "http://test-url.com", "--mode", "invalid", "--product_version", "5.0"] + with patch.object(sys, 'argv', test_args): + with patch("sys.stderr", new=StringIO()) as fake_err: + with self.assertRaises(SystemExit): + main() + + # Assert the error message is in the captured stderr + expected_error_message = ( + "usage: test_environment_cleaner_program.TestEnvironmentCleaner [-h] --url URL --mode " + "{delete_users,delete_activation_keys,delete_config_projects,delete_software_channels," + "delete_systems,delete_repositories,full_cleanup,delete_salt_keys,delete_known_hosts,update_custom_repositories,delete_distributions}" + " [--default-resources-to-delete [{proxy,monitoring-server,retail} ...]] --product_version {head,5.1,5.0,4.3,uyuni} " + "test_environment_cleaner_program.TestEnvironmentCleaner: error: argument --mode: invalid choice: 'invalid' (choose from 'delete_users'," + " 'delete_activation_keys', 'delete_config_projects', 'delete_software_channels', 'delete_systems'," + " 'delete_repositories', 'full_cleanup', 'delete_salt_keys', 'delete_known_hosts', 'update_custom_repositories'," + " 'delete_distributions')" + ) + cleaned_error = " ".join(fake_err.getvalue().split()) + self.assertIn(expected_error_message, cleaned_error) + + @patch("test_environment_cleaner_program.TestEnvironmentCleaner.ResourceManager") + @patch("test_environment_cleaner_program.TestEnvironmentCleaner.SSHClientManager") + def test_update_custom_repositories_mode(self, MockSSHClientManager, MockResourceManager): + test_args = ["test_environment_cleaner_program.TestEnvironmentCleaner", "--url", "http://test-url.com", "--mode", "update_custom_repositories", "--product_version", "5.0"] + with patch.object(sys, 'argv', test_args): + mock_ssh_manager = MockSSHClientManager.return_value + + main() + + mock_ssh_manager.update_custom_repositories.assert_called_once() + +if __name__ == "__main__": + unittest.main() diff --git a/jenkins_pipelines/scripts/test_environment_cleaner/tests/test_test_environment_cleaner_api.py b/jenkins_pipelines/scripts/test_environment_cleaner/tests/test_test_environment_cleaner_api.py new file mode 100644 index 000000000..5fb1c3158 --- /dev/null +++ b/jenkins_pipelines/scripts/test_environment_cleaner/tests/test_test_environment_cleaner_api.py @@ -0,0 +1,133 @@ +import unittest +from unittest.mock import patch, MagicMock +from test_environment_cleaner_program.test_environment_cleaner_api import ResourceManager + +class TestResourceManager(unittest.TestCase): + def setUp(self): + self.manager_url = "localhost" + self.resources_to_delete = {"monitoring"} + self.product_version = "5.0" + self.resource_manager = ResourceManager(self.manager_url, self.resources_to_delete, self.product_version) + + + # Mock the ServerProxy and session key to avoid actual API calls + self.mock_server_proxy = patch('xmlrpc.client.ServerProxy').start() + self.mock_client = MagicMock() + self.mock_server_proxy.return_value = self.mock_client + self.resource_manager.client = self.mock_client + self.resource_manager.session_key = "mock_session_key" + + def tearDown(self): + patch.stopall() + + def test_get_session_key(self): + # Mock the login call to return a session key + self.mock_client.auth.login.return_value = "mock_session_key" + self.resource_manager.get_session_key() + + self.mock_client.auth.login.assert_called_once_with("admin", "admin") + self.assertEqual(self.resource_manager.session_key, "mock_session_key") + + def test_logout_session(self): + self.resource_manager.logout_session() + self.mock_client.auth.logout.assert_called_once_with("mock_session_key") + + def test_delete_users(self): + self.mock_client.user.listUsers.return_value = [{"login": "admin"}, {"login": "test_user"}] + self.resource_manager.delete_users() + self.mock_client.user.delete.assert_called_once_with("mock_session_key", "test_user") + + def test_delete_activation_keys(self): + self.mock_client.activationkey.listActivationKeys.return_value = [ + {"key": "test-key-proxy"}, # This key should not trigger a delete call + {"key": "test-key-monitoring"}, # This key should trigger a delete call + {"key": "test-key-sles15sp5"}, # This key should also trigger a delete call + ] + self.resource_manager.delete_activation_keys() + self.mock_client.activationkey.delete.assert_any_call("mock_session_key", "test-key-monitoring") + self.mock_client.activationkey.delete.assert_any_call("mock_session_key", "test-key-sles15sp5") + delete_calls = self.mock_client.activationkey.delete.call_args_list + proxy_deleted = any(call[0][1] == "test-key-proxy" for call in delete_calls) + self.assertFalse(proxy_deleted, "delete() should not have been called with 'test-key-proxy'") + self.assertEqual(len(delete_calls), 2) + + def test_delete_software_channels(self): + self.mock_client.channel.listMyChannels.return_value = [ + {"label": "appstream-channel"}, + {"label": "common-channel"}, + ] + self.mock_client.channel.software.getDetails.return_value = { + "parent_channel_label": "parent-channel" + } + self.resource_manager.delete_software_channels() + self.mock_client.channel.software.delete.assert_any_call("mock_session_key", "common-channel") + + def test_delete_systems(self): + self.mock_client.system.listSystems.return_value = [ + {"name": "test-system-sles15sp5", "id": 1}, + {"name": "test-system-proxy", "id": 2}, + {"name": "test-system-monitoring", "id": 3} + ] + self.resource_manager.delete_systems() + self.mock_client.system.deleteSystem.assert_any_call("mock_session_key", 1) + self.mock_client.system.deleteSystem.assert_any_call("mock_session_key", 3) + delete_calls = self.mock_client.system.deleteSystem.call_args_list + proxy_deleted = any(call[0][1] == "test-system-proxy" for call in delete_calls) + self.assertFalse(proxy_deleted, "delete() should not have been called with 2") + self.assertEqual(len(delete_calls), 2) + + def test_delete_channel_repos(self): + self.mock_client.channel.software.listUserRepos.return_value = [ + {"label": "repo1"}, + {"label": "repo2"}, + ] + self.resource_manager.delete_channel_repos() + + self.mock_client.channel.software.removeRepo.assert_any_call("mock_session_key", "repo1") + self.mock_client.channel.software.removeRepo.assert_any_call("mock_session_key", "repo2") + + def test_delete_software_channels_warning_for_uyuni(self): + # Set the product_version to "uyuni" to simulate Uyuni environment + self.resource_manager.product_version = "uyuni" + + # Patch logging to capture warning message + with patch('logging.warning') as mock_warning: + self.resource_manager.delete_software_channels() + # Check if the warning message was logged + mock_warning.assert_called_once_with("Delete only custom channels for uyuni") + + def test_delete_salt_keys(self): + self.mock_client.saltkey.acceptedList.return_value = [ + "salt-key-monitoring", + "salt-key-sles15sp5", + "salt-key-proxy" + ] + self.resource_manager.delete_salt_keys() + self.mock_client.saltkey.delete.assert_any_call("mock_session_key", "salt-key-sles15sp5") + self.mock_client.saltkey.delete.assert_any_call("mock_session_key", "salt-key-monitoring") + delete_calls = self.mock_client.saltkey.delete.call_args_list + proxy_deleted = any(call[0][1] == "salt-key-proxy" for call in delete_calls) + self.assertFalse(proxy_deleted, "delete() should not have been called with 'salt-key-proxy'") + self.assertEqual(len(delete_calls), 2) + + def test_run(self): + with patch.object(self.resource_manager, 'delete_users') as mock_delete_users, \ + patch.object(self.resource_manager, 'delete_activation_keys') as mock_delete_activation_keys, \ + patch.object(self.resource_manager, 'delete_config_projects') as mock_delete_config_projects, \ + patch.object(self.resource_manager, 'delete_software_channels') as mock_delete_software_channels, \ + patch.object(self.resource_manager, 'delete_systems') as mock_delete_systems, \ + patch.object(self.resource_manager, 'delete_channel_repos') as mock_delete_channel_repos, \ + patch.object(self.resource_manager, 'delete_salt_keys') as mock_delete_salt_keys: + + self.resource_manager.run() + + mock_delete_users.assert_called_once() + mock_delete_activation_keys.assert_called_once() + mock_delete_config_projects.assert_called_once() + mock_delete_software_channels.assert_called_once() + mock_delete_systems.assert_called_once() + mock_delete_channel_repos.assert_called_once() + mock_delete_salt_keys.assert_called_once() + +if __name__ == '__main__': + unittest.main() diff --git a/terracumber_config/tf_files/SUSEManager-4.3-build-validation-NUE.tf b/terracumber_config/tf_files/SUSEManager-4.3-build-validation-NUE.tf index b890b491e..121be997f 100644 --- a/terracumber_config/tf_files/SUSEManager-4.3-build-validation-NUE.tf +++ b/terracumber_config/tf_files/SUSEManager-4.3-build-validation-NUE.tf @@ -1520,6 +1520,7 @@ module "controller" { output "configuration" { value = { - controller = module.controller.configuration + controller = module.controller.configuration + server = module.server.configuration } } diff --git a/terracumber_config/tf_files/SUSEManager-4.3-build-validation-PRV.tf b/terracumber_config/tf_files/SUSEManager-4.3-build-validation-PRV.tf index 5d4cd7fa6..2df84f4d2 100644 --- a/terracumber_config/tf_files/SUSEManager-4.3-build-validation-PRV.tf +++ b/terracumber_config/tf_files/SUSEManager-4.3-build-validation-PRV.tf @@ -1861,6 +1861,7 @@ module "controller" { output "configuration" { value = { - controller = module.controller.configuration + controller = module.controller.configuration + server = module.server.configuration } } diff --git a/terracumber_config/tf_files/SUSEManager-4.3-build-validation-paygo-AWS.tf b/terracumber_config/tf_files/SUSEManager-4.3-build-validation-paygo-AWS.tf index f71e49b78..ff02bf2b4 100644 --- a/terracumber_config/tf_files/SUSEManager-4.3-build-validation-paygo-AWS.tf +++ b/terracumber_config/tf_files/SUSEManager-4.3-build-validation-paygo-AWS.tf @@ -531,9 +531,10 @@ output "aws_mirrors_public_name" { output "configuration" { value = { - controller = module.controller.configuration - bastion = { - hostname = lookup(module.base.configuration, "bastion_host", null) + controller = module.controller.configuration + server = module.server.configuration + bastion = { + hostname = lookup(module.base.configuration, "bastion_host", null) } } } diff --git a/terracumber_config/tf_files/SUSEManager-5.0-build-validation-NUE.tf b/terracumber_config/tf_files/SUSEManager-5.0-build-validation-NUE.tf index d6861d3ec..0d7d57047 100644 --- a/terracumber_config/tf_files/SUSEManager-5.0-build-validation-NUE.tf +++ b/terracumber_config/tf_files/SUSEManager-5.0-build-validation-NUE.tf @@ -1272,6 +1272,7 @@ module "controller" { output "configuration" { value = { - controller = module.controller.configuration + controller = module.controller.configuration + server = module.server_containerized.configuration } } diff --git a/terracumber_config/tf_files/SUSEManager-5.0-build-validation-PRV.tf b/terracumber_config/tf_files/SUSEManager-5.0-build-validation-PRV.tf index 8db1ee54d..98c6bf68b 100644 --- a/terracumber_config/tf_files/SUSEManager-5.0-build-validation-PRV.tf +++ b/terracumber_config/tf_files/SUSEManager-5.0-build-validation-PRV.tf @@ -1598,6 +1598,7 @@ module "controller" { output "configuration" { value = { - controller = module.controller.configuration + controller = module.controller.configuration + server = module.server_containerized.configuration } } diff --git a/terracumber_config/tf_files/Uyuni-Master-build-validation-NUE.tf b/terracumber_config/tf_files/Uyuni-Master-build-validation-NUE.tf index 8de9d1a8b..de0daf822 100644 --- a/terracumber_config/tf_files/Uyuni-Master-build-validation-NUE.tf +++ b/terracumber_config/tf_files/Uyuni-Master-build-validation-NUE.tf @@ -206,7 +206,6 @@ module "server_containerized" { runtime = "podman" container_repository = var.CONTAINER_REPOSITORY container_tag = "latest" - helm_chart_url = "oci://registry.opensuse.org/systemsmanagement/uyuni/master/charts/uyuni/server" //server_additional_repos @@ -1248,6 +1247,7 @@ module "controller" { output "configuration" { value = { - controller = module.controller.configuration + controller = module.controller.configuration + server = module.server_containerized.configuration } } diff --git a/terracumber_config/tf_files/Uyuni-Master-build-validation-PRV.tf b/terracumber_config/tf_files/Uyuni-Master-build-validation-PRV.tf index c22cc0310..4ef72b520 100644 --- a/terracumber_config/tf_files/Uyuni-Master-build-validation-PRV.tf +++ b/terracumber_config/tf_files/Uyuni-Master-build-validation-PRV.tf @@ -356,7 +356,6 @@ module "server_containerized" { runtime = "podman" container_repository = var.CONTAINER_REPOSITORY container_tag = "latest" - helm_chart_url = "oci://registry.opensuse.org/systemsmanagement/uyuni/master/charts/uyuni/server" //server_additional_repos @@ -1577,6 +1576,7 @@ module "controller" { output "configuration" { value = { - controller = module.controller.configuration + controller = module.controller.configuration + server = module.server_containerized.configuration } }