diff --git a/CODEOWNERS b/CODEOWNERS index 4602e91019..8510c97688 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1,19 +1,20 @@ -/data/ @aas @d.skrynnik @tsd -/conf/ @aas @d.skrynnik @tsd @a.starovoitov -/go/ @aas @d.skrynnik @tsd @aer -/os/ @aas @d.skrynnik @tsd -/python/ @aas @d.skrynnik @tsd @a.starovoitov -/spec/ @aas @d.skrynnik @tsd +tests/ @aer @v.chudasov +/data/ @aas @d.skrynnik @aer +/conf/ @aas @d.skrynnik @a.starovoitov @aer +/go/ @aas @d.skrynnik @aer +/os/ @aas @d.skrynnik @aer +/python/ @aas @d.skrynnik @a.starovoitov @aer +/spec/ @aas @d.skrynnik @aer /web/ @v.remizov @d.bardin @k.fedorenko -.dockerignore @aas @v.remizov @d.skrynnik @tsd -.gitignore @aas @d.skrynnik @tsd @v.remizov @d.bardin @k.fedorenko -.gitlab-ci.yaml @aas @v.remizov @d.skrynnik @tsd -.pre-commit-config.yaml @aas @d.skrynnik @tsd @v.remizov @d.bardin @k.fedorenko -CODEOWNERS @aas @v.remizov @tsd @d.skrynnik -COPYRIGHT @aas @v.remizov @d.skrynnik @tsd -Dockerfile @aas @v.remizov @d.skrynnik @tsd -LICENSE @aas @v.remizov @d.skrynnik @tsd -license_checker.py @aas @d.skrynnik @tsd -Makefile @aas @d.skrynnik @tsd -pyproject.toml @aas @d.skrynnik @tsd -README.md @aas @d.skrynnik @tsd @v.remizov @d.bardin @k.fedorenko @a.starovoitov +.dockerignore @aas @v.remizov @d.skrynnik @aer @v.chudasov +.gitignore @aas @d.skrynnik @v.remizov @d.bardin @k.fedorenko @aer @v.chudasov +.gitlab-ci.yaml @aas @v.remizov @d.skrynnik @aer @v.chudasov +.pre-commit-config.yaml @aas @d.skrynnik @v.remizov @d.bardin @k.fedorenko @aer @v.chudasov +CODEOWNERS @aas @v.remizov @d.skrynnik @aer @v.chudasov +COPYRIGHT @aas @v.remizov @d.skrynnik @aer +Dockerfile @aas @v.remizov @d.skrynnik @aer +LICENSE @aas @v.remizov @d.skrynnik @aer +license_checker.py @aas @d.skrynnik @aer +Makefile @aas @d.skrynnik @aer +pyproject.toml @aas @d.skrynnik @aer @v.chudasov +README.md @aas @d.skrynnik @v.remizov @d.bardin @k.fedorenko @a.starovoitov @aer @v.chudasov diff --git a/Dockerfile b/Dockerfile index 0a7e7f7394..ac16270c58 100644 --- a/Dockerfile +++ b/Dockerfile @@ -8,6 +8,7 @@ RUN apk update && \ bash \ curl \ git \ + gnupg \ libc6-compat \ libffi \ libstdc++ \ diff --git a/Makefile b/Makefile index e78017a188..04dbc9447e 100644 --- a/Makefile +++ b/Makefile @@ -25,14 +25,14 @@ build_base: build: describe buildss buildjs build_base unittests_sqlite: describe - poetry install --no-root - poetry run python/manage.py test python -v 2 + poetry install --no-root --with unittests + poetry run python/manage.py test python -v 2 --parallel unittests_postgresql: describe docker run -d --rm -e POSTGRES_PASSWORD="postgres" --name postgres -p 5500:5432 postgres:14 export DB_HOST="localhost" DB_PORT="5500" DB_NAME="postgres" DB_PASS="postgres" DB_USER="postgres" - poetry install --no-root - poetry run python/manage.py test python -v 2 + poetry install --no-root --with unittests + poetry run python/manage.py test python -v 2 --parallel docker stop postgres ng_tests: @@ -40,18 +40,16 @@ ng_tests: docker run -i --rm -v $(CURDIR)/:/adcm -w /adcm/web hub.adsw.io/library/functest:3.8.6.slim.buster_node16-x64 ./ng_test.sh pretty: + poetry install --no-root --with lint black license_checker.py python autoflake -r -i --remove-all-unused-imports --exclude apps.py,python/ansible/plugins,python/init_db.py,python/task_runner.py,python/backupdb.py,python/job_runner.py,python/drf_docs.py license_checker.py python isort license_checker.py python python license_checker.py --fix --folders python go lint: - black --check license_checker.py python - autoflake --check --quiet -r --remove-all-unused-imports --exclude apps.py,python/ansible/plugins,python/init_db.py,python/task_runner.py,python/backupdb.py,python/job_runner.py,python/drf_docs.py license_checker.py python - isort --check license_checker.py python + poetry install --no-root --with lint + poetry run black --check license_checker.py python + poetry run autoflake --check --quiet -r --remove-all-unused-imports --exclude apps.py,python/ansible/plugins,python/init_db.py,python/task_runner.py,python/backupdb.py,python/job_runner.py,python/drf_docs.py license_checker.py python + poetry run isort --check license_checker.py python python license_checker.py --folders python go - pylint --rcfile pyproject.toml --recursive y python - -lint_docker: - docker run -i --rm -e DJANGO_SETTINGS_MODULE=adcm.settings $(APP_IMAGE):$(APP_TAG) \ - sh -c "cd /adcm && poetry install --no-root --with lint && apk add make && make lint" + poetry run pylint --rcfile pyproject.toml --recursive y python diff --git a/conf/adcm/config.yaml b/conf/adcm/config.yaml old mode 100644 new mode 100755 index 3f44996a0c..59016e14db --- a/conf/adcm/config.yaml +++ b/conf/adcm/config.yaml @@ -2,7 +2,7 @@ type: adcm name: ADCM - version: 2.6 + version: 3.0 actions: run_ldap_sync: @@ -34,6 +34,26 @@ We have to know ADCM's Url to send information from host. We try to guess that information from url you enter in browser. But if your network has more complicated structure and we guess wrong, please fix that here. type: string + - name: "verification_public_key" + display_name: "Bundle verification public key" + description: | + Bundle verification GPG public key to verify the signature of the bundle (*.sig file) when uploading the bundle to ADCM. + type: file + default: "gpg_key.pub" + - name: "statistics_collection" + display_name: "Statistics Collection" + type: "group" + activatable: true + active: true + ui_options: + advanced: true + subs: + - name: "url" + description: "URL to send collected statistic" + type: string + default: adcm-usage-ss.arenadata.io + ui_options: + invisible: true - name: "google_oauth" display_name: "Google Auth" type: "group" @@ -46,6 +66,8 @@ required: false ui_options: no_confirm: true + ui_options: + invisible: true - name: "yandex_oauth" display_name: "Yandex Auth" type: "group" @@ -60,38 +82,10 @@ no_confirm: true ui_options: invisible: true - - name: "job_log" - display_name: "Job Log" - type: "group" - subs: - - name: "log_rotation_on_fs" - display_name: "Log rotation from file system" - type: integer - required: false - default: 365 - min: 0 - description: | - You can set the time (number of days) after which the logs will be deleted from the file system. - - name: "log_rotation_in_db" - display_name: "Log rotation from database" - type: integer - required: false - default: 365 - min: 0 - description: | - You can set the time (number of days) after which the logs will be deleted from the database. - name: "ansible_settings" display_name: "Ansible Settings" type: "group" subs: - - name: "mitogen" - display_name: "Use Mitogen" - description: | - Mitogen for Ansible is a completely redesigned UNIX connection layer and module runtime for Ansible. - type: boolean - ui_options: - invisible: true - default: false - name: "forks" display_name: "Forks" description: | @@ -126,35 +120,44 @@ Compress the rotated files type: boolean default: false - - name: "config_rotation" - display_name: "Configuration rotation" - description: | - You can enable Clusters/Services/Components configurations deleting mechanism. + - name: "audit_data_retention" + display_name: "Data retention policy" type: "group" subs: + - name: "log_rotation_on_fs" + display_name: "Job log retention period from file system" + type: integer + required: false + default: 365 + min: 0 + description: | + You can set the time (number of days) after which the logs will be deleted from the file system. + - name: "log_rotation_in_db" + display_name: "Job log retention period from database" + type: integer + required: false + default: 365 + min: 0 + description: | + You can set the time (number of days) after which the logs will be deleted from the database. - name: "config_rotation_in_db" - display_name: "Objects configurations rotation period" + display_name: "Objects configurations retention period" type: integer required: false default: 0 min: 0 description: | You can set the time (number of days) after which the Objects configuration will be deleted from the database. 0 is infinite storing. - - - name: "audit_data_retention" - display_name: "Audit data retention" - type: "group" - subs: - name: "retention_period" - display_name: "Retention period" + display_name: "Audit data retention period" description: | Data storage period (in days) for operations and authorizations in ADCM. type: integer default: 1825 - name: "data_archiving" - display_name: "Enable archiving" + display_name: "Enable audit data archiving" description: | - Enable/disable archiving of data on operations and authorizations after the period specified in "retention_period". + Enable/disable archiving of data on operations and authorizations after the period specified in "Audit data retention period". type: boolean default: false required: no diff --git a/conf/adcm/gpg_key.pub b/conf/adcm/gpg_key.pub new file mode 100755 index 0000000000..8b13789179 --- /dev/null +++ b/conf/adcm/gpg_key.pub @@ -0,0 +1 @@ + diff --git a/conf/adcm/python_scripts/run_ldap_sync.py b/conf/adcm/python_scripts/run_ldap_sync.py index 51a4529dea..e68c701f9f 100644 --- a/conf/adcm/python_scripts/run_ldap_sync.py +++ b/conf/adcm/python_scripts/run_ldap_sync.py @@ -69,11 +69,11 @@ def _bind(self) -> ldap.ldapobject.LDAPObject: @staticmethod def _deactivate_extra_users(ldap_usernames: set): django_usernames = set( - User.objects.filter(type=OriginType.LDAP, is_active=True).values_list("username", flat=True) + User.objects.filter(type=OriginType.LDAP).values_list("username", flat=True) ) for username in django_usernames - ldap_usernames: user = User.objects.get(username__iexact=username) - sys.stdout.write(f"Deactivate user and his session: {user}\n") + sys.stdout.write(f"Delete user: {user}\n") user.delete() def unbind(self) -> None: @@ -106,6 +106,7 @@ def sync_groups(self) -> list: ldap_groups = self.settings["GROUP_SEARCH"].execute(self.conn, {}) self._sync_ldap_groups(ldap_groups) sys.stdout.write("Groups were synchronized\n") + return ldap_groups def sync_users(self, ldap_groups: list) -> None: @@ -114,11 +115,13 @@ def sync_users(self, ldap_groups: list) -> None: sys.stdout.write("No groups found. Aborting sync users\n") self._deactivate_extra_users(set()) return + group_filter = "" for group_dn, _ in ldap_groups: group_filter += f"(memberOf={group_dn})" if group_filter: group_filter = f"(|{group_filter})" + self.settings["USER_SEARCH"].filterstr = ( f"(&" f"(objectClass={self.settings['USER_OBJECT_CLASS']})" @@ -126,6 +129,7 @@ def sync_users(self, ldap_groups: list) -> None: f"{group_filter})" ) ldap_users = self.settings["USER_SEARCH"].execute(self.conn, {"user": "*"}, True) + self._sync_ldap_users(ldap_users, ldap_groups) sys.stdout.write("Users were synchronized\n") @@ -164,6 +168,8 @@ def _sync_ldap_users(self, ldap_users: list, ldap_groups: list) -> None: ldap_group_names = [group[0].split(",")[0][3:] for group in ldap_groups] ldap_usernames = set() error_names = [] + deleted_names: list[str] = [] + for cname, ldap_attributes in ldap_users: defaults = {} for field, ldap_name in self.settings["USER_ATTR_MAP"].items(): @@ -186,12 +192,13 @@ def _sync_ldap_users(self, ldap_users: list, ldap_groups: list) -> None: sys.stdout.write(f"Error creating user {username}: {e}\n") continue else: + if not self._is_ldap_user_active(ldap_attrs=ldap_attributes): + deleted_names.append(user.username) + user.delete() + continue + updated = False - user.is_active = False - if ldap_attributes.get("useraccountcontrol") and not hex( - int(ldap_attributes["useraccountcontrol"][0]) - ).endswith("2"): - user.is_active = True + if created: sys.stdout.write(f"Create user: {username}\n") user.set_unusable_password() @@ -221,8 +228,12 @@ def _sync_ldap_users(self, ldap_users: list, ldap_groups: list) -> None: except (IntegrityError, DataError, Group.DoesNotExist) as e: sys.stdout.write(f"Error getting group {name}: {e}\n") self._deactivate_extra_users(ldap_usernames) + msg = "Sync of users ended successfully." - msg = f"{msg} Couldn't synchronize users: {error_names}" if error_names else f"{msg}" + if error_names: + msg = f"{msg}{os.linesep}Couldn't synchronize users: {error_names}" + if deleted_names: + msg = f"{msg}{os.linesep}Deleted users (inactive in ldap): {deleted_names}" logger.debug(msg) def _process_user_ldap_groups(self, user: User, user_dn: str) -> None: @@ -242,6 +253,14 @@ def _process_user_ldap_groups(self, user: User, user_dn: str) -> None: group.user_set.add(user) sys.stdout.write(f"Add user {user} to group {ldap_group_name}\n") + @staticmethod + def _is_ldap_user_active(ldap_attrs: dict) -> bool: + target_attr = "useraccountcontrol" + if ldap_attrs.get(target_attr) and not hex(int(ldap_attrs[target_attr][0])).endswith("2"): + return True + + return False + if __name__ == "__main__": sync_ldap = SyncLDAP() diff --git a/os/etc/crontabs/root b/os/etc/crontabs/root old mode 100644 new mode 100755 index 584762e8bc..721387bbd9 --- a/os/etc/crontabs/root +++ b/os/etc/crontabs/root @@ -1,6 +1,7 @@ # DO NOT EDIT THIS FILE - edit the master and reinstall. # (/tmp/crontab.nS0S9F/crontab installed on Wed Oct 5 09:29:23 2022) # (Cron version -- $Id: crontab.c,v 2.13 1994/01/17 03:20:37 vixie Exp $) -0 8 */1 * * python /adcm/python/manage.py logrotate --target all +0 8 */1 * * python /adcm/python/manage.py logrotate --target all 0 10 */1 * * python /adcm/python/manage.py clearaudit */1 * * * * python /adcm/python/manage.py run_ldap_sync +0 0 * * 1 python /adcm/python/manage.py collect_statistics diff --git a/poetry.lock b/poetry.lock index 964056efd3..e95f887e25 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "ansible" version = "2.8.8" description = "Radically simple IT automation" -category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" files = [] @@ -28,7 +27,6 @@ resolved_reference = "300b13168002d3fb83fe5a19c1fd091e3d496b59" name = "apache-libcloud" version = "3.7.0" description = "A standard Python library that abstracts away differences among multiple cloud provider APIs. For more information and documentation, please see https://libcloud.apache.org" -category = "main" optional = false python-versions = ">=3.6, <4" files = [ @@ -43,7 +41,6 @@ requests = ">=2.26.0" name = "asgiref" version = "3.7.2" description = "ASGI specs, helper code, and adapters" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -61,7 +58,6 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] name = "astroid" version = "2.15.5" description = "An abstract syntax tree for Python with inference support." -category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -81,7 +77,6 @@ wrapt = [ name = "attr" version = "0.3.2" description = "Simple decorator to set attributes of target function or class in a DRY way." -category = "main" optional = false python-versions = "*" files = [ @@ -93,7 +88,6 @@ files = [ name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -112,7 +106,6 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "autoflake" version = "2.2.0" description = "Removes unused imports and unused variables" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -128,7 +121,6 @@ tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} name = "black" version = "23.3.0" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -177,7 +169,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "casestyle" version = "0.0.4" description = "String case style converter (snake_case, camelCase, PascalCase, kebab-case, MACRO_CASE can be converted to each other)." -category = "main" optional = false python-versions = ">=3" files = [ @@ -188,7 +179,6 @@ files = [ name = "certifi" version = "2023.5.7" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -200,7 +190,6 @@ files = [ name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = "*" files = [ @@ -277,7 +266,6 @@ pycparser = "*" name = "charset-normalizer" version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -362,7 +350,6 @@ files = [ name = "click" version = "8.1.3" description = "Composable command line interface toolkit" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -377,7 +364,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -389,7 +375,6 @@ files = [ name = "coreapi" version = "2.3.3" description = "Python client library for Core API." -category = "main" optional = false python-versions = "*" files = [ @@ -407,7 +392,6 @@ uritemplate = "*" name = "coreschema" version = "0.0.4" description = "Core Schema." -category = "main" optional = false python-versions = "*" files = [ @@ -422,7 +406,6 @@ jinja2 = "*" name = "cryptography" version = "41.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -464,7 +447,6 @@ test-randomorder = ["pytest-randomly"] name = "defusedxml" version = "0.7.1" description = "XML bomb protection for Python stdlib modules" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -476,7 +458,6 @@ files = [ name = "dill" version = "0.3.6" description = "serialize all of python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -491,7 +472,6 @@ graph = ["objgraph (>=1.7.2)"] name = "distro" version = "1.8.0" description = "Distro - an OS platform information API" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -503,7 +483,6 @@ files = [ name = "django" version = "3.2.19" description = "A high-level Python Web framework that encourages rapid development and clean, pragmatic design." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -524,7 +503,6 @@ bcrypt = ["bcrypt"] name = "django-auth-ldap" version = "4.3.0" description = "Django LDAP authentication backend." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -540,7 +518,6 @@ python-ldap = ">=3.1" name = "django-cors-headers" version = "4.1.0" description = "django-cors-headers is a Django application for handling the server headers required for Cross-Origin Resource Sharing (CORS)." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -555,7 +532,6 @@ Django = ">=3.2" name = "django-csp" version = "3.7" description = "Django Content Security Policy support." -category = "main" optional = false python-versions = "*" files = [ @@ -574,7 +550,6 @@ tests = ["jinja2 (>=2.9.6)", "mock (==1.0.1)", "pep8 (==1.4.6)", "pytest (<4.0)" name = "django-filter" version = "23.2" description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -589,7 +564,6 @@ Django = ">=3.2" name = "django-guardian" version = "2.4.0" description = "Implementation of per object permissions for Django." -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -604,7 +578,6 @@ Django = ">=2.2" name = "djangorestframework" version = "3.14.0" description = "Web APIs for Django, made easy." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -616,11 +589,20 @@ files = [ django = ">=3.0" pytz = "*" +[[package]] +name = "djangorestframework-camel-case" +version = "1.4.2" +description = "Camel case JSON support for Django REST framework." +optional = false +python-versions = ">=3.5" +files = [ + {file = "djangorestframework-camel-case-1.4.2.tar.gz", hash = "sha256:cdae75846648abb6585c7470639a1d2fb064dc45f8e8b62aaa50be7f1a7a61f4"}, +] + [[package]] name = "drf-extensions" version = "0.7.1" description = "Extensions for Django REST Framework" -category = "main" optional = false python-versions = "*" files = [ @@ -635,7 +617,6 @@ djangorestframework = ">=3.9.3" name = "drf-flex-fields" version = "1.0.2" description = "Flexible, dynamic fields and nested resources for Django REST Framework serializers." -category = "main" optional = false python-versions = "*" files = [ @@ -646,7 +627,6 @@ files = [ name = "drf-nested-routers" version = "0.93.4" description = "Nested resources for the Django Rest Framework" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -662,7 +642,6 @@ djangorestframework = ">=3.6.0" name = "flake8" version = "6.0.0" description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" optional = false python-versions = ">=3.8.1" files = [ @@ -679,7 +658,6 @@ pyflakes = ">=3.0.0,<3.1.0" name = "googleapis-common-protos" version = "1.59.1" description = "Common protobufs used in Google APIs" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -697,7 +675,6 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] name = "grpcio" version = "1.56.0" description = "HTTP/2-based RPC framework" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -755,7 +732,6 @@ protobuf = ["grpcio-tools (>=1.56.0)"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -767,7 +743,6 @@ files = [ name = "isort" version = "5.12.0" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -785,7 +760,6 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "itypes" version = "1.2.0" description = "Simple immutable types for python." -category = "main" optional = false python-versions = "*" files = [ @@ -797,7 +771,6 @@ files = [ name = "jinja2" version = "2.11.3" description = "A very fast and expressive template engine." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -815,7 +788,6 @@ i18n = ["Babel (>=0.8)"] name = "jmespath" version = "1.0.1" description = "JSON Matching Expressions" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -827,7 +799,6 @@ files = [ name = "jsonschema" version = "4.17.3" description = "An implementation of JSON Schema validation for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -847,7 +818,6 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "lazy-object-proxy" version = "1.9.0" description = "A fast and thorough lazy object proxy." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -893,7 +863,6 @@ files = [ name = "lxml" version = "4.9.2" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" files = [ @@ -986,7 +955,6 @@ source = ["Cython (>=0.29.7)"] name = "markupsafe" version = "1.1.1" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" files = [ @@ -1048,7 +1016,6 @@ files = [ name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1056,23 +1023,10 @@ files = [ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] -[[package]] -name = "mitogen" -version = "0.3.3" -description = "Library for writing distributed self-replicating programs." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "mitogen-0.3.3-py2.py3-none-any.whl", hash = "sha256:3f99570109534afc077cd69ae1c96dd5eead0eb253c561047b305aa6119ae629"}, - {file = "mitogen-0.3.3.tar.gz", hash = "sha256:db9dbc1ca8acb59f8f70c25a0fbdf11698b1a8b8355f39a8c540e0874496fb34"}, -] - [[package]] name = "multipledispatch" version = "1.0.0" description = "Multiple dispatch" -category = "main" optional = false python-versions = "*" files = [ @@ -1084,7 +1038,6 @@ files = [ name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1096,7 +1049,6 @@ files = [ name = "oauthlib" version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1113,7 +1065,6 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] name = "packaging" version = "23.1" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1125,7 +1076,6 @@ files = [ name = "pathspec" version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1137,7 +1087,6 @@ files = [ name = "platformdirs" version = "3.8.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1153,7 +1102,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest- name = "protobuf" version = "4.23.3" description = "" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1176,7 +1124,6 @@ files = [ name = "psycopg2-binary" version = "2.9.6" description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1248,7 +1195,6 @@ files = [ name = "pyasn1" version = "0.5.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -1260,7 +1206,6 @@ files = [ name = "pyasn1-modules" version = "0.3.0" description = "A collection of ASN.1-based protocols modules" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -1275,7 +1220,6 @@ pyasn1 = ">=0.4.6,<0.6.0" name = "pycodestyle" version = "2.10.0" description = "Python style guide checker" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1287,7 +1231,6 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1299,7 +1242,6 @@ files = [ name = "pycryptodome" version = "3.18.0" description = "Cryptographic library for Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -1341,7 +1283,6 @@ files = [ name = "pydantic" version = "1.10.9" description = "Data validation and settings management using python type hints" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1394,7 +1335,6 @@ email = ["email-validator (>=1.0.3)"] name = "pyflakes" version = "3.0.1" description = "passive checker of Python programs" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1406,7 +1346,6 @@ files = [ name = "pyjwt" version = "2.7.0" description = "JSON Web Token implementation in Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1424,7 +1363,6 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] name = "pylint" version = "2.17.4" description = "python code static checker" -category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -1453,7 +1391,6 @@ testutils = ["gitpython (>3)"] name = "pyrsistent" version = "0.19.3" description = "Persistent/Functional/Immutable data structures" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1486,11 +1423,21 @@ files = [ {file = "pyrsistent-0.19.3.tar.gz", hash = "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440"}, ] +[[package]] +name = "python-gnupg" +version = "0.5.0" +description = "A wrapper for the Gnu Privacy Guard (GPG or GnuPG)" +optional = false +python-versions = "*" +files = [ + {file = "python-gnupg-0.5.0.tar.gz", hash = "sha256:70758e387fc0e0c4badbcb394f61acbe68b34970a8fed7e0f7c89469fe17912a"}, + {file = "python_gnupg-0.5.0-py2.py3-none-any.whl", hash = "sha256:345723a03e67b82aba0ea8ae2328b2e4a3906fbe2c18c4082285c3b01068f270"}, +] + [[package]] name = "python-ldap" version = "3.4.3" description = "Python modules for implementing LDAP clients" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1505,7 +1452,6 @@ pyasn1_modules = ">=0.1.5" name = "python3-openid" version = "3.2.0" description = "OpenID support for modern servers and consumers." -category = "main" optional = false python-versions = "*" files = [ @@ -1524,7 +1470,6 @@ postgresql = ["psycopg2"] name = "pytz" version = "2023.3" description = "World timezone definitions, modern and historical" -category = "main" optional = false python-versions = "*" files = [ @@ -1534,59 +1479,57 @@ files = [ [[package]] name = "pyyaml" -version = "6.0" +version = "6.0.1" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, - {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, - {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, - {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, - {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, - {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, - {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, - {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, - {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, - {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, - {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, - {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, - {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, - {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, - {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, - {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, - {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, - {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] [[package]] name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1608,7 +1551,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-oauthlib" version = "1.3.1" description = "OAuthlib authentication support for Requests." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1627,7 +1569,6 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] name = "requests-toolbelt" version = "1.0.0" description = "A utility belt for advanced users of python-requests" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1642,7 +1583,6 @@ requests = ">=2.0.1,<3.0.0" name = "rstr" version = "3.2.1" description = "Generate random strings in Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1654,7 +1594,6 @@ files = [ name = "ruyaml" version = "0.91.0" description = "ruyaml is a fork of ruamel.yaml" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1673,7 +1612,6 @@ docs = ["Sphinx"] name = "setuptools" version = "68.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1690,7 +1628,6 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs ( name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1702,7 +1639,6 @@ files = [ name = "social-auth-app-django" version = "5.2.0" description = "Python Social Authentication, Django integration." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1718,7 +1654,6 @@ social-auth-core = ">=4.4.1" name = "social-auth-core" version = "4.4.2" description = "Python social authentication made simple." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1746,7 +1681,6 @@ saml = ["python3-saml (>=1.5.0)"] name = "sqlparse" version = "0.4.4" description = "A non-validating SQL parser." -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1759,11 +1693,21 @@ dev = ["build", "flake8"] doc = ["sphinx"] test = ["pytest", "pytest-cov"] +[[package]] +name = "tblib" +version = "2.0.0" +description = "Traceback serialization library." +optional = false +python-versions = ">=3.7" +files = [ + {file = "tblib-2.0.0-py3-none-any.whl", hash = "sha256:9100bfa016b047d5b980d66e7efed952fbd20bd85b56110aaf473cb97d18709a"}, + {file = "tblib-2.0.0.tar.gz", hash = "sha256:a6df30f272c08bf8be66e0775fad862005d950a6b8449b94f7c788731d70ecd7"}, +] + [[package]] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1775,7 +1719,6 @@ files = [ name = "tomlkit" version = "0.11.8" description = "Style preserving TOML library" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1787,7 +1730,6 @@ files = [ name = "typing-extensions" version = "4.7.0" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1799,7 +1741,6 @@ files = [ name = "uritemplate" version = "4.1.1" description = "Implementation of RFC 6570 URI Templates" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1811,7 +1752,6 @@ files = [ name = "urllib3" version = "2.0.3" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1827,20 +1767,18 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uwsgi" -version = "2.0.21" +version = "2.0.22" description = "The uWSGI server" -category = "main" optional = false python-versions = "*" files = [ - {file = "uwsgi-2.0.21.tar.gz", hash = "sha256:35a30d83791329429bc04fe44183ce4ab512fcf6968070a7bfba42fc5a0552a9"}, + {file = "uwsgi-2.0.22.tar.gz", hash = "sha256:4cc4727258671ac5fa17ab422155e9aaef8a2008ebb86e4404b66deaae965db2"}, ] [[package]] name = "version-utils" version = "0.3.2" description = "Library for parsing system package strings and comparing package versions" -category = "main" optional = false python-versions = "*" files = [ @@ -1852,7 +1790,6 @@ files = [ name = "wrapt" version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -1933,22 +1870,7 @@ files = [ {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"}, ] -[[package]] -name = "yspec" -version = "0.1.0" -description = "YAML structure validator" -category = "main" -optional = false -python-versions = "*" -files = [ - {file = "yspec-0.1.0-py3-none-any.whl", hash = "sha256:81d491c4d67f12401c032ef547a3965497b842a2af2181730c4d0989dd8e866a"}, - {file = "yspec-0.1.0.tar.gz", hash = "sha256:c47258e639f50cde32de2c3f415f7c1ac4d2aab7a898699e3e107282fbd75e71"}, -] - -[package.dependencies] -pyyaml = "*" - [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "1c2da145d4dd33cf39ce0923c0df748829b5f1b836dcdf6587fecac3edd49e55" +content-hash = "99ffaffe9d49bee2ec42d4a2a5f2ed5631769de3eafded05f44ee6e4ef137147" diff --git a/pyproject.toml b/pyproject.toml index cafeb2f8a6..0a80a8a63d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,20 +30,20 @@ jmespath = "*" jsonschema = "*" lxml = "*" markupsafe = "1.1.1" -mitogen = "*" multipledispatch = "*" psycopg2-binary = "*" pycryptodome = "*" pydantic = "*" +python-gnupg = "*" requests-toolbelt = "*" rstr = "*" ruyaml = "*" social-auth-app-django = "*" uwsgi = "*" version-utils = "*" -yspec = "*" six = "*" django-cors-headers = "*" +djangorestframework-camel-case = "^1.4.2" [tool.poetry.group.lint] optional = true @@ -55,6 +55,12 @@ flake8 = "*" isort = "*" pylint = "*" +[tool.poetry.group.unittests] +optional = true + +[tool.poetry.group.unittests.dependencies] +tblib = "^2.0.0" + [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" @@ -70,6 +76,8 @@ skip = ["python/init_db.py", "python/task_runner.py", "python/backupdb.py", "pyt [tool.pylint.master] ignore = ["migrations"] +enable = ["useless-suppression"] +fail-on = ["useless-suppression"] [tool.pylint."messages control"] disable = ["missing-docstring", "missing-timeout", "no-else-return", "no-member", "too-few-public-methods", diff --git a/python/adcm/mixins.py b/python/adcm/mixins.py index 31e414da30..b7fa1771ad 100644 --- a/python/adcm/mixins.py +++ b/python/adcm/mixins.py @@ -25,33 +25,43 @@ class GetParentObjectMixin: - def get_parent_object(self) -> GroupConfig | Cluster | ClusterObject | ServiceComponent | Host | None: + kwargs: dict + + def get_parent_object( + self, + ) -> GroupConfig | Cluster | ClusterObject | ServiceComponent | HostProvider | Host | None: parent_object = None with suppress(ObjectDoesNotExist): if all(lookup in self.kwargs for lookup in ("component_pk", "service_pk", "cluster_pk")): - parent_object = ServiceComponent.objects.get( + parent_object = ServiceComponent.objects.select_related( + "prototype", "cluster__prototype", "service__prototype" + ).get( pk=self.kwargs["component_pk"], cluster_id=self.kwargs["cluster_pk"], service_id=self.kwargs["service_pk"], ) elif "cluster_pk" in self.kwargs and "service_pk" in self.kwargs: - parent_object = ClusterObject.objects.get( + parent_object = ClusterObject.objects.select_related("prototype", "cluster__prototype").get( pk=self.kwargs["service_pk"], cluster_id=self.kwargs["cluster_pk"] ) elif "cluster_pk" in self.kwargs and "host_pk" in self.kwargs: - parent_object = Host.objects.get(pk=self.kwargs["host_pk"], cluster_id=self.kwargs["cluster_pk"]) + parent_object = Host.objects.select_related( + "prototype", "cluster__prototype", "provider__prototype" + ).get(pk=self.kwargs["host_pk"], cluster_id=self.kwargs["cluster_pk"]) elif "host_pk" in self.kwargs: - parent_object = Host.objects.get(pk=self.kwargs["host_pk"]) + parent_object = Host.objects.select_related( + "prototype", "cluster__prototype", "provider__prototype" + ).get(pk=self.kwargs["host_pk"]) elif "cluster_pk" in self.kwargs: - parent_object = Cluster.objects.get(pk=self.kwargs["cluster_pk"]) + parent_object = Cluster.objects.select_related("prototype").get(pk=self.kwargs["cluster_pk"]) - elif "provider_pk" in self.kwargs: - parent_object = HostProvider.objects.get(pk=self.kwargs["provider_pk"]) + elif "hostprovider_pk" in self.kwargs: + parent_object = HostProvider.objects.select_related("prototype").get(pk=self.kwargs["hostprovider_pk"]) if "config_group_pk" in self.kwargs: parent_object = GroupConfig.objects.get( diff --git a/python/adcm/permissions.py b/python/adcm/permissions.py index b5f14b8210..86f028af49 100644 --- a/python/adcm/permissions.py +++ b/python/adcm/permissions.py @@ -45,6 +45,11 @@ VIEW_IMPORT_PERM = "view_import_of" CHANGE_IMPORT_PERM = "change_import_of" VIEW_CLUSTER_BIND = "view_clusterbind" +VIEW_TASKLOG_PERMISSION = "cm.view_tasklog" +VIEW_JOBLOG_PERMISSION = "cm.view_joblog" +VIEW_LOGSTORAGE_PERMISSION = "cm.view_logstorage" +VIEW_USER_PERMISSION = "rbac.view_user" +VIEW_GROUP_PERMISSION = "rbac.view_group" class DjangoObjectPermissionsAudit(DjangoObjectPermissions): diff --git a/python/adcm/serializers.py b/python/adcm/serializers.py index e17b4af0b7..f61a26087b 100644 --- a/python/adcm/serializers.py +++ b/python/adcm/serializers.py @@ -9,6 +9,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from rest_framework.fields import IntegerField from rest_framework.serializers import Serializer @@ -18,3 +19,7 @@ def update(self, instance, validated_data): def create(self, validated_data): pass + + +class IdSerializer(EmptySerializer): + id = IntegerField() diff --git a/python/adcm/settings.py b/python/adcm/settings.py index 79fabbc7e2..0aacb6679b 100644 --- a/python/adcm/settings.py +++ b/python/adcm/settings.py @@ -34,10 +34,11 @@ BUNDLE_DIR = STACK_DIR / "data" / "bundle" CODE_DIR = BASE_DIR / "python" DOWNLOAD_DIR = Path(STACK_DIR, "data", "download") -RUN_DIR = BASE_DIR / "data" / "run" +DATA_DIR = BASE_DIR / "data" +RUN_DIR = DATA_DIR / "run" FILE_DIR = STACK_DIR / "data" / "file" -LOG_DIR = BASE_DIR / "data" / "log" -VAR_DIR = BASE_DIR / "data" / "var" +LOG_DIR = DATA_DIR / "log" +VAR_DIR = DATA_DIR / "var" LOG_FILE = LOG_DIR / "adcm.log" SECRETS_FILE = VAR_DIR / "secrets.json" ADCM_TOKEN_FILE = VAR_DIR / "adcm_token" @@ -102,6 +103,7 @@ "audit.middleware.LoginMiddleware", "django.contrib.messages.middleware.MessageMiddleware", "django.middleware.clickjacking.XFrameOptionsMiddleware", + "djangorestframework_camel_case.middleware.CamelCaseMiddleWare", ] if not DEBUG: MIDDLEWARE = [*MIDDLEWARE, "csp.middleware.CSPMiddleware"] @@ -153,6 +155,9 @@ "DEFAULT_VERSIONING_CLASS": "rest_framework.versioning.NamespaceVersioning", "DEFAULT_VERSION": "v1", "TEST_REQUEST_DEFAULT_FORMAT": "json", + "JSON_UNDERSCOREIZE": { + "ignore_fields": ("config", "adcmMeta"), + }, } DB_PASS = os.getenv("DB_PASS") @@ -233,18 +238,24 @@ "filters": ["require_debug_false"], "formatter": "adcm", "class": "logging.FileHandler", - "filename": BASE_DIR / "data/log/adcm_debug.log", + "filename": LOG_DIR / "adcm_debug.log", + }, + "task_runner_err_file": { + "filters": ["require_debug_false"], + "formatter": "adcm", + "class": "logging.FileHandler", + "filename": LOG_DIR / "task_runner.err", }, "background_task_file_handler": { "formatter": "adcm", "class": "logging.handlers.TimedRotatingFileHandler", - "filename": BASE_DIR / "data/log/cron_task.log", + "filename": LOG_DIR / "cron_task.log", "when": "midnight", "backupCount": 10, }, "audit_file_handler": { "class": "logging.handlers.TimedRotatingFileHandler", - "filename": BASE_DIR / "data/log/audit.log", + "filename": LOG_DIR / "audit.log", "when": "midnight", "backupCount": 10, }, @@ -270,6 +281,11 @@ "level": LOG_LEVEL, "propagate": True, }, + "task_runner_err": { + "handlers": ["task_runner_err_file"], + "level": LOG_LEVEL, + "propagate": True, + }, }, } diff --git a/python/adcm/tests/base.py b/python/adcm/tests/base.py index e9e0e335a0..87034325a3 100644 --- a/python/adcm/tests/base.py +++ b/python/adcm/tests/base.py @@ -15,6 +15,7 @@ from contextlib import contextmanager from pathlib import Path from shutil import rmtree +from tempfile import mkdtemp from cm.models import ( ADCM, @@ -30,10 +31,10 @@ ServiceComponent, ) from django.conf import settings -from django.test import Client, TestCase +from django.test import Client, TestCase, override_settings from django.urls import reverse from init_db import init -from rbac.models import Role, RoleTypes, User +from rbac.models import Group, Role, RoleTypes, User from rbac.upgrade.role import init_roles from rest_framework.response import Response from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED @@ -41,7 +42,36 @@ APPLICATION_JSON = "application/json" -class BaseTestCase(TestCase): +class ParallelReadyTestCase: + def __init_subclass__(cls, **kwargs): + super().__init_subclass__(**kwargs) + + directories = cls._prepare_temporal_directories_for_adcm() + override_settings(**directories)(cls) + + @staticmethod + def _prepare_temporal_directories_for_adcm() -> dict: + stack = Path(mkdtemp()) + data = Path(mkdtemp()) / "data" + + temporary_directories = { + "STACK_DIR": stack, + "BUNDLE_DIR": stack / "data" / "bundle", + "DOWNLOAD_DIR": Path(stack, "data", "download"), + "DATA_DIR": data, + "RUN_DIR": data / "run", + "FILE_DIR": stack / "data" / "file", + "LOG_DIR": data / "log", + "VAR_DIR": data / "var", + } + + for directory in temporary_directories.values(): + directory.mkdir(exist_ok=True, parents=True) + + return temporary_directories + + +class BaseTestCase(TestCase, ParallelReadyTestCase): # pylint: disable=too-many-instance-attributes,too-many-public-methods def setUp(self) -> None: @@ -53,6 +83,8 @@ def setUp(self) -> None: password=self.test_user_password, is_superuser=True, ) + self.test_user_group = Group.objects.create(name="simple_test_group") + self.test_user_group.user_set.add(self.test_user) self.no_rights_user_username = "no_rights_user" self.no_rights_user_password = "no_rights_user_password" @@ -60,6 +92,8 @@ def setUp(self) -> None: username="no_rights_user", password="no_rights_user_password", ) + self.no_rights_user_group = Group.objects.create(name="no_right_group") + self.no_rights_user_group.user_set.add(self.no_rights_user) self.client = Client(HTTP_USER_AGENT="Mozilla/5.0") self.login() @@ -68,6 +102,8 @@ def setUp(self) -> None: def setUpClass(cls): super().setUpClass() + cls.base_dir = Path(__file__).parent.parent.parent.parent + init_roles() init() @@ -176,7 +212,6 @@ def create_policy( self, role_name: str, obj: ADCMEntity, - user_pk: int | None = None, group_pk: int | None = None, ) -> int: role_data = self.get_role_data(role_name=role_name) @@ -186,8 +221,7 @@ def create_policy( data={ "name": f"test_policy_{obj.prototype.type}_{obj.pk}_admin", "role": {"id": role_data["id"]}, - "user": [{"id": user_pk}] if user_pk else [], - "group": [{"id": group_pk}] if group_pk else [], + "group": [{"id": group_pk}], "object": [{"name": obj.name, "type": obj.prototype.type, "id": obj.pk}], }, content_type=APPLICATION_JSON, diff --git a/python/adcm/utils.py b/python/adcm/utils.py index 7d8065b5fa..dda0764ef8 100644 --- a/python/adcm/utils.py +++ b/python/adcm/utils.py @@ -13,27 +13,36 @@ from typing import Any, Iterable from cm.adcm_config.ansible import ansible_decrypt -from cm.api import load_mm_objects +from cm.api import cancel_locking_tasks, delete_service, load_mm_objects +from cm.errors import raise_adcm_ex +from cm.flag import update_flags from cm.issue import update_hierarchy_issues, update_issue_after_deleting from cm.job import start_task from cm.models import ( ADCM, Action, ADCMEntity, + ClusterBind, ClusterObject, ConcernType, ConfigLog, Host, HostComponent, + JobStatus, MaintenanceMode, Prototype, PrototypeConfig, ServiceComponent, + TaskLog, ) from django.conf import settings from rest_framework.response import Response from rest_framework.serializers import Serializer -from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_409_CONFLICT +from rest_framework.status import ( + HTTP_204_NO_CONTENT, + HTTP_400_BAD_REQUEST, + HTTP_409_CONFLICT, +) def _change_mm_via_action( @@ -68,6 +77,7 @@ def _update_mm_hierarchy_issues(obj: Host | ClusterObject | ServiceComponent) -> update_hierarchy_issues(obj.cluster) update_issue_after_deleting() + update_flags() load_mm_objects() @@ -310,6 +320,66 @@ def get_maintenance_mode_response( ) +def delete_service_from_api(service: ClusterObject) -> Response: # pylint: disable=too-many-branches + delete_action = Action.objects.filter( + prototype=service.prototype, + name=settings.ADCM_DELETE_SERVICE_ACTION_NAME, + ).first() + host_components_exists = HostComponent.objects.filter(cluster=service.cluster, service=service).exists() + + if not delete_action: + if service.state != "created": + raise_adcm_ex("SERVICE_DELETE_ERROR") + + if host_components_exists: + raise_adcm_ex("SERVICE_CONFLICT", f"Service #{service.id} has component(s) on host(s)") + + cluster = service.cluster + + if cluster.state == "upgrading" and service.prototype.name in cluster.before_upgrade["services"]: + return raise_adcm_ex(code="SERVICE_CONFLICT", msg="It is forbidden to delete service in upgrade mode") + + if ClusterBind.objects.filter(source_service=service).exists(): + raise_adcm_ex("SERVICE_CONFLICT", f"Service #{service.id} has exports(s)") + + if service.prototype.required: + raise_adcm_ex("SERVICE_CONFLICT", f"Service #{service.id} is required") + + if TaskLog.objects.filter(action=delete_action, status=JobStatus.RUNNING).exists(): + raise_adcm_ex("SERVICE_DELETE_ERROR", "Service is deleting now") + + for component in ServiceComponent.objects.filter(cluster=service.cluster).exclude(service=service): + if component.requires_service_name(service_name=service.name): + raise_adcm_ex( + code="SERVICE_CONFLICT", + msg=f"Component {component.name} of service {component.service.display_name}" + f" requires this service or its component", + ) + + for another_service in ClusterObject.objects.filter(cluster=service.cluster): + if another_service.requires_service_name(service_name=service.name): + raise_adcm_ex( + code="SERVICE_CONFLICT", + msg=f"Service {another_service.display_name} requires this service or its component", + ) + + cancel_locking_tasks(obj=service, obj_deletion=True) + if delete_action and (host_components_exists or service.state != "created"): + start_task( + action=delete_action, + obj=service, + conf={}, + attr={}, + hostcomponent=[], + hosts=[], + verbose=False, + ) + else: + delete_service(service=service) + + return Response(status=HTTP_204_NO_CONTENT) + + def filter_actions(obj: ADCMEntity, actions: Iterable[Action]): """Filter out actions that are not allowed to run on object at that moment""" if obj.concerns.filter(type=ConcernType.LOCK).exists(): diff --git a/python/ansible/plugins/action/adcm_add_host.py b/python/ansible/plugins/action/adcm_add_host.py index cedd230d83..ffc5308eea 100644 --- a/python/ansible/plugins/action/adcm_add_host.py +++ b/python/ansible/plugins/action/adcm_add_host.py @@ -71,7 +71,10 @@ def run(self, tmp=None, task_vars=None): super().run(tmp, task_vars) provider_pk = get_object_id_from_context( - task_vars, "provider_id", "provider", err_msg="You can add host only in host provider context" + task_vars=task_vars, + id_type="provider_id", + context_types=("provider",), + err_msg="You can add host only in host provider context", ) if "fqdn" not in self._task.args: raise AnsibleError("fqdn is mandatory args of adcm_add_host") diff --git a/python/ansible/plugins/action/adcm_add_host_to_cluster.py b/python/ansible/plugins/action/adcm_add_host_to_cluster.py index 60a9c21c87..ff9b679dfb 100644 --- a/python/ansible/plugins/action/adcm_add_host_to_cluster.py +++ b/python/ansible/plugins/action/adcm_add_host_to_cluster.py @@ -67,7 +67,9 @@ class ActionModule(ActionBase): def run(self, tmp=None, task_vars=None): super().run(tmp, task_vars) msg = "You can add host only in cluster or service context" - cluster_id = get_object_id_from_context(task_vars, "cluster_id", "cluster", "service", err_msg=msg) + cluster_id = get_object_id_from_context( + task_vars=task_vars, id_type="cluster_id", context_types=("cluster", "service"), err_msg=msg + ) fqdn = self._task.args.get("fqdn", None) host_id = self._task.args.get("host_id", None) diff --git a/python/ansible/plugins/action/adcm_change_flag.py b/python/ansible/plugins/action/adcm_change_flag.py new file mode 100644 index 0000000000..022406943d --- /dev/null +++ b/python/ansible/plugins/action/adcm_change_flag.py @@ -0,0 +1,190 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# pylint: disable=wrong-import-order,wrong-import-position + +DOCUMENTATION = """ +--- +module: adcm_change_flag +short_description: Raise or Lower flags on Host, Service, Component or Cluster +description: + - The C(adcm_change_flag) module is intended to raise or lower on Host, Service, Component. +options: + operation: + description: Operation over flag. + required: True + choices: + - up + - down + msg: + description: Additional flag message, to use in pattern " has an outdated configuration: ". It might be used if you want several different flags in the same objects. In case of down operation, if message specified then down only flag with specified message. + required: False + type: string + objects: + description: List of Services or Components on which you need to raise/lower the flag. If this parameter not specified raise or lower flag on action context object. If you want to raise or lower flag on cluster you needed action in cluster context. + required: False + type: list + elements: dict + sample: + - type: service + service_name: hdfs + - type: component + service_name: service + component_name: component + - type: cluster +""" + +EXAMPLES = r""" +- adcm_change_flag: + operation: up + objects: + - type: service + service_name: hdfs + - type: component + service_name: service + component_name: component + - type: cluster + +- adcm_change_flag: + operation: down + objects: + - type: provider + - type: host + name: host_name +""" +import sys +from ansible.plugins.action import ActionBase +from ansible.errors import AnsibleError + +sys.path.append("/adcm/python") +import adcm.init_django # pylint: disable=unused-import + +from cm.ansible_plugin import get_context_object, check_context_type +from cm.logger import logger +from cm.flag import update_object_flag, remove_flag +from cm.models import ClusterObject, ServiceComponent, get_object_cluster, HostProvider, Host, ADCMEntity + +cluster_context_type = ("cluster", "service", "component") + + +class ActionModule(ActionBase): + TRANSFERS_FILES = False + _VALID_ARGS = frozenset(("operation", "msg", "objects")) + + def _check_args(self): + if "operation" not in self._task.args: + raise AnsibleError("'Operation' is mandatory args of adcm_change_flag") + + if self._task.args["operation"] not in ("up", "down"): + raise AnsibleError(f"'Operation' value must be 'up' or 'down', not {self._task.args['operation']}") + + if "objects" in self._task.args: + if not isinstance(self._task.args["objects"], list): + raise AnsibleError("'Objects' value should be list of objects") + + if not self._task.args["objects"]: + raise AnsibleError("'Objects' value should not be empty") + + for item in self._task.args["objects"]: + item_type = item.get("type") + if not item_type: + raise AnsibleError(message="'type' argument is mandatory for all items in 'objects'") + + if item_type == "component" and ("service_name" not in item or "component_name" not in item): + raise AnsibleError(message="'service_name' and 'component_name' is mandatory for type 'component'") + if item_type == "service" and "service_name" not in item: + raise AnsibleError(message="'service_name' is mandatory for type 'service'") + + def _process_objects(self, task_vars: dict, objects: list, context_obj: ADCMEntity) -> None: + err_msg = "Type {} should be used in {} context only" + cluster = get_object_cluster(obj=context_obj) + + for item in self._task.args["objects"]: + obj = None + item_type = item.get("type") + + if item_type == "component": + check_context_type( + task_vars=task_vars, + context_types=cluster_context_type, + err_msg=err_msg.format(item_type, cluster_context_type), + ) + + obj = ServiceComponent.objects.filter( + cluster=cluster, + prototype__name=item["component_name"], + service__prototype__name=item["service_name"], + ).first() + elif item_type == "service": + check_context_type( + task_vars=task_vars, + context_types=cluster_context_type, + err_msg=err_msg.format(item_type, cluster_context_type), + ) + + obj = ClusterObject.objects.filter(cluster=cluster, prototype__name=item["service_name"]).first() + elif item_type == "cluster": + check_context_type( + task_vars=task_vars, + context_types=cluster_context_type, + err_msg=err_msg.format(item_type, cluster_context_type), + ) + + obj = cluster + elif item_type == "provider": + check_context_type( + task_vars=task_vars, + context_types=("provider", "host"), + err_msg=err_msg.format(item_type, ("provider", "host")), + ) + + if isinstance(context_obj, HostProvider): + obj = context_obj + elif isinstance(context_obj, Host): + obj = context_obj.provider + + elif item_type == "host": + check_context_type( + task_vars=task_vars, + context_types=("host",), + err_msg=err_msg.format(item_type, "host"), + ) + + obj = context_obj + + if not obj: + logger.error("Object %s not found", item) + continue + + objects.append(obj) + + def run(self, tmp=None, task_vars=None): + super().run(tmp, task_vars) + self._check_args() + + msg = "" + if "msg" in self._task.args: + msg = str(self._task.args["msg"]) + + objects = [] + context_obj = get_context_object(task_vars=task_vars) + if "objects" in self._task.args: + self._process_objects(objects=objects, context_obj=context_obj, task_vars=task_vars) + else: + objects.append(context_obj) + + for obj in objects: + if self._task.args["operation"] == "up": + update_object_flag(obj=obj, msg=msg) + elif self._task.args["operation"] == "down": + remove_flag(obj=obj, msg=msg) + + return {"failed": False, "changed": True} diff --git a/python/ansible/plugins/action/adcm_change_maintenance_mode.py b/python/ansible/plugins/action/adcm_change_maintenance_mode.py index 2a5ba5b1cd..f14e33f289 100644 --- a/python/ansible/plugins/action/adcm_change_maintenance_mode.py +++ b/python/ansible/plugins/action/adcm_change_maintenance_mode.py @@ -90,11 +90,11 @@ def run(self, tmp=None, task_vars=None): if obj_type == "host": context_type = "cluster" - obj_value = "ON" if self._task.args["value"] else "OFF" + obj_value = "on" if self._task.args["value"] else "off" obj_pk = get_object_id_from_context( - task_vars, - f"{obj_type}_id", - context_type, + task_vars=task_vars, + id_type=f"{obj_type}_id", + context_types=(context_type,), err_msg=f'You can change "{obj_type}" maintenance mode only in {context_type} context', ) @@ -102,8 +102,8 @@ def run(self, tmp=None, task_vars=None): if not obj: raise AnsibleActionFail(f'Object of type "{obj_type}" with PK "{obj_pk}" does not exist') - if obj.maintenance_mode != "CHANGING": - raise AnsibleActionFail('Only "CHANGING" state of object maintenance mode can be changed') + if obj.maintenance_mode != "changing": + raise AnsibleActionFail('Only "changing" state of object maintenance mode can be changed') obj.maintenance_mode = obj_value obj.save() diff --git a/python/ansible/plugins/action/adcm_delete_host.py b/python/ansible/plugins/action/adcm_delete_host.py index 89d9b5c034..caee3a8b18 100644 --- a/python/ansible/plugins/action/adcm_delete_host.py +++ b/python/ansible/plugins/action/adcm_delete_host.py @@ -57,7 +57,9 @@ class ActionModule(ActionBase): def run(self, tmp=None, task_vars=None): super().run(tmp, task_vars) msg = "You can delete host only in host context" - host_id = get_object_id_from_context(task_vars, "host_id", "host", err_msg=msg) + host_id = get_object_id_from_context( + task_vars=task_vars, id_type="host_id", context_types=("host",), err_msg=msg + ) logger.info("ansible module adcm_delete_host: host #%s", host_id) try: diff --git a/python/ansible/plugins/action/adcm_delete_service.py b/python/ansible/plugins/action/adcm_delete_service.py index 7d604fc4b1..a5e1ef78e8 100644 --- a/python/ansible/plugins/action/adcm_delete_service.py +++ b/python/ansible/plugins/action/adcm_delete_service.py @@ -59,7 +59,9 @@ def run(self, tmp=None, task_vars=None): service = self._task.args.get("service", None) if service: msg = "You can delete service by name only in cluster context" - cluster_id = get_object_id_from_context(task_vars, "cluster_id", "cluster", err_msg=msg) + cluster_id = get_object_id_from_context( + task_vars=task_vars, id_type="cluster_id", context_types=("cluster",), err_msg=msg + ) logger.info('ansible module adcm_delete_service: service "%s"', service) try: cm.api.delete_service_by_name(service, cluster_id) @@ -67,7 +69,9 @@ def run(self, tmp=None, task_vars=None): raise AnsibleError(e.code + ":" + e.msg) from e else: msg = "You can delete service only in service context" - service_id = get_object_id_from_context(task_vars, "service_id", "service", err_msg=msg) + service_id = get_object_id_from_context( + task_vars=task_vars, id_type="service_id", context_types=("service",), err_msg=msg + ) logger.info("ansible module adcm_delete_service: service #%s", service_id) try: cm.api.delete_service_by_pk(service_id) diff --git a/python/ansible/plugins/action/adcm_hc.py b/python/ansible/plugins/action/adcm_hc.py index 20ea10343a..8a2ca2671d 100644 --- a/python/ansible/plugins/action/adcm_hc.py +++ b/python/ansible/plugins/action/adcm_hc.py @@ -69,7 +69,9 @@ class ActionModule(ActionBase): def run(self, tmp=None, task_vars=None): super().run(tmp, task_vars) msg = "You can modify hc only in cluster, service or component context" - cluster_id = get_object_id_from_context(task_vars, "cluster_id", "cluster", "service", "component", err_msg=msg) + cluster_id = get_object_id_from_context( + task_vars=task_vars, id_type="cluster_id", context_types=("cluster", "service", "component"), err_msg=msg + ) job_id = task_vars["job"]["id"] operations = self._task.args["operations"] diff --git a/python/ansible/plugins/action/adcm_remove_host_from_cluster.py b/python/ansible/plugins/action/adcm_remove_host_from_cluster.py index 8da006d082..51691cc148 100644 --- a/python/ansible/plugins/action/adcm_remove_host_from_cluster.py +++ b/python/ansible/plugins/action/adcm_remove_host_from_cluster.py @@ -67,7 +67,9 @@ class ActionModule(ActionBase): def run(self, tmp=None, task_vars=None): super().run(tmp, task_vars) msg = "You can remove host only in cluster or service context" - cluster_id = get_object_id_from_context(task_vars, "cluster_id", "cluster", "service", err_msg=msg) + cluster_id = get_object_id_from_context( + task_vars=task_vars, id_type="cluster_id", context_types=("cluster", "service"), err_msg=msg + ) fqdn = self._task.args.get("fqdn", None) host_id = self._task.args.get("host_id", None) diff --git a/python/api/action/views.py b/python/api/action/views.py index 98e955d1ef..a33759a255 100644 --- a/python/api/action/views.py +++ b/python/api/action/views.py @@ -23,9 +23,18 @@ from api.utils import AdcmFilterBackend, create from audit.utils import audit from cm.errors import AdcmEx -from cm.models import Action, Host, HostComponent, TaskLog, get_model_by_type +from cm.models import ( + Action, + ConcernType, + Host, + HostComponent, + PrototypeConfig, + TaskLog, + get_model_by_type, +) from django.conf import settings from django.contrib.contenttypes.models import ContentType +from django.db.models import Q from guardian.mixins import PermissionListMixin from rbac.viewsets import DjangoOnlyObjectPermissions from rest_framework.exceptions import PermissionDenied @@ -34,7 +43,6 @@ from rest_framework.response import Response from adcm.permissions import VIEW_ACTION_PERM, get_object_for_user -from adcm.utils import filter_actions def get_object_type_id(**kwargs) -> tuple[str, int, int]: @@ -54,67 +62,62 @@ def get_obj(**kwargs): class ActionList(PermissionListMixin, GenericUIView): - queryset = Action.objects.filter(upgrade__isnull=True).exclude(name__in=settings.ADCM_SERVICE_ACTION_NAMES_SET) + queryset = ( + Action.objects.select_related("prototype") + .filter(upgrade__isnull=True) + .exclude(name__in=settings.ADCM_SERVICE_ACTION_NAMES_SET) + ) serializer_class = ActionSerializer serializer_class_ui = ActionUISerializer filterset_fields = ("name",) filter_backends = (AdcmFilterBackend,) permission_required = [VIEW_ACTION_PERM] - def _get_actions_for_host(self, host: Host) -> set[Action]: - actions = set(filter_actions(host, self.filter_queryset(self.get_queryset().filter(prototype=host.prototype)))) - hostcomponents = HostComponent.objects.filter(host_id=host.id) - if hostcomponents: - for hostcomponent in hostcomponents: - cluster, _ = get_obj(object_type="cluster", cluster_id=hostcomponent.cluster_id) - service, _ = get_obj(object_type="service", service_id=hostcomponent.service_id) - component, _ = get_obj(object_type="component", component_id=hostcomponent.component_id) - for connect_obj in [cluster, service, component]: - actions.update( - filter_actions( - connect_obj, - self.filter_queryset( - self.get_queryset().filter(prototype=connect_obj.prototype, host_action=True), - ), - ), - ) - else: - if host.cluster is not None: - actions.update( - filter_actions( - host.cluster, - self.filter_queryset( - self.get_queryset().filter(prototype=host.cluster.prototype, host_action=True), - ), - ), - ) - - return actions - def get(self, request: Request, *args, **kwargs) -> Response: # pylint: disable=unused-argument - if kwargs["object_type"] == "host": - host, _ = get_obj(object_type="host", host_id=kwargs["host_id"]) - actions = self._get_actions_for_host(host) + obj, _ = get_obj(**kwargs) - obj = host - objects = {"host": host} - else: - obj, _ = get_obj(**kwargs) - actions = filter_actions( - obj, - self.filter_queryset(self.get_queryset().filter(prototype=obj.prototype, host_action=False)), + if obj.concerns.filter(type=ConcernType.LOCK).exists(): + return Response(data=[]) + + objects = {obj.prototype.type: obj} + prototype_object = {} + + if kwargs["object_type"] == "host" and obj.cluster: + prototype_object[obj.cluster.prototype] = obj.cluster + + for hc_map in HostComponent.objects.filter(host=obj).select_related( + "service__prototype", "component__prototype" + ): + prototype_object[hc_map.service.prototype] = hc_map.service + prototype_object[hc_map.component.prototype] = hc_map.component + + actions = self.filter_queryset( + self.get_queryset().filter( + Q(prototype=obj.prototype, host_action=False) + | Q(prototype__in=prototype_object.keys(), host_action=True) ) - objects = {obj.prototype.type: obj} + ) + prototype_object[obj.prototype] = obj + + allowed_actions = [] + + for action in actions: + if action.allowed(obj=prototype_object[action.prototype]): + action.config = PrototypeConfig.objects.filter(action=action).order_by("id") + allowed_actions.append(action) # added filter actions by custom perm for run actions - perms = [f"cm.run_action_{hashlib.sha256(a.name.encode(settings.ENCODING_UTF_8)).hexdigest()}" for a in actions] + perms = [ + f"cm.run_action_{hashlib.sha256(a.name.encode(settings.ENCODING_UTF_8)).hexdigest()}" + for a in allowed_actions + ] mask = [request.user.has_perm(perm, obj) for perm in perms] - actions = list(compress(actions, mask)) + allowed_actions = list(compress(allowed_actions, mask)) serializer = self.get_serializer( - actions, + allowed_actions, many=True, - context={"request": request, "objects": objects, "obj": obj}, + context={"request": request, "objects": objects, "obj": obj, "prototype": obj.prototype}, ) return Response(serializer.data) diff --git a/python/api/adcm/views.py b/python/api/adcm/views.py index e28da98b48..206f086d49 100644 --- a/python/api/adcm/views.py +++ b/python/api/adcm/views.py @@ -20,7 +20,6 @@ from rest_framework.mixins import ListModelMixin, RetrieveModelMixin -# pylint:disable-next=too-many-ancestors class ADCMViewSet(ListModelMixin, RetrieveModelMixin, GenericUIViewSet): queryset = ADCM.objects.select_related("prototype").all() serializer_class = ADCMSerializer diff --git a/python/api/base_view.py b/python/api/base_view.py index 17a0d79c0f..4d85bcf1d8 100644 --- a/python/api/base_view.py +++ b/python/api/base_view.py @@ -10,8 +10,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# pylint: disable=not-callable, unused-import, too-many-locals - import rest_framework.pagination from api.utils import AdcmFilterBackend, AdcmOrderingFilter, getlist_from_querydict from audit.utils import audit diff --git a/python/api/cluster/serializers.py b/python/api/cluster/serializers.py index 07a5d8aab5..673ec67215 100644 --- a/python/api/cluster/serializers.py +++ b/python/api/cluster/serializers.py @@ -25,7 +25,7 @@ from cm.schemas import RequiresUISchema from cm.status_api import get_cluster_status, get_hc_status from cm.upgrade import get_upgrade -from cm.validators import StartMidEndValidator +from cm.validators import ClusterUniqueValidator, StartMidEndValidator from django.conf import settings from rest_framework.exceptions import ValidationError from rest_framework.serializers import ( @@ -39,7 +39,6 @@ Serializer, SerializerMethodField, ) -from rest_framework.validators import UniqueValidator from adcm.serializers import EmptySerializer from adcm.utils import filter_actions, get_requires @@ -52,14 +51,6 @@ def get_cluster_id(obj): return obj.obj_ref.cluster.id -class ClusterUniqueValidator(UniqueValidator): - def __call__(self, value, serializer_field): - try: - super().__call__(value, serializer_field) - except ValidationError as e: - raise AdcmEx("CLUSTER_CONFLICT", f'Cluster with name "{value}" already exists') from e - - class ClusterSerializer(Serializer): id = IntegerField(read_only=True) prototype_id = IntegerField(help_text="ID of Cluster type") diff --git a/python/api/component/serializers.py b/python/api/component/serializers.py index c0cbbbeee6..fa483a88fe 100644 --- a/python/api/component/serializers.py +++ b/python/api/component/serializers.py @@ -10,15 +10,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -# pylint: disable=redefined-builtin - from api.action.serializers import ActionShort from api.concern.serializers import ConcernItemSerializer, ConcernItemUISerializer from api.group_config.serializers import GroupConfigsHyperlinkedIdentityField from api.serializers import StringListSerializer from api.utils import CommonAPIURL, ObjectURL from cm.adcm_config.config import get_main_info -from cm.models import Action, MaintenanceMode, ServiceComponent +from cm.models import MAINTENANCE_MODE_BOTH_CASES_CHOICES, Action, ServiceComponent from cm.status_api import get_component_status from rest_framework.serializers import ( BooleanField, @@ -48,6 +46,12 @@ class ComponentSerializer(EmptySerializer): maintenance_mode = CharField(read_only=True) is_maintenance_mode_available = BooleanField(read_only=True) + def to_representation(self, instance: ServiceComponent) -> dict: + data = super().to_representation(instance=instance) + data["maintenance_mode"] = data["maintenance_mode"].upper() + + return data + class ComponentUISerializer(ComponentSerializer): action = CommonAPIURL(read_only=True, view_name="v1:object-action") @@ -142,14 +146,30 @@ def get_main_info(obj: ServiceComponent) -> str | None: class ComponentChangeMaintenanceModeSerializer(ModelSerializer): - maintenance_mode = ChoiceField(choices=(MaintenanceMode.ON.value, MaintenanceMode.OFF.value)) + maintenance_mode = ChoiceField(choices=MAINTENANCE_MODE_BOTH_CASES_CHOICES) class Meta: model = ServiceComponent fields = ("maintenance_mode",) + @staticmethod + def validate_maintenance_mode(value: str) -> str: + return value.lower() + + def to_representation(self, instance: ServiceComponent) -> dict: + data = super().to_representation(instance=instance) + data["maintenance_mode"] = data["maintenance_mode"].upper() + + return data + class ComponentAuditSerializer(ModelSerializer): class Meta: model = ServiceComponent fields = ("maintenance_mode",) + + def to_representation(self, instance: ServiceComponent) -> dict: + data = super().to_representation(instance=instance) + data["maintenance_mode"] = data["maintenance_mode"].upper() + + return data diff --git a/python/api/config/serializers.py b/python/api/config/serializers.py index ff529f1206..82e67b594a 100644 --- a/python/api/config/serializers.py +++ b/python/api/config/serializers.py @@ -18,8 +18,6 @@ from cm.api import update_obj_config from cm.errors import raise_adcm_ex from cm.models import ConfigLog, PrototypeConfig - -# pylint: disable=redefined-builtin from rest_flex_fields.serializers import FlexFieldsSerializerMixin from rest_framework.reverse import reverse from rest_framework.serializers import ( diff --git a/python/api/group_config/serializers.py b/python/api/group_config/serializers.py index fa622f6146..14c1848596 100644 --- a/python/api/group_config/serializers.py +++ b/python/api/group_config/serializers.py @@ -81,7 +81,7 @@ def to_internal_value(self, data): class GroupConfigsHyperlinkedIdentityField(serializers.HyperlinkedIdentityField): """Return url for group_config for Cluster, Provider, Component or Service""" - def get_url(self, obj, view_name, request, _format): # pylint: disable=redefined-builtin + def get_url(self, obj, view_name, request, _format): url = reverse(viewname=view_name, request=request, format=_format) return f"{url}?object_id={obj.id}&object_type={obj.prototype.type}" @@ -172,6 +172,12 @@ class Meta: "locked", ) + def to_representation(self, instance: Host) -> dict: + data = super().to_representation(instance=instance) + data["maintenance_mode"] = data["maintenance_mode"].upper() + + return data + class GroupConfigHostCandidateSerializer(GroupConfigHostSerializer): """Serializer for host candidate""" diff --git a/python/api/host/serializers.py b/python/api/host/serializers.py index 4f3941ab13..c3c3f54a99 100644 --- a/python/api/host/serializers.py +++ b/python/api/host/serializers.py @@ -17,7 +17,14 @@ from cm.adcm_config.config import get_main_info from cm.api import add_host from cm.issue import update_hierarchy_issues, update_issue_after_deleting -from cm.models import Action, Host, HostProvider, MaintenanceMode, Prototype +from cm.models import ( + MAINTENANCE_MODE_BOTH_CASES_CHOICES, + Action, + Host, + HostProvider, + MaintenanceMode, + Prototype, +) from cm.status_api import get_host_status from cm.validators import HostUniqueValidator, StartMidEndValidator from django.conf import settings @@ -60,6 +67,10 @@ class HostSerializer(EmptySerializer): is_maintenance_mode_available = BooleanField(read_only=True) url = ObjectURL(read_only=True, view_name="v1:host-details") + @staticmethod + def validate_maintenance_mode(value: str) -> str: + return value.lower() + @staticmethod def validate_prototype_id(prototype_id): return check_obj(Prototype, {"id": prototype_id, "type": "host"}) @@ -76,6 +87,12 @@ def create(self, validated_data): validated_data.get("description", ""), ) + def to_representation(self, instance) -> dict: + data = super().to_representation(instance=instance) + data["maintenance_mode"] = data["maintenance_mode"].upper() + + return data + class HostDetailSerializer(HostSerializer): bundle_id = IntegerField(read_only=True) @@ -121,14 +138,30 @@ class Meta: "maintenance_mode", ) + def to_representation(self, instance) -> dict: + data = super().to_representation(instance=instance) + data["maintenance_mode"] = data["maintenance_mode"].upper() + + return data + class HostChangeMaintenanceModeSerializer(ModelSerializer): - maintenance_mode = ChoiceField(choices=(MaintenanceMode.ON.value, MaintenanceMode.OFF.value)) + maintenance_mode = ChoiceField(choices=MAINTENANCE_MODE_BOTH_CASES_CHOICES) class Meta: model = Host fields = ("maintenance_mode",) + @staticmethod + def validate_maintenance_mode(value: str) -> str: + return value.lower() + + def to_representation(self, instance: Host): + data = super().to_representation(instance=instance) + data["maintenance_mode"] = data["maintenance_mode"].upper() + + return data + class ClusterHostSerializer(HostSerializer): host_id = IntegerField(source="id") diff --git a/python/api/job/views.py b/python/api/job/views.py index 4ba004bb07..cbc3e229cf 100644 --- a/python/api/job/views.py +++ b/python/api/job/views.py @@ -259,7 +259,7 @@ def get_serializer_class(self): @audit @action(methods=["get"], detail=True) - def download(self, request: Request, job_pk: int, log_pk: int): # pylint: disable=unused-argument + def download(self, request: Request, job_pk: int, log_pk: int): # self is necessary for audit log_storage = get_object_for_user( user=request.user, perms=VIEW_LOGSTORAGE_PERMISSION, klass=LogStorage, id=log_pk, job__id=job_pk diff --git a/python/api/serializers.py b/python/api/serializers.py index 394f10b9f8..580d2a4e64 100644 --- a/python/api/serializers.py +++ b/python/api/serializers.py @@ -141,7 +141,7 @@ def __init__(self, view_name, *args, **kwargs): self.url_args = args super().__init__(view_name=view_name, **kwargs) - def get_url(self, obj, view_name, request, _format): # pylint: disable=redefined-builtin + def get_url(self, obj, view_name, request, _format): kwargs = {} for url_arg in self.url_args: if url_arg.startswith(extensions_api_settings.DEFAULT_PARENT_LOOKUP_KWARG_NAME_PREFIX): @@ -163,7 +163,7 @@ def __init__(self, view_name, *args, **kwargs): self.url_args = args super().__init__(view_name, **kwargs) - def get_url(self, obj, view_name, request, _format): # pylint: disable=redefined-builtin + def get_url(self, obj, view_name, request, _format): kwargs = {} for url_arg in self.url_args: if url_arg.startswith(extensions_api_settings.DEFAULT_PARENT_LOOKUP_KWARG_NAME_PREFIX): diff --git a/python/api/service/serializers.py b/python/api/service/serializers.py index 1d7d24cadd..9b6791455c 100644 --- a/python/api/service/serializers.py +++ b/python/api/service/serializers.py @@ -10,8 +10,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# pylint: disable=redefined-builtin - from api.action.serializers import ActionShort from api.cluster.serializers import BindSerializer from api.component.serializers import ComponentUISerializer @@ -23,10 +21,10 @@ from cm.api import add_service_to_cluster, bind, multi_bind from cm.errors import AdcmEx from cm.models import ( + MAINTENANCE_MODE_BOTH_CASES_CHOICES, Action, Cluster, ClusterObject, - MaintenanceMode, Prototype, ServiceComponent, ) @@ -60,6 +58,12 @@ class ServiceSerializer(EmptySerializer): maintenance_mode = CharField(read_only=True) is_maintenance_mode_available = BooleanField(read_only=True) + def to_representation(self, instance: ClusterObject) -> dict: + data = super().to_representation(instance=instance) + data["maintenance_mode"] = data["maintenance_mode"].upper() + + return data + @staticmethod def validate_prototype_id(prototype_id): check_obj(Prototype, {"id": prototype_id, "type": "service"}, "PROTOTYPE_NOT_FOUND") @@ -220,14 +224,30 @@ def get_status(obj): class ServiceChangeMaintenanceModeSerializer(ModelSerializer): - maintenance_mode = ChoiceField(choices=(MaintenanceMode.ON.value, MaintenanceMode.OFF.value)) + maintenance_mode = ChoiceField(choices=MAINTENANCE_MODE_BOTH_CASES_CHOICES) class Meta: model = ClusterObject fields = ("maintenance_mode",) + @staticmethod + def validate_maintenance_mode(value: str) -> str: + return value.lower() + + def to_representation(self, instance: ClusterObject) -> dict: + data = super().to_representation(instance=instance) + data["maintenance_mode"] = data["maintenance_mode"].upper() + + return data + class ServiceAuditSerializer(ModelSerializer): class Meta: model = ClusterObject fields = ("maintenance_mode",) + + def to_representation(self, instance) -> dict: + data = super().to_representation(instance=instance) + data["maintenance_mode"] = data["maintenance_mode"].upper() + + return data diff --git a/python/api/service/views.py b/python/api/service/views.py index afe69a916e..fd8dbdf58e 100644 --- a/python/api/service/views.py +++ b/python/api/service/views.py @@ -27,28 +27,9 @@ from api.stack.serializers import ImportSerializer from api.utils import check_obj, create from audit.utils import audit -from cm.api import ( - cancel_locking_tasks, - delete_service, - get_import, - unbind, - update_mm_objects, -) -from cm.errors import raise_adcm_ex -from cm.job import start_task -from cm.models import ( - Action, - Cluster, - ClusterBind, - ClusterObject, - HostComponent, - JobStatus, - Prototype, - ServiceComponent, - TaskLog, -) +from cm.api import get_import, unbind, update_mm_objects +from cm.models import Cluster, ClusterBind, ClusterObject, HostComponent, Prototype from cm.status_api import make_ui_service_status -from django.conf import settings from guardian.mixins import PermissionListMixin from rbac.viewsets import DjangoOnlyObjectPermissions from rest_framework import permissions @@ -57,7 +38,7 @@ from rest_framework.status import HTTP_200_OK, HTTP_204_NO_CONTENT, HTTP_400_BAD_REQUEST from adcm.permissions import check_custom_perm, get_object_for_user -from adcm.utils import get_maintenance_mode_response +from adcm.utils import delete_service_from_api, get_maintenance_mode_response class ServiceListView(PermissionListMixin, PaginatedView): @@ -115,64 +96,9 @@ def get_queryset(self, *args, **kwargs): return queryset @audit - def delete(self, request, *args, **kwargs): # pylint: disable=unused-argument, too-many-branches + def delete(self, request, *args, **kwargs): # pylint: disable=unused-argument instance: ClusterObject = self.get_object() - delete_action = Action.objects.filter( - prototype=instance.prototype, - name=settings.ADCM_DELETE_SERVICE_ACTION_NAME, - ).first() - host_components_exists = HostComponent.objects.filter(cluster=instance.cluster, service=instance).exists() - - if not delete_action: - if instance.state != "created": - raise_adcm_ex("SERVICE_DELETE_ERROR") - - if host_components_exists: - raise_adcm_ex("SERVICE_CONFLICT", f"Service #{instance.id} has component(s) on host(s)") - - cluster = instance.cluster - if cluster.state == "upgrading" and instance.prototype.name in cluster.before_upgrade["services"]: - return raise_adcm_ex(code="SERVICE_CONFLICT", msg="It is forbidden to delete service in upgrade mode") - - if ClusterBind.objects.filter(source_service=instance).exists(): - raise_adcm_ex("SERVICE_CONFLICT", f"Service #{instance.id} has exports(s)") - - if instance.prototype.required: - raise_adcm_ex("SERVICE_CONFLICT", f"Service #{instance.id} is required") - - if TaskLog.objects.filter(action=delete_action, status=JobStatus.RUNNING).exists(): - raise_adcm_ex("SERVICE_DELETE_ERROR", "Service is deleting now") - - for component in ServiceComponent.objects.filter(cluster=instance.cluster).exclude(service=instance): - if component.requires_service_name(service_name=instance.name): - raise_adcm_ex( - code="SERVICE_CONFLICT", - msg=f"Component {component.name} of service {component.service.display_name}" - f" requires this service or its component", - ) - - for service in ClusterObject.objects.filter(cluster=instance.cluster): - if service.requires_service_name(service_name=instance.name): - raise_adcm_ex( - code="SERVICE_CONFLICT", - msg=f"Service {service.display_name} requires this service or its component", - ) - - cancel_locking_tasks(obj=instance, obj_deletion=True) - if delete_action and (host_components_exists or instance.state != "created"): - start_task( - action=delete_action, - obj=instance, - conf={}, - attr={}, - hostcomponent=[], - hosts=[], - verbose=False, - ) - else: - delete_service(service=instance) - - return Response(status=HTTP_204_NO_CONTENT) + return delete_service_from_api(service=instance) class ServiceMaintenanceModeView(GenericUIView): diff --git a/python/api/stack/serializers.py b/python/api/stack/serializers.py index b44340adf1..a119003dad 100644 --- a/python/api/stack/serializers.py +++ b/python/api/stack/serializers.py @@ -61,6 +61,7 @@ class Meta: "license_url", "update", "url", + "signature_status", ) read_only_fields = fields extra_kwargs = {"url": {"lookup_url_kwarg": "bundle_pk"}} diff --git a/python/api/stack/views.py b/python/api/stack/views.py index 2f208c9c9c..e74c4c8d2b 100644 --- a/python/api/stack/views.py +++ b/python/api/stack/views.py @@ -10,7 +10,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from pathlib import Path from api.action.serializers import StackActionSerializer from api.base_view import GenericUIViewSet, ModelPermOrReadOnlyForAuth @@ -39,7 +38,7 @@ from api.utils import check_obj from audit.utils import audit from cm.api import accept_license, get_license, load_service_map -from cm.bundle import delete_bundle, load_bundle, update_bundle +from cm.bundle import delete_bundle, load_bundle, update_bundle, upload_file from cm.models import ( Action, Bundle, @@ -49,7 +48,6 @@ PrototypeImport, Upgrade, ) -from django.conf import settings from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt from rest_framework.authentication import SessionAuthentication, TokenAuthentication @@ -126,11 +124,7 @@ def create(self, request: Request, *args, **kwargs) -> Response: if not serializer.is_valid(): return Response(serializer.errors, status=HTTP_400_BAD_REQUEST) - file_data = request.data["file"] - with open(Path(settings.DOWNLOAD_DIR, file_data.name), "wb+") as f: - for chunk in file_data.chunks(): - f.write(chunk) - + upload_file(file=request.data["file"]) return Response(status=HTTP_201_CREATED) @@ -269,7 +263,6 @@ def retrieve(self, request: Request, *args, **kwargs) -> Response: return Response(serializer.data) -# pylint:disable-next=too-many-ancestors class ServicePrototypeViewSet(ListModelMixin, RetrieveModelMixin, GenericUIViewSet): queryset = Prototype.objects.filter(type="service") serializer_class = ServicePrototypeSerializer diff --git a/python/api/tests/test_action.py b/python/api/tests/test_action.py index e9594a7258..cc34fe93b3 100644 --- a/python/api/tests/test_action.py +++ b/python/api/tests/test_action.py @@ -78,7 +78,7 @@ def test_list(self): def test_jinja_conf_success(self): path = Path( - settings.BASE_DIR, + self.base_dir, "python/api/tests/files/bundle_test_action_with_jinja_conf.tar", ) with open(file=path, encoding=settings.ENCODING_UTF_8) as f: @@ -99,7 +99,7 @@ def test_jinja_conf_success(self): def test_jinja_wrong_conf_fail(self): path = Path( - settings.BASE_DIR, + self.base_dir, "python/api/tests/files/bundle_test_action_with_jinja_wrong_conf.tar", ) with open(file=path, encoding=settings.ENCODING_UTF_8) as f: @@ -119,7 +119,7 @@ def test_jinja_wrong_conf_fail(self): def test_jinja_wrong_conf_path_fail(self): path = Path( - settings.BASE_DIR, + self.base_dir, "python/api/tests/files/bundle_test_action_with_jinja_wrong_conf_path.tar", ) with open(file=path, encoding=settings.ENCODING_UTF_8) as f: @@ -140,7 +140,7 @@ def test_jinja_wrong_conf_path_fail(self): def test_jinja_conf_serialize_success(self): bundle = self.upload_and_load_bundle( path=Path( - settings.BASE_DIR, + self.base_dir, "python/api/tests/files/test_actions_data.tar", ), ) diff --git a/python/api/tests/test_api.py b/python/api/tests/test_api.py index ccc76832bb..b9fd74a1af 100755 --- a/python/api/tests/test_api.py +++ b/python/api/tests/test_api.py @@ -36,7 +36,6 @@ gen_service, gen_task_log, ) -from django.conf import settings from django.urls import reverse from django.utils import timezone from rest_framework.response import Response @@ -58,7 +57,7 @@ class TestAPI(BaseTestCase): def setUp(self) -> None: super().setUp() - self.files_dir = settings.BASE_DIR / "python" / "cm" / "tests" / "files" + self.test_files_dir = self.base_dir / "python" / "cm" / "tests" / "files" self.bundle_adh_name = "adh.1.5.tar" self.bundle_ssh_name = "ssh.1.0.tar" self.cluster = "adh42" @@ -173,10 +172,10 @@ def test_docs(self): self.assertEqual(response.status_code, HTTP_200_OK) - def test_cluster(self): # pylint: disable=too-many-statements + def test_cluster(self): cluster_name = "test-cluster" cluster_url = reverse(viewname="v1:cluster") - self.upload_and_load_bundle(path=self.files_dir / self.bundle_adh_name) + self.upload_and_load_bundle(path=self.test_files_dir / self.bundle_adh_name) bundle_id, proto_id = self.get_cluster_proto_id() response: Response = self.client.post(cluster_url, {}) @@ -259,7 +258,7 @@ def test_cluster_patching(self): name = "test-cluster" cluster_url = reverse(viewname="v1:cluster") - self.upload_and_load_bundle(path=self.files_dir / self.bundle_adh_name) + self.upload_and_load_bundle(path=self.test_files_dir / self.bundle_adh_name) bundle_id, proto_id = self.get_cluster_proto_id() response: Response = self.client.post(cluster_url, {"name": name, "prototype_id": proto_id}) @@ -324,8 +323,8 @@ def test_cluster_host(self): host = "test.host.net" cluster_url = reverse(viewname="v1:cluster") - self.upload_and_load_bundle(path=self.files_dir / self.bundle_adh_name) - self.upload_and_load_bundle(path=self.files_dir / self.bundle_ssh_name) + self.upload_and_load_bundle(path=self.test_files_dir / self.bundle_adh_name) + self.upload_and_load_bundle(path=self.test_files_dir / self.bundle_ssh_name) adh_bundle_id, cluster_proto = self.get_cluster_proto_id() @@ -391,7 +390,7 @@ def test_cluster_host(self): self.assertEqual(response.json()["code"], "BUNDLE_CONFLICT") def test_service(self): - self.upload_and_load_bundle(path=self.files_dir / self.bundle_adh_name) + self.upload_and_load_bundle(path=self.test_files_dir / self.bundle_adh_name) service_id = self.get_service_proto_id() service_url = reverse(viewname="v1:service-prototype-list") this_service_url = reverse(viewname="v1:service-prototype-detail", kwargs={"prototype_pk": service_id}) @@ -425,7 +424,7 @@ def test_service(self): self.assertEqual(response.status_code, HTTP_204_NO_CONTENT) def test_cluster_service(self): - self.upload_and_load_bundle(path=self.files_dir / self.bundle_adh_name) + self.upload_and_load_bundle(path=self.test_files_dir / self.bundle_adh_name) service_proto_id = self.get_service_proto_id() bundle_id, cluster_proto_id = self.get_cluster_proto_id() @@ -499,8 +498,8 @@ def test_cluster_service(self): self.assertEqual(response.status_code, HTTP_204_NO_CONTENT) def test_hostcomponent(self): # pylint: disable=too-many-statements,too-many-locals - self.upload_and_load_bundle(path=self.files_dir / self.bundle_adh_name) - self.upload_and_load_bundle(path=self.files_dir / self.bundle_ssh_name) + self.upload_and_load_bundle(path=self.test_files_dir / self.bundle_adh_name) + self.upload_and_load_bundle(path=self.test_files_dir / self.bundle_ssh_name) adh_bundle_id, cluster_proto = self.get_cluster_proto_id() ssh_bundle_id, _, host_id = self.get_host_in_cluster(self.host) @@ -681,7 +680,7 @@ def test_hostcomponent(self): # pylint: disable=too-many-statements,too-many-lo self.assertEqual(response.json()["code"], "BUNDLE_CONFLICT") def test_config(self): # pylint: disable=too-many-statements - self.upload_and_load_bundle(path=self.files_dir / self.bundle_adh_name) + self.upload_and_load_bundle(path=self.test_files_dir / self.bundle_adh_name) adh_bundle_id, proto_id = self.get_cluster_proto_id() service_proto_id = self.get_service_proto_id() response: Response = self.client.post( diff --git a/python/api/tests/test_bundle.py b/python/api/tests/test_bundle.py index 96bb3c5f68..b2372541a6 100644 --- a/python/api/tests/test_bundle.py +++ b/python/api/tests/test_bundle.py @@ -28,7 +28,7 @@ class TestBundle(BaseTestCase): def setUp(self) -> None: super().setUp() - self.files_dir = settings.BASE_DIR / "python" / "api" / "tests" / "files" + self.test_files_dir = self.base_dir / "python" / "api" / "tests" / "files" self.bundle_1 = Bundle.objects.create( name="test_bundle_1", @@ -45,7 +45,7 @@ def setUp(self) -> None: Prototype.objects.create(bundle=self.bundle_2, name=self.bundle_2.name) self.test_bundle_filename = "bundle_cluster.tar" self.test_bundle_path = Path( - settings.BASE_DIR, + self.base_dir, "python/api/tests/files", self.test_bundle_filename, ) @@ -64,7 +64,7 @@ def test_load_bundle_wrong_cluster_mm_action_no_host_action_prop_fail(self): self.upload_bundle( path=Path( - settings.BASE_DIR, + self.base_dir, "python/api/tests/files", bundle_filename, ), @@ -83,7 +83,7 @@ def test_load_bundle_wrong_cluster_mm_action_false_host_action_prop_fail(self): self.upload_bundle( path=Path( - settings.BASE_DIR, + self.base_dir, "python/api/tests/files", bundle_filename, ), @@ -102,7 +102,7 @@ def test_load_bundle_cluster_mm_action_host_action_true_success(self): self.upload_bundle( path=Path( - settings.BASE_DIR, + self.base_dir, "python/api/tests/files", bundle_filename, ), @@ -120,7 +120,7 @@ def test_load_bundle_service_with_host_mm_action_fail(self): self.upload_bundle( path=Path( - settings.BASE_DIR, + self.base_dir, "python/api/tests/files", bundle_filename, ), @@ -139,7 +139,7 @@ def test_load_bundle_cluster_with_host_mm_has_ui_options_fail(self): self.upload_bundle( path=Path( - settings.BASE_DIR, + self.base_dir, "python/api/tests/files", bundle_filename, ), @@ -249,7 +249,7 @@ def test_accept_license(self): self.assertEqual(response.status_code, HTTP_200_OK) def test_adcm_min_version_success(self): - test_bundle_path = Path(settings.BASE_DIR, "python/api/tests/files/bundle_test_min_adcm_version.tar") + test_bundle_path = Path(self.base_dir, "python/api/tests/files/bundle_test_min_adcm_version.tar") self.upload_bundle(path=test_bundle_path) response: Response = self.client.post( @@ -262,7 +262,7 @@ def test_adcm_min_version_success(self): def test_upload_hc_apply_without_hc_acl_job_fail(self): bundle_filename = "hc_apply_without_hc_acl_job.tar" - self.upload_bundle(path=Path(self.files_dir, bundle_filename)) + self.upload_bundle(path=Path(self.test_files_dir, bundle_filename)) response: Response = self.client.post( path=reverse(viewname="v1:load-bundle"), @@ -273,7 +273,7 @@ def test_upload_hc_apply_without_hc_acl_job_fail(self): def test_upload_hc_apply_without_hc_acl_task_fail(self): bundle_filename = "hc_apply_without_hc_acl_task.tar" - self.upload_bundle(path=Path(self.files_dir, bundle_filename)) + self.upload_bundle(path=Path(self.test_files_dir, bundle_filename)) response: Response = self.client.post( path=reverse(viewname="v1:load-bundle"), @@ -284,7 +284,7 @@ def test_upload_hc_apply_without_hc_acl_task_fail(self): def test_upload_hc_apply_wrong_internal_script_fail(self): bundle_filename = "hc_apply_action_wrong_script_bundle_switch.tar" - self.upload_bundle(path=Path(self.files_dir, bundle_filename)) + self.upload_bundle(path=Path(self.test_files_dir, bundle_filename)) response: Response = self.client.post( path=reverse(viewname="v1:load-bundle"), @@ -295,7 +295,7 @@ def test_upload_hc_apply_wrong_internal_script_fail(self): def test_upload_hc_apply_upgrade_success(self): bundle_filename = "upgrade_hc_apply_success.tar" - self.upload_bundle(path=Path(self.files_dir, bundle_filename)) + self.upload_bundle(path=Path(self.test_files_dir, bundle_filename)) response: Response = self.client.post( path=reverse(viewname="v1:load-bundle"), @@ -305,7 +305,7 @@ def test_upload_hc_apply_upgrade_success(self): def test_upload_hc_apply_upgrade_no_hc_acl_fail(self): bundle_filename = "upgrade_hc_apply_no_hc_acl.tar" - self.upload_bundle(path=Path(self.files_dir, bundle_filename)) + self.upload_bundle(path=Path(self.test_files_dir, bundle_filename)) response: Response = self.client.post( path=reverse(viewname="v1:load-bundle"), @@ -316,7 +316,7 @@ def test_upload_hc_apply_upgrade_no_hc_acl_fail(self): def test_upload_hc_apply_upgrade_wrong_script_fail(self): bundle_filename = "upgrade_hc_apply_wrong_script.tar" - self.upload_bundle(path=Path(self.files_dir, bundle_filename)) + self.upload_bundle(path=Path(self.test_files_dir, bundle_filename)) response: Response = self.client.post( path=reverse(viewname="v1:load-bundle"), diff --git a/python/api/tests/test_component.py b/python/api/tests/test_component.py index 8573fac666..1a0b42e202 100644 --- a/python/api/tests/test_component.py +++ b/python/api/tests/test_component.py @@ -71,19 +71,19 @@ def test_change_maintenance_mode_wrong_name_fail(self): def test_change_maintenance_mode_on_no_action_success(self): response: Response = self.client.post( path=reverse(viewname="v1:component-maintenance-mode", kwargs={"component_id": self.component.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) self.component.refresh_from_db() self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(response.data["maintenance_mode"], MaintenanceMode.ON) + self.assertEqual(response.data["maintenance_mode"], "ON") self.assertEqual(self.component.maintenance_mode, MaintenanceMode.ON) def test_change_maintenance_mode_on_no_service_issue_success(self): bundle = self.upload_and_load_bundle( path=Path( - settings.BASE_DIR, + self.base_dir, "python/api/tests/files/bundle_issue_component.tar", ), ) @@ -111,14 +111,14 @@ def test_change_maintenance_mode_on_no_service_issue_success(self): response: Response = self.client.post( path=reverse(viewname="v1:component-maintenance-mode", kwargs={"component_id": component_2.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) component_2.refresh_from_db() service.refresh_from_db() self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(response.data["maintenance_mode"], MaintenanceMode.ON) + self.assertEqual(response.data["maintenance_mode"], "ON") self.assertEqual(component_2.maintenance_mode, MaintenanceMode.ON) self.assertFalse(service.concerns.exists()) @@ -134,13 +134,13 @@ def test_change_maintenance_mode_on_with_action_success(self): with patch("adcm.utils.start_task") as start_task_mock: response: Response = self.client.post( path=reverse(viewname="v1:component-maintenance-mode", kwargs={"component_id": self.component.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) self.component.refresh_from_db() self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(response.data["maintenance_mode"], MaintenanceMode.CHANGING) + self.assertEqual(response.data["maintenance_mode"], "CHANGING") self.assertEqual(self.component.maintenance_mode, MaintenanceMode.CHANGING) start_task_mock.assert_called_once_with( action=action, @@ -159,7 +159,7 @@ def test_change_maintenance_mode_on_from_on_with_action_fail(self): with patch("adcm.utils.start_task") as start_task_mock: response: Response = self.client.post( path=reverse(viewname="v1:component-maintenance-mode", kwargs={"component_id": self.component.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) self.component.refresh_from_db() @@ -174,13 +174,13 @@ def test_change_maintenance_mode_off_no_action_success(self): response: Response = self.client.post( path=reverse(viewname="v1:component-maintenance-mode", kwargs={"component_id": self.component.pk}), - data={"maintenance_mode": MaintenanceMode.OFF}, + data={"maintenance_mode": "OFF"}, ) self.component.refresh_from_db() self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(response.data["maintenance_mode"], MaintenanceMode.OFF) + self.assertEqual(response.data["maintenance_mode"], "OFF") self.assertEqual(self.component.maintenance_mode, MaintenanceMode.OFF) def test_change_maintenance_mode_off_with_action_success(self): @@ -197,13 +197,13 @@ def test_change_maintenance_mode_off_with_action_success(self): with patch("adcm.utils.start_task") as start_task_mock: response: Response = self.client.post( path=reverse(viewname="v1:component-maintenance-mode", kwargs={"component_id": self.component.pk}), - data={"maintenance_mode": MaintenanceMode.OFF}, + data={"maintenance_mode": "OFF"}, ) self.component.refresh_from_db() self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(response.data["maintenance_mode"], MaintenanceMode.CHANGING) + self.assertEqual(response.data["maintenance_mode"], "CHANGING") self.assertEqual(self.component.maintenance_mode, MaintenanceMode.CHANGING) start_task_mock.assert_called_once_with( action=action, @@ -222,7 +222,7 @@ def test_change_maintenance_mode_off_to_off_with_action_fail(self): with patch("adcm.utils.start_task") as start_task_mock: response: Response = self.client.post( path=reverse(viewname="v1:component-maintenance-mode", kwargs={"component_id": self.component.pk}), - data={"maintenance_mode": MaintenanceMode.OFF}, + data={"maintenance_mode": "OFF"}, ) self.component.refresh_from_db() @@ -237,7 +237,7 @@ def test_change_maintenance_mode_changing_now_fail(self): response: Response = self.client.post( path=reverse(viewname="v1:component-maintenance-mode", kwargs={"component_id": self.component.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) self.assertEqual(response.status_code, HTTP_409_CONFLICT) diff --git a/python/api/tests/test_config.py b/python/api/tests/test_config.py index 63b9af9b66..b22c632ee4 100644 --- a/python/api/tests/test_config.py +++ b/python/api/tests/test_config.py @@ -15,7 +15,6 @@ from cm.adcm_config.ansible import ansible_decrypt from cm.models import ADCM, ConfigLog -from django.conf import settings from django.urls import reverse from rest_framework.response import Response from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED, HTTP_400_BAD_REQUEST @@ -28,7 +27,7 @@ def setUp(self) -> None: super().setUp() _, self.cluster, self.config_log = self.upload_bundle_create_cluster_config_log( - bundle_path=Path(settings.BASE_DIR, "python/api/tests/files/bundle_test_password.tar"), + bundle_path=Path(self.base_dir, "python/api/tests/files/bundle_test_password.tar"), ) def test_post_same_password_success(self): @@ -83,7 +82,7 @@ def setUp(self) -> None: super().setUp() _, self.cluster, self.config_log = self.upload_bundle_create_cluster_config_log( - bundle_path=Path(settings.BASE_DIR, "python/api/tests/files/bundle_test_secrettext.tar"), + bundle_path=Path(self.base_dir, "python/api/tests/files/bundle_test_secrettext.tar"), ) def test_post_same_secrettext_success(self): @@ -138,7 +137,7 @@ def setUp(self) -> None: super().setUp() _, self.cluster, self.config_log = self.upload_bundle_create_cluster_config_log( - bundle_path=Path(settings.BASE_DIR, "python/api/tests/files/bundle_test_secretfile.tar"), + bundle_path=Path(self.base_dir, "python/api/tests/files/bundle_test_secretfile.tar"), ) def test_post_same_secretfile_success(self): @@ -193,7 +192,7 @@ def setUp(self) -> None: super().setUp() _, self.cluster, self.config_log = self.upload_bundle_create_cluster_config_log( - bundle_path=Path(settings.BASE_DIR, "python/api/tests/files/bundle_test_secretmap.tar"), + bundle_path=Path(self.base_dir, "python/api/tests/files/bundle_test_secretmap.tar"), ) def test_post_same_secretmap_success(self): @@ -378,7 +377,7 @@ def test_superuser_list_config_log_success(self): self.login() self.upload_bundle_create_cluster_config_log( bundle_path=Path( - settings.BASE_DIR, + self.base_dir, "python/api/tests/files/bundle_test_password.tar", ), ) diff --git a/python/api/tests/test_host.py b/python/api/tests/test_host.py index 0983bb6224..c3bc243270 100644 --- a/python/api/tests/test_host.py +++ b/python/api/tests/test_host.py @@ -68,7 +68,7 @@ def test_change_mm_wrong_name_fail(self): def test_change_mm_to_changing_fail(self): response: Response = self.client.post( path=reverse(viewname="v1:host-maintenance-mode", kwargs={"host_id": self.host.pk}), - data={"maintenance_mode": MaintenanceMode.CHANGING}, + data={"maintenance_mode": "CHANGING"}, ) self.assertEqual(response.status_code, HTTP_400_BAD_REQUEST) @@ -76,13 +76,13 @@ def test_change_mm_to_changing_fail(self): def test_change_mm_on_no_action_success(self): response: Response = self.client.post( path=reverse(viewname="v1:host-maintenance-mode", kwargs={"host_id": self.host.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) self.host.refresh_from_db() self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(response.data["maintenance_mode"], MaintenanceMode.ON) + self.assertEqual(response.data["maintenance_mode"], "ON") self.assertEqual(self.host.maintenance_mode, MaintenanceMode.ON) def test_change_mm_on_with_action_success(self): @@ -96,13 +96,13 @@ def test_change_mm_on_with_action_success(self): with patch("adcm.utils.start_task") as start_task_mock: response: Response = self.client.post( path=reverse(viewname="v1:host-maintenance-mode", kwargs={"host_id": self.host.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) self.host.refresh_from_db() self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(response.data["maintenance_mode"], MaintenanceMode.CHANGING) + self.assertEqual(response.data["maintenance_mode"], "CHANGING") self.assertEqual(self.host.maintenance_mode, MaintenanceMode.CHANGING) start_task_mock.assert_called_once_with( action=action, @@ -121,7 +121,7 @@ def test_change_mm_on_from_on_with_action_fail(self): with patch("adcm.utils.start_task") as start_task_mock: response: Response = self.client.post( path=reverse(viewname="v1:host-maintenance-mode", kwargs={"host_id": self.host.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) self.host.refresh_from_db() @@ -136,13 +136,13 @@ def test_change_mm_off_no_action_success(self): response: Response = self.client.post( path=reverse(viewname="v1:host-maintenance-mode", kwargs={"host_id": self.host.pk}), - data={"maintenance_mode": MaintenanceMode.OFF}, + data={"maintenance_mode": "OFF"}, ) self.host.refresh_from_db() self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(response.data["maintenance_mode"], MaintenanceMode.OFF) + self.assertEqual(response.data["maintenance_mode"], "OFF") self.assertEqual(self.host.maintenance_mode, MaintenanceMode.OFF) def test_change_mm_off_with_action_success(self): @@ -156,13 +156,13 @@ def test_change_mm_off_with_action_success(self): with patch("adcm.utils.start_task") as start_task_mock: response: Response = self.client.post( path=reverse(viewname="v1:host-maintenance-mode", kwargs={"host_id": self.host.pk}), - data={"maintenance_mode": MaintenanceMode.OFF}, + data={"maintenance_mode": "OFF"}, ) self.host.refresh_from_db() self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(response.data["maintenance_mode"], MaintenanceMode.CHANGING) + self.assertEqual(response.data["maintenance_mode"], "CHANGING") self.assertEqual(self.host.maintenance_mode, MaintenanceMode.CHANGING) start_task_mock.assert_called_once_with( action=action, @@ -181,7 +181,7 @@ def test_change_mm_off_to_off_with_action_fail(self): with patch("adcm.utils.start_task") as start_task_mock: response: Response = self.client.post( path=reverse(viewname="v1:host-maintenance-mode", kwargs={"host_id": self.host.pk}), - data={"maintenance_mode": MaintenanceMode.OFF}, + data={"maintenance_mode": "OFF"}, ) self.host.refresh_from_db() @@ -196,14 +196,14 @@ def test_change_mm_changing_now_fail(self): response: Response = self.client.post( path=reverse(viewname="v1:host-maintenance-mode", kwargs={"host_id": self.host.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) self.assertEqual(response.status_code, HTTP_409_CONFLICT) response: Response = self.client.post( path=reverse(viewname="v1:host-maintenance-mode", kwargs={"host_id": self.host.pk}), - data={"maintenance_mode": MaintenanceMode.OFF}, + data={"maintenance_mode": "OFF"}, ) self.assertEqual(response.status_code, HTTP_409_CONFLICT) @@ -211,14 +211,14 @@ def test_change_mm_changing_now_fail(self): def test_cluster_clear_issue_success(self): provider_bundle = self.upload_and_load_bundle( path=Path( - settings.BASE_DIR, + self.base_dir, "python/api/tests/files/bundle_test_provider_concern.tar", ), ) cluster_bundle = self.upload_and_load_bundle( path=Path( - settings.BASE_DIR, + self.base_dir, "python/api/tests/files/bundle_test_cluster_with_mm.tar", ), ) @@ -271,7 +271,7 @@ def test_cluster_clear_issue_success(self): self.client.post( path=reverse(viewname="v1:host-maintenance-mode", kwargs={"host_id": host.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) self.assertFalse(cluster.concerns.exists()) @@ -282,7 +282,7 @@ def test_mm_constraint_by_no_cluster_fail(self): response: Response = self.client.post( path=reverse(viewname="v1:host-maintenance-mode", kwargs={"host_id": self.host.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) self.assertEqual(response.status_code, HTTP_409_CONFLICT) @@ -293,7 +293,7 @@ def test_mm_constraint_by_cluster_without_mm_fail(self): response: Response = self.client.post( path=reverse(viewname="v1:host-maintenance-mode", kwargs={"host_id": self.host.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) self.assertEqual(response.status_code, HTTP_409_CONFLICT) @@ -301,7 +301,7 @@ def test_mm_constraint_by_cluster_without_mm_fail(self): def test_change_maintenance_mode_on_with_action_via_bundle_success(self): bundle = self.upload_and_load_bundle( path=Path( - settings.BASE_DIR, + self.base_dir, "python/api/tests/files/cluster_using_plugin.tar", ), ) @@ -332,13 +332,13 @@ def test_change_maintenance_mode_on_with_action_via_bundle_success(self): with patch("adcm.utils.start_task") as start_task_mock: response: Response = self.client.post( path=reverse(viewname="v1:host-maintenance-mode", kwargs={"host_id": host.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) host.refresh_from_db() self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(response.data["maintenance_mode"], MaintenanceMode.CHANGING) + self.assertEqual(response.data["maintenance_mode"], "CHANGING") self.assertEqual(host.maintenance_mode, MaintenanceMode.CHANGING) start_task_mock.assert_called_once_with( action=action, diff --git a/python/api/tests/test_hostcomponent.py b/python/api/tests/test_hostcomponent.py index 153c5ba087..a9cd97d073 100644 --- a/python/api/tests/test_hostcomponent.py +++ b/python/api/tests/test_hostcomponent.py @@ -13,7 +13,6 @@ from pathlib import Path from cm.models import HostComponent, ObjectType, Prototype, ServiceComponent -from django.conf import settings from django.urls import reverse from rest_framework.response import Response from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED @@ -24,10 +23,10 @@ class TestHostComponentOrdering(BaseTestCase): def setUp(self) -> None: super().setUp() - self.files_dir = settings.BASE_DIR / "python" / "api" / "tests" / "files" + self.test_files_dir = self.base_dir / "python" / "api" / "tests" / "files" self.cluster_pk = self.create_adcm_entity( - bundle_filepath=self.files_dir / "test_cluster_many_components.tar", entity_type=ObjectType.CLUSTER + bundle_filepath=self.test_files_dir / "test_cluster_many_components.tar", entity_type=ObjectType.CLUSTER ) self.create_hc() @@ -73,7 +72,7 @@ def create_adcm_entity( def create_hosts(self, count: int) -> list[int]: provider_pk = self.create_adcm_entity( - bundle_filepath=self.files_dir / "provider.tar", entity_type=ObjectType.PROVIDER + bundle_filepath=self.test_files_dir / "provider.tar", entity_type=ObjectType.PROVIDER ) host_pks = [] diff --git a/python/api/tests/test_job.py b/python/api/tests/test_job.py index 011933e8fb..15524cc5d0 100644 --- a/python/api/tests/test_job.py +++ b/python/api/tests/test_job.py @@ -15,7 +15,6 @@ from unittest.mock import patch from cm.models import ADCM, Action, ActionType, Cluster, JobLog, Prototype, TaskLog -from django.conf import settings from django.contrib.contenttypes.models import ContentType from django.urls import reverse from django.utils import timezone @@ -161,7 +160,7 @@ def test_retrieve(self): def test_log_files(self): bundle = self.upload_and_load_bundle( path=Path( - settings.BASE_DIR, + self.base_dir, "python/api/tests/files/no-log-files.tar", ), ) @@ -188,7 +187,7 @@ def test_log_files(self): def test_task_permissions(self): bundle = self.upload_and_load_bundle( path=Path( - settings.BASE_DIR, + self.base_dir, "python/api/tests/files/no-log-files.tar", ), ) @@ -199,7 +198,7 @@ def test_task_permissions(self): role = Role.objects.get(name="Cluster Administrator") policy = Policy.objects.create(name="test_policy", role=role) - policy.user.add(self.no_rights_user) + policy.group.add(self.no_rights_user_group) policy.add_object(cluster) policy.apply() diff --git a/python/api/tests/test_service.py b/python/api/tests/test_service.py index 019eb3ddf1..db115296e7 100644 --- a/python/api/tests/test_service.py +++ b/python/api/tests/test_service.py @@ -64,7 +64,7 @@ def setUp(self) -> None: def get_host(self, bundle_path: str): provider_bundle = self.upload_and_load_bundle( - path=Path(settings.BASE_DIR, bundle_path), + path=Path(self.base_dir, bundle_path), ) provider_prototype = Prototype.objects.get(bundle=provider_bundle, type="provider") provider_response: Response = self.client.post( @@ -81,7 +81,7 @@ def get_host(self, bundle_path: str): return Host.objects.get(pk=host_response.data["id"]) def get_cluster(self, bundle_path: str): - cluster_bundle = self.upload_and_load_bundle(path=Path(settings.BASE_DIR, bundle_path)) + cluster_bundle = self.upload_and_load_bundle(path=Path(self.base_dir, bundle_path)) cluster_prototype = Prototype.objects.get(bundle_id=cluster_bundle.pk, type="cluster") cluster_response: Response = self.client.post( path=reverse(viewname="v1:cluster"), @@ -102,13 +102,13 @@ def test_change_maintenance_mode_wrong_name_fail(self): def test_change_maintenance_mode_on_no_action_success(self): response: Response = self.client.post( path=reverse(viewname="v1:service-maintenance-mode", kwargs={"service_id": self.service.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) self.service.refresh_from_db() self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(response.data["maintenance_mode"], MaintenanceMode.ON) + self.assertEqual(response.data["maintenance_mode"], "ON") self.assertEqual(self.service.maintenance_mode, MaintenanceMode.ON) def test_change_maintenance_mode_on_with_action_success(self): @@ -123,13 +123,13 @@ def test_change_maintenance_mode_on_with_action_success(self): with patch("adcm.utils.start_task") as start_task_mock: response: Response = self.client.post( path=reverse(viewname="v1:service-maintenance-mode", kwargs={"service_id": self.service.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) self.service.refresh_from_db() self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(response.data["maintenance_mode"], MaintenanceMode.CHANGING) + self.assertEqual(response.data["maintenance_mode"], "CHANGING") self.assertEqual(self.service.maintenance_mode, MaintenanceMode.CHANGING) start_task_mock.assert_called_once_with( action=action, @@ -148,7 +148,7 @@ def test_change_maintenance_mode_on_from_on_with_action_fail(self): with patch("adcm.utils.start_task") as start_task_mock: response: Response = self.client.post( path=reverse(viewname="v1:service-maintenance-mode", kwargs={"service_id": self.service.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) self.service.refresh_from_db() @@ -163,13 +163,13 @@ def test_change_maintenance_mode_off_no_action_success(self): response: Response = self.client.post( path=reverse(viewname="v1:service-maintenance-mode", kwargs={"service_id": self.service.pk}), - data={"maintenance_mode": MaintenanceMode.OFF}, + data={"maintenance_mode": "OFF"}, ) self.service.refresh_from_db() self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(response.data["maintenance_mode"], MaintenanceMode.OFF) + self.assertEqual(response.data["maintenance_mode"], "OFF") self.assertEqual(self.service.maintenance_mode, MaintenanceMode.OFF) def test_change_maintenance_mode_off_with_action_success(self): @@ -186,13 +186,13 @@ def test_change_maintenance_mode_off_with_action_success(self): with patch("adcm.utils.start_task") as start_task_mock: response: Response = self.client.post( path=reverse(viewname="v1:service-maintenance-mode", kwargs={"service_id": self.service.pk}), - data={"maintenance_mode": MaintenanceMode.OFF}, + data={"maintenance_mode": "OFF"}, ) self.service.refresh_from_db() self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(response.data["maintenance_mode"], MaintenanceMode.CHANGING) + self.assertEqual(response.data["maintenance_mode"], "CHANGING") self.assertEqual(self.service.maintenance_mode, MaintenanceMode.CHANGING) start_task_mock.assert_called_once_with( action=action, @@ -211,7 +211,7 @@ def test_change_maintenance_mode_off_to_off_with_action_fail(self): with patch("adcm.utils.start_task") as start_task_mock: response: Response = self.client.post( path=reverse(viewname="v1:service-maintenance-mode", kwargs={"service_id": self.service.pk}), - data={"maintenance_mode": MaintenanceMode.OFF}, + data={"maintenance_mode": "OFF"}, ) self.service.refresh_from_db() @@ -226,14 +226,14 @@ def test_change_maintenance_mode_changing_now_fail(self): response: Response = self.client.post( path=reverse(viewname="v1:service-maintenance-mode", kwargs={"service_id": self.service.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) self.assertEqual(response.status_code, HTTP_409_CONFLICT) response: Response = self.client.post( path=reverse(viewname="v1:service-maintenance-mode", kwargs={"service_id": self.service.pk}), - data={"maintenance_mode": MaintenanceMode.OFF}, + data={"maintenance_mode": "OFF"}, ) self.assertEqual(response.status_code, HTTP_409_CONFLICT) @@ -248,7 +248,7 @@ def test_delete_without_action(self): def test_delete_with_action(self): action = Action.objects.create(prototype=self.service.prototype, name=settings.ADCM_DELETE_SERVICE_ACTION_NAME) - with patch("api.service.views.delete_service"), patch("api.service.views.start_task") as start_task_mock: + with patch("adcm.utils.delete_service"), patch("adcm.utils.start_task") as start_task_mock: response: Response = self.client.delete( path=reverse(viewname="v1:service-details", kwargs={"service_id": self.service.pk}), ) @@ -279,7 +279,7 @@ def test_delete_with_action(self): component=service_component, ) - with patch("api.service.views.delete_service"), patch("api.service.views.start_task") as start_task_mock: + with patch("adcm.utils.delete_service"), patch("adcm.utils.start_task") as start_task_mock: response: Response = self.client.delete( path=reverse(viewname="v1:service-details", kwargs={"service_id": self.service.pk}), ) @@ -300,7 +300,7 @@ def test_delete_with_action_not_created_state(self): self.service.state = "not created" self.service.save(update_fields=["state"]) - with patch("api.service.views.delete_service"), patch("api.service.views.start_task") as start_task_mock: + with patch("adcm.utils.delete_service"), patch("adcm.utils.start_task") as start_task_mock: response: Response = self.client.delete( path=reverse(viewname="v1:service-details", kwargs={"service_id": self.service.pk}), ) @@ -317,7 +317,7 @@ def test_delete_with_action_not_created_state(self): ) def test_upload_with_cyclic_requires(self): - self.upload_and_load_bundle(path=Path(settings.BASE_DIR, "python/api/tests/files/bundle_cluster_requires.tar")) + self.upload_and_load_bundle(path=Path(self.base_dir, "python/api/tests/files/bundle_cluster_requires.tar")) def test_delete_service_with_requires_fail(self): host = self.get_host(bundle_path="python/api/tests/files/bundle_test_provider_concern.tar") @@ -365,7 +365,7 @@ def test_delete_required_fail(self): self.service.prototype.required = True self.service.prototype.save(update_fields=["required"]) - with patch("api.service.views.delete_service"): + with patch("adcm.utils.delete_service"): response: Response = self.client.delete( path=reverse(viewname="v1:service-details", kwargs={"service_id": self.service.pk}), ) @@ -382,7 +382,7 @@ def test_delete_export_bind_fail(self): source_service=self.service, ) - with patch("api.service.views.delete_service"): + with patch("adcm.utils.delete_service"): response: Response = self.client.delete( path=reverse(viewname="v1:service-details", kwargs={"service_id": self.service.pk}), ) @@ -399,7 +399,7 @@ def test_delete_import_bind_success(self): source_service=service_2, ) - with patch("api.service.views.delete_service"): + with patch("adcm.utils.delete_service"): response: Response = self.client.delete( path=reverse(viewname="v1:service-details", kwargs={"service_id": self.service.pk}), ) diff --git a/python/api/tests/test_stack.py b/python/api/tests/test_stack.py index 2d530cd298..8b0274bc0b 100644 --- a/python/api/tests/test_stack.py +++ b/python/api/tests/test_stack.py @@ -13,7 +13,6 @@ from pathlib import Path from cm.models import Prototype -from django.conf import settings from django.urls import reverse from rest_framework.response import Response from rest_framework.status import HTTP_200_OK @@ -25,7 +24,7 @@ class TestPrototypeAPI(BaseTestCase): def test_cluster_prototype_retrieve_success(self): bundle = self.upload_and_load_bundle( path=Path( - settings.BASE_DIR, + self.base_dir, "python/api/tests/files/test_actions_data.tar", ), ) diff --git a/python/api_ui/stack/views.py b/python/api_ui/stack/views.py index 7593265c2a..280c593f2e 100644 --- a/python/api_ui/stack/views.py +++ b/python/api_ui/stack/views.py @@ -33,9 +33,7 @@ def get_distinct_queryset(queryset: QuerySet) -> QuerySet: return queryset.filter(pk__in=distinct_prototype_pks) -class ClusterPrototypeUIViewSet( - PrototypeUIViewMixin, ListModelMixin, GenericViewSet -): # pylint: disable=too-many-ancestors +class ClusterPrototypeUIViewSet(PrototypeUIViewMixin, ListModelMixin, GenericViewSet): permission_classes = (IsAuthenticated,) serializer_class = PrototypeUISerializer ordering_fields = ("id", "name", "display_name") @@ -45,9 +43,7 @@ def get_queryset(self): return self.get_distinct_queryset(queryset=Prototype.objects.filter(type=ObjectType.CLUSTER)) -class ProviderPrototypeUIViewSet( - PrototypeUIViewMixin, ListModelMixin, GenericViewSet -): # pylint: disable=too-many-ancestors +class ProviderPrototypeUIViewSet(PrototypeUIViewMixin, ListModelMixin, GenericViewSet): permission_classes = (IsAuthenticated,) serializer_class = PrototypeUISerializer ordering_fields = ("id", "name", "display_name") diff --git a/python/api_v2/action/filters.py b/python/api_v2/action/filters.py index 73b959f4b3..3183e578ec 100644 --- a/python/api_v2/action/filters.py +++ b/python/api_v2/action/filters.py @@ -10,18 +10,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -from cm.models import Action -from django.db.models import QuerySet -from django_filters.rest_framework import CharFilter, FilterSet +from django_filters.rest_framework import CharFilter, FilterSet, OrderingFilter class ActionFilter(FilterSet): - name = CharFilter(label="Action name", method="filter_name") - - class Meta: - model = Action - fields = ["name"] - - @staticmethod - def filter_name(queryset: QuerySet, name: str, value: str) -> QuerySet: # pylint: disable=unused-argument - return queryset.filter(name=value) + name = CharFilter(label="Action name", field_name="name", lookup_expr="icontains") + ordering = OrderingFilter(fields={"id": "id"}, field_labels={"id": "ID"}, label="ordering") diff --git a/python/api_v2/action/serializers.py b/python/api_v2/action/serializers.py index 977a00342e..c6e268a173 100644 --- a/python/api_v2/action/serializers.py +++ b/python/api_v2/action/serializers.py @@ -9,11 +9,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - from cm.models import Action +from rest_framework.fields import IntegerField from rest_framework.serializers import ( BooleanField, JSONField, + ListSerializer, ModelSerializer, SerializerMethodField, ) @@ -32,22 +33,51 @@ def get_start_impossible_reason(self, action: Action) -> str | None: return action.get_start_impossible_reason(obj=self.context["obj"]) -class ActionRetrieveSerializer(ModelSerializer): +class ActionRetrieveSerializer(ActionListSerializer): is_allow_to_terminate = BooleanField(source="allow_to_terminate") host_component_map_rules = JSONField(source="hostcomponentmap") disclaimer = SerializerMethodField() + config_schema = SerializerMethodField() + adcm_meta = SerializerMethodField() class Meta: model = Action - fields = ["is_allow_to_terminate", "host_component_map_rules", "disclaimer"] + fields = [ + "id", + "name", + "display_name", + "start_impossible_reason", + "is_allow_to_terminate", + "host_component_map_rules", + "disclaimer", + "config_schema", + "adcm_meta", + ] @staticmethod def get_disclaimer(action: Action) -> str: - return action.ui_options.get("disclaimer") or "" + return action.ui_options.get("disclaimer", "") + + def get_config_schema(self, _: Action) -> dict: + return self.context["config_schema"] + + def get_adcm_meta(self, _: Action) -> dict: + return self.context["adcm_meta"] + + +class HostComponentEntry(EmptySerializer): + host_id = IntegerField() + component_id = IntegerField() class ActionRunSerializer(EmptySerializer): - host_component_map = JSONField() - config = JSONField() - attr = JSONField() - is_verbose = BooleanField() + host_component_map = ListSerializer(child=HostComponentEntry(), required=False, default=[]) + config = JSONField(required=False, default={}) + adcm_meta = JSONField(required=False, default={}) + is_verbose = BooleanField(required=False, default=False) + + +class ActionNameSerializer(ModelSerializer): + class Meta: + model = Action + fields = ["id", "name", "display_name"] diff --git a/python/api_v2/action/utils.py b/python/api_v2/action/utils.py index e28e9861c2..8ee1caf292 100644 --- a/python/api_v2/action/utils.py +++ b/python/api_v2/action/utils.py @@ -12,9 +12,9 @@ from hashlib import sha256 from itertools import compress -from typing import Iterable, Iterator +from typing import Iterable, Iterator, List, Literal -from cm.models import Action, ADCMEntity +from cm.models import Action, ADCMEntity, ServiceComponent from django.conf import settings from rbac.models import User @@ -40,3 +40,17 @@ def check_run_perms(user: User, action: Action, obj: ADCMEntity) -> bool: return True return user.has_perm(perm=get_run_actions_permissions(actions=[action])[0], obj=obj) + + +def insert_service_ids( + hc_create_data: List[dict[Literal["host_id", "component_id"], int]] +) -> List[dict[Literal["host_id", "component_id", "service_id"], int]]: + component_ids = {single_hc["component_id"] for single_hc in hc_create_data} + component_service_map = { + component.pk: component.service_id for component in ServiceComponent.objects.filter(pk__in=component_ids) + } + + for single_hc in hc_create_data: + single_hc["service_id"] = component_service_map[single_hc["component_id"]] + + return hc_create_data diff --git a/python/api_v2/action/views.py b/python/api_v2/action/views.py index 6b487d298c..620025ceb9 100644 --- a/python/api_v2/action/views.py +++ b/python/api_v2/action/views.py @@ -10,23 +10,40 @@ # See the License for the specific language governing permissions and # limitations under the License. +from itertools import compress + from api_v2.action.filters import ActionFilter from api_v2.action.serializers import ( ActionListSerializer, ActionRetrieveSerializer, ActionRunSerializer, ) -from api_v2.action.utils import check_run_perms, filter_actions_by_user_perm +from api_v2.action.utils import ( + check_run_perms, + filter_actions_by_user_perm, + insert_service_ids, +) +from api_v2.config.utils import ( + convert_adcm_meta_to_attr, + convert_attr_to_adcm_meta, + get_config_schema, +) +from api_v2.task.serializers import TaskListSerializer +from api_v2.views import CamelCaseGenericViewSet +from cm.adcm_config.config import get_prototype_config +from cm.errors import AdcmEx from cm.job import start_task -from cm.models import Action +from cm.models import Action, ConcernType, Host, HostComponent +from django.conf import settings +from django.db.models import Q +from django_filters.rest_framework.backends import DjangoFilterBackend from guardian.mixins import PermissionListMixin from rest_framework.decorators import action from rest_framework.exceptions import NotFound from rest_framework.mixins import ListModelMixin, RetrieveModelMixin from rest_framework.request import Request from rest_framework.response import Response -from rest_framework.status import HTTP_403_FORBIDDEN -from rest_framework.viewsets import GenericViewSet +from rest_framework.status import HTTP_200_OK, HTTP_403_FORBIDDEN from adcm.mixins import GetParentObjectMixin from adcm.permissions import ( @@ -34,16 +51,20 @@ DjangoModelPermissionsAudit, get_object_for_user, ) -from adcm.utils import filter_actions class ActionViewSet( # pylint: disable=too-many-ancestors - PermissionListMixin, GenericViewSet, ListModelMixin, RetrieveModelMixin, GetParentObjectMixin + PermissionListMixin, ListModelMixin, RetrieveModelMixin, GetParentObjectMixin, CamelCaseGenericViewSet ): - queryset = Action.objects.all() - serializer_class = ActionListSerializer + queryset = ( + Action.objects.select_related("prototype") + .filter(upgrade__isnull=True) + .exclude(name__in=settings.ADCM_SERVICE_ACTION_NAMES_SET) + .order_by("pk") + ) permission_classes = [DjangoModelPermissionsAudit] permission_required = [VIEW_ACTION_PERM] + filter_backends = (DjangoFilterBackend,) filterset_class = ActionFilter def get_serializer_class( @@ -55,29 +76,41 @@ def get_serializer_class( if self.action == "run": return ActionRunSerializer - return self.serializer_class + return ActionListSerializer - def get_queryset(self, *args, **kwargs): + def list(self, request: Request, *args, **kwargs) -> Response: parent_object = self.get_parent_object() + if parent_object is None: raise NotFound("Can't find action's parent object") - return super().get_queryset(*args, **kwargs).filter(prototype=parent_object.prototype) + if parent_object.concerns.filter(type=ConcernType.LOCK).exists(): + return Response(data=[]) - def list(self, request: Request, *args, **kwargs) -> Response: - parent_object = self.get_parent_object() - if parent_object is None: - raise NotFound("Can't find action's parent object") + prototype_object = {} - allowed_actions = filter_actions( - obj=parent_object, - actions=filter_actions_by_user_perm( - user=request.user, - obj=parent_object, - actions=self.filter_queryset(queryset=self.get_queryset()), - ), + if isinstance(parent_object, Host) and parent_object.cluster: + prototype_object[parent_object.cluster.prototype] = parent_object.cluster + + for hc_item in HostComponent.objects.filter(host=parent_object).select_related( + "service__prototype", "component__prototype" + ): + prototype_object[hc_item.service.prototype] = hc_item.service + prototype_object[hc_item.component.prototype] = hc_item.component + + actions = self.filter_queryset( + self.get_queryset().filter( + Q(prototype=parent_object.prototype, host_action=False) + | Q(prototype__in=prototype_object.keys(), host_action=True) + ) ) - serializer = self.get_serializer_class()(instance=allowed_actions, many=True, context={"obj": parent_object}) + prototype_object[parent_object.prototype] = parent_object + + allowed_actions_mask = [act.allowed(prototype_object[act.prototype]) for act in actions] + actions = list(compress(actions, allowed_actions_mask)) + actions = filter_actions_by_user_perm(user=request.user, obj=parent_object, actions=actions) + + serializer = self.get_serializer_class()(instance=actions, many=True, context={"obj": parent_object}) return Response(data=serializer.data) @@ -89,7 +122,19 @@ def retrieve(self, request, *args, **kwargs): # check permissions get_object_for_user(user=request.user, perms=VIEW_ACTION_PERM, klass=Action, pk=kwargs["pk"]) - return super().retrieve(request, *args, **kwargs) + action_ = self.get_object() + schema = {"fields": get_config_schema(parent_object=parent_object, action=action_)} + + attr = {} + if not action_.config_jinja: + _, _, _, attr = get_prototype_config(prototype=action_.prototype, action=action_) + + adcm_meta = convert_attr_to_adcm_meta(attr=attr) + serializer = self.get_serializer_class()( + instance=action_, context={"obj": parent_object, "config_schema": schema, "adcm_meta": adcm_meta} + ) + + return Response(data=serializer.data) @action(methods=["post"], detail=True, url_path="run") def run(self, request: Request, *args, **kwargs) -> Response: # pylint: disable=unused-argument @@ -98,20 +143,24 @@ def run(self, request: Request, *args, **kwargs) -> Response: # pylint: disable raise NotFound("Can't find action's parent object") target_action = get_object_for_user(user=request.user, perms=VIEW_ACTION_PERM, klass=Action, pk=kwargs["pk"]) + + if reason := target_action.get_start_impossible_reason(parent_object): + raise AdcmEx("ACTION_ERROR", msg=reason) + if not check_run_perms(user=request.user, action=target_action, obj=parent_object): return Response(data="Run action forbidden", status=HTTP_403_FORBIDDEN) serializer = self.get_serializer_class()(data=request.data) serializer.is_valid(raise_exception=True) - start_task( + task = start_task( action=target_action, obj=parent_object, conf=serializer.validated_data["config"], - attr=serializer.validated_data["attr"], - hostcomponent=serializer.validated_data["host_component_map"], + attr=convert_adcm_meta_to_attr(adcm_meta=serializer.validated_data["adcm_meta"]), + hostcomponent=insert_service_ids(hc_create_data=serializer.validated_data["host_component_map"]), hosts=[], verbose=serializer.validated_data["is_verbose"], ) - return Response() + return Response(status=HTTP_200_OK, data=TaskListSerializer(instance=task).data) diff --git a/python/api_v2/adcm/__init__.py b/python/api_v2/adcm/__init__.py new file mode 100644 index 0000000000..824dd6c8fe --- /dev/null +++ b/python/api_v2/adcm/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/python/api_v2/adcm/serializers.py b/python/api_v2/adcm/serializers.py new file mode 100644 index 0000000000..c28d39a3f8 --- /dev/null +++ b/python/api_v2/adcm/serializers.py @@ -0,0 +1,41 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from rbac.models import User +from rest_framework.serializers import BooleanField, CharField, ModelSerializer + +from adcm.serializers import EmptySerializer + + +class LoginSerializer(EmptySerializer): + username = CharField(write_only=True) + password = CharField(style={"input_type": "password"}, trim_whitespace=False, write_only=True) + + +class ProfileSerializer(ModelSerializer): + new_password = CharField(trim_whitespace=False, required=False, write_only=True, source="password") + current_password = CharField(trim_whitespace=False, required=False, write_only=True) + is_super_user = BooleanField(source="is_superuser", read_only=True) + + class Meta: + model = User + fields = [ + "id", + "username", + "email", + "first_name", + "last_name", + "is_super_user", + "new_password", + "current_password", + ] + read_only_fields = ["username", "email", "first_name", "last_name", "is_super_user"] diff --git a/python/api_v2/adcm/urls.py b/python/api_v2/adcm/urls.py new file mode 100644 index 0000000000..3a36095d84 --- /dev/null +++ b/python/api_v2/adcm/urls.py @@ -0,0 +1,29 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from api_v2.adcm.views import ( + ADCMConfigView, + LoginView, + LogoutView, + ProfileView, + TokenView, +) +from django.urls import path + +urlpatterns = [ + path("login/", LoginView.as_view(), name="login"), + path("logout/", LogoutView.as_view(), name="logout"), + path("token/", TokenView.as_view(), name="token"), + path("profile/", ProfileView.as_view(), name="profile"), + path("configs/", ADCMConfigView.as_view({"get": "list", "post": "create"}), name="adcm-config-list"), + path("configs//", ADCMConfigView.as_view({"get": "retrieve"}), name="adcm-config-detail"), +] diff --git a/python/api_v2/adcm/views.py b/python/api_v2/adcm/views.py new file mode 100644 index 0000000000..c03f786e6c --- /dev/null +++ b/python/api_v2/adcm/views.py @@ -0,0 +1,128 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from api_v2.adcm.serializers import LoginSerializer, ProfileSerializer +from api_v2.config.views import ConfigLogViewSet +from cm.adcm_config.config import get_adcm_config +from cm.errors import AdcmEx +from cm.models import ADCM, ConfigLog +from django.contrib.auth import authenticate, login, logout +from django.contrib.auth.models import User as AuthUser +from djangorestframework_camel_case.parser import ( + CamelCaseFormParser, + CamelCaseJSONParser, + CamelCaseMultiPartParser, +) +from djangorestframework_camel_case.render import ( + CamelCaseBrowsableAPIRenderer, + CamelCaseJSONRenderer, +) +from rbac.models import User +from rbac.services.user import update_user +from rest_framework.authentication import TokenAuthentication +from rest_framework.authtoken.models import Token +from rest_framework.generics import GenericAPIView, RetrieveUpdateAPIView +from rest_framework.permissions import AllowAny, IsAuthenticated +from rest_framework.request import Request +from rest_framework.response import Response +from rest_framework.status import HTTP_204_NO_CONTENT + +from adcm.serializers import EmptySerializer + + +class BaseLoginView(GenericAPIView): + permission_classes = (AllowAny,) + serializer_class = LoginSerializer + renderer_classes = [CamelCaseJSONRenderer, CamelCaseBrowsableAPIRenderer] + parser_classes = [CamelCaseJSONParser, CamelCaseMultiPartParser, CamelCaseFormParser] + http_method_names = ["post"] + + def perform_login(self, request: Request) -> AuthUser: + serializer = self.serializer_class(data=request.data) + serializer.is_valid(raise_exception=True) + + user = authenticate(request=request, **serializer.validated_data) + if user is None: + raise AdcmEx(code="AUTH_ERROR") + + login(request=request, user=user, backend="django.contrib.auth.backends.ModelBackend") + + return user + + +class LoginView(BaseLoginView): + def post(self, request: Request, *args, **kwargs) -> Response: # pylint: disable=unused-argument + self.perform_login(request=request) + _, adcm_auth_config = get_adcm_config(section="auth_policy") + + return Response(data={"auth_settings": adcm_auth_config}) + + +class LogoutView(GenericAPIView): + permission_classes = (IsAuthenticated,) + serializer_class = EmptySerializer + http_method_names = ["post"] + parser_classes = [CamelCaseJSONParser, CamelCaseMultiPartParser, CamelCaseFormParser] + renderer_classes = [CamelCaseJSONRenderer, CamelCaseBrowsableAPIRenderer] + + def post(self, request: Request, *args, **kwargs) -> Response: # pylint: disable=unused-argument + logout(request) + + return Response(status=HTTP_204_NO_CONTENT) + + +class TokenView(BaseLoginView): + authentication_classes = (TokenAuthentication,) + + def post(self, request: Request, *args, **kwargs) -> Response: # pylint: disable=unused-argument + user = self.perform_login(request=request) + token, _ = Token.objects.get_or_create(user=user) + + return Response({"token": token.key}) + + +class ProfileView(RetrieveUpdateAPIView): + permission_classes = (IsAuthenticated,) + queryset = User.objects.all() + serializer_class = ProfileSerializer + renderer_classes = [CamelCaseJSONRenderer, CamelCaseBrowsableAPIRenderer] + parser_classes = [CamelCaseJSONParser, CamelCaseMultiPartParser, CamelCaseFormParser] + + def get_object(self) -> User: + return User.objects.get(user_ptr=self.request.user) + + def update(self, request, *args, **kwargs): + instance = self.get_object() + serializer = self.get_serializer(instance, data=request.data, partial=True) + serializer.is_valid(raise_exception=True) + + user = update_user( + user=instance, + context_user=self.request.user, + partial=True, + api_v2_behaviour=True, + **serializer.validated_data, + ) + + return Response(data=self.get_serializer(instance=user).data) + + +class ADCMConfigView(ConfigLogViewSet): # pylint: disable=too-many-ancestors + def get_queryset(self, *args, **kwargs): + return ( + ConfigLog.objects.select_related("obj_ref__adcm__prototype") + .filter(obj_ref__adcm__isnull=False) + .order_by("-pk") + ) + + def get_parent_object(self) -> ADCM | None: + return ADCM.objects.first() diff --git a/python/api_v2/audit/filters.py b/python/api_v2/audit/filters.py index 4b0b2d32a0..046d458d0d 100644 --- a/python/api_v2/audit/filters.py +++ b/python/api_v2/audit/filters.py @@ -10,51 +10,70 @@ # See the License for the specific language governing permissions and # limitations under the License. -from audit.models import AuditLog, AuditObjectType, AuditSession -from django_filters import DateTimeFilter + +from audit.models import ( + AuditLog, + AuditLogOperationResult, + AuditLogOperationType, + AuditObjectType, + AuditSession, + AuditSessionLoginResult, +) from django_filters.rest_framework import ( CharFilter, ChoiceFilter, - DateFilter, + DateTimeFilter, FilterSet, - IsoDateTimeFromToRangeFilter, + OrderingFilter, ) -class AuditLogListFilter(FilterSet): +class AuditLogFilterSet(FilterSet): + object_name = CharFilter(field_name="audit_object__object_name", label="Object name", lookup_expr="icontains") object_type = ChoiceFilter( field_name="audit_object__object_type", choices=AuditObjectType.choices, label="Object type", ) - object_name = CharFilter(field_name="audit_object__object_name", label="Object name") - username = CharFilter(field_name="user__username", label="Username") - operation_result = CharFilter(field_name="operation_result", label="Operation result") - operation_date = DateFilter(field_name="operation_time", lookup_expr="date", label="Operation date") - time_from = DateTimeFilter(field_name="operation_time", lookup_expr="date", label="Login date, lower border") - time_to = DateTimeFilter(field_name="operation_time", lookup_expr="date", label="Login date, upper border") - operation_time = IsoDateTimeFromToRangeFilter() + operation_result = ChoiceFilter( + field_name="operation_result", label="Operation result", choices=AuditLogOperationResult.choices + ) + operation_type = ChoiceFilter( + field_name="operation_type", label="Operation type", choices=AuditLogOperationType.choices + ) + time_from = DateTimeFilter(field_name="operation_time", lookup_expr="gte") + time_to = DateTimeFilter(field_name="operation_time", lookup_expr="lte") + username = CharFilter(field_name="user__username", label="Username", lookup_expr="icontains") + ordering = OrderingFilter( + fields={"operation_time": "time"}, field_labels={"operation_time": "Time"}, label="ordering" + ) class Meta: model = AuditLog fields = [ - "operation_type", - "operation_result", + "id", "object_name", "object_type", + "operation_result", + "operation_type", + "time_from", + "time_to", "username", + "ordering", ] -class AuditSessionListFilter(FilterSet): - username = CharFilter(field_name="user__username", label="Username") - login_date = DateFilter(field_name="login_time", lookup_expr="date", label="Login date") - time_from = DateTimeFilter(field_name="login_time", lookup_expr="date", label="Login date, lower border") - time_to = DateTimeFilter(field_name="login_time", lookup_expr="date", label="Login date, upper border") - login_time = IsoDateTimeFromToRangeFilter() +class AuditSessionFilterSet(FilterSet): + login = CharFilter(field_name="user__username", label="Login", lookup_expr="icontains") + login_result = ChoiceFilter( + field_name="login_result", label="Login result", choices=AuditSessionLoginResult.choices + ) + time_from = DateTimeFilter(field_name="login_time", lookup_expr="gte", label="Time from") + time_to = DateTimeFilter(field_name="login_time", lookup_expr="lte", label="Time to") + ordering = OrderingFilter( + fields={"login_time": "loginTime"}, field_labels={"login_time": "Login time"}, label="ordering" + ) class Meta: model = AuditSession - fields = [ - "login_result", - ] + fields = ["id", "login", "login_result", "time_from", "time_to", "ordering"] diff --git a/python/api_v2/audit/serializers.py b/python/api_v2/audit/serializers.py new file mode 100644 index 0000000000..9c163750b1 --- /dev/null +++ b/python/api_v2/audit/serializers.py @@ -0,0 +1,65 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from audit.models import AuditLog, AuditObject, AuditSession, AuditUser +from rest_framework.fields import CharField, DateTimeField, IntegerField +from rest_framework.serializers import ModelSerializer + + +class AuditObjectSerializer(ModelSerializer): + id = IntegerField(read_only=True, source="object_id") + type = CharField(read_only=True, source="object_type") + name = CharField(read_only=True, source="object_name") + + class Meta: + model = AuditObject + fields = ["id", "type", "name"] + + +class AuditUserShortSerializer(ModelSerializer): + name = CharField(read_only=True, source="username") + + class Meta: + model = AuditUser + fields = ["name"] + + +class AuditLogSerializer(ModelSerializer): + time = DateTimeField(source="operation_time") + name = CharField(read_only=True, source="operation_name") + type = CharField(read_only=True, source="operation_type") + result = CharField(read_only=True, source="operation_result") + object = AuditObjectSerializer(source="audit_object", read_only=True, allow_null=True) + user = AuditUserShortSerializer(read_only=True, allow_null=True) + + class Meta: + model = AuditLog + fields = [ + "id", + "name", + "type", + "result", + "time", + "object", + "user", + "object_changes", + ] + + +class AuditSessionSerializer(ModelSerializer): + user = AuditUserShortSerializer(read_only=True, allow_null=True) + result = CharField(source="login_result") + time = DateTimeField(source="login_time") + + class Meta: + model = AuditSession + fields = ("id", "user", "result", "time") diff --git a/python/api_v2/audit/views.py b/python/api_v2/audit/views.py index 411afd3002..0d4fc5c3bb 100644 --- a/python/api_v2/audit/views.py +++ b/python/api_v2/audit/views.py @@ -9,45 +9,28 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from api_v2.audit.filters import AuditLogListFilter, AuditSessionListFilter -from api_v2.audit.utils import filter_objects_within_time_range -from audit.models import AuditLog, AuditSession, AuditSessionLoginResult -from audit.serializers import AuditLogSerializer, AuditSessionSerializer -from django.db.models import QuerySet -from rest_framework.pagination import LimitOffsetPagination -from rest_framework.response import Response -from rest_framework.status import HTTP_400_BAD_REQUEST -from rest_framework.viewsets import ReadOnlyModelViewSet + +from api_v2.audit.filters import AuditLogFilterSet, AuditSessionFilterSet +from api_v2.audit.serializers import AuditLogSerializer, AuditSessionSerializer +from api_v2.views import CamelCaseReadOnlyModelViewSet +from audit.models import AuditLog, AuditSession +from django_filters.rest_framework.backends import DjangoFilterBackend from adcm.permissions import SuperuserOnlyMixin # pylint: disable=too-many-ancestors -class AuditSessionViewSet(SuperuserOnlyMixin, ReadOnlyModelViewSet): +class AuditSessionViewSet(SuperuserOnlyMixin, CamelCaseReadOnlyModelViewSet): not_superuser_error_code = "AUDIT_LOGINS_FORBIDDEN" - queryset = AuditSession.objects.select_related("user").order_by("-login_time", "-pk") + queryset = AuditSession.objects.select_related("user").order_by("-login_time") serializer_class = AuditSessionSerializer - filterset_class = AuditSessionListFilter - pagination_class = LimitOffsetPagination - - def get_queryset(self, *args, **kwargs) -> QuerySet: - login_result = self.request.query_params.get("login_result", None) - if login_result and login_result.casefold() in AuditSessionLoginResult.values: - self.queryset = self.queryset.filter(login_result=login_result.casefold()) - return filter_objects_within_time_range(self.queryset, self.request.query_params) + filterset_class = AuditSessionFilterSet + filter_backends = (DjangoFilterBackend,) -class AuditLogViewSet(ReadOnlyModelViewSet): +class AuditLogViewSet(SuperuserOnlyMixin, CamelCaseReadOnlyModelViewSet): not_superuser_error_code = "AUDIT_OPERATIONS_FORBIDDEN" - queryset = AuditLog.objects.select_related("audit_object", "user").order_by("-operation_time", "-pk") + queryset = AuditLog.objects.select_related("audit_object", "user").order_by("-operation_time") serializer_class = AuditLogSerializer - filterset_class = AuditLogListFilter - pagination_class = LimitOffsetPagination - - def get_queryset(self, *args, **kwargs) -> QuerySet: # pylint: disable=unused-argument - return filter_objects_within_time_range(self.queryset, self.request.query_params) - - def list(self, request, *args, **kwargs): # pylint: disable=unused-argument - if not AuditLogListFilter(data=self.request.query_params, queryset=self.queryset).is_valid(): - return Response(self.request.query_params, status=HTTP_400_BAD_REQUEST) - return super().list(request, *args, **kwargs) + filterset_class = AuditLogFilterSet + filter_backends = (DjangoFilterBackend,) diff --git a/python/api_v2/bundle/filters.py b/python/api_v2/bundle/filters.py index 9567b378da..dfcdd1fb5a 100644 --- a/python/api_v2/bundle/filters.py +++ b/python/api_v2/bundle/filters.py @@ -9,24 +9,20 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + from cm.models import Bundle -from django_filters import DateFilter -from django_filters.rest_framework import CharFilter, FilterSet +from django_filters.rest_framework import CharFilter, FilterSet, OrderingFilter class BundleFilter(FilterSet): - name = CharFilter(field_name="name", label="Bundle name") - version = CharFilter(field_name="version", label="Bundle version") - edition = CharFilter(field_name="edition", label="Bundle edition") - date = DateFilter(field_name="date", lookup_expr="date", label="Bundle upload date") - product = CharFilter(field_name="category__value", label="Product name") + display_name = CharFilter(label="Display name", field_name="prototype__display_name", lookup_expr="icontains") + product = CharFilter(label="Product name", field_name="prototype__display_name", lookup_expr="iexact") + ordering = OrderingFilter( + fields={"prototype__display_name": "displayName", "date": "uploadTime"}, + field_labels={"prototype__display_name": "Display name", "date": "Upload time"}, + label="ordering", + ) class Meta: model = Bundle - fields = [ - "name", - "version", - "edition", - "date", - "product", - ] + fields = ["id"] diff --git a/python/api_v2/bundle/serializers.py b/python/api_v2/bundle/serializers.py index fb464022b3..fa5ef2ba6f 100644 --- a/python/api_v2/bundle/serializers.py +++ b/python/api_v2/bundle/serializers.py @@ -10,25 +10,38 @@ # See the License for the specific language governing permissions and # limitations under the License. -from cm.models import Bundle +from cm.models import Bundle, ObjectType from rest_framework.fields import DateTimeField, FileField, SerializerMethodField from rest_framework.serializers import ModelSerializer from adcm.serializers import EmptySerializer +class BundleIdSerializer(ModelSerializer): + class Meta: + model = Bundle + fields = ["id"] + + class BundleListSerializer(ModelSerializer): - display_name = SerializerMethodField() upload_time = DateTimeField(read_only=True, source="date") + display_name = SerializerMethodField() class Meta: model = Bundle - fields = ("id", "name", "display_name", "version", "edition", "upload_time", "category") + fields = ("id", "name", "display_name", "version", "edition", "upload_time", "category", "signature_status") - def get_display_name(self, instance) -> str | None: - prototype = instance.prototype_set.filter(type__in=["adcm", "cluster", "provider"]).first() - return prototype.display_name + @staticmethod + def get_display_name(bundle: Bundle) -> str: + proto = bundle.prototype_set.filter(type__in=[ObjectType.CLUSTER, ObjectType.PROVIDER]).first() + return proto.display_name class UploadBundleSerializer(EmptySerializer): file = FileField(help_text="bundle file for upload") + + +class BundleRelatedSerializer(ModelSerializer): + class Meta: + model = Bundle + fields = ["id"] diff --git a/python/api_v2/bundle/views.py b/python/api_v2/bundle/views.py index 62f5a84406..0ae675f611 100644 --- a/python/api_v2/bundle/views.py +++ b/python/api_v2/bundle/views.py @@ -11,32 +11,45 @@ # limitations under the License. from api_v2.bundle.filters import BundleFilter from api_v2.bundle.serializers import BundleListSerializer, UploadBundleSerializer -from api_v2.bundle.utils import upload_file -from cm.bundle import delete_bundle, load_bundle -from cm.models import Bundle +from api_v2.views import CamelCaseGenericViewSet +from cm.bundle import delete_bundle, load_bundle, upload_file +from cm.models import Bundle, ObjectType +from django.db.models import F +from django_filters.rest_framework.backends import DjangoFilterBackend +from rest_framework.mixins import ( + CreateModelMixin, + DestroyModelMixin, + ListModelMixin, + RetrieveModelMixin, +) from rest_framework.response import Response from rest_framework.status import HTTP_201_CREATED, HTTP_204_NO_CONTENT -from rest_framework.viewsets import ModelViewSet from adcm.permissions import VIEW_ACTION_PERM, DjangoModelPermissionsAudit -class BundleViewSet(ModelViewSet): # pylint: disable=too-many-ancestors - queryset = Bundle.objects.exclude(name="ADCM").prefetch_related("prototype_set") +class BundleViewSet( # pylint: disable=too-many-ancestors + ListModelMixin, RetrieveModelMixin, DestroyModelMixin, CreateModelMixin, CamelCaseGenericViewSet +): + queryset = ( + Bundle.objects.exclude(name="ADCM") + .annotate(type=F("prototype__type")) + .filter(type__in=[ObjectType.CLUSTER, ObjectType.PROVIDER]) + .order_by(F("prototype__display_name").asc()) + ) serializer_class = BundleListSerializer permission_classes = [DjangoModelPermissionsAudit] permission_required = [VIEW_ACTION_PERM] filterset_class = BundleFilter - ordering_fields = ("id", "name", "display_name", "edition", "version", "upload_time") - ordering = ["-date"] + filter_backends = (DjangoFilterBackend,) def create(self, request, *args, **kwargs) -> Response: - serializer = self.get_serializer(data=request.data) + serializer = self.get_serializer_class()(data=request.data) serializer.is_valid(raise_exception=True) - bundle_file = upload_file(request=request) - load_bundle(bundle_file=str(bundle_file)) + file_path = upload_file(file=request.data["file"]) + bundle = load_bundle(bundle_file=str(file_path)) - return Response(status=HTTP_201_CREATED) + return Response(status=HTTP_201_CREATED, data=self.serializer_class(bundle).data) def destroy(self, request, *args, **kwargs) -> Response: bundle = self.get_object() diff --git a/python/api_v2/cluster/filters.py b/python/api_v2/cluster/filters.py index 99824a3138..3393930975 100644 --- a/python/api_v2/cluster/filters.py +++ b/python/api_v2/cluster/filters.py @@ -18,11 +18,12 @@ class ClusterFilter(FilterSet): status = ChoiceFilter(label="Cluster status", choices=ADCMEntityStatus.choices, method="filter_status") - prototype_name = CharFilter(label="Cluster prototype name", method="filter_prototype_name") + prototype_display_name = CharFilter(label="Cluster prototype display name", field_name="prototype__display_name") + name = CharFilter(label="Cluster name", lookup_expr="icontains") class Meta: model = Cluster - fields = ("name", "status", "prototype_name") + fields = ("id", "name", "status", "prototype_display_name") @staticmethod def filter_status(queryset: QuerySet, name: str, value: str) -> QuerySet: # pylint: disable=unused-argument @@ -32,7 +33,3 @@ def filter_status(queryset: QuerySet, name: str, value: str) -> QuerySet: # pyl exclude_pks = {cluster.pk for cluster in queryset if get_cluster_status(cluster=cluster) == 0} return queryset.exclude(pk__in=exclude_pks) - - @staticmethod - def filter_prototype_name(queryset: QuerySet, name: str, value: str) -> QuerySet: # pylint: disable=unused-argument - return queryset.filter(prototype__name=value) diff --git a/python/api_v2/cluster/serializers.py b/python/api_v2/cluster/serializers.py index e958e8b699..6966e73651 100644 --- a/python/api_v2/cluster/serializers.py +++ b/python/api_v2/cluster/serializers.py @@ -13,24 +13,34 @@ from typing import Any from api_v2.concern.serializers import ConcernSerializer +from api_v2.prototype.serializers import PrototypeRelatedSerializer from cm.adcm_config.config import get_main_info -from cm.models import Cluster, HostComponent, Prototype +from cm.models import ( + Cluster, + ClusterObject, + Host, + HostComponent, + Prototype, + ServiceComponent, +) from cm.status_api import get_obj_status from cm.upgrade import get_upgrade +from cm.validators import ClusterUniqueValidator, StartMidEndValidator +from django.conf import settings +from rest_framework.fields import CharField, IntegerField from rest_framework.serializers import ( BooleanField, - CharField, ModelSerializer, SerializerMethodField, ) +from adcm.serializers import EmptySerializer from adcm.utils import get_requires class ClusterSerializer(ModelSerializer): status = SerializerMethodField() - prototype_name = CharField(source="prototype.name") - prototype_version = CharField(source="prototype.version") + prototype = PrototypeRelatedSerializer(read_only=True) concerns = ConcernSerializer(many=True, read_only=True) is_upgradable = SerializerMethodField() main_info = SerializerMethodField() @@ -43,8 +53,7 @@ class Meta: "state", "multi_state", "status", - "prototype_name", - "prototype_version", + "prototype", "description", "concerns", "is_upgradable", @@ -64,13 +73,46 @@ def get_main_info(cluster: Cluster) -> str | None: return get_main_info(obj=cluster) -class ClusterCreateSerializer(ModelSerializer): +class ClusterRelatedSerializer(ModelSerializer): class Meta: model = Cluster - fields = ["prototype", "name", "description"] + fields = ["id", "name"] + + +class ClusterCreateSerializer(EmptySerializer): + prototype_id = IntegerField() + name = CharField( + validators=[ + ClusterUniqueValidator(queryset=Cluster.objects.all()), + StartMidEndValidator( + start=settings.ALLOWED_CLUSTER_NAME_START_END_CHARS, + mid=settings.ALLOWED_CLUSTER_NAME_MID_CHARS, + end=settings.ALLOWED_CLUSTER_NAME_START_END_CHARS, + err_code="BAD_REQUEST", + err_msg="Wrong cluster name.", + ), + ], + ) + description = CharField(required=False, allow_blank=True) class ClusterUpdateSerializer(ModelSerializer): + name = CharField( + max_length=80, + validators=[ + ClusterUniqueValidator(queryset=Cluster.objects.all()), + StartMidEndValidator( + start=settings.ALLOWED_CLUSTER_NAME_START_END_CHARS, + mid=settings.ALLOWED_CLUSTER_NAME_MID_CHARS, + end=settings.ALLOWED_CLUSTER_NAME_START_END_CHARS, + err_code="BAD_REQUEST", + err_msg="Wrong cluster name.", + ), + ], + required=False, + help_text="Cluster name", + ) + class Meta: model = Cluster fields = ["name"] @@ -79,10 +121,11 @@ class Meta: class ServicePrototypeSerializer(ModelSerializer): is_required = BooleanField(source="required") depend_on = SerializerMethodField() + license_status = CharField(source="license") class Meta: model = Prototype - fields = ["id", "name", "display_name", "version", "is_required", "depend_on", "is_license_accepted"] + fields = ["id", "name", "display_name", "version", "is_required", "depend_on", "license_status"] @staticmethod def get_depend_on(prototype: Prototype) -> list[dict[str, list[dict[str, Any]] | Any]] | None: @@ -92,10 +135,46 @@ def get_depend_on(prototype: Prototype) -> list[dict[str, list[dict[str, Any]] | class HostComponentListSerializer(ModelSerializer): class Meta: model = HostComponent - fields = ["service", "host", "component", "cluster"] + fields = ["id", "host_id", "component_id"] + +class HostComponentPostSerializer(EmptySerializer): + host_id = IntegerField() + component_id = IntegerField() + + +class RelatedComponentStatusSerializer(ModelSerializer): + status = SerializerMethodField() -class HostComponentPostSerializer(ModelSerializer): class Meta: - model = HostComponent - fields = ["service", "host", "component", "cluster"] + model = ServiceComponent + fields = ["id", "name", "display_name", "status"] + + @staticmethod + def get_status(instance: ServiceComponent) -> str: + return get_obj_status(obj=instance) + + +class RelatedServicesStatusesSerializer(ModelSerializer): + status = SerializerMethodField() + components = RelatedComponentStatusSerializer(many=True, source="servicecomponent_set") + + @staticmethod + def get_status(instance: ClusterObject) -> str: + return get_obj_status(obj=instance) + + class Meta: + model = ClusterObject + fields = ["id", "name", "display_name", "status", "components"] + + +class RelatedHostsStatusesSerializer(ModelSerializer): + status = SerializerMethodField() + + @staticmethod + def get_status(instance: ClusterObject) -> str: + return get_obj_status(obj=instance) + + class Meta: + model = Host + fields = ["id", "name", "status"] diff --git a/python/api_v2/cluster/urls.py b/python/api_v2/cluster/urls.py index d041adf6db..cbaf3b2f37 100644 --- a/python/api_v2/cluster/urls.py +++ b/python/api_v2/cluster/urls.py @@ -15,8 +15,8 @@ from api_v2.component.views import ComponentViewSet from api_v2.config.views import ConfigLogViewSet from api_v2.group_config.views import GroupConfigViewSet -from api_v2.host.views import HostClusterViewSet -from api_v2.imports.views import ClusterImportViewSet, ServiceImportViewSet +from api_v2.host.views import HostClusterViewSet, HostGroupConfigViewSet +from api_v2.imports.views import ImportViewSet from api_v2.service.views import ServiceViewSet from api_v2.upgrade.views import UpgradeViewSet from rest_framework_nested.routers import NestedSimpleRouter, SimpleRouter @@ -46,6 +46,12 @@ cluster_group_config_router.register( prefix=CONFIG_GROUPS_PREFIX, viewset=GroupConfigViewSet, basename="cluster-config-group" ) +cluster_group_config_hosts_router = NestedSimpleRouter( + cluster_group_config_router, CONFIG_GROUPS_PREFIX, lookup="group_config" +) +cluster_group_config_hosts_router.register( + prefix=r"hosts", viewset=HostGroupConfigViewSet, basename="cluster-config-group-hosts" +) cluster_group_config_config_router = NestedSimpleRouter( parent_router=cluster_group_config_router, parent_prefix=CONFIG_GROUPS_PREFIX, lookup="config_group" @@ -54,7 +60,7 @@ prefix=CONFIG_PREFIX, viewset=ConfigLogViewSet, basename="cluster-config-group-config" ) import_cluster_router = NestedSimpleRouter(parent_router=cluster_router, parent_prefix=CLUSTER_PREFIX, lookup="cluster") -import_cluster_router.register(prefix=IMPORT_PREFIX, viewset=ClusterImportViewSet, basename="cluster-import") +import_cluster_router.register(prefix=IMPORT_PREFIX, viewset=ImportViewSet, basename="cluster-import") # service service_router = NestedSimpleRouter(parent_router=cluster_router, parent_prefix=CLUSTER_PREFIX, lookup="cluster") @@ -79,8 +85,14 @@ service_group_config_config_router.register( prefix=CONFIG_PREFIX, viewset=ConfigLogViewSet, basename="service-config-group-config" ) +service_group_config_hosts_router = NestedSimpleRouter( + service_group_config_router, CONFIG_GROUPS_PREFIX, lookup="group_config" +) +service_group_config_hosts_router.register( + prefix=r"hosts", viewset=HostGroupConfigViewSet, basename="service-config-group-hosts" +) import_service_router = NestedSimpleRouter(parent_router=service_router, parent_prefix=SERVICE_PREFIX, lookup="service") -import_service_router.register(prefix=IMPORT_PREFIX, viewset=ServiceImportViewSet, basename="service-import") +import_service_router.register(prefix=IMPORT_PREFIX, viewset=ImportViewSet, basename="service-import") # component component_router = NestedSimpleRouter(parent_router=service_router, parent_prefix=SERVICE_PREFIX, lookup="service") @@ -109,6 +121,13 @@ component_group_config_config_router.register( prefix=CONFIG_PREFIX, viewset=ConfigLogViewSet, basename="component-config-group-config" ) +component_group_config_hosts_router = NestedSimpleRouter( + component_group_config_router, CONFIG_GROUPS_PREFIX, lookup="group_config" +) +component_group_config_hosts_router.register( + prefix=r"hosts", viewset=HostGroupConfigViewSet, basename="component-config-group-hosts" +) + # host host_router = NestedSimpleRouter(parent_router=cluster_router, parent_prefix=CLUSTER_PREFIX, lookup="cluster") @@ -132,24 +151,27 @@ *cluster_config_router.urls, *cluster_group_config_router.urls, *cluster_group_config_config_router.urls, + *cluster_group_config_hosts_router.urls, + *import_cluster_router.urls, # service *service_router.urls, *service_action_router.urls, *service_config_router.urls, *service_group_config_router.urls, *service_group_config_config_router.urls, + *service_group_config_hosts_router.urls, + *import_service_router.urls, # component *component_router.urls, *component_action_router.urls, *component_config_router.urls, *component_group_config_router.urls, *component_group_config_config_router.urls, + *component_group_config_hosts_router.urls, # host *host_router.urls, *host_action_router.urls, # other *upgrade_router.urls, *mapping_router.urls, - *import_cluster_router.urls, - *import_service_router.urls, ] diff --git a/python/api_v2/cluster/views.py b/python/api_v2/cluster/views.py index 8e3f142b45..03b06b6fa9 100644 --- a/python/api_v2/cluster/views.py +++ b/python/api_v2/cluster/views.py @@ -10,6 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from api_v2.cluster.filters import ClusterFilter from api_v2.cluster.serializers import ( ClusterCreateSerializer, @@ -17,33 +18,57 @@ ClusterUpdateSerializer, HostComponentListSerializer, HostComponentPostSerializer, + RelatedHostsStatusesSerializer, + RelatedServicesStatusesSerializer, ServicePrototypeSerializer, ) from api_v2.component.serializers import ComponentMappingSerializer from api_v2.host.serializers import HostMappingSerializer -from cm.api import add_cluster -from cm.models import Cluster, HostComponent, ObjectType, Prototype +from api_v2.views import CamelCaseGenericViewSet, CamelCaseModelViewSet +from cm.api import add_cluster, retrieve_host_component_objects, set_host_component +from cm.errors import AdcmEx +from cm.issue import update_hierarchy_issues +from cm.models import ( + ADCMEntityStatus, + Cluster, + ClusterObject, + Host, + HostComponent, + ObjectType, + Prototype, + ServiceComponent, +) +from cm.status_api import get_obj_status +from django.db.models import QuerySet +from django_filters.rest_framework.backends import DjangoFilterBackend from guardian.mixins import PermissionListMixin +from guardian.shortcuts import get_objects_for_user from rest_framework.decorators import action +from rest_framework.exceptions import ValidationError from rest_framework.mixins import CreateModelMixin, ListModelMixin from rest_framework.request import Request from rest_framework.response import Response -from rest_framework.status import HTTP_201_CREATED, HTTP_404_NOT_FOUND -from rest_framework.viewsets import GenericViewSet, ModelViewSet +from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED, HTTP_404_NOT_FOUND from adcm.permissions import ( VIEW_CLUSTER_PERM, VIEW_HC_PERM, + VIEW_HOST_PERM, + VIEW_SERVICE_PERM, DjangoModelPermissionsAudit, + check_custom_perm, + get_object_for_user, ) -class ClusterViewSet(PermissionListMixin, ModelViewSet): # pylint:disable=too-many-ancestors - queryset = Cluster.objects.all() +class ClusterViewSet(PermissionListMixin, CamelCaseModelViewSet): # pylint:disable=too-many-ancestors + queryset = Cluster.objects.prefetch_related("prototype", "concerns").order_by("name") serializer_class = ClusterSerializer permission_classes = [DjangoModelPermissionsAudit] permission_required = [VIEW_CLUSTER_PERM] filterset_class = ClusterFilter + filter_backends = (DjangoFilterBackend,) + http_method_names = ["get", "post", "patch", "delete"] def get_serializer_class(self): if self.action == "create": @@ -60,15 +85,32 @@ def get_serializer_class(self): def create(self, request, *args, **kwargs): serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) + valid = serializer.validated_data + cluster = add_cluster( - prototype=serializer.validated_data["prototype"], - name=serializer.validated_data["name"], - description=serializer.validated_data["description"], + prototype=Prototype.objects.get(pk=valid["prototype_id"], type=ObjectType.CLUSTER), + name=valid["name"], + description=valid["description"], ) return Response(data=ClusterSerializer(cluster).data, status=HTTP_201_CREATED) - @action(methods=["get"], detail=True) + def update(self, request, *args, **kwargs): + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + valid_data = serializer.validated_data + instance = self.get_object() + + if valid_data.get("name") and valid_data.get("name") != instance.name and instance.state != "created": + raise ValidationError("Name change is available only in the 'created' state") + + instance.name = valid_data.get("name", instance.name) + instance.save(update_fields=["name"]) + update_hierarchy_issues(obj=instance) + + return Response(status=HTTP_200_OK, data=ClusterSerializer(instance).data) + + @action(methods=["get"], detail=True, url_path="service-prototypes") def service_prototypes(self, request: Request, *args, **kwargs) -> Response: # pylint: disable=unused-argument cluster = Cluster.objects.filter(pk=kwargs["pk"]).first() if not cluster: @@ -79,17 +121,59 @@ def service_prototypes(self, request: Request, *args, **kwargs) -> Response: # return Response(data=serializer.data) + @action(methods=["get"], detail=True, url_path="statuses/services") + def services_statuses(self, request: Request, *args, **kwargs) -> Response: # pylint: disable=unused-argument + cluster = get_object_for_user(user=request.user, perms=VIEW_CLUSTER_PERM, klass=Cluster, id=kwargs["pk"]) + queryset = get_objects_for_user(user=request.user, perms=VIEW_SERVICE_PERM, klass=ClusterObject).filter( + cluster=cluster + ) + queryset = self.filter_queryset(queryset=queryset, request=request) + + return self.get_paginated_response( + data=RelatedServicesStatusesSerializer(instance=self.paginate_queryset(queryset=queryset), many=True).data + ) + + @action(methods=["get"], detail=True, url_path="statuses/hosts") + def hosts_statuses(self, request: Request, *args, **kwargs) -> Response: # pylint: disable=unused-argument + cluster = get_object_for_user(user=request.user, perms=VIEW_CLUSTER_PERM, klass=Cluster, id=kwargs["pk"]) + queryset = get_objects_for_user(user=request.user, perms=VIEW_HOST_PERM, klass=Host).filter(cluster=cluster) + queryset = self.filter_queryset(queryset=queryset, request=request) + + return self.get_paginated_response( + data=RelatedHostsStatusesSerializer(instance=self.paginate_queryset(queryset=queryset), many=True).data + ) + + def filter_queryset(self, queryset: QuerySet, **kwargs) -> QuerySet | list: + if self.action in {"services_statuses", "hosts_statuses"}: + return self._filter_by_status(queryset=queryset, **kwargs) + + return super().filter_queryset(queryset=queryset) + + @staticmethod + def _filter_by_status(request: Request, queryset: QuerySet) -> QuerySet | list: + status_value = request.query_params.get("status", default=None) + if status_value is None: + return queryset + + status_choices = {choice[0] for choice in ADCMEntityStatus.choices} + if status_value not in status_choices: + status_choices_repr = ", ".join(status_choices) + raise AdcmEx(code="BAD_REQUEST", msg=f"Status choices: {status_choices_repr}") + + return [obj for obj in queryset if get_obj_status(obj=obj) == status_value] + class MappingViewSet( # pylint:disable=too-many-ancestors - PermissionListMixin, - GenericViewSet, - ListModelMixin, - CreateModelMixin, + PermissionListMixin, ListModelMixin, CreateModelMixin, CamelCaseGenericViewSet ): - queryset = HostComponent.objects.all() + queryset = HostComponent.objects.select_related("service", "host", "component", "cluster").order_by( + "component__prototype__display_name" + ) serializer_class = HostComponentListSerializer permission_classes = [DjangoModelPermissionsAudit] permission_required = [VIEW_HC_PERM] + pagination_class = None + filter_backends = [] def get_serializer_class(self): if self.action == "create": @@ -107,13 +191,22 @@ def list(self, request: Request, *args, **kwargs) -> Response: return super().list(request, *args, **kwargs) def create(self, request: Request, *args, **kwargs) -> Response: - cluster = Cluster.objects.filter(pk=kwargs["cluster_pk"]).first() - if not cluster: - return Response(data=f'Cluster with pk "{kwargs["cluster_pk"]}" not found', status=HTTP_404_NOT_FOUND) + cluster = get_object_for_user( + user=request.user, perms=VIEW_CLUSTER_PERM, klass=Cluster, id=kwargs["cluster_pk"] + ) + check_custom_perm( + user=request.user, action_type="edit_host_components_of", model=Cluster.__name__.lower(), obj=cluster + ) - request.data["cluster"] = cluster.pk + serializer = self.get_serializer(data=request.data, many=True) + serializer.is_valid(raise_exception=True) - return super().create(request, *args, **kwargs) + host_component_objects = retrieve_host_component_objects(cluster=cluster, plain_hc=serializer.validated_data) + new_host_component = set_host_component(cluster=cluster, host_component_objects=host_component_objects) + + return Response( + data=self.serializer_class(instance=new_host_component, many=True).data, status=HTTP_201_CREATED + ) @action(methods=["get"], detail=False) def hosts(self, request: Request, *args, **kwargs) -> Response: # pylint: disable=unused-argument @@ -121,10 +214,7 @@ def hosts(self, request: Request, *args, **kwargs) -> Response: # pylint: disab if not cluster: return Response(data=f'Cluster with pk "{kwargs["cluster_pk"]}" not found', status=HTTP_404_NOT_FOUND) - serializer = HostMappingSerializer( - instance=[service_component.host for service_component in self.queryset.filter(cluster_id=cluster.pk)], - many=True, - ) + serializer = HostMappingSerializer(instance=Host.objects.filter(cluster=cluster), many=True) return Response(data=serializer.data) @@ -134,9 +224,6 @@ def components(self, request: Request, *args, **kwargs) -> Response: # pylint: if not cluster: return Response(data=f'Cluster with pk "{kwargs["cluster_pk"]}" not found', status=HTTP_404_NOT_FOUND) - serializer = ComponentMappingSerializer( - instance=[service_component.component for service_component in self.queryset.filter(cluster_id=cluster.pk)], - many=True, - ) + serializer = ComponentMappingSerializer(instance=ServiceComponent.objects.filter(cluster=cluster), many=True) return Response(data=serializer.data) diff --git a/python/api_v2/component/filters.py b/python/api_v2/component/filters.py new file mode 100644 index 0000000000..36b3868ff2 --- /dev/null +++ b/python/api_v2/component/filters.py @@ -0,0 +1,20 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from cm.models import ServiceComponent +from django_filters.rest_framework import FilterSet + + +class ComponentFilter(FilterSet): + class Meta: + model = ServiceComponent + fields = ["id"] diff --git a/python/api_v2/component/serializers.py b/python/api_v2/component/serializers.py index 7f96c5bc93..b61da2f1d9 100644 --- a/python/api_v2/component/serializers.py +++ b/python/api_v2/component/serializers.py @@ -10,17 +10,18 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any +from api_v2.cluster.serializers import ClusterRelatedSerializer from api_v2.concern.serializers import ConcernSerializer from api_v2.host.serializers import HostShortSerializer +from api_v2.prototype.serializers import PrototypeRelatedSerializer +from api_v2.service.serializers import ServiceNameSerializer, ServiceRelatedSerializer from cm.adcm_config.config import get_main_info from cm.models import ( ConcernItem, Host, HostComponent, MaintenanceMode, - Prototype, ServiceComponent, ) from cm.status_api import get_obj_status @@ -28,6 +29,8 @@ from rest_framework.serializers import ( CharField, ChoiceField, + IntegerField, + JSONField, ModelSerializer, SerializerMethodField, ) @@ -36,9 +39,9 @@ class ComponentMappingSerializer(ModelSerializer): - service_name = CharField(source="service.name") - service_display_name = CharField(source="service.display_name") + service = ServiceNameSerializer(read_only=True) depend_on = SerializerMethodField() + constraints = JSONField(source="constraint") class Meta: model = ServiceComponent @@ -48,21 +51,39 @@ class Meta: "display_name", "is_maintenance_mode_available", "maintenance_mode", - "constraint", - "service_id", - "service_name", - "service_display_name", + "constraints", "depend_on", + "service", ] @staticmethod - def get_depend_on(prototype: Prototype) -> list[dict[str, list[dict[str, Any]] | Any]] | None: - return get_requires(prototype=prototype) + def get_depend_on(instance: ServiceComponent) -> list[dict] | None: + requires_data = get_requires(prototype=instance.prototype) + if requires_data is None: + return None + + out = [] + for req_dict in requires_data: + for req_component in req_dict.get("components", []): + out.append( + { + "prototype": { + "id": req_component["prototype_id"], + "name": req_component["name"], + "display_name": req_component["display_name"], + } + } + ) + + return out class ComponentSerializer(ModelSerializer): status = SerializerMethodField() hosts = SerializerMethodField() + prototype = PrototypeRelatedSerializer(read_only=True) + cluster = ClusterRelatedSerializer(read_only=True) + service = ServiceRelatedSerializer(read_only=True) concerns = SerializerMethodField() main_info = SerializerMethodField() @@ -73,7 +94,12 @@ class Meta: "name", "display_name", "status", + "state", + "multi_state", "hosts", + "prototype", + "cluster", + "service", "concerns", "is_maintenance_mode_available", "maintenance_mode", @@ -108,3 +134,25 @@ class ComponentMaintenanceModeSerializer(ModelSerializer): class Meta: model = ServiceComponent fields = ["maintenance_mode"] + + +class RelatedHostComponentsStatusSerializer(ModelSerializer): + id = IntegerField(source="host.id") + name = CharField(source="host.name") + status = SerializerMethodField() + + class Meta: + model = HostComponent + fields = ["id", "name", "status"] + + @staticmethod + def get_status(instance: HostComponent) -> str: + return get_obj_status(obj=instance) + + +class ComponentStatusSerializer(ModelSerializer): + host_components = RelatedHostComponentsStatusSerializer(many=True, source="hostcomponent_set") + + class Meta: + model = ServiceComponent + fields = ["host_components"] diff --git a/python/api_v2/component/views.py b/python/api_v2/component/views.py index 703b6801b1..b29a645778 100644 --- a/python/api_v2/component/views.py +++ b/python/api_v2/component/views.py @@ -10,19 +10,21 @@ # See the License for the specific language governing permissions and # limitations under the License. +from api_v2.component.filters import ComponentFilter from api_v2.component.serializers import ( ComponentMaintenanceModeSerializer, ComponentSerializer, + ComponentStatusSerializer, ) +from api_v2.views import CamelCaseReadOnlyModelViewSet from cm.api import update_mm_objects from cm.models import Cluster, ClusterObject, ServiceComponent +from django_filters.rest_framework.backends import DjangoFilterBackend from guardian.mixins import PermissionListMixin from rest_framework.decorators import action -from rest_framework.filters import OrderingFilter from rest_framework.request import Request from rest_framework.response import Response from rest_framework.status import HTTP_200_OK -from rest_framework.viewsets import ModelViewSet from adcm.permissions import ( CHANGE_MM_PERM, @@ -36,13 +38,13 @@ from adcm.utils import get_maintenance_mode_response -class ComponentViewSet(PermissionListMixin, ModelViewSet): # pylint: disable=too-many-ancestors - queryset = ServiceComponent.objects.all() +class ComponentViewSet(PermissionListMixin, CamelCaseReadOnlyModelViewSet): # pylint: disable=too-many-ancestors + queryset = ServiceComponent.objects.select_related("cluster", "service").order_by("pk") serializer_class = ComponentSerializer permission_classes = [DjangoModelPermissionsAudit] permission_required = [VIEW_COMPONENT_PERM] - filter_backends = [OrderingFilter] - ordering_fields = ["id"] + filterset_class = ComponentFilter + filter_backends = [DjangoFilterBackend] def get_queryset(self, *args, **kwargs): cluster = get_object_for_user( @@ -79,3 +81,11 @@ def maintenance_mode(self, request: Request, *args, **kwargs) -> Response: # py response.data = serializer.data return response + + @action(methods=["get"], detail=True, url_path="statuses") + def statuses(self, request: Request, *args, **kwargs) -> Response: # pylint: disable=unused-argument + component = get_object_for_user( + user=request.user, perms=VIEW_COMPONENT_PERM, klass=ServiceComponent, id=kwargs["pk"] + ) + + return Response(data=ComponentStatusSerializer(instance=component).data) diff --git a/python/api_v2/concern/serializers.py b/python/api_v2/concern/serializers.py index a9d0c9c503..dbe2ec9b26 100644 --- a/python/api_v2/concern/serializers.py +++ b/python/api_v2/concern/serializers.py @@ -19,8 +19,4 @@ class ConcernSerializer(ModelSerializer): class Meta: model = ConcernItem - fields = ( - "id", - "reason", - "is_blocking", - ) + fields = ("id", "reason", "is_blocking", "cause") diff --git a/python/api_v2/config/serializers.py b/python/api_v2/config/serializers.py index 206b6a7d3d..99eb2b9d19 100644 --- a/python/api_v2/config/serializers.py +++ b/python/api_v2/config/serializers.py @@ -9,21 +9,16 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any -from cm.adcm_config.config import get_default -from cm.adcm_config.utils import group_is_activatable -from cm.models import ConfigLog, PrototypeConfig -from rest_framework.fields import BooleanField, CharField, JSONField +from cm.models import ConfigLog from rest_framework.serializers import ( DateTimeField, + JSONField, ModelSerializer, SerializerMethodField, ValidationError, ) -from adcm.serializers import EmptySerializer - class ConfigLogListSerializer(ModelSerializer): is_current = SerializerMethodField() @@ -39,9 +34,11 @@ def get_is_current(config_log: ConfigLog) -> bool: class ConfigLogSerializer(ConfigLogListSerializer): + adcm_meta = JSONField(source="attr") + class Meta: model = ConfigLog - fields = ["id", "is_current", "creation_time", "config", "attr", "description"] + fields = ["id", "is_current", "creation_time", "config", "adcm_meta", "description"] def validate_config(self, value): auth_policy = value.get("auth_policy") @@ -60,39 +57,3 @@ def validate_config(self, value): raise ValidationError('"min_password_length" must be less or equal than "max_password_length"') return value - - -class ConfigSerializer(EmptySerializer): - name = CharField() - description = CharField(required=False) - display_name = SerializerMethodField() - subname = CharField() - default = SerializerMethodField(method_name="get_default_field") - value = SerializerMethodField() - type = CharField() - limits = JSONField(required=False) - ui_options = JSONField(required=False) - required = BooleanField() - - @staticmethod - def get_display_name(obj: PrototypeConfig) -> str: - if not obj.display_name: - return obj.name - - return obj.display_name - - @staticmethod - def get_default_field(obj: PrototypeConfig) -> Any: - return get_default(obj) - - def get_value(self, obj: PrototypeConfig) -> Any: # pylint: disable=arguments-renamed - proto = self.context.get("prototype", None) - return get_default(obj, proto) - - -class ConfigSerializerUI(ConfigSerializer): - activatable = SerializerMethodField() - - @staticmethod - def get_activatable(obj): - return bool(group_is_activatable(obj)) diff --git a/python/api_v2/config/utils.py b/python/api_v2/config/utils.py index 0b23b53755..63f80fcf45 100644 --- a/python/api_v2/config/utils.py +++ b/python/api_v2/config/utils.py @@ -9,6 +9,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from collections import defaultdict +from copy import deepcopy from cm.adcm_config.config import config_is_ro, get_default, group_is_activatable from cm.models import Action, ADCMEntity, PrototypeConfig @@ -40,19 +42,22 @@ def get_item_schema(field: PrototypeConfig, parent_object: ADCMEntity) -> dict: def get_config_schema(parent_object: ADCMEntity, action: Action | None = None) -> list: schema = [] - top_fields = PrototypeConfig.objects.filter(prototype=parent_object.prototype, action=action, subname="").order_by( - "id" - ) + + if action: + # if action is provided, it's enough to find config prototypes + # and for upgrade's actions it is important to not operate with parent object, + # because action is from bundle, not "created object" like cluster/provider + config_prototypes = PrototypeConfig.objects.filter(action=action) + else: + config_prototypes = PrototypeConfig.objects.filter(prototype=parent_object.prototype, action=action) + + top_fields = config_prototypes.filter(subname="").order_by("id") for field in top_fields: item = get_item_schema(field=field, parent_object=parent_object) if field.type == "group": - child_fields = ( - PrototypeConfig.objects.filter(prototype=parent_object.prototype, action=action, name=field.name) - .exclude(type="group") - .order_by("id") - ) + child_fields = config_prototypes.filter(name=field.name).exclude(type="group").order_by("id") for child_field in child_fields: item["children"].append(get_item_schema(field=child_field, parent_object=parent_object)) @@ -60,3 +65,54 @@ def get_config_schema(parent_object: ADCMEntity, action: Action | None = None) - schema.append(item) return schema + + +def convert_attr_to_adcm_meta(attr: dict) -> dict: + attr = deepcopy(attr) + adcm_meta = defaultdict(dict) + attr.pop("custom_group_keys", None) + group_keys = attr.pop("group_keys", {}) + + for key, value in attr.items(): + adcm_meta[f"/{key}"].update({"isActive": value["active"]}) + + for key, value in group_keys.items(): + if isinstance(value, dict): + if isinstance(value["value"], bool): + adcm_meta[f"/{key}"].update({"isSynchronized": value["value"]}) + for sub_key, sub_value in value["fields"].items(): + adcm_meta[f"/{key}/{sub_key}"].update({"isSynchronized": sub_value}) + else: + adcm_meta[f"/{key}"].update({"isSynchronized": value}) + + return adcm_meta + + +def convert_adcm_meta_to_attr(adcm_meta: dict) -> dict: + attr = defaultdict(dict) + try: + for key, value in adcm_meta.items(): + _, key, *sub_key = key.split("/") + + if sub_key: + sub_key = sub_key[0] + + if key not in attr["group_keys"]: + attr["group_keys"].update({key: {"value": None, "fields": {}}}) + + attr["group_keys"][key]["fields"].update({sub_key: value["isSynchronized"]}) + else: + if "isSynchronized" in value and "isActive" in value: + # activatable group in config-group + attr[key].update({"active": value["isActive"]}) + attr["group_keys"].update({key: {"value": value["isSynchronized"], "fields": {}}}) + elif "isActive" in value: + # activatable group not in config-group + attr[key].update({"active": value["isActive"]}) + else: + # non-group root field in config-group + attr["group_keys"].update({key: value["isSynchronized"]}) + except (KeyError, ValueError): + return adcm_meta + + return attr diff --git a/python/api_v2/config/views.py b/python/api_v2/config/views.py index 930b108a10..66c898400c 100644 --- a/python/api_v2/config/views.py +++ b/python/api_v2/config/views.py @@ -11,8 +11,14 @@ # limitations under the License. from api_v2.config.serializers import ConfigLogListSerializer, ConfigLogSerializer -from api_v2.config.utils import get_config_schema +from api_v2.config.utils import ( + convert_adcm_meta_to_attr, + convert_attr_to_adcm_meta, + get_config_schema, +) +from api_v2.views import CamelCaseGenericViewSet from cm.api import update_obj_config +from cm.errors import AdcmEx from cm.models import ConfigLog from django.contrib.contenttypes.models import ContentType from guardian.mixins import PermissionListMixin @@ -21,19 +27,28 @@ from rest_framework.mixins import CreateModelMixin, ListModelMixin, RetrieveModelMixin from rest_framework.response import Response from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED -from rest_framework.viewsets import GenericViewSet from adcm.mixins import GetParentObjectMixin from adcm.permissions import VIEW_CONFIG_PERM, check_config_perm class ConfigLogViewSet( - PermissionListMixin, ListModelMixin, CreateModelMixin, RetrieveModelMixin, GenericViewSet, GetParentObjectMixin + PermissionListMixin, + ListModelMixin, + CreateModelMixin, + RetrieveModelMixin, + GetParentObjectMixin, + CamelCaseGenericViewSet, ): # pylint: disable=too-many-ancestors - queryset = ConfigLog.objects.select_related("obj_ref").all() - serializer_class = ConfigLogSerializer + queryset = ConfigLog.objects.select_related( + "obj_ref__cluster__prototype", + "obj_ref__clusterobject__prototype", + "obj_ref__servicecomponent__prototype", + "obj_ref__hostprovider__prototype", + "obj_ref__host__prototype", + ).order_by("-pk") permission_required = [VIEW_CONFIG_PERM] - ordering = ["-id"] + filter_backends = [] def get_queryset(self, *args, **kwargs): parent_object = self.get_parent_object() @@ -41,7 +56,7 @@ def get_queryset(self, *args, **kwargs): raise NotFound if not parent_object.config: - return self.queryset.none() + return ConfigLog.objects.none() return super().get_queryset(*args, **kwargs).filter(obj_ref=parent_object.config) @@ -49,10 +64,17 @@ def get_serializer_class(self): if self.action == "list": return ConfigLogListSerializer - return self.serializer_class + return ConfigLogSerializer - def create(self, request, *args, **kwargs): + def create(self, request, *args, **kwargs) -> Response: parent_object = self.get_parent_object() + + if parent_object is None: + raise NotFound("Can't find config's parent object") + + if parent_object.config is None: + raise AdcmEx(code="CONFIG_NOT_FOUND", msg="This object has no config") + check_config_perm( user=request.user, action_type="change", @@ -61,16 +83,25 @@ def create(self, request, *args, **kwargs): ) serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) - initial_data = serializer.initial_data + config_log = update_obj_config( obj_conf=parent_object.config, - config=initial_data["config"], - attr=initial_data["attr"], - description=initial_data["description"], + config=serializer.validated_data["config"], + attr=convert_adcm_meta_to_attr(adcm_meta=serializer.validated_data["attr"]), + description=serializer.validated_data.get("description", ""), ) + config_log.attr = convert_attr_to_adcm_meta(attr=config_log.attr) + return Response(data=self.get_serializer(config_log).data, status=HTTP_201_CREATED) + def retrieve(self, request, *args, **kwargs) -> Response: + instance = self.get_object() + instance.attr = convert_attr_to_adcm_meta(attr=instance.attr) + serializer = self.get_serializer(instance) + + return Response(data=serializer.data, status=HTTP_200_OK) + @action(methods=["get"], detail=True, url_path="schema", url_name="schema") def config_schema(self, request, *args, **kwargs) -> Response: # pylint: disable=unused-argument schema = get_config_schema(parent_object=self.get_parent_object()) diff --git a/python/api_v2/group_config/views.py b/python/api_v2/group_config/views.py index 05baa1f923..c8af764a18 100644 --- a/python/api_v2/group_config/views.py +++ b/python/api_v2/group_config/views.py @@ -12,6 +12,7 @@ from api_v2.group_config.serializers import GroupConfigSerializer from api_v2.host.serializers import HostGroupConfigSerializer +from api_v2.views import CamelCaseModelViewSet from cm.models import GroupConfig from django.contrib.contenttypes.models import ContentType from guardian.mixins import PermissionListMixin @@ -20,17 +21,18 @@ from rest_framework.request import Request from rest_framework.response import Response from rest_framework.status import HTTP_201_CREATED -from rest_framework.viewsets import ModelViewSet from adcm.mixins import GetParentObjectMixin from adcm.permissions import VIEW_GROUP_CONFIG_PERM, check_config_perm -class GroupConfigViewSet(PermissionListMixin, ModelViewSet, GetParentObjectMixin): # pylint: disable=too-many-ancestors - queryset = GroupConfig.objects.all() +class GroupConfigViewSet( + PermissionListMixin, GetParentObjectMixin, CamelCaseModelViewSet +): # pylint: disable=too-many-ancestors + queryset = GroupConfig.objects.order_by("name") serializer_class = GroupConfigSerializer permission_required = [VIEW_GROUP_CONFIG_PERM] - ordering = ["id"] + filter_backends = [] def get_queryset(self, *args, **kwargs): parent_object = self.get_parent_object() @@ -62,24 +64,6 @@ def create(self, request: Request, *args, **kwargs): return Response(data=self.get_serializer(group_config).data, status=HTTP_201_CREATED) - @action(methods=["get", "post"], detail=True) - def hosts(self, request: Request, *args, **kwargs): # pylint: disable=unused-argument - group_config: GroupConfig = self.get_object() - - if request.method == "POST": - serializer = HostGroupConfigSerializer(data=request.data, many=True) - serializer.is_valid(raise_exception=True) - hosts = [host_data["id"] for host_data in serializer.validated_data] - group_config.check_host_candidate([host.pk for host in hosts]) - group_config.hosts.add(*hosts) - - return Response(data=HostGroupConfigSerializer(hosts, many=True).data, status=HTTP_201_CREATED) - - queryset = group_config.hosts.order_by("id") - serializer = HostGroupConfigSerializer(self.paginate_queryset(queryset=queryset), many=True) - - return self.get_paginated_response(data=serializer.data) - @action(methods=["get"], detail=True, url_path="host-candidates", url_name="host-candidates") def host_candidates(self, request: Request, *args, **kwargs): # pylint: disable=unused-argument group_config: GroupConfig = self.get_object() diff --git a/python/api_v2/host/filters.py b/python/api_v2/host/filters.py new file mode 100644 index 0000000000..56d8cf0ba7 --- /dev/null +++ b/python/api_v2/host/filters.py @@ -0,0 +1,35 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from cm.models import Host +from django_filters.rest_framework import CharFilter, FilterSet, OrderingFilter + + +class HostFilter(FilterSet): + name = CharFilter(label="Host name", field_name="fqdn", lookup_expr="icontains") + hostprovider_name = CharFilter(label="Hostprovider name", field_name="provider__name") + cluster_name = CharFilter(label="Cluster name", field_name="cluster__name") + ordering = OrderingFilter(fields={"fqdn": "name"}, field_labels={"name": "Name"}, label="ordering") + + class Meta: + model = Host + fields = ["name", "hostprovider_name", "cluster_name"] + + +class HostClusterFilter(FilterSet): + name = CharFilter(label="Host name", field_name="fqdn", lookup_expr="icontains") + hostprovider = CharFilter(label="Hostprovider name", field_name="provider__name") + ordering = OrderingFilter(fields={"fqdn": "name"}, field_labels={"name": "Name"}, label="ordering") + + class Meta: + model = Host + fields = ["name", "hostprovider", "ordering"] diff --git a/python/api_v2/host/serializers.py b/python/api_v2/host/serializers.py index c8b8c41bcd..0bd7a12758 100644 --- a/python/api_v2/host/serializers.py +++ b/python/api_v2/host/serializers.py @@ -9,27 +9,33 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + +from api_v2.cluster.serializers import RelatedComponentStatusSerializer from api_v2.concern.serializers import ConcernSerializer -from cm.models import Cluster, Host, HostComponent, HostProvider, MaintenanceMode -from cm.status_api import get_host_status +from api_v2.prototype.serializers import PrototypeRelatedSerializer +from cm.models import Cluster, Host, HostProvider, MaintenanceMode, ServiceComponent +from cm.status_api import get_obj_status from cm.validators import HostUniqueValidator, StartMidEndValidator from rest_framework.exceptions import ValidationError from rest_framework.serializers import ( CharField, ChoiceField, + IntegerField, + ListSerializer, ModelSerializer, PrimaryKeyRelatedField, SerializerMethodField, ) from adcm import settings -from adcm.permissions import VIEW_CLUSTER_PERM, VIEW_PROVIDER_PERM +from adcm.permissions import VIEW_CLUSTER_PERM +from adcm.serializers import EmptySerializer class HostProviderSerializer(ModelSerializer): class Meta: model = HostProvider - fields = ["id", "name"] + fields = ["id", "name", "display_name"] class HostClusterSerializer(ModelSerializer): @@ -38,20 +44,18 @@ class Meta: fields = ["id", "name"] -class HostComponentSerializer(ModelSerializer): - name = CharField(source="component.name") - display_name = CharField(source="component.display_name") - +class HCComponentNameSerializer(ModelSerializer): class Meta: - model = HostComponent + model = ServiceComponent fields = ["id", "name", "display_name"] class HostSerializer(ModelSerializer): status = SerializerMethodField() - provider = HostProviderSerializer() + hostprovider = HostProviderSerializer(source="provider") + prototype = PrototypeRelatedSerializer(read_only=True) concerns = ConcernSerializer(many=True) - fqdn = CharField( + name = CharField( max_length=253, help_text="fully qualified domain name", validators=[ @@ -64,28 +68,41 @@ class HostSerializer(ModelSerializer): err_msg="Wrong FQDN.", ), ], + source="fqdn", ) + cluster = HostClusterSerializer(read_only=True) + components = SerializerMethodField() class Meta: model = Host fields = [ "id", - "fqdn", + "name", "state", "status", - "provider", + "hostprovider", + "prototype", "concerns", "is_maintenance_mode_available", "maintenance_mode", + "multi_state", + "cluster", + "components", ] @staticmethod - def get_status(host: Host) -> int: - return get_host_status(host=host) + def get_status(host: Host) -> str: + return get_obj_status(obj=host) + + @staticmethod + def get_components(instance: Host) -> list[dict]: + return HCComponentNameSerializer( + instance=[hc.component for hc in instance.hostcomponent_set.all()], many=True + ).data class HostUpdateSerializer(ModelSerializer): - fqdn = CharField( + name = CharField( max_length=253, help_text="fully qualified domain name", required=False, @@ -99,11 +116,12 @@ class HostUpdateSerializer(ModelSerializer): err_msg="Wrong FQDN.", ), ], + source="fqdn", ) class Meta: model = Host - fields = ["fqdn", "cluster"] + fields = ["name", "cluster"] extra_kwargs = {"cluster": {"required": False}} def validate_cluster(self, cluster): @@ -112,45 +130,41 @@ def validate_cluster(self, cluster): if not self.context["request"].user.has_perm(perm=VIEW_CLUSTER_PERM, obj=cluster): raise ValidationError("Current user has no permission to view this cluster") + if not self.context["request"].user.has_perm(perm="cm.map_host_to_cluster", obj=cluster): raise ValidationError("Current user has no permission to map host to this cluster") return cluster -class HostCreateSerializer(HostUpdateSerializer): - class Meta: - model = Host - fields = ["provider", "fqdn", "cluster"] - extra_kwargs = {"fqdn": {"allow_null": False}} - - def validate_provider(self, provider): - if not provider: - raise ValidationError("Missing required field provider") - - if not self.context["request"].user.has_perm(perm=VIEW_PROVIDER_PERM, obj=provider): - raise ValidationError("Current user has no permission to view this provider") - - return provider - - -class ClusterHostSerializer(HostSerializer): - components = HostComponentSerializer(source="hostcomponent_set", many=True) +class HostCreateSerializer(EmptySerializer): + name = CharField( + allow_null=False, + required=True, + max_length=253, + help_text="fully qualified domain name", + validators=[ + HostUniqueValidator(queryset=Host.objects.all()), + StartMidEndValidator( + start=settings.ALLOWED_HOST_FQDN_START_CHARS, + mid=settings.ALLOWED_HOST_FQDN_MID_END_CHARS, + end=settings.ALLOWED_HOST_FQDN_MID_END_CHARS, + err_code="BAD_REQUEST", + err_msg="Wrong FQDN.", + ), + ], + source="fqdn", + ) + hostprovider_id = IntegerField(required=True) + cluster_id = IntegerField(required=False) - class Meta: - model = Host - fields = [*HostSerializer.Meta.fields, "components"] +class ClusterHostCreateSerializer(EmptySerializer): + host_id = IntegerField() -class ClusterHostCreateSerializer(ModelSerializer): - hosts = PrimaryKeyRelatedField( - queryset=Host.objects.select_related("cluster").filter(cluster__isnull=True), many=True - ) - class Meta: - model = Host - fields = ["hosts", "fqdn"] - extra_kwargs = {"fqdn": {"read_only": True}} +class HostListIdCreateSerializer(ListSerializer): # pylint: disable=abstract-method + child = IntegerField() class HostMappingSerializer(ModelSerializer): @@ -168,9 +182,11 @@ class Meta: class HostShortSerializer(ModelSerializer): + name = CharField(source="fqdn") + class Meta: model = Host - fields = ["id", "fqdn"] + fields = ["id", "name"] class HostGroupConfigSerializer(ModelSerializer): @@ -180,3 +196,16 @@ class Meta: model = Host fields = ["id", "name"] extra_kwargs = {"name": {"read_only": True}} + + +class ClusterHostStatusSerializer(EmptySerializer): + host_components = SerializerMethodField() + + class Meta: + model = Host + fields = ["host_components"] + + def get_host_components(self, instance: Host) -> list: + return RelatedComponentStatusSerializer( + instance=[hc.component for hc in instance.hostcomponent_set.select_related("component")], many=True + ).data diff --git a/python/api_v2/host/views.py b/python/api_v2/host/views.py index 110707b503..e7fd00dd82 100644 --- a/python/api_v2/host/views.py +++ b/python/api_v2/host/views.py @@ -10,11 +10,15 @@ # See the License for the specific language governing permissions and # limitations under the License. + +from api_v2.host.filters import HostClusterFilter, HostFilter from api_v2.host.serializers import ( ClusterHostCreateSerializer, - ClusterHostSerializer, + ClusterHostStatusSerializer, HostChangeMaintenanceModeSerializer, HostCreateSerializer, + HostGroupConfigSerializer, + HostListIdCreateSerializer, HostSerializer, HostUpdateSerializer, ) @@ -23,10 +27,12 @@ maintenance_mode, map_list_of_hosts, ) +from api_v2.views import CamelCaseReadOnlyModelViewSet from cm.api import add_host_to_cluster, delete_host, remove_host_from_cluster from cm.errors import AdcmEx from cm.issue import update_hierarchy_issues, update_issue_after_deleting -from cm.models import Cluster, Host +from cm.models import Cluster, GroupConfig, Host, HostProvider +from django_filters.rest_framework.backends import DjangoFilterBackend from guardian.mixins import PermissionListMixin from rest_framework.decorators import action from rest_framework.request import Request @@ -37,11 +43,11 @@ HTTP_204_NO_CONTENT, HTTP_404_NOT_FOUND, ) -from rest_framework.viewsets import ModelViewSet from adcm.permissions import ( VIEW_CLUSTER_PERM, VIEW_HOST_PERM, + VIEW_PROVIDER_PERM, DjangoModelPermissionsAudit, check_custom_perm, get_object_for_user, @@ -49,13 +55,17 @@ # pylint:disable-next=too-many-ancestors -class HostViewSet(PermissionListMixin, ModelViewSet): - queryset = Host.objects.prefetch_related("provider", "concerns").all() +class HostViewSet(PermissionListMixin, CamelCaseReadOnlyModelViewSet): + queryset = ( + Host.objects.select_related("provider", "cluster") + .prefetch_related("concerns", "hostcomponent_set") + .order_by("fqdn") + ) serializer_class = HostSerializer permission_classes = [DjangoModelPermissionsAudit] permission_required = [VIEW_HOST_PERM] - filterset_fields = ["provider__name", "state", "fqdn"] - ordering_fields = ["fqdn"] + filterset_class = HostFilter + filter_backends = (DjangoFilterBackend,) def get_serializer_class(self): if self.action == "create": @@ -67,16 +77,27 @@ def get_serializer_class(self): return self.serializer_class - def create(self, request, *args, **kwargs): + def create(self, request, *args, **kwargs): # pylint:disable=unused-argument serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) - valid = serializer.validated_data + + request_hostprovider = get_object_for_user( + user=request.user, + perms=VIEW_PROVIDER_PERM, + klass=HostProvider, + id=serializer.validated_data["hostprovider_id"], + ) + request_cluster = None + if serializer.validated_data.get("cluster_id"): + request_cluster = get_object_for_user( + user=request.user, perms=VIEW_CLUSTER_PERM, klass=Cluster, id=serializer.validated_data["cluster_id"] + ) host = add_new_host_and_map_it( - provider=valid.get("provider"), fqdn=valid.get("fqdn"), cluster=valid.get("cluster") + provider=request_hostprovider, fqdn=serializer.validated_data["fqdn"], cluster=request_cluster ) - return Response(data=HostSerializer(host).data, status=HTTP_201_CREATED) + return Response(data=HostSerializer(instance=host).data, status=HTTP_201_CREATED) def destroy(self, request, *args, **kwargs): # pylint: disable=unused-argument host = self.get_object() @@ -113,7 +134,7 @@ def _host_update(self, request, *args, partial=False, **kwargs): # pylint: disa update_hierarchy_issues(host.provider) update_issue_after_deleting() - return Response(status=HTTP_200_OK) + return Response(status=HTTP_200_OK, data=HostSerializer(host).data) def partial_update(self, request, *args, **kwargs): return self._host_update(request, *args, partial=True, **kwargs) @@ -126,12 +147,11 @@ def maintenance_mode(self, request: Request, *args, **kwargs) -> Response: # py return maintenance_mode(request=request, **kwargs) -class HostClusterViewSet(PermissionListMixin, ModelViewSet): # pylint:disable=too-many-ancestors - serializer_class = ClusterHostSerializer +class HostClusterViewSet(PermissionListMixin, CamelCaseReadOnlyModelViewSet): # pylint:disable=too-many-ancestors + serializer_class = HostSerializer permission_classes = [DjangoModelPermissionsAudit] permission_required = [VIEW_HOST_PERM] - filterset_fields = ["provider__name", "state", "fqdn"] - ordering_fields = ["fqdn"] + filterset_class = HostClusterFilter def get_serializer_class(self): if self.action == "maintenance_mode": @@ -141,11 +161,15 @@ def get_serializer_class(self): return self.serializer_class - def get_queryset(self, *args, **kwargs): # pylint: disable=unused-argument - return Host.objects.filter(cluster=self.kwargs["cluster_pk"]) + def get_queryset(self, *args, **kwargs): + return ( + Host.objects.filter(cluster=self.kwargs["cluster_pk"]) + .select_related("cluster") + .prefetch_related("hostcomponent_set") + ) - def create(self, request, *args, **kwargs): - serializer = self.get_serializer(data=request.data) + def create(self, request, *args, **kwargs): # pylint:disable=unused-argument + serializer = self.get_serializer(data=request.data, many=True) serializer.is_valid(raise_exception=True) cluster = get_object_for_user( @@ -156,23 +180,23 @@ def create(self, request, *args, **kwargs): check_custom_perm(request.user, "map_host_to", "cluster", cluster) - map_list_of_hosts(hosts=serializer.validated_data["hosts"], cluster=cluster) + target_hosts = Host.objects.filter(pk__in=[host_data["host_id"] for host_data in serializer.validated_data]) + map_list_of_hosts(hosts=target_hosts, cluster=cluster) return Response( - data=ClusterHostSerializer( + data=HostSerializer( instance=Host.objects.prefetch_related("hostcomponent_set").filter(cluster=cluster), many=True, ).data, status=HTTP_201_CREATED, ) - def destroy(self, request, *args, **kwargs): + def destroy(self, request, *args, **kwargs): # pylint:disable=unused-argument host = self.get_object() cluster = get_object_for_user(request.user, VIEW_CLUSTER_PERM, Cluster, id=kwargs["cluster_pk"]) if host.cluster != cluster: - msg = f"Host #{host.id} doesn't belong to cluster #{cluster.id}" + raise AdcmEx(code="FOREIGN_HOST", msg=f"Host #{host.id} doesn't belong to cluster #{cluster.id}") - raise AdcmEx("FOREIGN_HOST", msg) check_custom_perm(request.user, "unmap_host_from", "cluster", cluster) remove_host_from_cluster(host=host) return Response(status=HTTP_204_NO_CONTENT) @@ -180,3 +204,60 @@ def destroy(self, request, *args, **kwargs): @action(methods=["post"], detail=True, url_path="maintenance-mode") def maintenance_mode(self, request: Request, *args, **kwargs) -> Response: # pylint: disable=unused-argument return maintenance_mode(request=request, **kwargs) + + @action(methods=["get"], detail=True, url_path="statuses") + def statuses(self, request: Request, *args, **kwargs) -> Response: # pylint: disable=unused-argument + host = self.get_object() + cluster = get_object_for_user(request.user, VIEW_CLUSTER_PERM, Cluster, id=kwargs["cluster_pk"]) + if host.cluster != cluster: + raise AdcmEx(code="FOREIGN_HOST", msg=f"Host #{host.id} doesn't belong to cluster #{cluster.id}") + + return Response(data=ClusterHostStatusSerializer(instance=host).data) + + +class HostGroupConfigViewSet(PermissionListMixin, CamelCaseReadOnlyModelViewSet): # pylint: disable=too-many-ancestors + queryset = ( + Host.objects.select_related("provider", "cluster") + .prefetch_related("concerns", "hostcomponent_set") + .order_by("fqdn") + ) + permission_classes = [DjangoModelPermissionsAudit] + permission_required = [VIEW_HOST_PERM] + filterset_class = HostClusterFilter + filter_backends = (DjangoFilterBackend,) + + def get_queryset(self, *args, **kwargs): + return self.queryset.filter(group_config__id=self.kwargs["group_config_pk"]) + + def create(self, request, *args, **kwargs): # pylint: disable=unused-argument + serializer = self.get_serializer_class()(data=request.data) + serializer.is_valid(raise_exception=True) + host_ids = serializer.validated_data + group_config = GroupConfig.objects.filter(id=self.kwargs["group_config_pk"]).first() + + if not group_config: + raise AdcmEx(code="HOST_GROUP_CONFIG_NOT_FOUND") + + group_config.check_host_candidate(host_ids) + group_config.hosts.add(*host_ids) + + return Response( + data=HostGroupConfigSerializer(group_config.hosts.filter(id__in=host_ids), many=True).data, + status=HTTP_201_CREATED, + ) + + def destroy(self, request, *args, **kwargs): # pylint: disable=unused-argument + group_config = GroupConfig.objects.filter(id=self.kwargs["group_config_pk"]).first() + + if not group_config: + raise AdcmEx(code="HOST_GROUP_CONFIG_NOT_FOUND") + + host: Host = self.get_object() + group_config.hosts.remove(host) + return Response(status=HTTP_204_NO_CONTENT) + + def get_serializer_class(self) -> type[HostGroupConfigSerializer | HostListIdCreateSerializer]: + if self.action == "create": + return HostListIdCreateSerializer + + return HostGroupConfigSerializer diff --git a/python/api_v2/hostprovider/filters.py b/python/api_v2/hostprovider/filters.py index f73742bfc4..f0e831ce10 100644 --- a/python/api_v2/hostprovider/filters.py +++ b/python/api_v2/hostprovider/filters.py @@ -10,18 +10,17 @@ # See the License for the specific language governing permissions and # limitations under the License. from cm.models import HostProvider -from django_filters.rest_framework import CharFilter, FilterSet +from django_filters.rest_framework import CharFilter, FilterSet, OrderingFilter class HostProviderFilter(FilterSet): - hostprovider_name = CharFilter(field_name="name", label="Hostprovider name") - type = CharFilter(field_name="prototype__type", label="Hostprovider type") + name = CharFilter(field_name="name", label="Hostprovider name", lookup_expr="icontains") + prototype_display_name = CharFilter( + field_name="prototype__display_name", label="Hostprovider prototype display name" + ) state = CharFilter(field_name="state", label="Hostprovider state") + ordering = OrderingFilter(fields={"name": "name"}, field_labels={"name": "Name"}, label="ordering") class Meta: model = HostProvider - fields = [ - "hostprovider_name", - "state", - "type", - ] + fields = ["name", "state", "prototype_display_name", "ordering"] diff --git a/python/api_v2/hostprovider/serializers.py b/python/api_v2/hostprovider/serializers.py index c8e2021a03..d33d554e34 100644 --- a/python/api_v2/hostprovider/serializers.py +++ b/python/api_v2/hostprovider/serializers.py @@ -11,19 +11,24 @@ # limitations under the License. from api_v2.concern.serializers import ConcernSerializer +from api_v2.prototype.serializers import PrototypeRelatedSerializer from cm.adcm_config.config import get_main_info from cm.models import HostProvider from cm.upgrade import get_upgrade -from rest_framework.serializers import CharField, ModelSerializer, SerializerMethodField +from rest_framework.serializers import ( + CharField, + IntegerField, + ModelSerializer, + SerializerMethodField, +) + +from adcm.serializers import EmptySerializer class HostProviderSerializer(ModelSerializer): - type = CharField(source="prototype.type") state = CharField(read_only=True) - prototype_display_name = CharField(source="prototype.display_name") - prototype_name = CharField(source="prototype.name") + prototype = PrototypeRelatedSerializer(read_only=True) description = CharField(required=False) - prototype_version = CharField(source="prototype.version") is_upgradable = SerializerMethodField() main_info = SerializerMethodField() concerns = ConcernSerializer(read_only=True, many=True) @@ -35,10 +40,7 @@ class Meta: "name", "state", "multi_state", - "type", - "prototype_name", - "prototype_display_name", - "prototype_version", + "prototype", "description", "concerns", "is_upgradable", @@ -54,10 +56,10 @@ def get_main_info(host_provider: HostProvider) -> str | None: return get_main_info(obj=host_provider) -class HostProviderCreateSerializer(ModelSerializer): - class Meta: - model = HostProvider - fields = ["prototype", "name", "description"] +class HostProviderCreateSerializer(EmptySerializer): + prototype_id = IntegerField() + name = CharField() + description = CharField(required=False, allow_blank=True) class HostProviderSerializerForHosts(ModelSerializer): diff --git a/python/api_v2/hostprovider/urls.py b/python/api_v2/hostprovider/urls.py index c52bad5e14..e06e34ff58 100644 --- a/python/api_v2/hostprovider/urls.py +++ b/python/api_v2/hostprovider/urls.py @@ -19,10 +19,10 @@ router = SimpleRouter() router.register("", HostProviderViewSet) -hostprovider_action_router = NestedSimpleRouter(parent_router=router, parent_prefix="", lookup="provider") +hostprovider_action_router = NestedSimpleRouter(parent_router=router, parent_prefix="", lookup="hostprovider") hostprovider_action_router.register(prefix="actions", viewset=ActionViewSet, basename="provider-action") -hostprovider_config_router = NestedSimpleRouter(parent_router=router, parent_prefix="", lookup="provider") +hostprovider_config_router = NestedSimpleRouter(parent_router=router, parent_prefix="", lookup="hostprovider") hostprovider_config_router.register(prefix="configs", viewset=ConfigLogViewSet, basename="provider-config") hostprovider_upgrade_router = NestedSimpleRouter(parent_router=router, parent_prefix="", lookup="hostprovider") diff --git a/python/api_v2/hostprovider/views.py b/python/api_v2/hostprovider/views.py index a11ad78d93..8c0bcefa81 100644 --- a/python/api_v2/hostprovider/views.py +++ b/python/api_v2/hostprovider/views.py @@ -9,31 +9,30 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + from api_v2.hostprovider.filters import HostProviderFilter from api_v2.hostprovider.serializers import ( HostProviderCreateSerializer, HostProviderSerializer, ) +from api_v2.views import CamelCaseReadOnlyModelViewSet from cm.api import add_host_provider, delete_host_provider -from cm.models import HostProvider +from cm.errors import raise_adcm_ex +from cm.models import HostProvider, ObjectType, Prototype +from django_filters.rest_framework.backends import DjangoFilterBackend from rest_framework.response import Response -from rest_framework.status import ( - HTTP_201_CREATED, - HTTP_204_NO_CONTENT, - HTTP_409_CONFLICT, -) -from rest_framework.viewsets import ModelViewSet +from rest_framework.status import HTTP_201_CREATED, HTTP_204_NO_CONTENT from adcm.permissions import VIEW_HOST_PERM, DjangoModelPermissionsAudit -class HostProviderViewSet(ModelViewSet): # pylint:disable=too-many-ancestors - queryset = HostProvider.objects.select_related("prototype").all() +class HostProviderViewSet(CamelCaseReadOnlyModelViewSet): # pylint:disable=too-many-ancestors + queryset = HostProvider.objects.select_related("prototype").order_by("name") serializer_class = HostProviderSerializer permission_classes = [DjangoModelPermissionsAudit] permission_required = [VIEW_HOST_PERM] filterset_class = HostProviderFilter - ordering_fields = ("id", "name", "state", "prototype__display_name", "prototype__version") + filter_backends = (DjangoFilterBackend,) def get_serializer_class(self): if self.action == "create": @@ -44,16 +43,17 @@ def get_serializer_class(self): def create(self, request, *args, **kwargs): # pylint: disable=unused-argument serializer = self.get_serializer(data=request.data) if not serializer.is_valid(): - return Response(serializer.errors, status=HTTP_409_CONFLICT) + return raise_adcm_ex(code="HOSTPROVIDER_CREATE_ERROR") + host_provider = add_host_provider( - prototype=serializer.validated_data["prototype"], + prototype=Prototype.objects.get(pk=serializer.validated_data["prototype_id"], type=ObjectType.PROVIDER), name=serializer.validated_data["name"], - description=serializer.validated_data["description"], + description=serializer.validated_data.get("description", ""), ) return Response(data=HostProviderSerializer(host_provider).data, status=HTTP_201_CREATED) - def destroy(self, request, *args, **kwargs): + def destroy(self, request, *args, **kwargs): # pylint:disable=unused-argument host_provider = self.get_object() delete_host_provider(host_provider) return Response(status=HTTP_204_NO_CONTENT) diff --git a/python/api_v2/imports/serializers.py b/python/api_v2/imports/serializers.py index 595bfe930b..cb77d81586 100644 --- a/python/api_v2/imports/serializers.py +++ b/python/api_v2/imports/serializers.py @@ -10,22 +10,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -from cm.errors import raise_adcm_ex -from rest_framework.fields import JSONField +from cm.models import ObjectType +from rest_framework.fields import ChoiceField, IntegerField from adcm.serializers import EmptySerializer -class ImportPostSerializer(EmptySerializer): - bind = JSONField() - - @staticmethod - def validate_bind(bind): - if not isinstance(bind, list): - raise_adcm_ex(code="INVALID_INPUT", msg="bind field should be a list") +class SourceSerializer(EmptySerializer): + id = IntegerField() + type = ChoiceField(choices=[ObjectType.CLUSTER, ObjectType.SERVICE]) - for item in bind: - if "cluster_id" not in item: - raise_adcm_ex(code="INVALID_INPUT", msg="'cluster_id' sub-field is required") - return bind +class ImportPostSerializer(EmptySerializer): + source = SourceSerializer() diff --git a/python/api_v2/imports/types.py b/python/api_v2/imports/types.py new file mode 100644 index 0000000000..3e50e7716c --- /dev/null +++ b/python/api_v2/imports/types.py @@ -0,0 +1,70 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TypedDict + +from cm.models import Cluster, ClusterObject, PrototypeImport + + +class CommonImportCandidate(TypedDict): + obj: Cluster | ClusterObject + prototype_import: PrototypeImport + + +class ServiceImportCandidate(TypedDict): + obj: ClusterObject + prototype_import: PrototypeImport + + +class ClusterImportCandidate(TypedDict): + obj: Cluster + prototype_import: PrototypeImport | None + services: list[ServiceImportCandidate] + + +class UICluster(TypedDict): + id: int + name: str + status: str + state: str + + +class UIImportCluster(TypedDict): + id: int + is_multi_bind: bool + is_required: bool + + +class UIImportServices(TypedDict): + id: int + name: str + display_name: str + version: str + is_required: bool + is_multi_bind: bool + + +class UIBindSource(TypedDict): + id: int + type: str + + +class UIBind(TypedDict): + id: int + source: UIBindSource + + +class UIObjectImport(TypedDict): + cluster: UICluster + import_cluster: UIImportCluster | None + import_services: list[UIImportServices] | None + binds: list[UIBind] diff --git a/python/api_v2/imports/utils.py b/python/api_v2/imports/utils.py index 51cc0b8997..784860c60c 100644 --- a/python/api_v2/imports/utils.py +++ b/python/api_v2/imports/utils.py @@ -10,119 +10,214 @@ # See the License for the specific language governing permissions and # limitations under the License. -from cm.api import version_in +from api_v2.imports.types import ( + ClusterImportCandidate, + CommonImportCandidate, + ServiceImportCandidate, + UIBind, + UIBindSource, + UICluster, + UIImportCluster, + UIImportServices, + UIObjectImport, +) +from cm.api import is_version_suitable from cm.errors import raise_adcm_ex from cm.models import ( Cluster, ClusterBind, ClusterObject, + ObjectType, + Prototype, PrototypeExport, PrototypeImport, ) from cm.status_api import get_obj_status +from django.db.models import QuerySet -def get_cluster_export_dict(cluster: Cluster, export_cluster: Cluster, prototype_import: PrototypeImport | None = None): - bind_list = [] - for bind in ClusterBind.objects.filter(cluster=cluster, source_cluster=export_cluster): - bind_list.append(get_bind_dict(bind=bind, cluster=bind.source_cluster, service=bind.source_service)) - - import_cluster_dict = None - if prototype_import: - import_cluster_dict = { - "prototype_id": cluster.prototype.id, - "is_multibind": prototype_import.multibind if prototype_import else False, - "is_required": prototype_import.required if prototype_import else False, - } - - return { - "id": cluster.id, - "cluster_name": cluster.name, - "cluster_status": get_obj_status(obj=cluster), - "cluster_state": cluster.state, - "import_cluster": import_cluster_dict, - "import_services": None, - "binds": bind_list, - } - - -def get_bind_dict(bind: ClusterBind, cluster: Cluster, service: ClusterObject | None = None) -> dict: - return { - "id": bind.pk, - "cluster_id": cluster.pk, - "service_id": service.pk if service else None, - "prototype_id": service.prototype.pk if service else cluster.prototype.pk, - } - - -def get_imports(obj: Cluster | ClusterObject) -> dict: - objects = {} - checked_export_proto = {} - cluster = obj - if isinstance(obj, ClusterObject): - cluster = obj.cluster +def _format_binds(binds: QuerySet[ClusterBind]) -> list[UIBind]: + binds_data = [] - for proto_import in PrototypeImport.objects.filter(prototype=obj.prototype): - for export in PrototypeExport.objects.filter(prototype__name=proto_import.name): - if checked_export_proto.get(export.prototype.pk): - continue - checked_export_proto[export.prototype.pk] = True + for bind in binds: + source = bind.source_service + if source is None: + source = bind.source_cluster + + binds_data.append(UIBind(id=bind.pk, source=UIBindSource(id=source.pk, type=source.prototype.type))) + + return binds_data + + +def _format_cluster(cluster: Cluster) -> UICluster: + return UICluster(id=cluster.pk, name=cluster.name, status=get_obj_status(obj=cluster), state=cluster.state) + + +def _format_import_cluster(cluster: Cluster, prototype_import: PrototypeImport | None) -> UIImportCluster | None: + if prototype_import is None: + return None + + return UIImportCluster( + id=cluster.pk, is_multi_bind=prototype_import.multibind, is_required=prototype_import.required + ) + + +def _format_import_services(service_candidates: list[ServiceImportCandidate]) -> list[UIImportServices] | None: + if not service_candidates: + return None + + out = [] + for service_data in sorted(service_candidates, key=lambda service_dandidate: service_dandidate["obj"].display_name): + service: ClusterObject = service_data["obj"] + prototype_import: PrototypeImport = service_data["prototype_import"] + + out.append( + UIImportServices( + id=service.pk, + name=service.name, + display_name=service.display_name, + version=service.version, + is_required=prototype_import.required, + is_multi_bind=prototype_import.multibind, + ) + ) + + return out - if not version_in(version=export.prototype.version, ver=proto_import): + +def _get_import_candidates_of_single_prototype_export( + prototype_export: PrototypeExport, + prototype_import: PrototypeImport, + queryset: QuerySet[Cluster] | QuerySet[ClusterObject], +) -> list[CommonImportCandidate] | None: + if not is_version_suitable(version=prototype_export.prototype.version, prototype_import=prototype_import): + return None + + out = [] + for obj in queryset: + out.append(CommonImportCandidate(obj=obj, prototype_import=prototype_import)) + + return out + + +def _get_import_candidates(prototype: Prototype) -> list[ClusterImportCandidate]: + cluster_candidates: dict[int, ClusterImportCandidate] = {} + service_candidates: list[ServiceImportCandidate] = [] + + for prototype_import in PrototypeImport.objects.filter(prototype=prototype): + checked_export_proto: set[int] = set() + + for cluster_export in PrototypeExport.objects.filter( + prototype__name=prototype_import.name, prototype__type=ObjectType.CLUSTER + ).select_related("prototype"): + if cluster_export.prototype.pk in checked_export_proto: continue - if export.prototype.type == "cluster": - for export_cluster in Cluster.objects.filter(prototype=export.prototype): - objects[cluster.id] = get_cluster_export_dict( - cluster=cluster, export_cluster=export_cluster, prototype_import=proto_import + checked_export_proto.add(cluster_export.prototype.pk) + + cluster_import_candidates = _get_import_candidates_of_single_prototype_export( + prototype_export=cluster_export, + prototype_import=prototype_import, + queryset=Cluster.objects.filter(prototype=cluster_export.prototype), + ) + if cluster_import_candidates is not None: + for cluster_export_data in cluster_import_candidates: + cluster_candidates[cluster_export_data["obj"].pk] = ClusterImportCandidate( + obj=cluster_export_data["obj"], + prototype_import=cluster_export_data["prototype_import"], + services=[], ) - if export.prototype.type == "service": - for service in ClusterObject.objects.filter(prototype=export.prototype): - service_list = ( - objects[service.cluster.id]["import_services"] if objects.get(service.cluster.id) else [] - ) + for service_export in PrototypeExport.objects.filter( + prototype__name=prototype_import.name, prototype__type=ObjectType.SERVICE + ).select_related("prototype"): + if service_export.prototype.pk in checked_export_proto: + continue - service_list.append( - { - "prototype_id": service.prototype.id, - "name": service.name, - "display_name": service.display_name, - "version": service.version, - "is_required": proto_import.required, - "is_multibind": proto_import.multibind, - } - ) + checked_export_proto.add(service_export.prototype.pk) + + service_import_candidates = _get_import_candidates_of_single_prototype_export( + prototype_export=service_export, + prototype_import=prototype_import, + queryset=ClusterObject.objects.filter(prototype=service_export.prototype).select_related("cluster"), + ) + if service_import_candidates is not None: + service_candidates.extend(service_import_candidates) + + # attach services to corresponding clusters + for service_data in service_candidates: + cluster_pk = service_data["obj"].cluster.pk + cluster_data = cluster_candidates.get(cluster_pk) + if cluster_data is None: + cluster_candidates[cluster_pk] = ClusterImportCandidate( + obj=Cluster.objects.get(pk=cluster_pk), + services=[service_data], + prototype_import=None, + ) + else: + cluster_data["services"].append(service_data) - if not objects.get(service.cluster.id): - objects[service.cluster.id] = get_cluster_export_dict( - cluster=cluster, export_cluster=service.cluster - ) + return list(cluster_candidates.values()) - objects[service.cluster.id]["import_services"] = service_list - return objects.values() +def get_imports(obj: Cluster | ClusterObject) -> list[UIObjectImport]: + if isinstance(obj, ClusterObject): + cluster = obj.cluster + service = obj + elif isinstance(obj, Cluster): + cluster = obj + service = None + else: + raise ValueError("Wrong obj type") + + out_data = [] + import_candidates = _get_import_candidates(prototype=obj.prototype) + binds = ClusterBind.objects.filter(cluster=cluster, service=service).select_related( + "source_cluster", "source_service", "source_cluster__prototype", "source_service__prototype" + ) + + for import_candidate in sorted(import_candidates, key=lambda candidate: candidate["obj"].name): + out_data.append( + UIObjectImport( + cluster=_format_cluster(cluster=import_candidate["obj"]), + import_cluster=_format_import_cluster( + cluster=import_candidate["obj"], prototype_import=import_candidate["prototype_import"] + ), + import_services=_format_import_services(service_candidates=import_candidate["services"]), + binds=_format_binds(binds=binds.filter(source_cluster=import_candidate["obj"]).order_by("pk")), + ) + ) + + return out_data def cook_data_for_multibind(validated_data: list, obj: Cluster | ClusterObject) -> list: bind_data = [] - for item in validated_data["bind"]: - if item.get("service_id"): - export_obj = ClusterObject.obj.get(pk=item["service_id"]) - else: - export_obj = Cluster.obj.get(pk=item["cluster_id"]) + for item in validated_data: + if item["source"]["type"] == ObjectType.CLUSTER: + export_obj = Cluster.objects.get(pk=item["source"]["id"]) + cluster_id = export_obj.pk + service_id = None + + elif item["source"]["type"] == ObjectType.SERVICE: + export_obj = ClusterObject.objects.get(pk=item["source"]["id"]) + cluster_id = export_obj.cluster.pk + service_id = export_obj.pk - proto_import = PrototypeImport.objects.filter(name=export_obj.name, prototype=obj.prototype).first() + proto_import = PrototypeImport.objects.filter(name=export_obj.prototype.name, prototype=obj.prototype).first() if not proto_import: raise_adcm_ex(code="INVALID_INPUT", msg="Needed import doesn't exist") + export_id_data = {"cluster_id": cluster_id} + if service_id is not None: + export_id_data["service_id"] = service_id bind_data.append( { "import_id": proto_import.pk, - "export_id": {"cluster_id": item["cluster_id"], "service_id": item.get("service_id")}, + "export_id": export_id_data, } ) - return bind_data + return bind_data diff --git a/python/api_v2/imports/views.py b/python/api_v2/imports/views.py index 495216e1d6..7ad669bc82 100644 --- a/python/api_v2/imports/views.py +++ b/python/api_v2/imports/views.py @@ -12,13 +12,13 @@ from api_v2.imports.serializers import ImportPostSerializer from api_v2.imports.utils import cook_data_for_multibind, get_imports +from api_v2.views import CamelCaseReadOnlyModelViewSet from cm.api import multi_bind -from cm.models import Cluster, ClusterObject +from cm.models import Cluster, ClusterObject, PrototypeImport from rest_framework.permissions import IsAuthenticated from rest_framework.request import Request from rest_framework.response import Response from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST -from rest_framework.viewsets import ModelViewSet from adcm.permissions import ( CHANGE_IMPORT_PERM, @@ -31,29 +31,43 @@ ) -class ImportViewSet(ModelViewSet): # pylint: disable=too-many-ancestors +class ImportViewSet(CamelCaseReadOnlyModelViewSet): # pylint: disable=too-many-ancestors + queryset = PrototypeImport.objects.all() permission_classes = [IsAuthenticated] ordering = ["id"] - - def get_serializer_class(self): - if self.action == "create": - return ImportPostSerializer - - return self.serializer_class - - def get_object_and_check_perm(self, request, **kwargs): - raise NotImplementedError + filter_backends = [] + serializer_class = ImportPostSerializer + + def get_object_and_check_perm(self, request) -> Cluster | ClusterObject: + if "cluster_pk" in self.kwargs and "service_pk" in self.kwargs: + kwargs_get = {"perms": VIEW_SERVICE_PERM, "klass": ClusterObject, "id": self.kwargs["service_pk"]} + kwargs_check = { + "action_type": VIEW_IMPORT_PERM, + "model": ClusterObject.__class__.__name__.lower(), + "second_perm": VIEW_CLUSTER_BIND, + } + else: + kwargs_get = {"perms": VIEW_CLUSTER_PERM, "klass": Cluster, "id": self.kwargs["cluster_pk"]} + kwargs_check = { + "action_type": VIEW_IMPORT_PERM, + "model": Cluster.__class__.__name__.lower(), + "second_perm": VIEW_CLUSTER_BIND, + } + + obj = get_object_for_user(user=request.user, **kwargs_get) + check_custom_perm(user=request.user, obj=obj, **kwargs_check) + + return obj def list(self, request: Request, *args, **kwargs) -> Response: - obj = self.get_object_and_check_perm(request=request, **kwargs) - res = get_imports(obj=obj) - - return Response(data=res) + obj = self.get_object_and_check_perm(request=request) + return self.get_paginated_response(data=self.paginate_queryset(queryset=get_imports(obj=obj))) def create(self, request, *args, **kwargs): # pylint: disable=unused-argument - obj = self.get_object_and_check_perm(request=request, **kwargs) + obj = self.get_object_and_check_perm(request=request) check_custom_perm(request.user, CHANGE_IMPORT_PERM, "cluster", obj) - serializer = self.get_serializer(data=request.data, context={"request": request, "cluster": obj}) + serializer = self.get_serializer(data=request.data, many=True, context={"request": request, "cluster": obj}) + if serializer.is_valid(): bind_data = cook_data_for_multibind(validated_data=serializer.validated_data, obj=obj) @@ -65,17 +79,3 @@ def create(self, request, *args, **kwargs): # pylint: disable=unused-argument return Response(get_imports(obj=obj), status=HTTP_200_OK) return Response(serializer.errors, status=HTTP_400_BAD_REQUEST) - - -class ClusterImportViewSet(ImportViewSet): # pylint: disable=too-many-ancestors - def get_object_and_check_perm(self, request, **kwargs): - cluster = get_object_for_user(request.user, VIEW_CLUSTER_PERM, Cluster, id=kwargs["cluster_pk"]) - check_custom_perm(request.user, VIEW_IMPORT_PERM, "cluster", cluster, VIEW_CLUSTER_BIND) - return cluster - - -class ServiceImportViewSet(ImportViewSet): # pylint: disable=too-many-ancestors - def get_object_and_check_perm(self, request, **kwargs): - service = get_object_for_user(request.user, VIEW_SERVICE_PERM, ClusterObject, id=kwargs["clusterobject_pk"]) - check_custom_perm(request.user, VIEW_IMPORT_PERM, "clusterobject", service, VIEW_CLUSTER_BIND) - return service diff --git a/python/api_v2/job/serializers.py b/python/api_v2/job/serializers.py index 818bfc1424..c526e1f037 100644 --- a/python/api_v2/job/serializers.py +++ b/python/api_v2/job/serializers.py @@ -10,106 +10,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -from datetime import timedelta - -from cm.models import Action, JobLog, JobStatus, LogStorage, SubAction, TaskLog -from rest_framework.fields import SerializerMethodField -from rest_framework.serializers import ModelSerializer - - -class TaskRetrieveByJobSerializer(ModelSerializer): - objects = SerializerMethodField() - terminatable = SerializerMethodField() - object_type = SerializerMethodField() - duration = SerializerMethodField() - - class Meta: - model = TaskLog - fields = ( - "id", - "status", - "start_date", - "finish_date", - "duration", - "objects", - "terminatable", - "object_type", - ) - - @staticmethod - def get_objects(obj: TaskLog) -> list: - objects = [{"type": k, **v} for k, v in obj.selector.items()] - - return objects - - @staticmethod - def get_terminatable(obj: TaskLog): - if obj.action: - allow_to_terminate = obj.action.allow_to_terminate - else: - allow_to_terminate = False - - if allow_to_terminate and obj.status in {JobStatus.CREATED, JobStatus.RUNNING}: - return True - - return False - - @staticmethod - def get_object_type(obj: TaskLog): - if obj.action: - return obj.action.prototype.type - - return None - - @staticmethod - def get_duration(obj: JobLog) -> timedelta: - return obj.finish_date - obj.start_date - - -class JobListSerializer(ModelSerializer): - duration = SerializerMethodField() - name = SerializerMethodField() - display_name = SerializerMethodField() - - class Meta: - model = JobLog - fields = ( - "id", - "name", - "display_name", - "status", - "start_date", - "finish_date", - "duration", - ) - - @staticmethod - def get_duration(obj: JobLog) -> timedelta: - return obj.finish_date - obj.start_date - - @classmethod - def get_display_name(cls, obj: JobLog) -> str | None: - job_action = cls._get_job_action_obj(obj) - return job_action.display_name if job_action else None - - @classmethod - def get_name(cls, obj: JobLog) -> str | None: - job_action = cls._get_job_action_obj(obj) - return job_action.name if job_action else None - - @staticmethod - def _get_job_action_obj(obj: JobLog) -> Action | SubAction | None: - if obj.sub_action: - return obj.sub_action - elif obj.action: - return obj.action - else: - return None +from api_v2.task.serializers import JobListSerializer, TaskRetrieveByJobSerializer +from cm.models import JobLog +from rest_framework.fields import DateTimeField class JobRetrieveSerializer(JobListSerializer): - parent_task = TaskRetrieveByJobSerializer(source="task") - log_files = SerializerMethodField() + parent_task = TaskRetrieveByJobSerializer(source="task", allow_null=True) + start_time = DateTimeField(source="start_date") + end_time = DateTimeField(source="finish_date") class Meta: model = JobLog @@ -119,23 +28,9 @@ class Meta: "display_name", "parent_task", "status", - "start_date", - "finish_date", + "start_time", + "end_time", "duration", "task_id", - "log_files", + "is_terminatable", ) - - def get_log_files(self, obj: JobLog) -> list[dict[str, str]]: - logs = [] - for log_storage in LogStorage.objects.filter(job=obj): - logs.append( - { - "name": log_storage.name, - "type": log_storage.type, - "format": log_storage.format, - "id": log_storage.pk, - }, - ) - - return logs diff --git a/python/api_v2/job/urls.py b/python/api_v2/job/urls.py index 089f39f16b..f0236f25ec 100644 --- a/python/api_v2/job/urls.py +++ b/python/api_v2/job/urls.py @@ -10,7 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. from api_v2.job.views import JobViewSet -from api_v2.log_storage.views import LogStorageViewSet +from api_v2.log_storage.views import LogStorageJobViewSet from rest_framework.routers import SimpleRouter from rest_framework_nested.routers import NestedSimpleRouter @@ -18,6 +18,6 @@ router.register("", JobViewSet) log_storage_router = NestedSimpleRouter(parent_router=router, parent_prefix="", lookup="job") -log_storage_router.register(prefix="logs", viewset=LogStorageViewSet, basename="log") +log_storage_router.register(prefix="logs", viewset=LogStorageJobViewSet, basename="log") urlpatterns = [*router.urls, *log_storage_router.urls] diff --git a/python/api_v2/job/views.py b/python/api_v2/job/views.py index a93e6ad040..c5f3886f68 100644 --- a/python/api_v2/job/views.py +++ b/python/api_v2/job/views.py @@ -9,24 +9,27 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - from api.job.views import VIEW_JOBLOG_PERMISSION -from api_v2.job.serializers import JobListSerializer, JobRetrieveSerializer +from api_v2.job.serializers import JobRetrieveSerializer +from api_v2.task.serializers import JobListSerializer +from api_v2.views import CamelCaseGenericViewSet from cm.models import JobLog, JobStatus from cm.status_api import Event from rest_framework.decorators import action +from rest_framework.mixins import CreateModelMixin, ListModelMixin, RetrieveModelMixin from rest_framework.request import Request from rest_framework.response import Response from rest_framework.status import HTTP_200_OK -from rest_framework.viewsets import ModelViewSet from adcm.permissions import check_custom_perm, get_object_for_user -class JobViewSet(ModelViewSet): # pylint: disable=too-many-ancestors - queryset = JobLog.objects.select_related("task").all() +class JobViewSet( + ListModelMixin, RetrieveModelMixin, CreateModelMixin, CamelCaseGenericViewSet +): # pylint: disable=too-many-ancestors + queryset = JobLog.objects.select_related("task__action").order_by("pk") serializer_class = JobListSerializer - http_method_names = ["get", "post"] + filter_backends = [] def get_serializer_class(self): if self.action == "retrieve": @@ -35,9 +38,9 @@ def get_serializer_class(self): return super().get_serializer_class() @action(methods=["post"], detail=True) - def terminate(self, request: Request, job_pk: int) -> Response: - job: JobLog = get_object_for_user(request.user, VIEW_JOBLOG_PERMISSION, JobLog, id=job_pk) - check_custom_perm(request.user, "change", JobLog, job_pk) + def terminate(self, request: Request, pk: int) -> Response: + job: JobLog = get_object_for_user(request.user, VIEW_JOBLOG_PERMISSION, JobLog, id=pk) + check_custom_perm(request.user, "change", JobLog, pk) event = Event() event.set_job_status(job=job, status=JobStatus.ABORTED.value) diff --git a/python/api_v2/log_storage/views.py b/python/api_v2/log_storage/views.py index 74abf87870..2a4db618ca 100644 --- a/python/api_v2/log_storage/views.py +++ b/python/api_v2/log_storage/views.py @@ -11,40 +11,161 @@ # limitations under the License. # pylint: disable=duplicate-code +import io import re +import tarfile from pathlib import Path from api.job.views import VIEW_LOGSTORAGE_PERMISSION from api_v2.log_storage.serializers import LogStorageSerializer -from cm.models import LogStorage +from api_v2.views import CamelCaseGenericViewSet +from cm.errors import raise_adcm_ex +from cm.models import ActionType, JobLog, LogStorage, TaskLog from django.http import HttpResponse from rest_framework.decorators import action +from rest_framework.mixins import ListModelMixin, RetrieveModelMixin from rest_framework.request import Request -from rest_framework.viewsets import ModelViewSet from adcm import settings -from adcm.permissions import get_object_for_user +from adcm.permissions import VIEW_TASKLOG_PERMISSION, get_object_for_user +from adcm.utils import str_remove_non_alnum + + +def get_task_download_archive_name(task: TaskLog) -> str: + archive_name = f"{task.pk}.tar.gz" + + if not task.action: + return archive_name + + action_display_name = str_remove_non_alnum(value=task.action.display_name) or str_remove_non_alnum( + value=task.action.name, + ) + if action_display_name: + archive_name = f"{action_display_name}_{archive_name}" + + if task.object_type.name in { + "adcm", + "cluster", + "cluster object", + "service component", + "host provider", + }: + action_prototype_display_name = str_remove_non_alnum( + value=task.action.prototype.display_name, + ) or str_remove_non_alnum(value=task.action.prototype.name) + if action_prototype_display_name: + archive_name = f"{action_prototype_display_name}_{archive_name}" + + if not task.task_object: + return archive_name + + obj_name = None + if task.object_type.name == "cluster": + obj_name = task.task_object.name + elif task.object_type.name == "cluster object": + obj_name = task.task_object.cluster.name + elif task.object_type.name == "service component": + obj_name = task.task_object.cluster.name + elif task.object_type.name == "host provider": + obj_name = task.task_object.name + elif task.object_type.name == "host": + obj_name = task.task_object.fqdn + + if obj_name: + archive_name = f"{str_remove_non_alnum(value=obj_name)}_{archive_name}" + + return archive_name + + +def get_task_download_archive_file_handler(task: TaskLog) -> io.BytesIO: + jobs = JobLog.objects.filter(task=task) + + if task.action and task.action.type == ActionType.JOB: + task_dir_name_suffix = str_remove_non_alnum(value=task.action.display_name) or str_remove_non_alnum( + value=task.action.name, + ) + else: + task_dir_name_suffix = None + + file_handler = io.BytesIO() + with tarfile.open(fileobj=file_handler, mode="w:gz") as tar_file: + for job in jobs: + if task_dir_name_suffix is None: + dir_name_suffix = "" + if job.sub_action: + dir_name_suffix = str_remove_non_alnum(value=job.sub_action.display_name) or str_remove_non_alnum( + value=job.sub_action.name, + ) + else: + dir_name_suffix = task_dir_name_suffix + + directory = Path(settings.RUN_DIR, str(job.pk)) + if directory.is_dir(): + files = [item for item in Path(settings.RUN_DIR, str(job.pk)).iterdir() if item.is_file()] + for log_file in files: + tarinfo = tarfile.TarInfo(f'{f"{job.pk}-{dir_name_suffix}".strip("-")}/{log_file.name}') + tarinfo.size = log_file.stat().st_size + tar_file.addfile(tarinfo=tarinfo, fileobj=io.BytesIO(log_file.read_bytes())) + else: + log_storages = LogStorage.objects.filter(job=job, type__in={"stdout", "stderr"}) + for log_storage in log_storages: + tarinfo = tarfile.TarInfo( + f'{f"{job.pk}-{dir_name_suffix}".strip("-")}' f"/{log_storage.name}-{log_storage.type}.txt", + ) + body = io.BytesIO(bytes(log_storage.body, settings.ENCODING_UTF_8)) + tarinfo.size = body.getbuffer().nbytes + tar_file.addfile(tarinfo=tarinfo, fileobj=body) + + return file_handler # pylint:disable-next=too-many-ancestors -class LogStorageViewSet(ModelViewSet): - queryset = LogStorage.objects.all() +class LogStorageViewSet(ListModelMixin, RetrieveModelMixin, CamelCaseGenericViewSet): + queryset = LogStorage.objects.order_by("pk") serializer_class = LogStorageSerializer - filterset_fields = ("name", "type", "format") - ordering_fields = ("id", "name") + filter_backends = [] + pagination_class = None permission_required = ["cm.view_logstorage"] lookup_url_kwarg = "log_pk" - ordering = ["id"] + + def list(self, request, *args, **kwargs): + if "task_pk" in self.request.parser_context["kwargs"]: + raise_adcm_ex("LOG_FOR_TASK_VIEW_NOT_ALLOWED", "The task view does not allow to read logs") + return super().list(request, *args, **kwargs) + + def retrieve(self, request, *args, **kwargs): + if "task_pk" in self.request.parser_context["kwargs"]: + raise_adcm_ex("LOG_FOR_TASK_VIEW_NOT_ALLOWED", "The task view does not allow to read logs") + return super().retrieve(request, *args, **kwargs) def get_queryset(self, *args, **kwargs): # pylint: disable=unused-argument - queryset = super().get_queryset() - if "job_pk" in self.kwargs: - queryset = queryset.filter(job_id=self.kwargs["job_pk"]) + if "task_pk" in self.request.parser_context["kwargs"]: + return self.queryset.filter(job__task_id=self.request.parser_context["kwargs"]["task_pk"]) + elif "job_pk" in self.kwargs: + self.queryset = self.queryset.filter(job_id=self.kwargs["job_pk"]) + + return self.queryset - return queryset - @action(methods=["post"], detail=True) - def download(self, request: Request, job_pk: int, log_pk: int): # pylint: disable=unused-argument +# pylint:disable-next=too-many-ancestors +class LogStorageTaskViewSet(LogStorageViewSet): + @action(methods=["get"], detail=False) + def download(self, request: Request, task_pk: int) -> HttpResponse: + task = get_object_for_user(request.user, VIEW_TASKLOG_PERMISSION, TaskLog, id=task_pk) + response = HttpResponse( + content=get_task_download_archive_file_handler(task=task).getvalue(), + content_type="application/tar+gzip", + ) + response["Content-Disposition"] = f'attachment; filename="{get_task_download_archive_name(task=task)}"' + + return response + + +# pylint:disable-next=too-many-ancestors +class LogStorageJobViewSet(LogStorageViewSet): + @action(methods=["get"], detail=True) + def download(self, request: Request, **kwargs) -> HttpResponse: + job_pk, log_pk = kwargs["job_pk"], kwargs["log_pk"] log_storage = get_object_for_user( user=request.user, perms=VIEW_LOGSTORAGE_PERMISSION, klass=LogStorage, id=log_pk, job__id=job_pk ) diff --git a/python/api_v2/prototype/filters.py b/python/api_v2/prototype/filters.py index 1f81bf7b3d..caedb94306 100644 --- a/python/api_v2/prototype/filters.py +++ b/python/api_v2/prototype/filters.py @@ -11,15 +11,15 @@ # limitations under the License. from cm.models import ObjectType, Prototype -from django_filters import ChoiceFilter +from django_filters import CharFilter, ChoiceFilter, NumberFilter from django_filters.rest_framework import FilterSet class PrototypeFilter(FilterSet): - type = ChoiceFilter(choices=ObjectType.choices) + bundle_id = NumberFilter(field_name="bundle__id", label="Bundle ID") + type = ChoiceFilter(choices=ObjectType.choices, label="Type") + display_name = CharFilter(label="Display name") class Meta: model = Prototype - fields = [ - "type", - ] + fields = ["id", "type", "bundle_id"] diff --git a/python/api_v2/prototype/serializers.py b/python/api_v2/prototype/serializers.py index aa261b30b9..8cf8f36b4b 100644 --- a/python/api_v2/prototype/serializers.py +++ b/python/api_v2/prototype/serializers.py @@ -11,8 +11,9 @@ # limitations under the License. from typing import Dict +from api_v2.bundle.serializers import BundleIdSerializer from api_v2.prototype.utils import get_license_text -from cm.models import LICENSE_STATE, Prototype +from cm.models import Prototype from rest_framework.fields import CharField, IntegerField, SerializerMethodField from rest_framework.serializers import ModelSerializer @@ -21,7 +22,7 @@ class PrototypeListSerializer(ModelSerializer): license = SerializerMethodField() - bundle_id = IntegerField(source="bundle.id") + bundle = BundleIdSerializer(read_only=True) class Meta: model = Prototype @@ -31,8 +32,9 @@ class Meta: "display_name", "description", "type", - "bundle_id", + "bundle", "license", + "version", ) def get_license(self, obj: Prototype) -> Dict: @@ -42,32 +44,28 @@ def get_license(self, obj: Prototype) -> Dict: class PrototypeVersionSerializer(ModelSerializer): id = IntegerField(source="pk") version = CharField() - bundle_id = CharField() - is_license_accepted = SerializerMethodField() + bundle = BundleIdSerializer(read_only=True) + license_status = CharField(source="license") class Meta: model = Prototype - fields = ("id", "bundle_id", "version", "is_license_accepted") - - def get_versions(self, obj: Prototype) -> Dict: - return { - "id": obj.id, - "version": obj.version, - "is_license_accepted": self.get_is_license_accepted(obj), - "bundle_id": obj.bundle.id, - } - - def get_is_license_accepted(self, obj: Prototype): - return obj.license == LICENSE_STATE[1][0] + fields = ("id", "bundle", "version", "license_status") class PrototypeTypeSerializer(EmptySerializer): name = CharField() + display_name = CharField() versions = SerializerMethodField() @staticmethod def get_versions(obj: Prototype) -> str | None: - queryset = Prototype.objects.filter(type=obj.type, name=obj.display_name).order_by("-version") + queryset = Prototype.objects.filter(type=obj.type, name=obj.name).order_by("-version") serializer = PrototypeVersionSerializer(instance=queryset, many=True) return serializer.data + + +class PrototypeRelatedSerializer(ModelSerializer): + class Meta: + model = Prototype + fields = ("id", "name", "display_name", "type", "version") diff --git a/python/api_v2/prototype/views.py b/python/api_v2/prototype/views.py index 0bc4fb8eca..81f6246e82 100644 --- a/python/api_v2/prototype/views.py +++ b/python/api_v2/prototype/views.py @@ -15,25 +15,23 @@ PrototypeTypeSerializer, ) from api_v2.prototype.utils import accept_license +from api_v2.views import CamelCaseReadOnlyModelViewSet from cm.models import Prototype from django.db.models import QuerySet from rest_framework.decorators import action from rest_framework.request import Request from rest_framework.response import Response from rest_framework.status import HTTP_200_OK -from rest_framework.viewsets import ModelViewSet from adcm.permissions import VIEW_CLUSTER_PERM, DjangoModelPermissionsAudit -class PrototypeViewSet(ModelViewSet): # pylint: disable=too-many-ancestors - queryset = Prototype.objects.exclude(type="adcm").select_related("bundle") +class PrototypeViewSet(CamelCaseReadOnlyModelViewSet): # pylint: disable=too-many-ancestors + queryset = Prototype.objects.exclude(type="adcm").select_related("bundle").order_by("name") serializer_class = PrototypeListSerializer permission_classes = [DjangoModelPermissionsAudit] permission_required = [VIEW_CLUSTER_PERM] filterset_class = PrototypeFilter - ordering_fields = ["name", "bundle"] - ordering = ["name"] def get_serializer_class(self): if self.action == "versions": diff --git a/python/api_v2/rbac/__init__.py b/python/api_v2/rbac/__init__.py new file mode 100644 index 0000000000..824dd6c8fe --- /dev/null +++ b/python/api_v2/rbac/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/python/api_v2/rbac/group/__init__.py b/python/api_v2/rbac/group/__init__.py new file mode 100644 index 0000000000..824dd6c8fe --- /dev/null +++ b/python/api_v2/rbac/group/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/python/api_v2/rbac/group/filters.py b/python/api_v2/rbac/group/filters.py new file mode 100644 index 0000000000..cdd7ba9b8d --- /dev/null +++ b/python/api_v2/rbac/group/filters.py @@ -0,0 +1,28 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from django_filters.rest_framework import ( + CharFilter, + ChoiceFilter, + FilterSet, + OrderingFilter, +) +from rbac.models import OriginType + + +class GroupFilter(FilterSet): + display_name = CharFilter(lookup_expr="icontains") + type = ChoiceFilter(choices=OriginType.choices) + ordering = OrderingFilter( + fields={"display_name": "displayName"}, + field_labels={"display_name": "Display name"}, + ) diff --git a/python/api_v2/rbac/group/serializers.py b/python/api_v2/rbac/group/serializers.py new file mode 100644 index 0000000000..32aa59da1a --- /dev/null +++ b/python/api_v2/rbac/group/serializers.py @@ -0,0 +1,57 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from rbac.models import Group, User +from rest_framework.fields import IntegerField +from rest_framework.serializers import ( + CharField, + ManyRelatedField, + ModelSerializer, + PrimaryKeyRelatedField, +) + +from adcm.serializers import EmptySerializer + + +class RelatedUserSerializer(ModelSerializer): + username = CharField(read_only=True) + + class Meta: + model = User + fields = ["id", "username"] + + +class GroupSerializer(ModelSerializer): + users = RelatedUserSerializer(source="user_set", many=True) + + class Meta: + model = Group + fields = ["id", "name", "display_name", "description", "users", "type"] + + +class GroupRelatedSerializer(EmptySerializer): + id = IntegerField() + name = CharField() + display_name = CharField() + + +class GroupCreateUpdateSerializer(ModelSerializer): + users = ManyRelatedField( + child_relation=PrimaryKeyRelatedField(queryset=User.objects.all()), source="user_set", required=False + ) + + class Meta: + model = Group + fields = ("display_name", "description", "users") + extra_kwargs = { + "display_name": {"required": True}, + "description": {"default": "", "allow_blank": True, "required": False}, + } diff --git a/python/api_v2/rbac/group/urls.py b/python/api_v2/rbac/group/urls.py new file mode 100644 index 0000000000..4799639323 --- /dev/null +++ b/python/api_v2/rbac/group/urls.py @@ -0,0 +1,17 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from api_v2.rbac.group.views import GroupViewSet +from rest_framework.routers import SimpleRouter + +group_router = SimpleRouter() +group_router.register(prefix="groups", viewset=GroupViewSet) diff --git a/python/api_v2/rbac/group/views.py b/python/api_v2/rbac/group/views.py new file mode 100644 index 0000000000..e757893e24 --- /dev/null +++ b/python/api_v2/rbac/group/views.py @@ -0,0 +1,78 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from api_v2.rbac.group.filters import GroupFilter +from api_v2.rbac.group.serializers import GroupCreateUpdateSerializer, GroupSerializer +from api_v2.views import CamelCaseModelViewSet +from cm.errors import AdcmEx +from guardian.mixins import PermissionListMixin +from rbac.models import Group +from rbac.services.group import create as create_group +from rbac.services.group import update as update_group +from rest_framework.permissions import DjangoModelPermissions +from rest_framework.request import Request +from rest_framework.response import Response +from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED + +from adcm.permissions import VIEW_GROUP_PERMISSION + + +class GroupViewSet(PermissionListMixin, CamelCaseModelViewSet): # pylint:disable=too-many-ancestors + queryset = Group.objects.order_by("display_name").prefetch_related("user_set") + filterset_class = GroupFilter + permission_classes = (DjangoModelPermissions,) + permission_required = [VIEW_GROUP_PERMISSION] + + def get_serializer_class(self) -> type[GroupSerializer | GroupCreateUpdateSerializer]: + if self.action in ("create", "update", "partial_update"): + return GroupCreateUpdateSerializer + + return GroupSerializer + + def create(self, request: Request, *args, **kwargs) -> Response: + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + + users = [{"id": user.pk} for user in serializer.validated_data.pop("user_set", [])] + group = create_group( + name_to_display=serializer.validated_data["display_name"], + description=serializer.validated_data.get("description", ""), + user_set=users, + ) + + return Response(data=GroupSerializer(instance=group).data, status=HTTP_201_CREATED) + + def update(self, request: Request, *args, **kwargs) -> Response: + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + + users = [{"id": user.pk} for user in serializer.validated_data.pop("user_set", [])] + group = update_group( + group=self.get_object(), + partial=kwargs.pop("partial", False), + name_to_display=serializer.validated_data["display_name"], + description=serializer.validated_data.get("description", ""), + user_set=users, + ) + + return Response(data=GroupSerializer(instance=group).data, status=HTTP_200_OK) + + def destroy(self, request: Request, *args, **kwargs) -> Response: + instance: Group = self.get_object() + + if instance.built_in: + raise AdcmEx(code="GROUP_DELETE_ERROR") + + if instance.policy_set.exists(): + raise AdcmEx(code="GROUP_DELETE_ERROR", msg="Group with policy should not be deleted") + + return super().destroy(request=request, *args, **kwargs) diff --git a/python/api_v2/rbac/policy/__init__.py b/python/api_v2/rbac/policy/__init__.py new file mode 100644 index 0000000000..824dd6c8fe --- /dev/null +++ b/python/api_v2/rbac/policy/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/python/api_v2/rbac/policy/filters.py b/python/api_v2/rbac/policy/filters.py new file mode 100644 index 0000000000..cb4cec1f98 --- /dev/null +++ b/python/api_v2/rbac/policy/filters.py @@ -0,0 +1,27 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from django_filters.rest_framework import CharFilter, FilterSet, OrderingFilter +from rbac.models import Policy + + +class PolicyFilter(FilterSet): + name = CharFilter(label="Name", field_name="name", lookup_expr="icontains") + ordering = OrderingFilter( + fields={"name": "name"}, + field_labels={"name": "Name"}, + label="ordering", + ) + + class Meta: + model = Policy + fields = ["id"] diff --git a/python/api_v2/rbac/policy/serializers.py b/python/api_v2/rbac/policy/serializers.py new file mode 100644 index 0000000000..1cdd1060cb --- /dev/null +++ b/python/api_v2/rbac/policy/serializers.py @@ -0,0 +1,76 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from api_v2.rbac.group.serializers import GroupRelatedSerializer +from api_v2.rbac.role.serializers import RoleRelatedSerializer +from rbac.endpoints.policy.serializers import ObjectField +from rbac.endpoints.serializers import BaseRelatedSerializer +from rbac.models import Group, Policy, Role +from rest_framework.fields import BooleanField +from rest_framework.relations import ManyRelatedField, PrimaryKeyRelatedField +from rest_framework.serializers import ModelSerializer + + +class PolicyObjectField(ObjectField): + def to_representation(self, value): + data = [] + for obj in value.all(): + data.append( + { + "id": obj.object_id, + "type": obj.object.prototype.type, + "name": obj.object.name, + "display_name": obj.object.display_name, + }, + ) + + return super(ObjectField, self).to_representation(data) + + +class PolicySerializer(ModelSerializer): + is_built_in = BooleanField(read_only=True, source="built_in") + groups = GroupRelatedSerializer(many=True, source="group") + objects = PolicyObjectField(required=True, source="object") + role = RoleRelatedSerializer(read_only=True) + + class Meta: + model = Policy + fields = [ + "id", + "name", + "description", + "is_built_in", + "objects", + "groups", + "role", + ] + + +class PolicyRoleCreateSerializer(BaseRelatedSerializer): + id = PrimaryKeyRelatedField(queryset=Role.objects.all()) + + +class PolicyCreateSerializer(ModelSerializer): + groups = ManyRelatedField(child_relation=PrimaryKeyRelatedField(queryset=Group.objects.all()), source="group") + objects = ObjectField(required=True, source="object") + role = PolicyRoleCreateSerializer() + + class Meta: + model = Policy + fields = [ + "id", + "name", + "description", + "objects", + "groups", + "role", + ] diff --git a/python/api_v2/rbac/policy/urls.py b/python/api_v2/rbac/policy/urls.py new file mode 100644 index 0000000000..414d568f1b --- /dev/null +++ b/python/api_v2/rbac/policy/urls.py @@ -0,0 +1,17 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from api_v2.rbac.policy.views import PolicyViewSet +from rest_framework.routers import SimpleRouter + +policy_router = SimpleRouter() +policy_router.register(prefix="policies", viewset=PolicyViewSet) diff --git a/python/api_v2/rbac/policy/views.py b/python/api_v2/rbac/policy/views.py new file mode 100644 index 0000000000..5b0ab729cc --- /dev/null +++ b/python/api_v2/rbac/policy/views.py @@ -0,0 +1,63 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from api_v2.rbac.policy.filters import PolicyFilter +from api_v2.rbac.policy.serializers import PolicyCreateSerializer, PolicySerializer +from api_v2.views import CamelCaseModelViewSet +from cm.errors import AdcmEx, raise_adcm_ex +from django_filters.rest_framework.backends import DjangoFilterBackend +from guardian.mixins import PermissionListMixin +from rbac.models import Policy +from rbac.services.policy import policy_create, policy_update +from rest_framework.response import Response +from rest_framework.status import HTTP_201_CREATED + +from adcm.permissions import DjangoModelPermissionsAudit + + +class PolicyViewSet(PermissionListMixin, CamelCaseModelViewSet): # pylint: disable=too-many-ancestors + queryset = Policy.objects.select_related("role").prefetch_related("group", "object") + filter_backends = (DjangoFilterBackend,) + filterset_class = PolicyFilter + permission_classes = (DjangoModelPermissionsAudit,) + permission_required = ["rbac.view_policy"] + http_method_names = ["get", "post", "patch", "delete"] + + def get_serializer_class(self) -> type[PolicySerializer | PolicyCreateSerializer]: + if self.action in ("create", "update", "partial_update"): + return PolicyCreateSerializer + + return PolicySerializer + + def create(self, request, *args, **kwargs): + serializer = self.get_serializer_class()(data=request.data) + serializer.is_valid(raise_exception=True) + policy = policy_create(**serializer.validated_data) + return Response(data=PolicySerializer(policy).data, status=HTTP_201_CREATED) + + def update(self, request, *args, **kwargs): + policy = self.get_object() + + if policy.built_in: + raise AdcmEx(code="POLICY_CREATE_ERROR") + + serializer = self.get_serializer(policy, data=request.data, partial=kwargs.pop("partial", False)) + serializer.is_valid(raise_exception=True) + policy = policy_update(policy, **serializer.validated_data) + return Response(data=PolicySerializer(policy).data) + + def destroy(self, request, *args, **kwargs): + policy = self.get_object() + if policy.built_in: + return raise_adcm_ex(code="POLICY_DELETE_ERROR") + + return super().destroy(request, *args, **kwargs) diff --git a/python/api_v2/rbac/role/__init__.py b/python/api_v2/rbac/role/__init__.py new file mode 100644 index 0000000000..824dd6c8fe --- /dev/null +++ b/python/api_v2/rbac/role/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/python/api_v2/rbac/role/filters.py b/python/api_v2/rbac/role/filters.py new file mode 100644 index 0000000000..bddef1e7ec --- /dev/null +++ b/python/api_v2/rbac/role/filters.py @@ -0,0 +1,30 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from django.db.models import Q, QuerySet +from django_filters import CharFilter, OrderingFilter +from django_filters.rest_framework import FilterSet +from rbac.models import Role + + +class RoleFilter(FilterSet): + display_name = CharFilter(field_name="display_name", label="Role name", lookup_expr="icontains") + categories = CharFilter(label="Categories", method="filter_category") + ordering = OrderingFilter(fields={"display_name": "displayName"}, field_labels={"display_name": "Display name"}) + + @staticmethod + def filter_category(queryset: QuerySet, name: str, value: str): # pylint: disable=unused-argument + return queryset.filter(Q(category__value=value) | Q(any_category=True)) + + class Meta: + model = Role + fields = ["type"] diff --git a/python/api_v2/rbac/role/serializers.py b/python/api_v2/rbac/role/serializers.py new file mode 100644 index 0000000000..2153852e46 --- /dev/null +++ b/python/api_v2/rbac/role/serializers.py @@ -0,0 +1,68 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from rbac.models import Role +from rest_framework.fields import BooleanField +from rest_framework.serializers import ( + ManyRelatedField, + ModelSerializer, + PrimaryKeyRelatedField, + SlugRelatedField, +) + + +class RoleChildSerializer(ModelSerializer): + is_built_in = BooleanField(source="built_in", default=False, read_only=True) + is_any_category = BooleanField(source="any_category", default=False, read_only=True) + categories = SlugRelatedField(read_only=True, many=True, slug_field="value", source="category") + + class Meta: + model = Role + fields = ( + "id", + "name", + "display_name", + "is_built_in", + "is_any_category", + "categories", + "type", + ) + extra_kwargs = {"name": {"read_only": True}, "type": {"read_only": True}} + + +class RoleSerializer(RoleChildSerializer): + children = RoleChildSerializer(many=True, source="child", read_only=True) + + class Meta: + model = Role + fields = ( + *RoleChildSerializer.Meta.fields, + "parametrized_by_type", + "description", + "children", + ) + extra_kwargs = {"name": {"read_only": True}, "type": {"read_only": True}} + + +class RoleCreateUpdateSerializer(ModelSerializer): + children = ManyRelatedField(child_relation=PrimaryKeyRelatedField(queryset=Role.objects.all()), source="child") + + class Meta: + model = Role + fields = ("display_name", "description", "children") + extra_kwargs = {"display_name": {"required": True}, "children": {"required": True}} + + +class RoleRelatedSerializer(ModelSerializer): + class Meta: + model = Role + fields = ["id", "name", "display_name"] diff --git a/python/api_v2/rbac/role/views.py b/python/api_v2/rbac/role/views.py new file mode 100644 index 0000000000..d547bb941a --- /dev/null +++ b/python/api_v2/rbac/role/views.py @@ -0,0 +1,144 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from collections import defaultdict + +from api_v2.rbac.role.filters import RoleFilter +from api_v2.rbac.role.serializers import RoleCreateUpdateSerializer, RoleSerializer +from api_v2.views import CamelCaseModelViewSet +from cm.errors import raise_adcm_ex +from cm.models import Cluster, ClusterObject, Host, HostProvider, ProductCategory +from guardian.mixins import PermissionListMixin +from guardian.shortcuts import get_objects_for_user +from rbac.models import ObjectType as RBACObjectType +from rbac.models import Role, RoleTypes +from rbac.services.role import role_create, role_update +from rest_framework.decorators import action +from rest_framework.response import Response +from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED + +from adcm.permissions import DjangoModelPermissionsAudit + + +class RoleViewSet(PermissionListMixin, CamelCaseModelViewSet): # pylint: disable=too-many-ancestors + queryset = Role.objects.prefetch_related("child", "category").order_by("display_name") + permission_classes = (DjangoModelPermissionsAudit,) + permission_required = ["rbac.view_role"] + filterset_class = RoleFilter + + def get_queryset(self, *args, **kwargs): + return get_objects_for_user(**self.get_get_objects_for_user_kwargs(Role.objects.all())) + + def get_serializer_class(self): + if self.action in ("create", "update", "partial_update"): + return RoleCreateUpdateSerializer + + return RoleSerializer + + def create(self, request, *args, **kwargs): + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + role = role_create(**serializer.validated_data) + + return Response(data=RoleSerializer(instance=role).data, status=HTTP_201_CREATED) + + def update(self, request, *args, **kwargs): + partial = kwargs.pop("partial", False) + instance = self.get_object() + + if instance.built_in: + raise_adcm_ex(code="ROLE_UPDATE_ERROR", msg=f"Can't modify role {instance.name} as it is auto created") + + serializer = self.get_serializer(data=request.data, partial=partial) + serializer.is_valid(raise_exception=True) + role = role_update(role=instance, partial=partial, **serializer.validated_data) + + return Response(data=RoleSerializer(instance=role).data, status=HTTP_200_OK) + + def destroy(self, request, *args, **kwargs): + instance = self.get_object() + + if instance.built_in: + raise_adcm_ex(code="ROLE_DELETE_ERROR", msg="It is forbidden to remove the built-in role.") + + return super().destroy(request, *args, **kwargs) + + @action(methods=["get"], detail=False) + def categories(self, request, *args, **kwargs): # pylint: disable=unused-argument + return Response(data=sorted(category.value for category in ProductCategory.objects.all()), status=HTTP_200_OK) + + @action(methods=["get"], detail=True, url_path="object-candidates", url_name="object-candidates") + def object_candidates(self, request, *args, **kwargs): # pylint: disable=unused-argument + role = self.get_object() + if role.type != RoleTypes.ROLE: + return Response({"cluster": [], "provider": [], "service": [], "host": []}) + + clusters = [] + providers = [] + services = [] + hosts = [] + + if RBACObjectType.CLUSTER.value in role.parametrized_by_type: + for cluster in Cluster.objects.all(): + clusters.append( + { + "name": cluster.display_name, + "id": cluster.id, + }, + ) + + if RBACObjectType.PROVIDER.value in role.parametrized_by_type: + for provider in HostProvider.objects.all(): + providers.append( + { + "name": provider.display_name, + "id": provider.id, + }, + ) + + if RBACObjectType.HOST.value in role.parametrized_by_type: + for host in Host.objects.all(): + hosts.append( + { + "name": host.display_name, + "id": host.id, + }, + ) + + if ( + RBACObjectType.SERVICE.value in role.parametrized_by_type + or RBACObjectType.COMPONENT.value in role.parametrized_by_type + ): + _services = defaultdict(list) + for service in ClusterObject.objects.all(): + _services[service].append( + { + "name": service.cluster.name, + "id": service.id, + }, + ) + for service, clusters_info in _services.items(): + services.append( + { + "name": service.name, + "display_name": service.display_name, + "clusters": sorted(clusters_info, key=lambda x: x["name"]), + }, + ) + + return Response( + { + "cluster": sorted(clusters, key=lambda x: x["name"]), + "provider": sorted(providers, key=lambda x: x["name"]), + "service": sorted(services, key=lambda x: x["name"]), + "host": sorted(hosts, key=lambda x: x["name"]), + }, + ) diff --git a/python/api_v2/rbac/urls.py b/python/api_v2/rbac/urls.py new file mode 100644 index 0000000000..23451720af --- /dev/null +++ b/python/api_v2/rbac/urls.py @@ -0,0 +1,29 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from api_v2.rbac.group.urls import group_router +from api_v2.rbac.policy.urls import policy_router +from api_v2.rbac.role.views import RoleViewSet +from api_v2.rbac.user.urls import user_router +from api_v2.rbac.views import RbacRoot +from django.urls import path +from rest_framework.routers import SimpleRouter + +role_router = SimpleRouter() +role_router.register("roles", RoleViewSet) +urlpatterns = [ + path("", RbacRoot.as_view(), name="root"), + *role_router.urls, + *user_router.urls, + *group_router.urls, + *policy_router.urls, +] diff --git a/python/api_v2/rbac/user/__init__.py b/python/api_v2/rbac/user/__init__.py new file mode 100644 index 0000000000..824dd6c8fe --- /dev/null +++ b/python/api_v2/rbac/user/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/python/api_v2/bundle/utils.py b/python/api_v2/rbac/user/constants.py similarity index 60% rename from python/api_v2/bundle/utils.py rename to python/api_v2/rbac/user/constants.py index db6f526983..c25c8d5937 100644 --- a/python/api_v2/bundle/utils.py +++ b/python/api_v2/rbac/user/constants.py @@ -10,17 +10,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from pathlib import Path +from django.db.models import TextChoices -from rest_framework.request import Request -from adcm import settings +class UserStatusChoices(TextChoices): + ACTIVE = "active", "active" + BLOCKED = "blocked", "blocked" -def upload_file(request: Request) -> Path: - file_data = request.data["file"] - file_path = Path(settings.DOWNLOAD_DIR, file_data.name) - with open(file_path, "wb+") as f: - for chunk in file_data.chunks(): - f.write(chunk) - return file_path +class UserTypeChoices(TextChoices): + LOCAL = "local", "local" + LDAP = "ldap", "ldap" diff --git a/python/api_v2/rbac/user/filters.py b/python/api_v2/rbac/user/filters.py new file mode 100644 index 0000000000..be9fcc635c --- /dev/null +++ b/python/api_v2/rbac/user/filters.py @@ -0,0 +1,45 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from api_v2.rbac.user.constants import UserStatusChoices, UserTypeChoices +from django.db.models import QuerySet +from django_filters.rest_framework import ( + CharFilter, + ChoiceFilter, + FilterSet, + OrderingFilter, +) + + +class UserFilterSet(FilterSet): + username = CharFilter(field_name="username", label="username", lookup_expr="icontains") + status = ChoiceFilter(choices=UserStatusChoices.choices, method="filter_status", label="status") + type = ChoiceFilter(choices=UserTypeChoices.choices, method="filter_type", label="type") + ordering = OrderingFilter(fields={"username": "username"}, field_labels={"username": "username"}, label="ordering") + + @staticmethod + def filter_status(queryset: QuerySet, name: str, value: str) -> QuerySet: # pylint: disable=unused-argument + filter_value = False + + if value == UserStatusChoices.ACTIVE: + filter_value = True + + return queryset.filter(blocked_at__isnull=filter_value) + + @staticmethod + def filter_type(queryset: QuerySet, name: str, value: str) -> QuerySet: # pylint: disable=unused-argument + filter_value = UserTypeChoices.LOCAL.value + + if value == UserTypeChoices.LDAP: + filter_value = UserTypeChoices.LDAP.value + + return queryset.filter(type=filter_value) diff --git a/python/api_v2/rbac/user/serializers.py b/python/api_v2/rbac/user/serializers.py new file mode 100644 index 0000000000..a4608b58d4 --- /dev/null +++ b/python/api_v2/rbac/user/serializers.py @@ -0,0 +1,89 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from api_v2.rbac.user.constants import UserStatusChoices +from django.contrib.auth.models import Group as AuthGroup +from rbac.models import User +from rest_framework.fields import ( + BooleanField, + CharField, + EmailField, + RegexField, + SerializerMethodField, +) +from rest_framework.serializers import ModelSerializer + + +class RelatedGroupSerializer(ModelSerializer): + display_name = SerializerMethodField() + + class Meta: + model = AuthGroup + fields = ["id", "name", "display_name"] + + @staticmethod + def get_display_name(instance: AuthGroup) -> str: + return instance.group.display_name + + +class UserSerializer(ModelSerializer): + status = SerializerMethodField() + is_built_in = BooleanField(read_only=True, source="built_in") + groups = RelatedGroupSerializer(many=True) + is_super_user = BooleanField(read_only=True, source="is_superuser") + + class Meta: + model = User + fields = [ + "id", + "username", + "first_name", + "last_name", + "status", + "email", + "type", + "is_built_in", + "is_super_user", + "groups", + ] + + @staticmethod + def get_status(instance: User) -> str: + if instance.blocked_at is None: + return UserStatusChoices.ACTIVE.value + + return UserStatusChoices.BLOCKED.value + + +class UserUpdateSerializer(ModelSerializer): + password = CharField(trim_whitespace=False, write_only=True, required=False) + first_name = RegexField(r"^[^\n]*$", max_length=150, allow_blank=True, required=False, default="") + last_name = RegexField(r"^[^\n]*$", max_length=150, allow_blank=True, required=False, default="") + email = EmailField(allow_blank=True, required=False, default="") + is_super_user = BooleanField(source="is_superuser", default=False) + + class Meta: + model = User + fields = ["id", "password", "first_name", "last_name", "groups", "email", "is_super_user"] + + +class UserCreateSerializer(UserUpdateSerializer): + username = RegexField(r"^[^\s]+$", max_length=150) + password = CharField(trim_whitespace=False, write_only=True) + first_name = RegexField(r"^[^\n]*$", max_length=150, allow_blank=True, default="") + last_name = RegexField(r"^[^\n]*$", max_length=150, allow_blank=True, default="") + email = EmailField(allow_blank=True, default="") + is_super_user = BooleanField(source="is_superuser", default=False) + + class Meta: + model = User + fields = ["username", "password", "first_name", "last_name", "groups", "email", "is_super_user"] diff --git a/python/api_v2/rbac/user/urls.py b/python/api_v2/rbac/user/urls.py new file mode 100644 index 0000000000..613e371c40 --- /dev/null +++ b/python/api_v2/rbac/user/urls.py @@ -0,0 +1,17 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from api_v2.rbac.user.views import UserViewSet +from rest_framework.routers import SimpleRouter + +user_router = SimpleRouter() +user_router.register(prefix="users", viewset=UserViewSet) diff --git a/python/api_v2/rbac/user/utils.py b/python/api_v2/rbac/user/utils.py new file mode 100644 index 0000000000..95a8209cd5 --- /dev/null +++ b/python/api_v2/rbac/user/utils.py @@ -0,0 +1,23 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from cm.errors import AdcmEx +from rbac.models import User + + +def unblock_user(user: User) -> None: + if user.built_in: + raise AdcmEx(code="USER_BLOCK_ERROR") + + user.failed_login_attempts = 0 + user.blocked_at = None + user.save(update_fields=["failed_login_attempts", "blocked_at"]) diff --git a/python/api_v2/rbac/user/views.py b/python/api_v2/rbac/user/views.py new file mode 100644 index 0000000000..52f194a194 --- /dev/null +++ b/python/api_v2/rbac/user/views.py @@ -0,0 +1,96 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from api_v2.rbac.user.filters import UserFilterSet +from api_v2.rbac.user.serializers import ( + UserCreateSerializer, + UserSerializer, + UserUpdateSerializer, +) +from api_v2.rbac.user.utils import unblock_user +from api_v2.views import CamelCaseModelViewSet +from cm.errors import AdcmEx +from django.contrib.auth.models import Group as AuthGroup +from django.db.models import Prefetch +from django_filters.rest_framework.backends import DjangoFilterBackend +from guardian.mixins import PermissionListMixin +from rbac.models import User +from rbac.services.user import create_user, update_user +from rest_framework.decorators import action +from rest_framework.permissions import DjangoModelPermissions +from rest_framework.request import Request +from rest_framework.response import Response +from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED + +from adcm.permissions import VIEW_USER_PERMISSION + + +class UserViewSet(PermissionListMixin, CamelCaseModelViewSet): # pylint: disable=too-many-ancestors + queryset = User.objects.prefetch_related( + Prefetch(lookup="groups", queryset=AuthGroup.objects.select_related("group")) + ).order_by("username") + filter_backends = (DjangoFilterBackend,) + filterset_class = UserFilterSet + permission_classes = (DjangoModelPermissions,) + permission_required = [VIEW_USER_PERMISSION] + + def get_serializer_class(self) -> type[UserSerializer] | type[UserUpdateSerializer] | type[UserCreateSerializer]: + if self.action in ("update", "partial_update"): + return UserUpdateSerializer + + if self.action == "create": + return UserCreateSerializer + + return UserSerializer + + def create(self, request: Request, *args, **kwargs) -> Response: + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + + groups = [{"id": group.pk} for group in serializer.validated_data.pop("groups", [])] + user: User = create_user(groups=groups, **serializer.validated_data) + + return Response(data=UserSerializer(instance=user).data, status=HTTP_201_CREATED) + + def partial_update(self, request: Request, *args, **kwargs) -> Response: + instance: User = self.get_object() + serializer = self.get_serializer(instance=instance, data=request.data) + serializer.is_valid(raise_exception=True) + + groups = [{"id": group.pk} for group in serializer.validated_data.pop("groups", [])] + user: User = update_user( + user=serializer.instance, + context_user=request.user, + partial=True, + need_current_password=False, + api_v2_behaviour=True, + groups=groups, + **serializer.validated_data, + ) + + return Response(data=UserSerializer(instance=user).data, status=HTTP_200_OK) + + @action(methods=["post"], detail=True) + def unblock(self, request: Request, *args, **kwargs) -> Response: # pylint: disable=unused-argument + if not request.user.is_superuser: + raise AdcmEx(code="USER_UNBLOCK_ERROR") + + unblock_user(user=self.get_object()) + + return Response(status=HTTP_200_OK) + + def destroy(self, request: Request, *args, **kwargs) -> Response: + user = self.get_object() + if user.built_in: + raise AdcmEx(code="USER_DELETE_ERROR") + + return super().destroy(request=request, *args, **kwargs) diff --git a/python/api_v2/rbac/views.py b/python/api_v2/rbac/views.py new file mode 100644 index 0000000000..0b22343be4 --- /dev/null +++ b/python/api_v2/rbac/views.py @@ -0,0 +1,24 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from rest_framework.permissions import AllowAny +from rest_framework.routers import APIRootView + + +class RbacRoot(APIRootView): + permission_classes = (AllowAny,) + api_root_dict = { + "roles": "role-list", + "users": "user-list", + "groups": "group-list", + "policies": "policy-list", + } diff --git a/python/api_v2/service/filters.py b/python/api_v2/service/filters.py index c77c312524..a625d5dd47 100644 --- a/python/api_v2/service/filters.py +++ b/python/api_v2/service/filters.py @@ -35,4 +35,4 @@ def filter_status(queryset: QuerySet, name: str, value: str) -> QuerySet: # pyl @staticmethod def filter_name(queryset: QuerySet, name: str, value: str) -> QuerySet: # pylint: disable=unused-argument - return queryset.filter(prototype__type=ObjectType.SERVICE, prototype__name=value) + return queryset.filter(prototype__type=ObjectType.SERVICE, prototype__name__icontains=value) diff --git a/python/api_v2/service/serializers.py b/python/api_v2/service/serializers.py index c1dd2a3d5c..3a155edc7c 100644 --- a/python/api_v2/service/serializers.py +++ b/python/api_v2/service/serializers.py @@ -10,20 +10,25 @@ # See the License for the specific language governing permissions and # limitations under the License. +from api_v2.cluster.serializers import ClusterRelatedSerializer from api_v2.concern.serializers import ConcernSerializer +from api_v2.prototype.serializers import PrototypeRelatedSerializer from cm.adcm_config.config import get_main_info -from cm.models import ClusterObject, MaintenanceMode +from cm.models import ClusterObject, MaintenanceMode, ServiceComponent from cm.status_api import get_obj_status from rest_framework.serializers import ( - CharField, ChoiceField, + IntegerField, ModelSerializer, SerializerMethodField, ) +from adcm.serializers import EmptySerializer + class ServiceRetrieveSerializer(ModelSerializer): - prototype_version = CharField(read_only=True, source="prototype.version") + prototype = PrototypeRelatedSerializer(read_only=True) + cluster = ClusterRelatedSerializer(read_only=True) status = SerializerMethodField() concerns = ConcernSerializer(read_only=True, many=True) main_info = SerializerMethodField() @@ -34,13 +39,16 @@ class Meta: "id", "name", "display_name", - "prototype_version", + "prototype", + "cluster", "status", "state", + "multi_state", "concerns", "is_maintenance_mode_available", "maintenance_mode", "main_info", + "multi_state", ] def get_status(self, instance: ClusterObject) -> str: @@ -50,10 +58,14 @@ def get_main_info(self, instance: ClusterObject) -> str | None: return get_main_info(obj=instance) -class ServiceCreateSerializer(ModelSerializer): +class ServiceRelatedSerializer(ModelSerializer): class Meta: model = ClusterObject - fields = ["prototype"] + fields = ["id", "name", "display_name"] + + +class ServiceCreateSerializer(EmptySerializer): + prototype_id = IntegerField() class ServiceMaintenanceModeSerializer(ModelSerializer): @@ -62,3 +74,29 @@ class ServiceMaintenanceModeSerializer(ModelSerializer): class Meta: model = ClusterObject fields = ["maintenance_mode"] + + +class ServiceNameSerializer(ModelSerializer): + class Meta: + model = ClusterObject + fields = ["id", "name", "display_name"] + + +class RelatedComponentsStatusesSerializer(ModelSerializer): + status = SerializerMethodField() + + @staticmethod + def get_status(instance: ClusterObject) -> str: + return get_obj_status(obj=instance) + + class Meta: + model = ServiceComponent + fields = ["id", "name", "display_name", "status"] + + +class ServiceStatusSerializer(ModelSerializer): + components = RelatedComponentsStatusesSerializer(many=True, source="servicecomponent_set") + + class Meta: + model = ClusterObject + fields = ["components"] diff --git a/python/api_v2/service/views.py b/python/api_v2/service/views.py index 19b592d3ce..fdc484fc99 100644 --- a/python/api_v2/service/views.py +++ b/python/api_v2/service/views.py @@ -15,15 +15,17 @@ ServiceCreateSerializer, ServiceMaintenanceModeSerializer, ServiceRetrieveSerializer, + ServiceStatusSerializer, ) +from api_v2.views import CamelCaseReadOnlyModelViewSet from cm.api import add_service_to_cluster, update_mm_objects -from cm.models import Cluster, ClusterObject +from cm.models import Cluster, ClusterObject, ObjectType, Prototype +from django_filters.rest_framework.backends import DjangoFilterBackend from guardian.mixins import PermissionListMixin from rest_framework.decorators import action from rest_framework.request import Request from rest_framework.response import Response from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED -from rest_framework.viewsets import ModelViewSet from adcm.permissions import ( ADD_SERVICE_PERM, @@ -34,13 +36,14 @@ check_custom_perm, get_object_for_user, ) -from adcm.utils import get_maintenance_mode_response +from adcm.utils import delete_service_from_api, get_maintenance_mode_response -class ServiceViewSet(PermissionListMixin, ModelViewSet): # pylint: disable=too-many-ancestors - queryset = ClusterObject.objects.all() +class ServiceViewSet(PermissionListMixin, CamelCaseReadOnlyModelViewSet): # pylint: disable=too-many-ancestors + queryset = ClusterObject.objects.select_related("cluster").order_by("pk") serializer_class = ServiceRetrieveSerializer filterset_class = ServiceFilter + filter_backends = (DjangoFilterBackend,) permission_classes = [DjangoModelPermissionsAudit] permission_required = [VIEW_SERVICE_PERM] http_method_names = ["get", "post", "delete"] @@ -61,20 +64,34 @@ def get_serializer_class(self): return self.serializer_class - def create(self, request: Request, *args, **kwargs): + def create(self, request: Request, *args, **kwargs): # pylint:disable=unused-argument cluster = get_object_for_user( user=request.user, perms=VIEW_CLUSTER_PERM, klass=Cluster, pk=kwargs["cluster_pk"] ) check_custom_perm(user=request.user, action_type=ADD_SERVICE_PERM, model=Cluster.__name__.lower(), obj=cluster) - serializer = self.get_serializer_class()(data=request.data) + serializer = self.get_serializer(data=request.data, many=True) serializer.is_valid(raise_exception=True) - add_service_to_cluster( - cluster=Cluster.objects.get(pk=kwargs["cluster_pk"]), proto=serializer.validated_data["prototype"] + added_services = [] + for service_prototype in Prototype.objects.filter( + pk__in=[prototype_data["prototype_id"] for prototype_data in serializer.validated_data], + type=ObjectType.SERVICE, + ): + added_services.append( + add_service_to_cluster( + cluster=cluster, + proto=service_prototype, + ) + ) + + return Response( + status=HTTP_201_CREATED, data=ServiceRetrieveSerializer(instance=added_services, many=True).data ) - return Response(status=HTTP_201_CREATED) + def destroy(self, request: Request, *args, **kwargs): # pylint:disable=unused-argument + instance = self.get_object() + return delete_service_from_api(service=instance) @update_mm_objects @action(methods=["post"], detail=True, url_path="maintenance-mode") @@ -92,3 +109,9 @@ def maintenance_mode(self, request: Request, *args, **kwargs) -> Response: # py response.data = serializer.data return response + + @action(methods=["get"], detail=True, url_path="statuses") + def statuses(self, request: Request, *args, **kwargs) -> Response: # pylint: disable=unused-argument + service = get_object_for_user(user=request.user, perms=VIEW_SERVICE_PERM, klass=ClusterObject, id=kwargs["pk"]) + + return Response(data=ServiceStatusSerializer(instance=service).data) diff --git a/python/api_v2/task/__init__.py b/python/api_v2/task/__init__.py new file mode 100644 index 0000000000..824dd6c8fe --- /dev/null +++ b/python/api_v2/task/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/python/api_v2/task/filters.py b/python/api_v2/task/filters.py new file mode 100644 index 0000000000..f812ca3f8c --- /dev/null +++ b/python/api_v2/task/filters.py @@ -0,0 +1,69 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from cm.models import ( + Cluster, + ClusterObject, + Host, + HostProvider, + JobStatus, + ServiceComponent, + TaskLog, +) +from django.contrib.contenttypes.models import ContentType +from django.db.models import QuerySet +from django_filters.rest_framework.filters import ( + CharFilter, + ChoiceFilter, + OrderingFilter, +) +from django_filters.rest_framework.filterset import FilterSet + + +class TaskFilter(FilterSet): + job_name = CharFilter( + label="Job name", field_name="joblog__action__display_name", lookup_expr="icontains", distinct=True + ) + object_name = CharFilter(label="Object name", method="filter_object_name") + status = ChoiceFilter(field_name="status", choices=JobStatus.choices, label="Task status") + ordering = OrderingFilter( + fields={"id": "id", "action__prototype__name": "name", "start_date": "startTime", "finish_date": "endTime"}, + field_labels={ + "id": "ID", + "action__prototype__name": "Name", + "start_date": "Start time", + "finish_date": "End time", + }, + label="ordering", + ) + + # pylint: disable=unused-argument + def filter_object_name(self, queryset: QuerySet, name: str, value: str) -> QuerySet: + clusters = Cluster.objects.filter(name__icontains=value).values_list("id") + services = ClusterObject.objects.filter(prototype__display_name__icontains=value).values_list("id") + components = ServiceComponent.objects.filter(prototype__display_name__icontains=value).values_list("id") + providers = HostProvider.objects.filter(name__icontains=value).values_list("id") + hosts = Host.objects.filter(fqdn__icontains=value).values_list("id") + + queryset = ( + queryset.filter(object_type=ContentType.objects.get_for_model(Cluster), object_id__in=clusters) + | queryset.filter(object_type=ContentType.objects.get_for_model(ClusterObject), object_id__in=services) + | queryset.filter(object_type=ContentType.objects.get_for_model(ServiceComponent), object_id__in=components) + | queryset.filter(object_type=ContentType.objects.get_for_model(HostProvider), object_id__in=providers) + | queryset.filter(object_type=ContentType.objects.get_for_model(Host), object_id__in=hosts) + ) + + return queryset.order_by("-pk") + + class Meta: + model = TaskLog + fields = ["id", "job_name", "object_name", "status", "ordering"] diff --git a/python/api_v2/task/serializers.py b/python/api_v2/task/serializers.py new file mode 100644 index 0000000000..7157025431 --- /dev/null +++ b/python/api_v2/task/serializers.py @@ -0,0 +1,141 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from api_v2.action.serializers import ActionNameSerializer +from cm.models import Action, JobLog, JobStatus, SubAction, TaskLog +from rest_framework.fields import CharField, DateTimeField, SerializerMethodField +from rest_framework.serializers import ModelSerializer + + +class JobListSerializer(ModelSerializer): + name = SerializerMethodField() + display_name = SerializerMethodField() + is_terminatable = SerializerMethodField() + start_time = DateTimeField(source="start_date") + end_time = DateTimeField(source="finish_date") + + class Meta: + model = JobLog + fields = ( + "id", + "name", + "display_name", + "status", + "start_time", + "end_time", + "duration", + "is_terminatable", + ) + + @classmethod + def get_display_name(cls, obj: JobLog) -> str | None: + job_action = cls._get_job_action_obj(obj) + return job_action.display_name if job_action else None + + @classmethod + def get_name(cls, obj: JobLog) -> str | None: + job_action = cls._get_job_action_obj(obj) + return job_action.name if job_action else None + + @staticmethod + def _get_job_action_obj(obj: JobLog) -> Action | SubAction | None: + if obj.sub_action: + return obj.sub_action + elif obj.action: + return obj.action + else: + return None + + @staticmethod + def get_is_terminatable(obj: JobLog): + if obj.sub_action is None: + return False + + return obj.sub_action.allowed_to_terminate + + +class TaskSerializer(ModelSerializer): + name = CharField(source="action.name", allow_null=True) + display_name = CharField(source="action.display_name", allow_null=True) + is_terminatable = SerializerMethodField() + action = ActionNameSerializer(read_only=True, allow_null=True) + objects = SerializerMethodField() + start_time = DateTimeField(source="start_date") + end_time = DateTimeField(source="finish_date") + + class Meta: + model = TaskLog + fields = ( + "id", + "name", + "display_name", + "action", + "status", + "start_time", + "end_time", + "duration", + "is_terminatable", + "child_jobs", + "objects", + ) + + @staticmethod + def get_is_terminatable(obj: TaskLog): + if obj.action: + allow_to_terminate = obj.action.allow_to_terminate + else: + allow_to_terminate = False + + if allow_to_terminate and obj.status in {JobStatus.CREATED, JobStatus.RUNNING}: + return True + + return False + + @staticmethod + def get_objects(obj: TaskLog) -> list[dict[str, int | str]]: + return [{"type": k, **v} for k, v in obj.selector.items()] + + +class TaskListSerializer(TaskSerializer): + child_jobs = SerializerMethodField() + + class Meta: + model = TaskLog + fields = ( + *TaskSerializer.Meta.fields, + "child_jobs", + ) + + @staticmethod + def get_child_jobs(obj: TaskLog) -> list: + return JobListSerializer(instance=obj.joblog_set.order_by("pk"), many=True, read_only=True).data + + +class TaskRetrieveByJobSerializer(TaskSerializer): + action = ActionNameSerializer(read_only=True, allow_null=True) + start_time = DateTimeField(source="start_date") + end_time = DateTimeField(source="finish_date") + + class Meta: + model = TaskLog + fields = ( + "id", + "name", + "display_name", + "action", + "status", + "start_time", + "end_time", + "duration", + "objects", + "is_terminatable", + ) diff --git a/python/api_v2/task/urls.py b/python/api_v2/task/urls.py new file mode 100644 index 0000000000..599dc67786 --- /dev/null +++ b/python/api_v2/task/urls.py @@ -0,0 +1,23 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from api_v2.log_storage.views import LogStorageTaskViewSet +from api_v2.task.views import TaskViewSet +from rest_framework.routers import SimpleRouter +from rest_framework_nested.routers import NestedSimpleRouter + +router = SimpleRouter() +router.register("", TaskViewSet) + +log_storage_router = NestedSimpleRouter(parent_router=router, parent_prefix="", lookup="task") +log_storage_router.register(prefix="logs", viewset=LogStorageTaskViewSet, basename="log") + +urlpatterns = [*router.urls, *log_storage_router.urls] diff --git a/python/api_v2/task/views.py b/python/api_v2/task/views.py new file mode 100644 index 0000000000..0b82a54eb8 --- /dev/null +++ b/python/api_v2/task/views.py @@ -0,0 +1,43 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from api.job.views import VIEW_TASKLOG_PERMISSION +from api_v2.task.filters import TaskFilter +from api_v2.task.serializers import TaskListSerializer +from api_v2.views import CamelCaseGenericViewSet +from cm.job import cancel_task +from cm.models import TaskLog +from django_filters.rest_framework.backends import DjangoFilterBackend +from rest_framework.decorators import action +from rest_framework.mixins import CreateModelMixin, ListModelMixin, RetrieveModelMixin +from rest_framework.request import Request +from rest_framework.response import Response +from rest_framework.status import HTTP_200_OK + +from adcm.permissions import check_custom_perm, get_object_for_user + + +class TaskViewSet( + ListModelMixin, RetrieveModelMixin, CreateModelMixin, CamelCaseGenericViewSet +): # pylint: disable=too-many-ancestors + queryset = TaskLog.objects.select_related("action").order_by("-pk") + serializer_class = TaskListSerializer + filterset_class = TaskFilter + filter_backends = (DjangoFilterBackend,) + + @action(methods=["post"], detail=True) + def terminate(self, request: Request, pk: int) -> Response: + task = get_object_for_user(request.user, VIEW_TASKLOG_PERMISSION, TaskLog, id=pk) + check_custom_perm(request.user, "change", TaskLog, task) + cancel_task(task) + + return Response(status=HTTP_200_OK, data=TaskListSerializer(instance=task).data) diff --git a/python/api_v2/tests/base.py b/python/api_v2/tests/base.py index 361fc29595..d6987aa1b3 100644 --- a/python/api_v2/tests/base.py +++ b/python/api_v2/tests/base.py @@ -40,9 +40,13 @@ ) from django.conf import settings from init_db import init +from rbac.models import User +from rbac.services.user import create_user from rbac.upgrade.role import init_roles from rest_framework.test import APITestCase +from adcm.tests.base import ParallelReadyTestCase + class HostComponentMapDictType(TypedDict): host_id: int @@ -50,24 +54,27 @@ class HostComponentMapDictType(TypedDict): component_id: int -class BaseAPITestCase(APITestCase): # pylint: disable=too-many-instance-attributes +class BaseAPITestCase(APITestCase, ParallelReadyTestCase): @classmethod def setUpClass(cls): super().setUpClass() + + cls.test_bundles_dir = Path(__file__).parent / "bundles" + init_roles() init() adcm = ADCM.objects.first() - config_log = ConfigLog.objects.get(obj_ref=adcm.config) + config_log = ConfigLog.objects.get(id=adcm.config.current) config_log.config["auth_policy"]["max_password_length"] = 20 config_log.save(update_fields=["config"]) def setUp(self) -> None: self.client.login(username="admin", password="admin") - cluster_bundle_1_path = settings.BASE_DIR / "python" / "api_v2" / "tests" / "bundles" / "cluster_one" - cluster_bundle_2_path = settings.BASE_DIR / "python" / "api_v2" / "tests" / "bundles" / "cluster_two" - provider_bundle_path = settings.BASE_DIR / "python" / "api_v2" / "tests" / "bundles" / "provider" + cluster_bundle_1_path = self.test_bundles_dir / "cluster_one" + cluster_bundle_2_path = self.test_bundles_dir / "cluster_two" + provider_bundle_path = self.test_bundles_dir / "provider" self.bundle_1 = self.add_bundle(source_dir=cluster_bundle_1_path) self.bundle_2 = self.add_bundle(source_dir=cluster_bundle_2_path) @@ -142,8 +149,21 @@ def add_hostcomponent_map(cluster: Cluster, hc_map: list[HostComponentMapDictTyp return add_hc(cluster=cluster, hc_in=hc_map) @staticmethod - def get_non_existent_pk(model: type[ADCMEntity] | type[ADCMModel]): + def get_non_existent_pk(model: type[ADCMEntity] | type[ADCMModel] | type[User]): try: return model.objects.order_by("-pk").first().pk + 1 except model.DoesNotExist: return 1 + + def create_user(self, user_data: dict | None = None) -> User: + if user_data is None: + user_data = { + "username": "test_user_username", + "password": "test_user_password", + "email": "testuser@mail.ru", + "first_name": "test_user_first_name", + "last_name": "test_user_last_name", + "profile": "", + } + + return create_user(**user_data) diff --git a/python/api_v2/tests/bundles/adcm_configs/config.yaml b/python/api_v2/tests/bundles/adcm_configs/config.yaml new file mode 100644 index 0000000000..3f44996a0c --- /dev/null +++ b/python/api_v2/tests/bundles/adcm_configs/config.yaml @@ -0,0 +1,274 @@ +- + + type: adcm + name: ADCM + version: 2.6 + + actions: + run_ldap_sync: + type: job + script_type: python + script: python_scripts/run_ldap_sync.py + display_name: Run LDAP sync + allow_in_maintenance_mode: true + states: + available: any + test_ldap_connection: + type: job + script_type: python + script: python_scripts/testing_ldap_connection.py + display_name: Test LDAP connection + allow_to_terminate: true + allow_in_maintenance_mode: true + states: + available: any + + config: + - name: "global" + display_name: "Global Options" + type: "group" + subs: + - name: "adcm_url" + display_name: "ADCM's URL" + description: | + We have to know ADCM's Url to send information from host. We try to guess that information from url you enter in browser. + But if your network has more complicated structure and we guess wrong, please fix that here. + type: string + - name: "google_oauth" + display_name: "Google Auth" + type: "group" + subs: + - name: "client_id" + type: string + required: false + - name: "secret" + type: password + required: false + ui_options: + no_confirm: true + - name: "yandex_oauth" + display_name: "Yandex Auth" + type: "group" + subs: + - name: "client_id" + type: string + required: false + - name: "secret" + type: password + required: false + ui_options: + no_confirm: true + ui_options: + invisible: true + - name: "job_log" + display_name: "Job Log" + type: "group" + subs: + - name: "log_rotation_on_fs" + display_name: "Log rotation from file system" + type: integer + required: false + default: 365 + min: 0 + description: | + You can set the time (number of days) after which the logs will be deleted from the file system. + - name: "log_rotation_in_db" + display_name: "Log rotation from database" + type: integer + required: false + default: 365 + min: 0 + description: | + You can set the time (number of days) after which the logs will be deleted from the database. + - name: "ansible_settings" + display_name: "Ansible Settings" + type: "group" + subs: + - name: "mitogen" + display_name: "Use Mitogen" + description: | + Mitogen for Ansible is a completely redesigned UNIX connection layer and module runtime for Ansible. + type: boolean + ui_options: + invisible: true + default: false + - name: "forks" + display_name: "Forks" + description: | + This is the default number of parallel processes to spawn when communicating with remote hosts. + type: integer + default: 5 + min: 1 + max: 100 + - name: "logrotate" + display_name: "Nginx Server Logrotate" + type: "group" + activatable: true + ui_options: + invisible: true + subs: + - name: size + display_name: "Max file size" + description: | + Specifies the allowed size the log file can reach before it is archived + type: string + default: 10M + - name: max_history + display_name: "Max files history" + description: | + Controls the maximum number of archive files to keep + type: integer + default: 10 + min: 1 + - name: compress + display_name: "Enable compression" + description: | + Compress the rotated files + type: boolean + default: false + - name: "config_rotation" + display_name: "Configuration rotation" + description: | + You can enable Clusters/Services/Components configurations deleting mechanism. + type: "group" + subs: + - name: "config_rotation_in_db" + display_name: "Objects configurations rotation period" + type: integer + required: false + default: 0 + min: 0 + description: | + You can set the time (number of days) after which the Objects configuration will be deleted from the database. 0 is infinite storing. + + - name: "audit_data_retention" + display_name: "Audit data retention" + type: "group" + subs: + - name: "retention_period" + display_name: "Retention period" + description: | + Data storage period (in days) for operations and authorizations in ADCM. + type: integer + default: 1825 + - name: "data_archiving" + display_name: "Enable archiving" + description: | + Enable/disable archiving of data on operations and authorizations after the period specified in "retention_period". + type: boolean + default: false + required: no + - name: "ldap_integration" + display_name: "LDAP integration" + type: "group" + activatable: true + active: false + subs: + - name: "ldap_uri" + display_name: "LDAP URI" + description: | + The URI of the LDAP server. + type: string + - name: "ldap_user" + display_name: "Bind DN" + description: | + The distinguished name (DN) of the user that is used to connect to the LDAP server. For example: "cn=admin,dc=ad,dc=ranger-test". + type: string + - name: "ldap_password" + display_name: "Bind Password" + description: | + Password for access to the LDAP server of the user specified in the "LDAP User" field. + type: password + - name: "user_search_base" + display_name: "User search base" + description: | + The distinguished name (DN) of the directory object from which to search for entries. For example: "ou=Peoples,dc=ad,dc=ranger-test". + type: string + - name: "user_search_filter" + display_name: "User search filter" + description: | + Additional filter constraining the users selected for syncing. + type: string + required: false + - name: "user_object_class" + display_name: "User object class" + description: | + Object class to identify user entries. + type: string + default: "user" + - name: "user_name_attribute" + display_name: "User name attribute" + description: | + Attribute from user entry that would be treated as user name. + type: string + default: "sAMAccountName" + - name: "group_search_base" + display_name: "Group search base" + description: | + The distinguished name (DN) of the directory object from which to search for entries. For example: "ou=Groups,dc=ad,dc=ranger-test". + type: string + required: false + - name: "group_search_filter" + display_name: "Group search filter" + description: | + Additional filter constraining the groups selected for syncing. + type: string + required: false + - name: "group_object_class" + display_name: "Group object class" + description: | + Object class to identify group entries. + type: string + default: "group" + - name: "group_name_attribute" + display_name: "Group name attribute" + description: | + Attribute from group entry that would be treated as group name. + type: string + default: "cn" + - name: "group_member_attribute_name" + display_name: "Group member attribute name" + description: | + Attribute from group entry that is list of members. + type: string + default: "member" + - name: "sync_interval" + display_name: "Sync interval" + description: | + Amount of time in minutes that group memberships and user distinguished names are synchronized between AD and ADCM. + type: integer + default: 60 + - name: "tls_ca_cert_file" + display_name: "TLS CA certificate file path" + description: | + CA certificate path in ADCM container volume. + type: string + required: false + - name: "auth_policy" + display_name: "Authentication policy" + type: "group" + subs: + - name: "min_password_length" + display_name: "Minimum password length" + type: integer + default: 12 + min: 1 + max: 128 + - name: "max_password_length" + display_name: "Maximum password length" + type: integer + default: 128 + min: 1 + max: 128 + - name: "login_attempt_limit" + display_name: "Login attempt limit" + description: "The number of login attempts allowed before user account is locked out" + type: integer + min: 1 + default: 5 + - name: "block_time" + display_name: "Blocking time period" + description: "Defines a period (in min) of inactivity after which old failed login attempts will be cleared" + type: integer + min: 1 + default: 5 diff --git a/python/api_v2/tests/bundles/cluster_actions/config.yaml b/python/api_v2/tests/bundles/cluster_actions/config.yaml new file mode 100644 index 0000000000..6284bf7c9a --- /dev/null +++ b/python/api_v2/tests/bundles/cluster_actions/config.yaml @@ -0,0 +1,210 @@ +- type: cluster + name: cluster_with_actions + version: "1.0" + + actions: &actions + + # maintenance mode + + cluster_host_action_allowed: &job + type: job + script: ./actions.yaml + script_type: ansible + params: + ansible_tags: simple_action + host_action: true + allow_in_maintenance_mode: true + states: + available: any + + cluster_host_action_disallowed: + <<: *job + allow_in_maintenance_mode: false + + # simple + + state_any: &simple_action + type: job + script: ./actions.yaml + script_type: ansible + states: + available: any + + state_created: + <<: *simple_action + states: + available: + - created + + state_installed: + <<: *simple_action + states: + available: + - installed + + # masking + + state_created_masking: &action + type: job + script: ./actions.yaml + script_type: ansible + masking: + state: + available: + - created + + state_installed_masking: + <<: *action + masking: + state: + available: + - installed + + state_created_unavailable: + <<: *action + masking: + state: + unavailable: + - created + + # multi state masking + + multi_flag_masking: + <<: *action + masking: + multi_state: + available: + - flag + + multi_flag_unavailable: + <<: *action + masking: + multi_state: + unavailable: + - flag + + # state + multi + + state_created_available_multi_bag_available: + <<: *action + masking: + state: + available: + - created + multi_state: + available: + - bag + + state_created_available_multi_bag_unavailable: + <<: *action + masking: + state: + available: + - created + multi_state: + unavailable: + - bag + + state_created_unavailable_multi_bag_available: + <<: *action + masking: + state: + unavailable: + - created + multi_state: + available: + - bag + + state_created_unavailable_multi_bag_unavailable: + <<: *action + masking: + state: + unavailable: + - created + multi_state: + unavailable: + - bag + + # host_action: true + + host_action_any: &host_action_any + <<: *simple_action + display_name: "from cluster any" + host_action: true + states: + available: any + + host_action_installed: &host_action_masking + <<: *action + display_name: "from cluster installed" + host_action: true + masking: + state: + available: + - installed + + host_action_multi_flag: + <<: *host_action_masking + display_name: "from cluster multi flag" + masking: &created_unavailable_mutli_flag_available + state: + unavailable: + - created + multi_state: + available: + - flag + +- type: service + name: service_1 + version: "0.1" + + actions: + <<: *actions + + host_action_any: + <<: *host_action_any + display_name: "from service any" + + host_action_installed: + <<: *host_action_masking + display_name: "from service installed" + + + host_action_multi_flag: + <<: *host_action_masking + display_name: "from service multi flag" + masking: *created_unavailable_mutli_flag_available + + components: + component_1: + actions: + <<: *actions + + host_action_any: + <<: *host_action_any + display_name: "from component any" + + host_action_installed: + <<: *host_action_masking + display_name: "from component installed" + + host_action_multi_flag: + <<: *host_action_masking + display_name: "from component multi flag" + masking: *created_unavailable_mutli_flag_available + + component_2: + actions: *actions + +- type: service + name: service_2 + version: "0.2" + + actions: *actions + + components: + component_1: + actions: *actions + + component_2: + actions: *actions diff --git a/python/api_v2/tests/bundles/cluster_one/config.yaml b/python/api_v2/tests/bundles/cluster_one/config.yaml index 018f3114b7..d814e5f623 100644 --- a/python/api_v2/tests/bundles/cluster_one/config.yaml +++ b/python/api_v2/tests/bundles/cluster_one/config.yaml @@ -11,13 +11,61 @@ type: string required: false default: string + - name: map_not_required + type: map + required: False + - name: list + type: list + required: false + default: + - value1 + - value2 + - value3 + - name: boolean + type: boolean + required: false + default: true - name: group type: group subs: - - name: string - type: string + - name: float + type: float + required: false + default: 0.1 + - name: map + type: map + required: false + default: + integer_key: '10' + string_key: string + - name: text + type: text + required: false + default: text + - name: activatable_group + type: group + activatable: True + active: True + subs: + - name: integer + type: integer + required: false + default: 10 + - name: json + type: json required: false - default: string + default: + key: value + - name: structure + type: structure + required: false + yspec: ./schema.yaml + default: + - integer: 1 + string: string1 + - integer: 2 + string: string2 + actions: action: &action type: job @@ -26,6 +74,44 @@ states: available: any + with_config: + <<: *action + config: + - name: simple + type: string + required: false + - name: grouped + type: group + subs: + - name: simple + type: integer + default: 4 + - name: second + type: float + default: 4.3 + - name: after + type: list + default: + - "1" + - "woohoo" + + with_hc: + <<: *action + hc_acl: + - action: add + service: service_1 + component: component_1 + - action: remove + service: service_1 + component: component_2 + + cluster_on_host: &host_action + <<: *action + host_action: true + + export: + - string + - name: service_1 type: service version: *version @@ -33,6 +119,8 @@ license: EULA.txt actions: action: *action + service_on_host: *host_action + components: component_1: constraint: [ 0, + ] @@ -40,6 +128,7 @@ actions: action_1_comp_1: *action action_2_comp_1: *action + component_on_host: *host_action component_2: constraint: [ 0, + ] config: *config diff --git a/python/api_v2/tests/bundles/cluster_one/schema.yaml b/python/api_v2/tests/bundles/cluster_one/schema.yaml new file mode 100755 index 0000000000..49aa4bc276 --- /dev/null +++ b/python/api_v2/tests/bundles/cluster_one/schema.yaml @@ -0,0 +1,13 @@ +--- +root: + match: list + item: variable +variable: + match: dict + items: + string: string + integer: integer +string: + match: string +integer: + match: int diff --git a/python/api_v2/tests/bundles/cluster_one_upgrade/config.yaml b/python/api_v2/tests/bundles/cluster_one_upgrade/config.yaml index f171650113..66a8f46e81 100644 --- a/python/api_v2/tests/bundles/cluster_one_upgrade/config.yaml +++ b/python/api_v2/tests/bundles/cluster_one_upgrade/config.yaml @@ -4,20 +4,73 @@ version: &version '2.0' edition: community config_group_customization: true - license: EULA.txt + license: ./EULA.txt upgrade: - name: upgrade - versions: + versions: &correct_versions min: '1.0' max: '2.0' - states: + states: &always_available available: any - - name: upgrade_via_action + + - name: upgrade_via_action_simple + versions: *correct_versions + states: *always_available + scripts: &upgrade_scripts + - name: pre + script: ./playbook.yaml + script_type: ansible + - name: switch + script: bundle_switch + script_type: internal + - name: post + script: ./playbook.yaml + script_type: ansible + + - name: upgrade_via_action_complex + versions: *correct_versions + states: *always_available + scripts: *upgrade_scripts + config: + - name: simple + type: string + required: false + - name: grouped + type: group + subs: + - name: simple + type: integer + default: 4 + - name: second + type: float + default: 4.3 + - name: after + type: list + default: + - "1" + - "woohoo" + hc_acl: + - action: add + service: service_1 + component: component_1 + - action: remove + service: service_1 + component: component_2 + ui_options: + disclaimer: "Cool upgrade" + + - name: unfit_version versions: - min: '1.0' - max: '2.0' + min_strict: '0' + max: '0.3' + states: *always_available + + - name: unfit_state + versions: *correct_versions states: - available: any + available: + - doesnotexist + config: &config - name: string type: string diff --git a/python/api_v2/tests/bundles/cluster_two/config.yaml b/python/api_v2/tests/bundles/cluster_two/config.yaml index c968f497ad..59bde089ce 100644 --- a/python/api_v2/tests/bundles/cluster_two/config.yaml +++ b/python/api_v2/tests/bundles/cluster_two/config.yaml @@ -16,6 +16,11 @@ type: string required: false default: string + import: + NonExistentCluster: + versions: + min: 1.0 + max: 2.0 - name: service type: service diff --git a/python/api_v2/tests/bundles/cluster_with_allowed_flags/config.yaml b/python/api_v2/tests/bundles/cluster_with_allowed_flags/config.yaml new file mode 100644 index 0000000000..af8fdb5d70 --- /dev/null +++ b/python/api_v2/tests/bundles/cluster_with_allowed_flags/config.yaml @@ -0,0 +1,11 @@ +--- +- type: cluster + name: cluster_with_allowed_flags + version: '1.0' + edition: community + allow_flags: true + config: + - name: string + type: string + required: false + diff --git a/python/api_v2/tests/bundles/cluster_with_required_config_field/config.yaml b/python/api_v2/tests/bundles/cluster_with_required_config_field/config.yaml new file mode 100644 index 0000000000..015ca58be4 --- /dev/null +++ b/python/api_v2/tests/bundles/cluster_with_required_config_field/config.yaml @@ -0,0 +1,9 @@ +--- +- type: cluster + name: cluster_with_required_config + version: '1.0' + edition: community + config: + - name: string + type: string + required: true diff --git a/python/api_v2/tests/bundles/cluster_with_required_hc/config.yaml b/python/api_v2/tests/bundles/cluster_with_required_hc/config.yaml new file mode 100644 index 0000000000..142eb5a163 --- /dev/null +++ b/python/api_v2/tests/bundles/cluster_with_required_hc/config.yaml @@ -0,0 +1,11 @@ +- type: cluster + name: cluster_with_required_hc + version: &version '1.0' + edition: community + +- name: service_1 + type: service + version: *version + components: + component_hc_required: + constraint: [1, +] diff --git a/python/api_v2/tests/bundles/cluster_with_required_import/config.yaml b/python/api_v2/tests/bundles/cluster_with_required_import/config.yaml new file mode 100644 index 0000000000..9b02fc8bc8 --- /dev/null +++ b/python/api_v2/tests/bundles/cluster_with_required_import/config.yaml @@ -0,0 +1,20 @@ +- type: cluster + name: cluster_with_required_import + version: 1.0 + import: + cluster_one: + required: true + versions: + min: 1.0 + max: 2.0 + not_exist: + default: + - something + + config: + - name: something + type: group + subs: + - name: string_congig + type: string + default: string_value diff --git a/python/api_v2/tests/bundles/cluster_with_required_service/config.yaml b/python/api_v2/tests/bundles/cluster_with_required_service/config.yaml new file mode 100644 index 0000000000..b981847453 --- /dev/null +++ b/python/api_v2/tests/bundles/cluster_with_required_service/config.yaml @@ -0,0 +1,10 @@ +--- +- type: cluster + name: cluster_with_required_service + version: &version '1.0' + edition: community + +- name: service_required + type: service + version: *version + required: true diff --git a/python/api_v2/tests/bundles/cluster_with_service_requirements/config.yaml b/python/api_v2/tests/bundles/cluster_with_service_requirements/config.yaml new file mode 100644 index 0000000000..213fad0253 --- /dev/null +++ b/python/api_v2/tests/bundles/cluster_with_service_requirements/config.yaml @@ -0,0 +1,15 @@ +- type: cluster + name: cluster_with_service_requirements + version: &version '1.0' + edition: community + +- name: service_1 + type: service + version: *version + requires: + - service: some_other_service + +- name: some_other_service + type: service + version: *version + diff --git a/python/api_v2/tests/bundles/provider/config.yaml b/python/api_v2/tests/bundles/provider/config.yaml index 494adc49ac..cfb9183cb1 100644 --- a/python/api_v2/tests/bundles/provider/config.yaml +++ b/python/api_v2/tests/bundles/provider/config.yaml @@ -8,6 +8,57 @@ type: string required: false default: string + - name: list + type: list + required: false + default: + - value1 + - value2 + - value3 + - name: boolean + type: boolean + required: false + default: true + - name: group + type: group + subs: + - name: float + type: float + required: false + default: 0.1 + - name: map + type: map + required: false + default: + integer_key: '10' + string_key: string + - name: text + type: text + required: false + default: text + - name: activatable_group + type: group + activatable: True + active: True + subs: + - name: integer + type: integer + required: false + default: 10 + - name: json + type: json + required: false + default: + key: value + - name: structure + type: structure + required: false + yspec: ./schema.yaml + default: + - integer: 1 + string: string1 + - integer: 2 + string: string2 actions: provider_action: diff --git a/python/api_v2/tests/bundles/provider/schema.yaml b/python/api_v2/tests/bundles/provider/schema.yaml new file mode 100755 index 0000000000..49aa4bc276 --- /dev/null +++ b/python/api_v2/tests/bundles/provider/schema.yaml @@ -0,0 +1,13 @@ +--- +root: + match: list + item: variable +variable: + match: dict + items: + string: string + integer: integer +string: + match: string +integer: + match: int diff --git a/python/api_v2/tests/bundles/provider_actions/config.yaml b/python/api_v2/tests/bundles/provider_actions/config.yaml new file mode 100644 index 0000000000..fd6eb6b4d1 --- /dev/null +++ b/python/api_v2/tests/bundles/provider_actions/config.yaml @@ -0,0 +1,155 @@ +- type: provider + name: provider_with_actions + version: 3 + + actions: &actions + # simple + + state_any: &simple_action + type: job + script: ./actions.yaml + script_type: ansible + states: + available: any + + state_created: + <<: *simple_action + states: + available: + - created + + state_installed: + <<: *simple_action + states: + available: + - installed + + # masking + + state_created_masking: &action + type: job + script: ./actions.yaml + script_type: ansible + masking: + state: + available: + - created + + state_installed_masking: + <<: *action + masking: + state: + available: + - installed + + state_created_unavailable: + <<: *action + masking: + state: + unavailable: + - created + + # multi state masking + + multi_flag_masking: + <<: *action + masking: + multi_state: + available: + - flag + + multi_flag_unavailable: + <<: *action + masking: + multi_state: + unavailable: + - flag + + # state + multi + + state_created_available_multi_bag_available: + <<: *action + masking: + state: + available: + - created + multi_state: + available: + - bag + + state_created_available_multi_bag_unavailable: + <<: *action + masking: + state: + available: + - created + multi_state: + unavailable: + - bag + + state_created_unavailable_multi_bag_available: + <<: *action + masking: + state: + unavailable: + - created + multi_state: + available: + - bag + + state_created_unavailable_multi_bag_unavailable: + <<: *action + masking: + state: + unavailable: + - created + multi_state: + unavailable: + - bag + + # host_action: true + + host_action_any: + <<: *simple_action + display_name: "from provider any" + host_action: true + states: + available: any + + host_action_installed: &host_action_masking + <<: *action + display_name: "from provider installed" + host_action: true + masking: + state: + available: + - installed + + host_action_multi_flag: + <<: *host_action_masking + display_name: "from provider multi flag" + masking: + state: + unavailable: + - created + multi_state: + available: + - flag + +- type: host + version: 2 + name: host_with_actions + + actions: + <<: *actions + + host_action_any: &never_action + <<: *simple_action + host_action: false + states: + available: + - never + + host_action_installed: *never_action + + host_action_multi_flag: *never_action diff --git a/python/api_v2/tests/bundles/provider_upgrade/config.yaml b/python/api_v2/tests/bundles/provider_upgrade/config.yaml index 68e6dcee82..b3f14f91f9 100644 --- a/python/api_v2/tests/bundles/provider_upgrade/config.yaml +++ b/python/api_v2/tests/bundles/provider_upgrade/config.yaml @@ -4,17 +4,62 @@ version: &version '2.0' upgrade: - name: upgrade - versions: + versions: &correct_versions min: '1.0' max: '2.0' - states: + states: &always_available available: any - - name: upgrade_via_action + + - name: upgrade_via_action_simple + versions: *correct_versions + states: *always_available + scripts: &upgrade_scripts + - name: pre + script: ./playbook.yaml + script_type: ansible + - name: switch + script: bundle_switch + script_type: internal + - name: post + script: ./playbook.yaml + script_type: ansible + + - name: upgrade_via_action_complex + versions: *correct_versions + states: *always_available + scripts: *upgrade_scripts + config: + - name: simple + type: string + required: false + - name: grouped + type: group + subs: + - name: simple + type: integer + default: 4 + - name: second + type: float + default: 4.3 + - name: after + type: list + default: + - "1" + - "woohoo" + ui_options: + disclaimer: "Cool upgrade" + + - name: unfit_version versions: - min: '1.0' - max: '2.0' + min_strict: '0' + max: '0.3' + states: *always_available + + - name: unfit_state + versions: *correct_versions states: - available: any + available: + - doesnotexist actions: provider_action: diff --git a/python/api_v2/tests/test_actions.py b/python/api_v2/tests/test_actions.py new file mode 100644 index 0000000000..493e3e3b09 --- /dev/null +++ b/python/api_v2/tests/test_actions.py @@ -0,0 +1,271 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from functools import partial +from typing import TypeAlias + +from api_v2.tests.base import BaseAPITestCase +from cm.models import ( + Action, + Cluster, + ClusterObject, + Host, + HostComponent, + HostProvider, + MaintenanceMode, + ServiceComponent, +) +from django.urls import reverse +from rest_framework.response import Response +from rest_framework.status import HTTP_200_OK, HTTP_409_CONFLICT + +ObjectWithActions: TypeAlias = Cluster | ClusterObject | ServiceComponent | HostProvider | Host + + +class TestActionsFiltering(BaseAPITestCase): # pylint: disable=too-many-instance-attributes + def setUp(self) -> None: + super().setUp() + + cluster_bundle = self.add_bundle(self.test_bundles_dir / "cluster_actions") + self.cluster = self.add_cluster(cluster_bundle, "Cluster with Actions") + self.service_1 = self.add_service_to_cluster("service_1", self.cluster) + self.component_1: ServiceComponent = ServiceComponent.objects.get( + service=self.service_1, prototype__name="component_1" + ) + self.add_service_to_cluster("service_2", self.cluster) + + provider_bundle = self.add_bundle(self.test_bundles_dir / "provider_actions") + self.hostprovider = self.add_provider(provider_bundle, "Provider with Actions") + self.host_1 = self.add_host(provider_bundle, self.hostprovider, "host-1") + self.host_2 = self.add_host(provider_bundle, self.hostprovider, "host-2") + + self.available_at_any = ["state_any"] + common_at_created = [*self.available_at_any, "state_created", "state_created_masking"] + self.available_at_created_no_multi = [ + *common_at_created, + "multi_flag_unavailable", + "state_created_available_multi_bag_unavailable", + ] + self.available_at_created_flag = [ + *common_at_created, + "multi_flag_masking", + "state_created_available_multi_bag_unavailable", + ] + self.available_at_created_bag = [ + *common_at_created, + "multi_flag_unavailable", + "state_created_available_multi_bag_available", + ] + + common_at_installed = [ + *self.available_at_any, + "state_installed", + "state_installed_masking", + "state_created_unavailable", + ] + self.available_at_installed_no_multi = [ + *common_at_installed, + "multi_flag_unavailable", + "state_created_unavailable_multi_bag_unavailable", + ] + self.available_at_installed_flag = [ + *common_at_installed, + "multi_flag_masking", + "state_created_unavailable_multi_bag_unavailable", + ] + self.available_at_installed_bag = [ + *common_at_installed, + "multi_flag_unavailable", + "state_created_unavailable_multi_bag_available", + ] + + self.installed_state = "installed" + self.flag_multi_state = "flag" + self.bag_multi_state = "bag" + + def test_filter_object_own_actions_success(self) -> None: + for object_ in (self.cluster, self.service_1, self.component_1, self.hostprovider, self.host_1): + viewname, object_kwargs = self.get_viewname_and_kwargs_for_object(object_=object_) + with self.subTest(msg=f"{object_.__class__.__name__} at different states"): + self.check_object_action_list( + viewname=viewname, object_kwargs=object_kwargs, expected_actions=self.available_at_created_no_multi + ) + + object_.set_multi_state(self.flag_multi_state) + + self.check_object_action_list( + viewname=viewname, object_kwargs=object_kwargs, expected_actions=self.available_at_created_flag + ) + + object_.unset_multi_state(self.flag_multi_state) + object_.set_multi_state(self.bag_multi_state) + + self.check_object_action_list( + viewname=viewname, object_kwargs=object_kwargs, expected_actions=self.available_at_created_bag + ) + + object_.unset_multi_state(self.bag_multi_state) + object_.set_state(self.installed_state) + + self.check_object_action_list( + viewname=viewname, + object_kwargs=object_kwargs, + expected_actions=self.available_at_installed_no_multi, + ) + + object_.set_multi_state(self.flag_multi_state) + + self.check_object_action_list( + viewname=viewname, object_kwargs=object_kwargs, expected_actions=self.available_at_installed_flag + ) + + object_.unset_multi_state(self.flag_multi_state) + object_.set_multi_state(self.bag_multi_state) + + self.check_object_action_list( + viewname=viewname, object_kwargs=object_kwargs, expected_actions=self.available_at_installed_bag + ) + + def test_filter_host_actions_success(self) -> None: + check_host_1_actions = partial( + self.check_object_action_list, *self.get_viewname_and_kwargs_for_object(object_=self.host_1) + ) + check_host_2_actions = partial( + self.check_object_action_list, *self.get_viewname_and_kwargs_for_object(object_=self.host_2) + ) + any_cluster = "from cluster any" + any_all = (any_cluster, "from service any", "from component any") + cluster_host_actions = ["cluster_host_action_allowed", "cluster_host_action_disallowed"] + + self.add_host_to_cluster(self.cluster, self.host_1) + check_host_1_actions(expected_actions=[*self.available_at_created_no_multi, any_cluster, *cluster_host_actions]) + check_host_2_actions(expected_actions=self.available_at_created_no_multi) + + HostComponent.objects.create( + cluster=self.cluster, host=self.host_1, service=self.service_1, component=self.component_1 + ) + check_host_1_actions( + expected_actions=[*self.available_at_created_no_multi, *any_all, *cluster_host_actions * 3] + ) + check_host_2_actions(expected_actions=self.available_at_created_no_multi) + + self.add_host_to_cluster(self.cluster, self.host_2) + check_host_2_actions(expected_actions=[*self.available_at_created_no_multi, any_cluster, *cluster_host_actions]) + + self.service_1.set_state(self.installed_state) + check_host_1_actions( + expected_actions=[ + *self.available_at_created_no_multi, + *any_all, + *cluster_host_actions * 3, + "from service installed", + ] + ) + check_host_2_actions(expected_actions=[*self.available_at_created_no_multi, any_cluster, *cluster_host_actions]) + + self.component_1.set_state(self.installed_state) + self.component_1.set_multi_state(self.flag_multi_state) + check_host_1_actions( + expected_actions=[ + *self.available_at_created_no_multi, + *any_all, + "from service installed", + "from component installed", + "from component multi flag", + *cluster_host_actions * 3, + ] + ) + check_host_2_actions(expected_actions=[*self.available_at_created_no_multi, any_cluster, *cluster_host_actions]) + + self.cluster.set_state("woohoo") + self.cluster.set_multi_state("flag") + check_host_1_actions( + expected_actions=[ + *self.available_at_created_no_multi, + *any_all, + "from cluster multi flag", + "from service installed", + "from component installed", + "from component multi flag", + *cluster_host_actions * 3, + ] + ) + check_host_2_actions( + expected_actions=[ + *self.available_at_created_no_multi, + any_cluster, + "from cluster multi flag", + *cluster_host_actions, + ] + ) + + def test_adcm_4516_disallowed_host_action_not_executable_success(self) -> None: + self.add_host_to_cluster(self.cluster, self.host_1) + allowed_action = Action.objects.filter(display_name="cluster_host_action_allowed").first() + disallowed_action = Action.objects.filter(display_name="cluster_host_action_disallowed").first() + check_host_1_actions = partial( + self.check_object_action_list, *self.get_viewname_and_kwargs_for_object(object_=self.host_1) + ) + check_host_1_actions( + expected_actions=[ + *self.available_at_created_no_multi, + "from cluster any", + "cluster_host_action_allowed", + "cluster_host_action_disallowed", + ] + ) + + self.host_1.maintenance_mode = MaintenanceMode.ON + + response = self.client.post( + path=reverse( + viewname="v2:cluster-action-run", + kwargs={"cluster_pk": self.cluster_1.pk, "pk": allowed_action.pk}, + ), + data={"host_component_map": [], "config": {}, "attr": {}, "is_verbose": False}, + ) + self.assertEqual(response.status_code, HTTP_200_OK) + + response = self.client.post( + path=reverse( + viewname="v2:cluster-action-run", + kwargs={"cluster_pk": self.cluster_1.pk, "pk": disallowed_action.pk}, + ), + data={"host_component_map": [], "config": {}, "attr": {}, "is_verbose": False}, + ) + self.assertEqual(response.status_code, HTTP_409_CONFLICT) + + @staticmethod + def get_viewname_and_kwargs_for_object(object_: ObjectWithActions) -> tuple[str, dict[str, int]]: + if isinstance(object_, ClusterObject): + return "v2:service-action-list", {"service_pk": object_.pk, "cluster_pk": object_.cluster.pk} + + if isinstance(object_, ServiceComponent): + return "v2:component-action-list", { + "component_pk": object_.pk, + "service_pk": object_.service.pk, + "cluster_pk": object_.cluster.pk, + } + + classname: str = object_.__class__.__name__.lower() + # change hostp->p is for hostprovider->provider mutation for viewname + return f"v2:{classname.replace('hostp', 'p')}-action-list", {f"{classname}_pk": object_.pk} + + def check_object_action_list(self, viewname: str, object_kwargs: dict, expected_actions: list[str]) -> None: + response: Response = self.client.get(path=reverse(viewname=viewname, kwargs=object_kwargs)) + + self.assertEqual(response.status_code, HTTP_200_OK) + + data = response.json() + self.assertTrue(isinstance(data, list)) + self.assertTrue(all("displayName" in entry for entry in data)) + actual_actions = sorted(entry["displayName"] for entry in data) + self.assertListEqual(actual_actions, sorted(expected_actions)) diff --git a/python/api_v2/tests/test_audit.py b/python/api_v2/tests/test_audit.py index e9f7766cb3..e6fc9b93d8 100644 --- a/python/api_v2/tests/test_audit.py +++ b/python/api_v2/tests/test_audit.py @@ -9,7 +9,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -# pylint: disable=too-many-lines from datetime import timedelta @@ -25,7 +24,7 @@ class TestAudit(BaseAPITestCase): def setUp(self) -> None: super().setUp() self.username = self.password = "user" - self.user = User.objects.create_user(self.username, "user@example.com", self.password) + self.user = User.objects.create_superuser(self.username, "user@example.com", self.password) self.login_for_audit(username=self.username, password=self.password) last_login = AuditSession.objects.last() self.last_login_id = last_login.id @@ -44,31 +43,31 @@ def test_logins_success(self): response = self.client.get( path=reverse(viewname="v2:audit:auditsession-list"), ) - self.assertEqual(response.json()["results"][0]["login_details"], {"username": self.username}) self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(response.json()["results"][0]["user"], {"name": self.username}) def test_logins_time_filtering_success(self): response = self.client.get( path=reverse(viewname="v2:audit:auditsession-list"), data={"time_to": self.time_to, "time_from": self.time_from}, ) - self.assertEqual(response.json()["results"][0]["login_details"], {"username": self.username}) self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(response.json()["results"][0]["user"], {"name": self.username}) def test_logins_time_filtering_empty_list_success(self): response = self.client.get( path=reverse(viewname="v2:audit:auditsession-list"), - data={"time_to": self.time_from, "time_from": self.time_to}, + data={"timeTo": self.time_from, "timeFrom": self.time_to}, ) - self.assertEqual(len(response.json()["results"]), 0) self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(len(response.json()["results"]), 0) def test_logins_retrieve_success(self): response = self.client.get( path=reverse(viewname="v2:audit:auditsession-detail", kwargs={"pk": self.last_login_id}) ) - self.assertEqual(response.json()["login_details"]["username"], self.username) self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(response.json()["user"]["name"], self.username) def test_logins_retrieve_not_found_fail(self): response = self.client.get( diff --git a/python/api_v2/tests/test_bundle.py b/python/api_v2/tests/test_bundle.py index 72d51e93ca..432c6d416a 100644 --- a/python/api_v2/tests/test_bundle.py +++ b/python/api_v2/tests/test_bundle.py @@ -28,11 +28,11 @@ class TestBundle(BaseAPITestCase): def setUp(self) -> None: self.client.login(username="admin", password="admin") - cluster_bundle_1_path = settings.BASE_DIR / "python" / "api_v2" / "tests" / "bundles" / "cluster_one" + cluster_bundle_1_path = self.test_bundles_dir / "cluster_one" self.bundle_1 = self.add_bundle(source_dir=cluster_bundle_1_path) - cluster_new_bundle_path = settings.BASE_DIR / "python" / "api_v2" / "tests" / "bundles" / "cluster_two" + cluster_new_bundle_path = self.test_bundles_dir / "cluster_two" self.new_bundle_file = self.prepare_bundle_file(source_dir=cluster_new_bundle_path) def test_list_success(self): diff --git a/python/api_v2/tests/test_cluster.py b/python/api_v2/tests/test_cluster.py index 86f58d35a6..df8af7d823 100644 --- a/python/api_v2/tests/test_cluster.py +++ b/python/api_v2/tests/test_cluster.py @@ -14,14 +14,24 @@ from unittest.mock import patch from api_v2.tests.base import BaseAPITestCase -from cm.models import Action, ADCMEntityStatus, Cluster +from cm.models import ( + Action, + ADCMEntityStatus, + Cluster, + ClusterObject, + Prototype, + TaskLog, +) +from django.contrib.contenttypes.models import ContentType from django.urls import reverse -from rest_framework.response import Response +from django.utils import timezone from rest_framework.status import ( HTTP_200_OK, HTTP_201_CREATED, HTTP_204_NO_CONTENT, + HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND, + HTTP_409_CONFLICT, ) @@ -36,13 +46,13 @@ def inner(cluster: Cluster) -> int: return inner def test_list_success(self): - response: Response = self.client.get(path=reverse(viewname="v2:cluster-list")) + response = self.client.get(path=reverse(viewname="v2:cluster-list")) self.assertEqual(response.status_code, HTTP_200_OK) self.assertEqual(response.json()["count"], 2) def test_retrieve_success(self): - response: Response = self.client.get( + response = self.client.get( path=reverse(viewname="v2:cluster-detail", kwargs={"pk": self.cluster_1.pk}), ) @@ -50,7 +60,7 @@ def test_retrieve_success(self): self.assertEqual(response.json()["id"], self.cluster_1.pk) def test_filter_by_name_success(self): - response: Response = self.client.get( + response = self.client.get( path=reverse(viewname="v2:cluster-list"), data={"name": self.cluster_1.name}, ) @@ -59,7 +69,7 @@ def test_filter_by_name_success(self): self.assertEqual(response.json()["count"], 1) def test_filter_by_wrong_name_success(self): - response: Response = self.client.get( + response = self.client.get( path=reverse(viewname="v2:cluster-list"), data={"name": "wrong"}, ) @@ -69,7 +79,7 @@ def test_filter_by_wrong_name_success(self): def test_filter_by_status_up_success(self): with patch("api_v2.cluster.filters.get_cluster_status", new_callable=self.get_cluster_status_mock): - response: Response = self.client.get( + response = self.client.get( path=reverse(viewname="v2:cluster-list"), data={"status": ADCMEntityStatus.UP}, ) @@ -80,7 +90,7 @@ def test_filter_by_status_up_success(self): def test_filter_by_status_down_success(self): with patch("api_v2.cluster.filters.get_cluster_status", new_callable=self.get_cluster_status_mock): - response: Response = self.client.get( + response = self.client.get( path=reverse(viewname="v2:cluster-list"), data={"status": ADCMEntityStatus.DOWN}, ) @@ -90,9 +100,9 @@ def test_filter_by_status_down_success(self): self.assertEqual(response.json()["results"][0]["id"], self.cluster_2.pk) def test_filter_by_prototype_name_success(self): - response: Response = self.client.get( + response = self.client.get( path=reverse(viewname="v2:cluster-list"), - data={"prototype_name": self.cluster_1.prototype.name}, + data={"prototypeDisplayName": self.cluster_1.prototype.name}, ) self.assertEqual(response.status_code, HTTP_200_OK) @@ -100,19 +110,19 @@ def test_filter_by_prototype_name_success(self): self.assertEqual(response.json()["results"][0]["id"], self.cluster_1.pk) def test_filter_by_wrong_prototype_name_success(self): - response: Response = self.client.get( + response = self.client.get( path=reverse(viewname="v2:cluster-list"), - data={"prototype_name": "wrong"}, + data={"prototypeDisplayName": "wrong"}, ) self.assertEqual(response.status_code, HTTP_200_OK) self.assertEqual(response.json()["count"], 0) def test_create_success(self): - response: Response = self.client.post( + response = self.client.post( path=reverse(viewname="v2:cluster-list"), data={ - "prototype": self.cluster_1.prototype.pk, + "prototype_id": self.cluster_1.prototype.pk, "name": "new_test_cluster", "description": "Test cluster description", }, @@ -120,9 +130,41 @@ def test_create_success(self): self.assertEqual(response.status_code, HTTP_201_CREATED) + def test_create_same_name_fail(self): + response = self.client.post( + path=reverse(viewname="v2:cluster-list"), + data={ + "prototype_id": self.cluster_1.prototype.pk, + "name": self.cluster_1.name, + "description": "Test cluster description", + }, + ) + self.assertEqual(response.status_code, HTTP_409_CONFLICT) + + def test_update_failed(self): + wrong_cluster_name = "__new_test_cluster_name" + correct_cluster_name = "new_test_cluster_name" + + response = self.client.patch( + path=reverse(viewname="v2:cluster-detail", kwargs={"pk": self.cluster_1.pk}), + data={"name": wrong_cluster_name}, + ) + + self.assertEqual(response.status_code, HTTP_400_BAD_REQUEST) + + self.cluster_1.state = "not_created" + self.cluster_1.save(update_fields=["state"]) + + response = self.client.patch( + path=reverse(viewname="v2:cluster-detail", kwargs={"pk": self.cluster_1.pk}), + data={"name": correct_cluster_name}, + ) + + self.assertEqual(response.status_code, HTTP_400_BAD_REQUEST) + def test_update_success(self): new_test_cluster_name = "new_test_cluster_name" - response: Response = self.client.patch( + response = self.client.patch( path=reverse(viewname="v2:cluster-detail", kwargs={"pk": self.cluster_1.pk}), data={"name": new_test_cluster_name}, ) @@ -134,7 +176,7 @@ def test_update_success(self): self.assertEqual(self.cluster_1.name, new_test_cluster_name) def test_delete_success(self): - response: Response = self.client.delete( + response = self.client.delete( path=reverse(viewname="v2:cluster-detail", kwargs={"pk": self.cluster_1.pk}), ) @@ -142,30 +184,42 @@ def test_delete_success(self): self.assertFalse(Cluster.objects.filter(pk=self.cluster_1.pk).exists()) def test_service_prototypes_success(self): - response: Response = self.client.get( + response = self.client.get( path=reverse(viewname="v2:cluster-service-prototypes", kwargs={"pk": self.cluster_1.pk}), ) self.assertEqual(response.status_code, HTTP_200_OK) self.assertEqual(len(response.json()), 3) + def test_service_create_success(self): + service_prototype = Prototype.objects.filter(type="service").first() + response = self.client.post( + path=reverse(viewname="v2:service-list", kwargs={"cluster_pk": self.cluster_1.pk}), + data=[{"prototype_id": service_prototype.pk}], + ) + self.assertEqual(response.status_code, HTTP_201_CREATED) + self.assertEqual(response.json()[0]["name"], service_prototype.name) + self.assertEqual(ClusterObject.objects.get(cluster_id=self.cluster_1.pk).name, "service_1") + class TestClusterActions(BaseAPITestCase): def setUp(self) -> None: super().setUp() self.cluster_action = Action.objects.get(prototype=self.cluster_1.prototype, name="action") + self.cluster_action_with_config = Action.objects.get(prototype=self.cluster_1.prototype, name="with_config") + self.cluster_action_with_hc = Action.objects.get(prototype=self.cluster_1.prototype, name="with_hc") def test_list_cluster_actions_success(self): - response: Response = self.client.get( + response = self.client.get( path=reverse(viewname="v2:cluster-action-list", kwargs={"cluster_pk": self.cluster_1.pk}), ) self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(len(response.json()), 1) + self.assertEqual(len(response.json()), 3) def test_list_cluster_actions_no_actions_cluster_success(self): - response: Response = self.client.get( + response = self.client.get( path=reverse(viewname="v2:cluster-action-list", kwargs={"cluster_pk": self.cluster_2.pk}), ) @@ -173,7 +227,7 @@ def test_list_cluster_actions_no_actions_cluster_success(self): self.assertListEqual(response.json(), []) def test_list_cluster_actions_wrong_cluster_fail(self): - response: Response = self.client.get( + response = self.client.get( path=reverse( viewname="v2:cluster-action-list", kwargs={"cluster_pk": self.get_non_existent_pk(model=Cluster)} ), @@ -182,7 +236,7 @@ def test_list_cluster_actions_wrong_cluster_fail(self): self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) def test_retrieve_cluster_action_success(self): - response: Response = self.client.get( + response = self.client.get( path=reverse( viewname="v2:cluster-action-detail", kwargs={"cluster_pk": self.cluster_1.pk, "pk": self.cluster_action.pk}, @@ -192,13 +246,76 @@ def test_retrieve_cluster_action_success(self): self.assertEqual(response.status_code, HTTP_200_OK) def test_run_cluster_action_success(self): - with patch("api_v2.action.views.start_task"): - response: Response = self.client.post( + tasklog = TaskLog.objects.create( + object_id=self.cluster_1.pk, + object_type=ContentType.objects.get(app_label="cm", model="cluster"), + start_date=timezone.now(), + finish_date=timezone.now(), + action=self.cluster_action, + ) + + with patch("api_v2.action.views.start_task", return_value=tasklog): + response = self.client.post( path=reverse( viewname="v2:cluster-action-run", kwargs={"cluster_pk": self.cluster_1.pk, "pk": self.cluster_action.pk}, ), - data={"host_component_map": {}, "config": {}, "attr": {}, "is_verbose": False}, + data={"host_component_map": [], "config": {}, "attr": {}, "is_verbose": False}, + ) + + self.assertEqual(response.status_code, HTTP_200_OK) + + def test_retrieve_action_with_config_success(self): + response = self.client.get( + path=reverse( + viewname="v2:cluster-action-detail", + kwargs={"cluster_pk": self.cluster_1.pk, "pk": self.cluster_action_with_config.pk}, + ) + ) + + self.assertEqual(response.status_code, HTTP_200_OK) + + attributes = response.json()["configSchema"]["fields"] + self.assertEqual(len(attributes), 3) + self.assertEqual([attr["name"] for attr in attributes], ["simple", "grouped", "after"]) + self.assertEqual([attr["name"] for attr in attributes[1]["children"]], ["simple", "second"]) + self.assertEqual(attributes[0]["default"], None) + self.assertEqual(attributes[1]["children"][0]["default"], 4) + + def test_run_action_with_config_success(self): + tasklog = TaskLog.objects.create( + object_id=self.cluster_1.pk, + object_type=ContentType.objects.get(app_label="cm", model="cluster"), + start_date=timezone.now(), + finish_date=timezone.now(), + action=self.cluster_action_with_config, + ) + + config = {"simple": "kuku", "grouped": {"simple": 5, "second": 4.3}, "after": ["something"]} + + with patch("cm.job.start_task", return_value=tasklog): + response = self.client.post( + path=reverse( + viewname="v2:cluster-action-run", + kwargs={"cluster_pk": self.cluster_1.pk, "pk": self.cluster_action_with_config.pk}, + ), + data={"host_component_map": [], "config": config, "attr": {}, "is_verbose": False}, + ) + + self.assertEqual(response.status_code, HTTP_200_OK) + + def test_retrieve_action_with_hc_success(self): + response = self.client.get( + path=reverse( + viewname="v2:cluster-action-detail", + kwargs={"cluster_pk": self.cluster_1.pk, "pk": self.cluster_action_with_hc.pk}, ) + ) self.assertEqual(response.status_code, HTTP_200_OK) + + hc_map = response.json()["hostComponentMapRules"] + self.assertEqual(len(hc_map), 2) + add, remove = sorted(hc_map, key=lambda rec: rec["action"]) + self.assertDictEqual(add, {"action": "add", "component": "component_1", "service": "service_1"}) + self.assertDictEqual(remove, {"action": "remove", "component": "component_2", "service": "service_1"}) diff --git a/python/api_v2/tests/test_component.py b/python/api_v2/tests/test_component.py index c4e0d09c44..186ef99c9d 100644 --- a/python/api_v2/tests/test_component.py +++ b/python/api_v2/tests/test_component.py @@ -14,7 +14,7 @@ from cm.models import Action, MaintenanceMode, ServiceComponent from django.urls import reverse from rest_framework.response import Response -from rest_framework.status import HTTP_200_OK, HTTP_204_NO_CONTENT +from rest_framework.status import HTTP_200_OK, HTTP_405_METHOD_NOT_ALLOWED class TestComponentAPI(BaseAPITestCase): @@ -68,8 +68,7 @@ def test_delete_success(self): ), ) - self.assertEqual(response.status_code, HTTP_204_NO_CONTENT) - self.assertFalse(ServiceComponent.objects.filter(pk=self.component_1.pk).exists()) + self.assertEqual(response.status_code, HTTP_405_METHOD_NOT_ALLOWED) def test_action_list_success(self): response: Response = self.client.get( @@ -113,7 +112,7 @@ def test_action_run_success(self): "pk": self.action_1.pk, }, ), - data={"host_component_map": {}, "config": {}, "attr": {}, "is_verbose": False}, + data={"host_component_map": [], "config": {}, "attr": {}, "is_verbose": False}, ) self.assertEqual(response.status_code, HTTP_200_OK) diff --git a/python/api_v2/tests/test_concerns.py b/python/api_v2/tests/test_concerns.py new file mode 100644 index 0000000000..b08c660025 --- /dev/null +++ b/python/api_v2/tests/test_concerns.py @@ -0,0 +1,198 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from api_v2.tests.base import BaseAPITestCase +from cm.models import ( + KnownNames, + MessageTemplate, + ObjectType, + Prototype, + PrototypeImport, +) +from django.urls import reverse +from rest_framework.response import Response +from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED + + +class TestConcernsResponse(BaseAPITestCase): + def setUp(self) -> None: + super().setUp() + + bundle_dir = self.test_bundles_dir / "cluster_with_required_service" + self.required_service_bundle = self.add_bundle(source_dir=bundle_dir) + + bundle_dir = self.test_bundles_dir / "cluster_with_required_config_field" + self.required_config_bundle = self.add_bundle(source_dir=bundle_dir) + + bundle_dir = self.test_bundles_dir / "cluster_with_required_import" + self.required_import_bundle = self.add_bundle(source_dir=bundle_dir) + + bundle_dir = self.test_bundles_dir / "cluster_with_required_hc" + self.required_hc_bundle = self.add_bundle(source_dir=bundle_dir) + + bundle_dir = self.test_bundles_dir / "cluster_with_allowed_flags" + self.config_flag_bundle = self.add_bundle(source_dir=bundle_dir) + + bundle_dir = self.test_bundles_dir / "cluster_with_service_requirements" + self.service_requirements_bundle = self.add_bundle(source_dir=bundle_dir) + + def test_required_service_concern(self): + cluster = self.add_cluster(bundle=self.required_service_bundle, name="required_service_cluster") + expected_concern_reason = { + "message": MessageTemplate.objects.get(name=KnownNames.REQUIRED_SERVICE_ISSUE.value).template["message"], + "placeholder": { + "source": {"type": "cluster", "name": cluster.name, "params": {"clusterId": cluster.pk}}, + "target": { + "params": { + "prototypeId": Prototype.objects.get( + type=ObjectType.SERVICE, name="service_required", required=True + ).pk + }, + "type": "prototype", + "name": "service_required", + }, + }, + } + + response: Response = self.client.get(path=reverse(viewname="v2:cluster-detail", kwargs={"pk": cluster.pk})) + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(len(response.json()["concerns"]), 1) + self.assertDictEqual(response.json()["concerns"][0]["reason"], expected_concern_reason) + + def test_required_config_concern(self): + cluster = self.add_cluster(bundle=self.required_config_bundle, name="required_config_cluster") + expected_concern_reason = { + "message": MessageTemplate.objects.get(name=KnownNames.CONFIG_ISSUE.value).template["message"], + "placeholder": {"source": {"name": cluster.name, "params": {"clusterId": cluster.pk}, "type": "cluster"}}, + } + + response: Response = self.client.get(path=reverse(viewname="v2:cluster-detail", kwargs={"pk": cluster.pk})) + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(len(response.json()["concerns"]), 1) + self.assertDictEqual(response.json()["concerns"][0]["reason"], expected_concern_reason) + + def test_required_import_concern(self): + cluster = self.add_cluster(bundle=self.required_import_bundle, name="required_import_cluster") + expected_concern_reason = { + "message": MessageTemplate.objects.get(name=KnownNames.REQUIRED_IMPORT_ISSUE.value).template["message"], + "placeholder": {"source": {"name": cluster.name, "params": {"clusterId": cluster.pk}, "type": "cluster"}}, + } + + response: Response = self.client.get(path=reverse(viewname="v2:cluster-detail", kwargs={"pk": cluster.pk})) + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(len(response.json()["concerns"]), 1) + self.assertDictEqual(response.json()["concerns"][0]["reason"], expected_concern_reason) + + def test_required_hc_concern(self): + cluster = self.add_cluster(bundle=self.required_hc_bundle, name="required_hc_cluster") + self.add_service_to_cluster(service_name="service_1", cluster=cluster) + expected_concern_reason = { + "message": MessageTemplate.objects.get(name=KnownNames.HOST_COMPONENT_ISSUE.value).template["message"], + "placeholder": {"source": {"name": cluster.name, "params": {"clusterId": cluster.pk}, "type": "cluster"}}, + } + + response: Response = self.client.get(path=reverse(viewname="v2:cluster-detail", kwargs={"pk": cluster.pk})) + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(len(response.json()["concerns"]), 1) + self.assertDictEqual(response.json()["concerns"][0]["reason"], expected_concern_reason) + + def test_outdated_config_flag(self): + cluster = self.add_cluster(bundle=self.config_flag_bundle, name="config_flag_cluster") + expected_concern_reason = { + "message": MessageTemplate.objects.get(name=KnownNames.CONFIG_FLAG.value).template["message"], + "placeholder": {"source": {"name": cluster.name, "params": {"clusterId": cluster.pk}, "type": "cluster"}}, + } + + response: Response = self.client.post( + path=reverse(viewname="v2:cluster-config-list", kwargs={"cluster_pk": cluster.pk}), + data={"config": {"string": "new_string"}, "adcmMeta": {}, "description": ""}, + ) + self.assertEqual(response.status_code, HTTP_201_CREATED) + + response: Response = self.client.get(path=reverse(viewname="v2:cluster-detail", kwargs={"pk": cluster.pk})) + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(len(response.json()["concerns"]), 1) + self.assertDictEqual(response.json()["concerns"][0]["reason"], expected_concern_reason) + + def test_service_requirements(self): + cluster = self.add_cluster(bundle=self.service_requirements_bundle, name="service_requirements_cluster") + service = self.add_service_to_cluster(service_name="service_1", cluster=cluster) + expected_concern_reason = { + "message": MessageTemplate.objects.get(name=KnownNames.UNSATISFIED_REQUIREMENT_ISSUE.value).template[ + "message" + ], + "placeholder": { + "source": { + "name": service.name, + "params": {"clusterId": cluster.pk, "serviceId": service.pk}, + "type": "service", + }, + "target": { + "name": "some_other_service", + "params": { + "prototypeId": Prototype.objects.get(type=ObjectType.SERVICE, name="some_other_service").pk + }, + "type": "prototype", + }, + }, + } + + response: Response = self.client.get( + path=reverse(viewname="v2:service-detail", kwargs={"cluster_pk": cluster.pk, "pk": service.pk}) + ) + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(len(response.json()["concerns"]), 1) + self.assertDictEqual(response.json()["concerns"][0]["reason"], expected_concern_reason) + + +class TestConcernsLogic(BaseAPITestCase): + def setUp(self) -> None: + super().setUp() + + bundle_dir = self.test_bundles_dir / "cluster_with_required_import" + self.required_import_bundle = self.add_bundle(source_dir=bundle_dir) + + def test_import_concern_resolved_after_saving_import(self): + import_cluster = self.add_cluster(bundle=self.required_import_bundle, name="required_import_cluster") + export_cluster = self.cluster_1 + + response: Response = self.client.get( + path=reverse(viewname="v2:cluster-detail", kwargs={"pk": import_cluster.pk}) + ) + self.assertEqual(len(response.json()["concerns"]), 1) + self.assertEqual(import_cluster.concerns.count(), 1) + + self.client.post( + path=reverse(viewname="v2:cluster-import-list", kwargs={"cluster_pk": import_cluster.pk}), + data=[{"source": {"id": export_cluster.pk, "type": ObjectType.CLUSTER}}], + ) + + response: Response = self.client.get( + path=reverse(viewname="v2:cluster-detail", kwargs={"pk": import_cluster.pk}) + ) + self.assertEqual(len(response.json()["concerns"]), 0) + self.assertEqual(import_cluster.concerns.count(), 0) + + def test_non_required_import_do_not_raises_concern(self): + self.assertGreater(PrototypeImport.objects.filter(prototype=self.cluster_2.prototype).count(), 0) + + response: Response = self.client.get( + path=reverse(viewname="v2:cluster-detail", kwargs={"pk": self.cluster_2.pk}) + ) + self.assertEqual(len(response.json()["concerns"]), 0) + self.assertEqual(self.cluster_2.concerns.count(), 0) diff --git a/python/api_v2/tests/test_config.py b/python/api_v2/tests/test_config.py index 265c8086e2..264c58833b 100644 --- a/python/api_v2/tests/test_config.py +++ b/python/api_v2/tests/test_config.py @@ -10,12 +10,19 @@ # See the License for the specific language governing permissions and # limitations under the License. +from api_v2.config.utils import convert_adcm_meta_to_attr, convert_attr_to_adcm_meta from api_v2.tests.base import BaseAPITestCase -from cm.models import ConfigLog, GroupConfig, Host, HostProvider, ServiceComponent +from cm.inventory import get_obj_config +from cm.models import ADCM, ConfigLog, GroupConfig, Host, HostProvider, ServiceComponent from django.contrib.contenttypes.models import ContentType from rest_framework.response import Response from rest_framework.reverse import reverse -from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED, HTTP_404_NOT_FOUND +from rest_framework.status import ( + HTTP_200_OK, + HTTP_201_CREATED, + HTTP_400_BAD_REQUEST, + HTTP_404_NOT_FOUND, +) class TestClusterConfig(BaseAPITestCase): @@ -25,22 +32,19 @@ def setUp(self) -> None: self.cluster_1_config = ConfigLog.objects.get(id=self.cluster_1.config.current) def test_list_success(self): - response: Response = self.client.get( + response = self.client.get( path=reverse(viewname="v2:cluster-config-list", kwargs={"cluster_pk": self.cluster_1.pk}) ) - data = { - "creation_time": self.cluster_1_config.date.isoformat().replace("+00:00", "Z"), - "description": self.cluster_1_config.description, - "id": self.cluster_1_config.pk, - "is_current": True, - } self.assertEqual(response.status_code, HTTP_200_OK) self.assertEqual(response.json()["count"], 1) - self.assertDictEqual(response.json()["results"][0], data) + self.assertListEqual( + sorted(list(response.json()["results"][0].keys())), + sorted(["id", "isCurrent", "creationTime", "description"]), + ) def test_retrieve_success(self): - response: Response = self.client.get( + response = self.client.get( path=reverse( viewname="v2:cluster-config-detail", kwargs={"cluster_pk": self.cluster_1.pk, "pk": self.cluster_1_config.pk}, @@ -49,35 +53,104 @@ def test_retrieve_success(self): self.assertEqual(response.status_code, HTTP_200_OK) data = { - "attr": self.cluster_1_config.attr, + "adcmMeta": {"/activatable_group": {"isActive": True}}, "config": self.cluster_1_config.config, - "creation_time": self.cluster_1_config.date.isoformat().replace("+00:00", "Z"), + "creationTime": self.cluster_1_config.date.isoformat().replace("+00:00", "Z"), "description": self.cluster_1_config.description, "id": self.cluster_1_config.pk, - "is_current": True, + "isCurrent": True, } self.assertDictEqual(response.json(), data) def test_create_success(self): data = { - "config": {"string": "new string", "group": {"string": "new string"}}, - "attr": {}, + "config": { + "group": {"float": 0.1, "map": {"integer_key": "10", "string_key": "string"}, "text": "text"}, + "activatable_group": { + "integer": 10, + "json": {"key": "value"}, + "structure": [{"integer": 1, "string": "string1"}, {"integer": 2, "string": "string2"}], + }, + "string": "string", + "list": ["value1", "value2", "value3"], + "boolean": True, + }, + "adcmMeta": {"/activatable_group": {"isActive": False}}, "description": "new config", } - response: Response = self.client.post( + response = self.client.post( path=reverse(viewname="v2:cluster-config-list", kwargs={"cluster_pk": self.cluster_1.pk}), data=data ) self.assertEqual(response.status_code, HTTP_201_CREATED) - response_data = response.json() self.assertDictEqual(response_data["config"], data["config"]) - self.assertDictEqual(response_data["attr"], data["attr"]) + self.assertDictEqual(response_data["adcmMeta"], data["adcmMeta"]) self.assertEqual(response_data["description"], data["description"]) - self.assertEqual(response_data["is_current"], True) + self.assertEqual(response_data["isCurrent"], True) + + def test_create_bad_attr_fail(self): + data = { + "config": { + "group": {"float": 0.1, "map": {"integer_key": "10", "string_key": "string"}, "text": "text"}, + "activatable_group": { + "integer": 10, + "json": {"key": "value"}, + "structure": [{"integer": 1, "string": "string1"}, {"integer": 2, "string": "string2"}], + }, + "string": "string", + "list": ["value1", "value2", "value3"], + "boolean": True, + }, + "adcmMeta": {"bad_key": "bad_value"}, + "description": "new config", + } + response = self.client.post( + path=reverse(viewname="v2:cluster-config-list", kwargs={"cluster_pk": self.cluster_1.pk}), data=data + ) + + self.assertEqual(response.status_code, HTTP_400_BAD_REQUEST) + self.assertDictEqual( + response.json(), + { + "code": "ATTRIBUTE_ERROR", + "desc": 'there isn\'t `bad_key` group in the config (cluster "cluster_one" 1.0)', + "level": "error", + }, + ) + + def test_create_bad_and_good_attr_fail(self): + data = { + "config": { + "group": {"float": 0.1, "map": {"integer_key": "10", "string_key": "string"}, "text": "text"}, + "activatable_group": { + "integer": 10, + "json": {"key": "value"}, + "structure": [{"integer": 1, "string": "string1"}, {"integer": 2, "string": "string2"}], + }, + "string": "string", + "list": ["value1", "value2", "value3"], + "boolean": True, + }, + "adcmMeta": {"/activatable_group": {"isActive": False}, "/bad_key": {"isActive": False}}, + "description": "new config", + } + response = self.client.post( + path=reverse(viewname="v2:cluster-config-list", kwargs={"cluster_pk": self.cluster_1.pk}), data=data + ) + + self.assertEqual(response.status_code, HTTP_400_BAD_REQUEST) + self.assertDictEqual( + response.json(), + { + "code": "ATTRIBUTE_ERROR", + "desc": 'there isn\'t `bad_key` group in the config (cluster "cluster_one" 1.0)', + "level": "error", + }, + ) def test_schema_success(self): - response: Response = self.client.get( + response = self.client.get( path=reverse( viewname="v2:cluster-config-schema", kwargs={"cluster_pk": self.cluster_1.pk, "pk": self.cluster_1_config.pk}, @@ -97,6 +170,39 @@ def test_schema_success(self): "options": [], "children": [], }, + { + "children": [], + "default": None, + "displayName": "map_not_required", + "isActive": False, + "isReadOnly": False, + "name": "map_not_required", + "options": [], + "type": "map", + "validation": {"isRequired": False, "maxValue": None, "minValue": None}, + }, + { + "name": "list", + "displayName": "list", + "type": "list", + "default": ["value1", "value2", "value3"], + "isReadOnly": False, + "isActive": False, + "validation": {"isRequired": False, "minValue": None, "maxValue": None}, + "options": [], + "children": [], + }, + { + "name": "boolean", + "displayName": "boolean", + "type": "boolean", + "default": True, + "isReadOnly": False, + "isActive": False, + "validation": {"isRequired": False, "minValue": None, "maxValue": None}, + "options": [], + "children": [], + }, { "name": "group", "displayName": "group", @@ -108,10 +214,77 @@ def test_schema_success(self): "options": [], "children": [ { - "name": "string", - "displayName": "string", - "type": "string", - "default": "string", + "name": "float", + "displayName": "float", + "type": "float", + "default": 0.1, + "isReadOnly": False, + "isActive": False, + "validation": {"isRequired": False, "minValue": None, "maxValue": None}, + "options": [], + "children": [], + }, + { + "name": "map", + "displayName": "map", + "type": "map", + "default": {"integerKey": "10", "stringKey": "string"}, + "isReadOnly": False, + "isActive": False, + "validation": {"isRequired": False, "minValue": None, "maxValue": None}, + "options": [], + "children": [], + }, + { + "name": "text", + "displayName": "text", + "type": "text", + "default": "text", + "isReadOnly": False, + "isActive": False, + "validation": {"isRequired": False, "minValue": None, "maxValue": None}, + "options": [], + "children": [], + }, + ], + }, + { + "name": "activatable_group", + "displayName": "activatable_group", + "type": "group", + "default": None, + "isReadOnly": False, + "isActive": True, + "validation": {"isRequired": True, "minValue": None, "maxValue": None}, + "options": [], + "children": [ + { + "name": "integer", + "displayName": "integer", + "type": "integer", + "default": 10, + "isReadOnly": False, + "isActive": False, + "validation": {"isRequired": False, "minValue": None, "maxValue": None}, + "options": [], + "children": [], + }, + { + "name": "json", + "displayName": "json", + "type": "json", + "default": {"key": "value"}, + "isReadOnly": False, + "isActive": False, + "validation": {"isRequired": False, "minValue": None, "maxValue": None}, + "options": [], + "children": [], + }, + { + "name": "structure", + "displayName": "structure", + "type": "structure", + "default": [{"integer": 1, "string": "string1"}, {"integer": 2, "string": "string2"}], "isReadOnly": False, "isActive": False, "validation": {"isRequired": False, "minValue": None, "maxValue": None}, @@ -124,6 +297,24 @@ def test_schema_success(self): self.assertListEqual(response.json(), data) +class TestMapTypeConfig(BaseAPITestCase): + def test_absent_not_required_map_config_processing_success(self): + new_config = {"string": "new string value"} + response: Response = self.client.post( + path=reverse(viewname="v2:cluster-config-list", kwargs={"cluster_pk": self.cluster_1.pk}), + data={"config": new_config, "attr": {}, "adcmMeta": {"/activatable_group": {"isActive": False}}}, + ) + self.assertEqual(response.status_code, HTTP_201_CREATED) + + self.cluster_1.refresh_from_db() + processed_config = get_obj_config(obj=self.cluster_1) + self.assertDictEqual(processed_config, {"activatable_group": None, **new_config}) + + def test_not_required_no_default_map_config_processing_success(self): + processed_config = get_obj_config(obj=self.cluster_1) + self.assertDictEqual(processed_config["map_not_required"], {}) + + class TestClusterGroupConfig(BaseAPITestCase): def setUp(self) -> None: super().setUp() @@ -136,7 +327,7 @@ def setUp(self) -> None: self.cluster_1_group_config_config = ConfigLog.objects.get(pk=self.cluster_1_group_config.config.current) def test_list_success(self): - response: Response = self.client.get( + response = self.client.get( path=reverse( viewname="v2:cluster-config-group-config-list", kwargs={"cluster_pk": self.cluster_1.pk, "config_group_pk": self.cluster_1_group_config.pk}, @@ -144,9 +335,13 @@ def test_list_success(self): ) self.assertEqual(response.status_code, HTTP_200_OK) self.assertEqual(response.json()["count"], 1) + self.assertListEqual( + sorted(list(response.json()["results"][0].keys())), + sorted(["id", "isCurrent", "creationTime", "description"]), + ) def test_retrieve_success(self): - response: Response = self.client.get( + response = self.client.get( path=reverse( viewname="v2:cluster-config-group-config-detail", kwargs={ @@ -157,23 +352,58 @@ def test_retrieve_success(self): ) ) self.assertEqual(response.status_code, HTTP_200_OK) - attr = { - "custom_group_keys": {"group": {"fields": {"string": True}, "value": True}, "string": True}, - "group_keys": {"group": {"fields": {"string": False}, "value": None}, "string": False}, + data = { + "id": self.cluster_1_group_config_config.pk, + "isCurrent": True, + "creationTime": self.cluster_1_group_config_config.date.isoformat().replace("+00:00", "Z"), + "config": self.cluster_1_group_config_config.config, + "adcmMeta": { + "/string": {"isSynchronized": False}, + "/map_not_required": {"isSynchronized": False}, + "/list": {"isSynchronized": False}, + "/boolean": {"isSynchronized": False}, + "/group/float": {"isSynchronized": False}, + "/group/map": {"isSynchronized": False}, + "/group/text": {"isSynchronized": False}, + "/activatable_group": {"isSynchronized": False, "isActive": True}, + "/activatable_group/integer": {"isSynchronized": False}, + "/activatable_group/json": {"isSynchronized": False}, + "/activatable_group/structure": {"isSynchronized": False}, + }, + "description": self.cluster_1_group_config_config.description, } - self.assertDictEqual(response.json()["attr"], attr) + self.assertDictEqual(response.json(), data) def test_create_success(self): data = { - "config": {"string": "new string", "group": {"string": "new string"}}, - "attr": { - "custom_group_keys": {"group": {"fields": {"string": True}, "value": True}, "string": True}, - "group_keys": {"group": {"fields": {"string": True}, "value": None}, "string": True}, + "config": { + "group": {"float": 0.1, "map": {"integer_key": "10", "string_key": "string"}, "text": "text"}, + "activatable_group": { + "integer": 10, + "json": {"key": "value"}, + "structure": [{"integer": 1, "string": "string1"}, {"integer": 2, "string": "string2"}], + }, + "string": "string", + "list": ["value1", "value2", "value3"], + "boolean": True, + }, + "adcmMeta": { + "/string": {"isSynchronized": False}, + "/map_not_required": {"isSynchronized": False}, + "/list": {"isSynchronized": False}, + "/boolean": {"isSynchronized": False}, + "/group/float": {"isSynchronized": False}, + "/group/map": {"isSynchronized": False}, + "/group/text": {"isSynchronized": False}, + "/activatable_group": {"isSynchronized": False, "isActive": True}, + "/activatable_group/integer": {"isSynchronized": False}, + "/activatable_group/json": {"isSynchronized": False}, + "/activatable_group/structure": {"isSynchronized": False}, }, "description": "new config", } - response: Response = self.client.post( + response = self.client.post( path=reverse( viewname="v2:cluster-config-group-config-list", kwargs={"cluster_pk": self.cluster_1.pk, "config_group_pk": self.cluster_1_group_config.pk}, @@ -185,9 +415,89 @@ def test_create_success(self): response_data = response.json() self.assertDictEqual(response_data["config"], data["config"]) - self.assertDictEqual(response_data["attr"], data["attr"]) + self.assertDictEqual(response_data["adcmMeta"], data["adcmMeta"]) self.assertEqual(response_data["description"], data["description"]) - self.assertEqual(response_data["is_current"], True) + self.assertEqual(response_data["isCurrent"], True) + + def test_create_bad_attr_fail(self): + data = { + "config": { + "group": {"float": 0.1, "map": {"integer_key": "10", "string_key": "string"}, "text": "text"}, + "activatable_group": { + "integer": 10, + "json": {"key": "value"}, + "structure": [{"integer": 1, "string": "string1"}, {"integer": 2, "string": "string2"}], + }, + "string": "string", + "list": ["value1", "value2", "value3"], + "boolean": True, + }, + "adcmMeta": { + "bad_key": "bad_value", + }, + "description": "new config", + } + + response = self.client.post( + path=reverse( + viewname="v2:cluster-config-group-config-list", + kwargs={"cluster_pk": self.cluster_1.pk, "config_group_pk": self.cluster_1_group_config.pk}, + ), + data=data, + ) + + self.assertEqual(response.status_code, HTTP_400_BAD_REQUEST) + self.assertDictEqual( + response.json(), + { + "code": "ATTRIBUTE_ERROR", + "desc": 'there isn\'t `bad_key` group in the config (cluster "cluster_one" 1.0)', + "level": "error", + }, + ) + + def test_create_bad_and_good_fail(self): + data = { + "config": { + "group": {"float": 0.1, "map": {"integer_key": "10", "string_key": "string"}, "text": "text"}, + "activatable_group": { + "integer": 10, + "json": {"key": "value"}, + "structure": [{"integer": 1, "string": "string1"}, {"integer": 2, "string": "string2"}], + }, + "string": "string", + "list": ["value1", "value2", "value3"], + "boolean": True, + }, + "adcmMeta": { + "/string": {"isSynchronized": False}, + "/list": {"isSynchronized": False}, + "/boolean": {"isSynchronized": False}, + "/group/float": {"isSynchronized": False}, + "/group/map": {"isSynchronized": False}, + "/group/text": {"isSynchronized": False}, + "/activatable_group": {"isSynchronized": False, "isActive": True}, + "/activatable_group/integer": {"isSynchronized": False}, + "/activatable_group/json": {"isSynchronized": False}, + "/activatable_group/structure": {"isSynchronized": False}, + "/stringBAD": {"isSynchronized": False}, + }, + "description": "new config", + } + + response = self.client.post( + path=reverse( + viewname="v2:cluster-config-group-config-list", + kwargs={"cluster_pk": self.cluster_1.pk, "config_group_pk": self.cluster_1_group_config.pk}, + ), + data=data, + ) + + self.assertEqual(response.status_code, HTTP_400_BAD_REQUEST) + self.assertDictEqual( + response.json(), + {"code": "ATTRIBUTE_ERROR", "desc": "invalid `stringBAD/` field in `group_keys`", "level": "error"}, + ) class TestServiceConfig(BaseAPITestCase): @@ -198,25 +508,22 @@ def setUp(self) -> None: self.service_1_initial_config = ConfigLog.objects.get(pk=self.service_1.config.current) def test_list_success(self): - response: Response = self.client.get( + response = self.client.get( path=reverse( viewname="v2:service-config-list", kwargs={"cluster_pk": self.cluster_1.pk, "service_pk": self.service_1.pk}, ) ) - data = { - "creation_time": self.service_1_initial_config.date.isoformat().replace("+00:00", "Z"), - "description": self.service_1_initial_config.description, - "id": self.service_1_initial_config.pk, - "is_current": True, - } self.assertEqual(response.status_code, HTTP_200_OK) self.assertEqual(response.json()["count"], 1) - self.assertDictEqual(response.json()["results"][0], data) + self.assertListEqual( + sorted(list(response.json()["results"][0].keys())), + sorted(["id", "isCurrent", "creationTime", "description"]), + ) def test_retrieve_success(self): - response: Response = self.client.get( + response = self.client.get( path=reverse( viewname="v2:service-config-detail", kwargs={ @@ -229,22 +536,32 @@ def test_retrieve_success(self): self.assertEqual(response.status_code, HTTP_200_OK) data = { - "attr": self.service_1_initial_config.attr, + "adcmMeta": {"/activatable_group": {"isActive": True}}, "config": self.service_1_initial_config.config, - "creation_time": self.service_1_initial_config.date.isoformat().replace("+00:00", "Z"), + "creationTime": self.service_1_initial_config.date.isoformat().replace("+00:00", "Z"), "description": self.service_1_initial_config.description, "id": self.service_1_initial_config.pk, - "is_current": True, + "isCurrent": True, } self.assertDictEqual(response.json(), data) def test_create_success(self): data = { - "config": {"string": "new string", "group": {"string": "new string"}}, - "attr": {}, + "config": { + "group": {"float": 0.1, "map": {"integer_key": "10", "string_key": "string"}, "text": "text"}, + "activatable_group": { + "integer": 10, + "json": {"key": "value"}, + "structure": [{"integer": 1, "string": "string1"}, {"integer": 2, "string": "string2"}], + }, + "string": "string", + "list": ["value1", "value2", "value3"], + "boolean": True, + }, + "adcmMeta": {"/activatable_group": {"isActive": True}}, "description": "new config", } - response: Response = self.client.post( + response = self.client.post( path=reverse( viewname="v2:service-config-list", kwargs={"cluster_pk": self.cluster_1.pk, "service_pk": self.service_1.pk}, @@ -255,17 +572,9 @@ def test_create_success(self): response_data = response.json() self.assertDictEqual(response_data["config"], data["config"]) - self.assertDictEqual(response_data["attr"], data["attr"]) + self.assertDictEqual(response_data["adcmMeta"], data["adcmMeta"]) self.assertEqual(response_data["description"], data["description"]) - self.assertEqual(response_data["is_current"], True) - - response: Response = self.client.get( - path=reverse( - viewname="v2:service-config-list", - kwargs={"cluster_pk": self.cluster_1.pk, "service_pk": self.service_1.pk}, - ) - ) - self.assertEqual(response.json()["count"], 2) + self.assertEqual(response_data["isCurrent"], True) class TestComponentConfig(BaseAPITestCase): @@ -279,7 +588,7 @@ def setUp(self) -> None: self.component_1_initial_config = ConfigLog.objects.get(pk=self.component_1.config.current) def test_list_success(self): - response: Response = self.client.get( + response = self.client.get( path=reverse( viewname="v2:component-config-list", kwargs={ @@ -290,18 +599,15 @@ def test_list_success(self): ) ) - data = { - "creation_time": self.component_1_initial_config.date.isoformat().replace("+00:00", "Z"), - "description": self.component_1_initial_config.description, - "id": self.component_1_initial_config.pk, - "is_current": True, - } self.assertEqual(response.status_code, HTTP_200_OK) self.assertEqual(response.json()["count"], 1) - self.assertDictEqual(response.json()["results"][0], data) + self.assertListEqual( + sorted(list(response.json()["results"][0].keys())), + sorted(["id", "isCurrent", "creationTime", "description"]), + ) def test_retrieve_success(self): - response: Response = self.client.get( + response = self.client.get( path=reverse( viewname="v2:component-config-detail", kwargs={ @@ -315,22 +621,32 @@ def test_retrieve_success(self): self.assertEqual(response.status_code, HTTP_200_OK) data = { - "attr": self.component_1_initial_config.attr, + "adcmMeta": {"/activatable_group": {"isActive": True}}, "config": self.component_1_initial_config.config, - "creation_time": self.component_1_initial_config.date.isoformat().replace("+00:00", "Z"), + "creationTime": self.component_1_initial_config.date.isoformat().replace("+00:00", "Z"), "description": self.component_1_initial_config.description, "id": self.component_1_initial_config.pk, - "is_current": True, + "isCurrent": True, } self.assertDictEqual(response.json(), data) def test_create_success(self): data = { - "config": {"string": "new string", "group": {"string": "new string"}}, - "attr": {}, + "config": { + "group": {"float": 0.1, "map": {"integer_key": "10", "string_key": "string"}, "text": "text"}, + "activatable_group": { + "integer": 10, + "json": {"key": "value"}, + "structure": [{"integer": 1, "string": "string1"}, {"integer": 2, "string": "string2"}], + }, + "string": "string", + "list": ["value1", "value2", "value3"], + "boolean": True, + }, + "adcmMeta": {"/activatable_group": {"isActive": True}}, "description": "new config", } - response: Response = self.client.post( + response = self.client.post( path=reverse( viewname="v2:component-config-list", kwargs={ @@ -345,11 +661,11 @@ def test_create_success(self): response_data = response.json() self.assertDictEqual(response_data["config"], data["config"]) - self.assertDictEqual(response_data["attr"], data["attr"]) + self.assertDictEqual(response_data["adcmMeta"], data["adcmMeta"]) self.assertEqual(response_data["description"], data["description"]) - self.assertEqual(response_data["is_current"], True) + self.assertEqual(response_data["isCurrent"], True) - response: Response = self.client.get( + response = self.client.get( path=reverse( viewname="v2:component-config-list", kwargs={ @@ -369,31 +685,28 @@ def setUp(self) -> None: self.provider_initial_config = ConfigLog.objects.get(pk=self.provider.config.current) def test_list_success(self): - response: Response = self.client.get( + response = self.client.get( path=reverse( viewname="v2:provider-config-list", kwargs={ - "provider_pk": self.provider.pk, + "hostprovider_pk": self.provider.pk, }, ) ) - data = { - "creation_time": self.provider_initial_config.date.isoformat().replace("+00:00", "Z"), - "description": self.provider_initial_config.description, - "id": self.provider_initial_config.pk, - "is_current": True, - } self.assertEqual(response.status_code, HTTP_200_OK) self.assertEqual(response.json()["count"], 1) - self.assertDictEqual(response.json()["results"][0], data) + self.assertListEqual( + sorted(list(response.json()["results"][0].keys())), + sorted(["id", "isCurrent", "creationTime", "description"]), + ) def test_retrieve_success(self): - response: Response = self.client.get( + response = self.client.get( path=reverse( viewname="v2:provider-config-detail", kwargs={ - "provider_pk": self.provider.pk, + "hostprovider_pk": self.provider.pk, "pk": self.provider_initial_config.pk, }, ) @@ -401,21 +714,21 @@ def test_retrieve_success(self): self.assertEqual(response.status_code, HTTP_200_OK) data = { - "attr": self.provider_initial_config.attr, + "adcmMeta": {"/activatable_group": {"isActive": True}}, "config": self.provider_initial_config.config, - "creation_time": self.provider_initial_config.date.isoformat().replace("+00:00", "Z"), + "creationTime": self.provider_initial_config.date.isoformat().replace("+00:00", "Z"), "description": self.provider_initial_config.description, "id": self.provider_initial_config.pk, - "is_current": True, + "isCurrent": True, } self.assertDictEqual(response.json(), data) def test_retrieve_wrong_pk_fail(self): - response: Response = self.client.get( + response = self.client.get( path=reverse( viewname="v2:provider-config-detail", kwargs={ - "provider_pk": self.provider.pk, + "hostprovider_pk": self.provider.pk, "pk": self.get_non_existent_pk(model=ConfigLog), }, ) @@ -423,11 +736,11 @@ def test_retrieve_wrong_pk_fail(self): self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) def test_retrieve_wrong_provider_pk_fail(self): - response: Response = self.client.get( + response = self.client.get( path=reverse( viewname="v2:provider-config-detail", kwargs={ - "provider_pk": self.get_non_existent_pk(model=HostProvider), + "hostprovider_pk": self.get_non_existent_pk(model=HostProvider), "pk": self.provider_initial_config.pk, }, ) @@ -436,15 +749,25 @@ def test_retrieve_wrong_provider_pk_fail(self): def test_create_success(self): data = { - "config": {"string": "new string"}, - "attr": {}, + "config": { + "group": {"float": 0.1, "map": {"integer_key": "10", "string_key": "string"}, "text": "text"}, + "activatable_group": { + "integer": 10, + "json": {"key": "value"}, + "structure": [{"integer": 1, "string": "string1"}, {"integer": 2, "string": "string2"}], + }, + "string": "string", + "list": ["value1", "value2", "value3"], + "boolean": True, + }, + "adcmMeta": {"/activatable_group": {"isActive": True}}, "description": "new config", } - response: Response = self.client.post( + response = self.client.post( path=reverse( viewname="v2:provider-config-list", kwargs={ - "provider_pk": self.provider.pk, + "hostprovider_pk": self.provider.pk, }, ), data=data, @@ -453,19 +776,9 @@ def test_create_success(self): response_data = response.json() self.assertDictEqual(response_data["config"], data["config"]) - self.assertDictEqual(response_data["attr"], data["attr"]) + self.assertDictEqual(response_data["adcmMeta"], data["adcmMeta"]) self.assertEqual(response_data["description"], data["description"]) - self.assertEqual(response_data["is_current"], True) - - response: Response = self.client.get( - path=reverse( - viewname="v2:provider-config-list", - kwargs={ - "provider_pk": self.provider.pk, - }, - ) - ) - self.assertEqual(response.json()["count"], 2) + self.assertEqual(response_data["isCurrent"], True) class TestHostConfig(BaseAPITestCase): @@ -477,43 +790,48 @@ def setUp(self) -> None: self.host_config = ConfigLog.objects.get(pk=self.host.config.current) def test_list_success(self): - response: Response = self.client.get( - path=reverse(viewname="v2:host-config-list", kwargs={"host_pk": self.host.pk}) - ) + response = self.client.get(path=reverse(viewname="v2:host-config-list", kwargs={"host_pk": self.host.pk})) - data = { - "creation_time": self.host_config.date.isoformat().replace("+00:00", "Z"), - "description": self.host_config.description, - "id": self.host_config.pk, - "is_current": True, - } self.assertEqual(response.status_code, HTTP_200_OK) self.assertEqual(response.json()["count"], 1) - self.assertDictEqual(response.json()["results"][0], data) + self.assertListEqual( + sorted(list(response.json()["results"][0].keys())), + sorted(["id", "isCurrent", "creationTime", "description"]), + ) def test_retrieve_success(self): - response: Response = self.client.get( + response = self.client.get( path=reverse(viewname="v2:host-config-detail", kwargs={"host_pk": self.host.pk, "pk": self.host_config.pk}) ) self.assertEqual(response.status_code, HTTP_200_OK) data = { - "attr": self.host_config.attr, + "adcmMeta": {"/activatable_group": {"isActive": True}}, "config": self.host_config.config, - "creation_time": self.host_config.date.isoformat().replace("+00:00", "Z"), + "creationTime": self.host_config.date.isoformat().replace("+00:00", "Z"), "description": self.host_config.description, "id": self.host_config.pk, - "is_current": True, + "isCurrent": True, } self.assertDictEqual(response.json(), data) def test_create_success(self): data = { - "config": {"string": "new string"}, - "attr": {}, + "config": { + "group": {"float": 0.1, "map": {"integer_key": "10", "string_key": "string"}, "text": "text"}, + "activatable_group": { + "integer": 10, + "json": {"key": "value"}, + "structure": [{"integer": 1, "string": "string1"}, {"integer": 2, "string": "string2"}], + }, + "string": "string", + "list": ["value1", "value2", "value3"], + "boolean": True, + }, + "adcmMeta": {"/activatable_group": {"isActive": True}}, "description": "new config", } - response: Response = self.client.post( + response = self.client.post( path=reverse(viewname="v2:host-config-list", kwargs={"host_pk": self.host.pk}), data=data, ) @@ -521,18 +839,157 @@ def test_create_success(self): response_data = response.json() self.assertDictEqual(response_data["config"], data["config"]) - self.assertDictEqual(response_data["attr"], data["attr"]) + self.assertDictEqual(response_data["adcmMeta"], data["adcmMeta"]) self.assertEqual(response_data["description"], data["description"]) - self.assertEqual(response_data["is_current"], True) + self.assertEqual(response_data["isCurrent"], True) - response: Response = self.client.get( - path=reverse(viewname="v2:host-config-list", kwargs={"host_pk": self.host.pk}) - ) + response = self.client.get(path=reverse(viewname="v2:host-config-list", kwargs={"host_pk": self.host.pk})) self.assertEqual(response.json()["count"], 2) def test_list_wrong_pk_fail(self): - response: Response = self.client.get( + response = self.client.get( path=reverse(viewname="v2:host-config-list", kwargs={"host_pk": self.get_non_existent_pk(Host)}) ) self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) + + +class TestADCMConfig(BaseAPITestCase): + def setUp(self) -> None: + self.client.login(username="admin", password="admin") + self.adcm = ADCM.objects.first() + self.adcm_current_config = ConfigLog.objects.get(id=self.adcm.config.current) + + def test_list_success(self): + response = self.client.get(path=reverse(viewname="v2:adcm:adcm-config-list")) + + self.assertEqual(response.status_code, HTTP_200_OK) + data = response.json() + self.assertEqual(data["count"], 1) + self.assertListEqual( + sorted(list(data["results"][0].keys())), sorted(["id", "isCurrent", "creationTime", "description"]) + ) + self.assertTrue(data["results"][0]["isCurrent"]) + + def test_retrieve_success(self): + response = self.client.get( + path=reverse(viewname="v2:adcm:adcm-config-detail", kwargs={"pk": self.adcm_current_config.pk}) + ) + + self.assertEqual(response.status_code, HTTP_200_OK) + data = response.json() + self.assertTrue(data["isCurrent"]) + self.assertDictEqual( + data["adcmMeta"], + { + "/logrotate": {"isActive": False}, + "/ldap_integration": {"isActive": False}, + "/statistics_collection": {"isActive": True}, + }, + ) + + def test_create_success(self): + data = { + "config": { + "global": {"adcm_url": "http://127.0.0.1:8000", "verification_public_key": "\n"}, + "google_oauth": {"client_id": None, "secret": None}, + "yandex_oauth": {"client_id": None, "secret": None}, + "ansible_settings": {"forks": 5}, + "logrotate": {"size": "10M", "max_history": 10, "compress": False}, + "audit_data_retention": { + "log_rotation_on_fs": 365, + "log_rotation_in_db": 365, + "config_rotation_in_db": 0, + "retention_period": 1825, + "data_archiving": False, + }, + "ldap_integration": { + "ldap_uri": None, + "ldap_user": None, + "ldap_password": None, + "user_search_base": None, + "user_search_filter": None, + "user_object_class": "user", + "user_name_attribute": "sAMAccountName", + "group_search_base": None, + "group_search_filter": None, + "group_object_class": "group", + "group_name_attribute": "cn", + "group_member_attribute_name": "member", + "sync_interval": 60, + "tls_ca_cert_file": None, + }, + "statistics_collection": {"url": "statistics_url"}, + "auth_policy": { + "min_password_length": 12, + "max_password_length": 20, + "login_attempt_limit": 5, + "block_time": 5, + }, + }, + "adcmMeta": { + "/logrotate": {"isActive": False}, + "/ldap_integration": {"isActive": False}, + "/statistics_collection": {"isActive": False}, + }, + "description": "new ADCM config", + } + + response = self.client.post(path=reverse(viewname="v2:adcm:adcm-config-list"), data=data) + + self.assertEqual(response.status_code, HTTP_201_CREATED) + self.assertEqual(ConfigLog.objects.filter(obj_ref=self.adcm.config).count(), 2) + self.assertTrue(response.json()["isCurrent"]) + self.assertEqual(response.json()["description"], "new ADCM config") + + +class TestAttrTransformation(BaseAPITestCase): + def test_transformation_success(self): + attr = { + "activatable_group": {"active": True}, + "group_keys": { + "group": {"value": None, "fields": {"string": False}}, + "activatable_group": { + "value": True, + "fields": {"string": True}, + }, + "string": True, + }, + } + adcm_meta = convert_attr_to_adcm_meta(attr=attr) + expected_adcm_meta = { + "/activatable_group": {"isActive": True, "isSynchronized": True}, + "/activatable_group/string": {"isSynchronized": True}, + "/group/string": {"isSynchronized": False}, + "/string": {"isSynchronized": True}, + } + + self.assertDictEqual(adcm_meta, expected_adcm_meta) + new_attr = convert_adcm_meta_to_attr(adcm_meta=adcm_meta) + self.assertDictEqual(new_attr, attr) + + def test_incorrect_attr_to_adcm_meta_fail(self): + attr = { + "activatable_group": {"active": True}, + "group_keys": { + "group": {"value": None, "fields": {"string": False}}, + "activatable_group": { + "bad_value": True, + "fields": {"string": True}, + }, + "string": True, + }, + } + with self.assertRaises(KeyError): + convert_attr_to_adcm_meta(attr=attr) + + def test_adcm_meta_to_attr_returns_unchanged_on_fail(self): + adcm_meta = { + "/activatable_group": {"isActive": True, "isSynchronized": True}, + "/activatable_group/string": {"isSynchronized": True}, + "/group/string": {"isSynchronized": False}, + "/string": {}, + } + + new_attr = convert_adcm_meta_to_attr(adcm_meta=adcm_meta) + self.assertDictEqual(new_attr, adcm_meta) diff --git a/python/api_v2/tests/test_group.py b/python/api_v2/tests/test_group.py new file mode 100644 index 0000000000..28aa3172a3 --- /dev/null +++ b/python/api_v2/tests/test_group.py @@ -0,0 +1,157 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from api_v2.tests.base import BaseAPITestCase +from django.urls import reverse +from rbac.models import Group, OriginType +from rest_framework.response import Response +from rest_framework.status import ( + HTTP_200_OK, + HTTP_201_CREATED, + HTTP_204_NO_CONTENT, + HTTP_400_BAD_REQUEST, +) + + +class TestGroupAPI(BaseAPITestCase): + def setUp(self) -> None: + super().setUp() + + self.group_local = Group.objects.create(name="test_local_group") + self.group_ldap = Group.objects.create(name="test_ldap_group", type=OriginType.LDAP) + + def test_list_success(self): + response: Response = self.client.get(path=reverse(viewname="v2:rbac:group-list")) + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(response.json()["count"], 2) + + def test_list_no_permissions_success(self): + user_credentials = {"username": "test_user", "password": "test_user_password"} + user_create_data = { + "email": "testuser@mail.ru", + "first_name": "test_user_first_name", + "last_name": "test_user_last_name", + "profile": "", + **user_credentials, + } + + self.create_user(user_data=user_create_data) + self.client.login(**user_credentials) + + response: Response = self.client.get(path=reverse(viewname="v2:rbac:group-list")) + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(response.json()["count"], 0) + + def test_create_required_fields_success(self): + response: Response = self.client.post( + path=reverse(viewname="v2:rbac:group-list"), + data={"display_name": "new group name"}, + ) + + self.assertEqual(response.status_code, HTTP_201_CREATED) + self.assertEqual(Group.objects.count(), 3) + + group = Group.objects.order_by("pk").last() + self.assertEqual(group.display_name, "new group name") + self.assertEqual(group.description, "") + self.assertListEqual(list(group.user_set.all()), []) + + def test_create_with_user_success(self): + new_user = self.create_user() + create_data = {"display_name": "new group name", "description": "new group description", "users": [new_user.pk]} + + response: Response = self.client.post( + path=reverse(viewname="v2:rbac:group-list"), + data=create_data, + ) + + self.assertEqual(response.status_code, HTTP_201_CREATED) + self.assertEqual(Group.objects.count(), 3) + self.assertIn(new_user.pk, Group.objects.get(pk=response.json()["id"]).user_set.values_list("id", flat=True)) + + def test_update_success(self): + new_user = self.create_user() + update_data = { + "display_name": "new display name", + "description": "new description", + "users": [new_user.pk], + } + + response: Response = self.client.patch( + path=reverse(viewname="v2:rbac:group-detail", kwargs={"pk": self.group_local.pk}), + data=update_data, + ) + + self.assertEqual(response.status_code, HTTP_200_OK) + self.group_local.refresh_from_db() + self.assertEqual(self.group_local.display_name, update_data["display_name"]) + self.assertEqual(self.group_local.description, update_data["description"]) + self.assertListEqual(list(self.group_local.user_set.values_list("id", flat=True)), update_data["users"]) + + def test_delete_success(self): + group_ldap_pk = self.group_ldap.pk + response: Response = self.client.delete( + path=reverse(viewname="v2:rbac:group-detail", kwargs={"pk": group_ldap_pk}) + ) + + self.assertEqual(response.status_code, HTTP_204_NO_CONTENT) + with self.assertRaises(Group.DoesNotExist): + Group.objects.get(pk=group_ldap_pk) + + def test_ordering_by_name_success(self): + response: Response = self.client.get( + path=reverse(viewname="v2:rbac:group-list"), data={"ordering": "displayName"} + ) + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertListEqual( + [group["displayName"] for group in response.json()["results"]], + [group.display_name for group in Group.objects.order_by("name")], + ) + + response: Response = self.client.get( + path=reverse(viewname="v2:rbac:group-list"), data={"ordering": "-displayName"} + ) + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertListEqual( + [group["displayName"] for group in response.json()["results"]], + [group.display_name for group in Group.objects.order_by("-name")], + ) + + def test_filtering_by_display_name_success(self): + response: Response = self.client.get( + path=reverse(viewname="v2:rbac:group-list"), data={"displayName": "nonexistentname"} + ) + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(response.json()["count"], 0) + + response: Response = self.client.get( + path=reverse(viewname="v2:rbac:group-list"), data={"displayName": "_lDaP_"} + ) + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(response.json()["count"], 1) + + def test_filtering_by_type_success(self): + response: Response = self.client.get(path=reverse(viewname="v2:rbac:group-list"), data={"type": "local"}) + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(response.json()["count"], 1) + + def test_filtering_by_wrong_type_fail(self): + response: Response = self.client.get( + path=reverse(viewname="v2:rbac:group-list"), data={"type": "wrong-group-type"} + ) + + self.assertEqual(response.status_code, HTTP_400_BAD_REQUEST) diff --git a/python/api_v2/tests/test_group_config.py b/python/api_v2/tests/test_group_config.py index fdf09ac2da..97e1544ad0 100644 --- a/python/api_v2/tests/test_group_config.py +++ b/python/api_v2/tests/test_group_config.py @@ -11,7 +11,7 @@ # limitations under the License. from api_v2.tests.base import BaseAPITestCase -from cm.models import GroupConfig, ServiceComponent +from cm.models import GroupConfig, Host, ServiceComponent from django.contrib.contenttypes.models import ContentType from rest_framework.response import Response from rest_framework.reverse import reverse @@ -40,7 +40,7 @@ def setUp(self) -> None: self.add_host_to_cluster(cluster=self.cluster_1, host=self.new_host) -class BaseServiceGroupConfigTestCase(BaseClusterGroupConfigTestCase): +class BaseServiceGroupConfigTestCase(BaseClusterGroupConfigTestCase): # pylint: disable=too-many-ancestors def setUp(self) -> None: super().setUp() @@ -71,7 +71,7 @@ def setUp(self) -> None: ) -class TestClusterGroupConfig(BaseClusterGroupConfigTestCase): +class TestClusterGroupConfig(BaseClusterGroupConfigTestCase): # pylint: disable=too-many-ancestors def test_list_success(self): response: Response = self.client.get( path=reverse(viewname="v2:cluster-config-group-list", kwargs={"cluster_pk": self.cluster_1.pk}) @@ -111,24 +111,24 @@ def test_delete_success(self): self.assertEqual(response.status_code, HTTP_204_NO_CONTENT) - def test_list_hosts(self): + def test_list_hosts_success(self): response: Response = self.client.get( path=reverse( - viewname="v2:cluster-config-group-hosts", - kwargs={"cluster_pk": self.cluster_1.pk, "pk": self.cluster_1_group_config.pk}, + viewname="v2:cluster-config-group-hosts-list", + kwargs={"cluster_pk": self.cluster_1.pk, "group_config_pk": self.cluster_1_group_config.pk}, ) ) self.assertEqual(response.status_code, HTTP_200_OK) self.assertEqual(response.json()["count"], 1) - def test_add_hosts(self): + def test_add_hosts_success(self): response: Response = self.client.post( path=reverse( - "v2:cluster-config-group-hosts", - kwargs={"cluster_pk": self.cluster_1.pk, "pk": self.cluster_1_group_config.pk}, + viewname="v2:cluster-config-group-hosts-list", + kwargs={"cluster_pk": self.cluster_1.pk, "group_config_pk": self.cluster_1_group_config.pk}, ), - data=[{"id": self.new_host.pk}], + data=[self.new_host.pk], ) self.assertEqual(response.status_code, HTTP_201_CREATED) @@ -147,6 +147,22 @@ def test_host_candidates(self): self.assertEqual(response.json()["count"], 1) self.assertEqual(response.json()["results"][0]["name"], self.new_host.name) + def test_delete_host_success(self): + response: Response = self.client.delete( + path=reverse( + "v2:cluster-config-group-hosts-detail", + kwargs={ + "cluster_pk": self.cluster_1.pk, + "group_config_pk": self.cluster_1_group_config.pk, + "pk": self.host.pk, + }, + ), + ) + + self.assertEqual(response.status_code, HTTP_204_NO_CONTENT) + self.assertEqual(self.host, Host.objects.get(id=self.host.pk)) + self.assertNotIn(self.host, self.cluster_1_group_config.hosts.all()) + class TestServiceGroupConfig(BaseServiceGroupConfigTestCase): # pylint: disable=too-many-ancestors def test_list_success(self): @@ -213,14 +229,14 @@ def test_delete_success(self): ) self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) - def test_list_hosts(self): + def test_list_hosts_success(self): response: Response = self.client.get( path=reverse( - viewname="v2:service-config-group-hosts", + viewname="v2:service-config-group-hosts-list", kwargs={ "cluster_pk": self.cluster_1.pk, "service_pk": self.service_1.pk, - "pk": self.service_1_group_config.pk, + "group_config_pk": self.service_1_group_config.pk, }, ) ) @@ -229,24 +245,42 @@ def test_list_hosts(self): self.assertEqual(response.json()["count"], 1) self.assertEqual(response.json()["results"][0]["name"], self.host.name) - def test_add_hosts(self): + def test_add_hosts_success(self): response: Response = self.client.post( path=reverse( - viewname="v2:service-config-group-hosts", + viewname="v2:service-config-group-hosts-list", kwargs={ "cluster_pk": self.cluster_1.pk, "service_pk": self.service_1.pk, - "pk": self.service_1_group_config.pk, + "group_config_pk": self.service_1_group_config.pk, }, ), - data=[{"id": self.host_for_service.pk}], + data=[self.host_for_service.pk], ) self.assertEqual(response.status_code, HTTP_201_CREATED) self.assertEqual(len(response.json()), 1) self.assertEqual(response.json()[0]["name"], self.host_for_service.name) - def test_host_candidates(self): + def test_delete_host_success(self): + response: Response = self.client.delete( + path=reverse( + "v2:service-config-group-hosts-detail", + kwargs={ + "cluster_pk": self.cluster_1.pk, + "service_pk": self.service_1.pk, + "group_config_pk": self.service_1_group_config.pk, + "pk": self.host.pk, + }, + ), + ) + + self.assertEqual(response.status_code, HTTP_204_NO_CONTENT) + self.assertEqual(self.host, Host.objects.get(id=self.host.pk)) + self.assertIn(self.host, self.cluster_1_group_config.hosts.all()) + self.assertNotIn(self.host, self.service_1_group_config.hosts.all()) + + def test_host_candidates_success(self): response: Response = self.client.get( path=reverse( viewname="v2:service-config-group-host-candidates", @@ -367,12 +401,12 @@ def test_delete_success(self): def test_list_hosts(self): response: Response = self.client.get( path=reverse( - viewname="v2:component-config-group-hosts", + viewname="v2:component-config-group-hosts-list", kwargs={ "cluster_pk": self.cluster_1.pk, "service_pk": self.service_1.pk, "component_pk": self.component_1.pk, - "pk": self.component_1_group_config.pk, + "group_config_pk": self.component_1_group_config.pk, }, ) ) @@ -381,25 +415,41 @@ def test_list_hosts(self): self.assertEqual(response.json()["count"], 1) self.assertEqual(response.json()["results"][0]["name"], self.host.name) - def test_add_hosts(self): + def test_add_hosts_group_config_not_found_fail(self): response: Response = self.client.post( path=reverse( - viewname="v2:component-config-group-hosts", + viewname="v2:component-config-group-hosts-list", kwargs={ "cluster_pk": self.cluster_1.pk, "service_pk": self.service_1.pk, "component_pk": self.component_1.pk, - "pk": self.component_1_group_config.pk, + "group_config_pk": self.component_1_group_config.pk + 1000, + }, + ), + data=[self.host_for_component.pk], + ) + + self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) + + def test_add_hosts_success(self): + response: Response = self.client.post( + path=reverse( + viewname="v2:component-config-group-hosts-list", + kwargs={ + "cluster_pk": self.cluster_1.pk, + "service_pk": self.service_1.pk, + "component_pk": self.component_1.pk, + "group_config_pk": self.component_1_group_config.pk, }, ), - data=[{"id": self.host_for_component.pk}], + data=[self.host_for_component.pk], ) self.assertEqual(response.status_code, HTTP_201_CREATED) self.assertEqual(len(response.json()), 1) self.assertEqual(response.json()[0]["name"], self.host_for_component.name) - def test_host_candidates(self): + def test_list_host_candidates_success(self): response: Response = self.client.get( path=reverse( viewname="v2:component-config-group-host-candidates", @@ -415,3 +465,23 @@ def test_host_candidates(self): self.assertEqual(response.status_code, HTTP_200_OK) self.assertEqual(response.json()["count"], 1) self.assertEqual(response.json()["results"][0]["name"], self.host_for_component.name) + + def test_delete_host_success(self): + response: Response = self.client.delete( + path=reverse( + "v2:component-config-group-hosts-detail", + kwargs={ + "cluster_pk": self.cluster_1.pk, + "service_pk": self.service_1.pk, + "component_pk": self.component_1.pk, + "group_config_pk": self.component_1_group_config.pk, + "pk": self.host.pk, + }, + ), + ) + + self.assertEqual(response.status_code, HTTP_204_NO_CONTENT) + self.assertEqual(self.host, Host.objects.get(id=self.host.pk)) + self.assertIn(self.host, self.cluster_1_group_config.hosts.all()) + self.assertIn(self.host, self.service_1_group_config.hosts.all()) + self.assertNotIn(self.host, self.component_1_group_config.hosts.all()) diff --git a/python/api_v2/tests/test_host.py b/python/api_v2/tests/test_host.py index aa33475ce4..16086c3b09 100644 --- a/python/api_v2/tests/test_host.py +++ b/python/api_v2/tests/test_host.py @@ -11,7 +11,14 @@ # limitations under the License. from api_v2.tests.base import BaseAPITestCase -from cm.models import Action, Cluster, Host, MaintenanceMode +from cm.models import ( + Action, + Cluster, + Host, + HostComponent, + HostProvider, + ServiceComponent, +) from django.urls import reverse from rest_framework.response import Response from rest_framework.status import ( @@ -19,6 +26,7 @@ HTTP_201_CREATED, HTTP_204_NO_CONTENT, HTTP_400_BAD_REQUEST, + HTTP_404_NOT_FOUND, HTTP_409_CONFLICT, ) @@ -42,25 +50,30 @@ def test_retrieve_success(self): path=reverse(viewname="v2:host-detail", kwargs={"pk": self.host.pk}), ) data = { - "id": 1, - "fqdn": "test_host", + "id": self.host.pk, + "name": "test_host", "state": "created", "status": 32, - "provider": {"id": 1, "name": "provider"}, + "hostprovider": {"id": 1, "name": "provider", "display_name": "provider"}, "concerns": [], "is_maintenance_mode_available": False, - "maintenance_mode": "OFF", + "maintenance_mode": "off", } self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(response.json()["id"], self.host.pk) - self.assertEqual(response.data, data) + self.assertEqual(response.data["id"], data["id"]) + self.assertEqual(response.data["name"], data["name"]) + self.assertEqual(response.data["state"], data["state"]) + self.assertDictEqual(response.data["hostprovider"], data["hostprovider"]) + self.assertEqual(response.data["concerns"], data["concerns"]) + self.assertEqual(response.data["is_maintenance_mode_available"], data["is_maintenance_mode_available"]) + self.assertEqual(response.data["maintenance_mode"], data["maintenance_mode"]) def test_create_without_cluster_success(self): response: Response = self.client.post( path=reverse(viewname="v2:host-list"), data={ - "provider": self.provider.pk, - "fqdn": "new-test-host", + "hostprovider_id": self.provider.pk, + "name": "new-test-host", }, ) @@ -71,23 +84,36 @@ def test_create_without_cluster_success(self): data = { "id": 2, - "fqdn": "new-test-host", + "name": "new-test-host", "state": "created", "status": 32, - "provider": {"id": 1, "name": "provider"}, + "hostprovider": {"id": 1, "name": "provider", "display_name": "provider"}, "concerns": [], "is_maintenance_mode_available": False, - "maintenance_mode": "OFF", + "maintenance_mode": "off", } self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(response.data, data) + self.assertEqual(response.data["id"], data["id"]) + self.assertEqual(response.data["name"], data["name"]) + self.assertEqual(response.data["state"], data["state"]) + self.assertDictEqual(response.data["hostprovider"], data["hostprovider"]) + self.assertEqual(response.data["concerns"], data["concerns"]) + self.assertEqual(response.data["is_maintenance_mode_available"], data["is_maintenance_mode_available"]) + self.assertEqual(response.data["maintenance_mode"], data["maintenance_mode"]) + + def test_create_failed_wrong_provider(self): + response: Response = self.client.post( + path=reverse(viewname="v2:host-list"), + data={"hostprovider_id": self.get_non_existent_pk(model=HostProvider), "name": "woohoo"}, + ) + + self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) def test_create_with_cluster_success(self): response: Response = self.client.post( path=reverse(viewname="v2:host-list"), - data={"provider": self.provider.pk, "fqdn": "new-test-host", "cluster": self.cluster_1.pk}, + data={"hostprovider_id": self.provider.pk, "name": "new-test-host", "cluster_id": self.cluster_1.pk}, ) - self.assertEqual(response.status_code, HTTP_201_CREATED) host_2 = Host.objects.get(fqdn="new-test-host") @@ -97,8 +123,8 @@ def test_fqdn_validation_create_failed(self): response: Response = self.client.post( path=reverse(viewname="v2:host-list"), data={ - "provider": self.provider.pk, - "fqdn": "new_test_host", + "hostprovider_id": self.provider.pk, + "name": "new_test_host", }, ) @@ -109,7 +135,7 @@ def test_update_success(self): new_test_host_fqdn = "new-fqdn" response: Response = self.client.patch( path=reverse(viewname="v2:host-detail", kwargs={"pk": self.host.pk}), - data={"fqdn": new_test_host_fqdn}, + data={"name": new_test_host_fqdn}, ) self.assertEqual(response.status_code, HTTP_200_OK) @@ -141,7 +167,7 @@ def test_delete_success(self): def test_maintenance_mode(self): response: Response = self.client.post( path=reverse(viewname="v2:host-maintenance-mode", kwargs={"pk": self.host.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "on"}, ) self.assertEqual(response.status_code, HTTP_409_CONFLICT) @@ -150,9 +176,10 @@ def test_maintenance_mode(self): self.add_host_to_cluster(cluster=self.cluster_1, host=self.host) response: Response = self.client.post( path=reverse(viewname="v2:host-maintenance-mode", kwargs={"pk": self.host.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "on"}, ) self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(response.data["maintenance_mode"], "on") class TestClusterHost(BaseAPITestCase): @@ -185,7 +212,7 @@ def test_create_success(self): host_2 = self.add_host(bundle=self.provider_bundle, provider=self.provider, fqdn="test_host_second") response: Response = self.client.post( path=reverse(viewname="v2:host-cluster-list", kwargs={"cluster_pk": self.cluster_1.pk}), - data={"hosts": [self.host.pk, host_2.pk]}, + data=[{"host_id": self.host.pk}, {"host_id": host_2.pk}], ) self.assertEqual(response.status_code, HTTP_201_CREATED) @@ -202,10 +229,11 @@ def test_maintenance_mode(self): viewname="v2:host-cluster-maintenance-mode", kwargs={"cluster_pk": self.cluster_1.pk, "pk": self.host.pk}, ), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "on"}, ) self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(response.data["maintenance_mode"], "on") class TestHostActions(BaseAPITestCase): @@ -216,6 +244,9 @@ def setUp(self) -> None: self.add_host_to_cluster(cluster=self.cluster_1, host=self.host) self.action = Action.objects.get(name="host_action", prototype=self.host.prototype) + self.service_1 = self.add_service_to_cluster(service_name="service_1", cluster=self.cluster_1) + self.component_1 = ServiceComponent.objects.get(prototype__name="component_1", service=self.service_1) + def test_host_cluster_list_success(self): response: Response = self.client.get( path=reverse( @@ -225,7 +256,7 @@ def test_host_cluster_list_success(self): ) self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(len(response.json()), 1) + self.assertEqual(len(response.json()), 2) def test_host_cluster_retrieve_success(self): response: Response = self.client.get( @@ -252,7 +283,7 @@ def test_host_cluster_run_success(self): "pk": self.action.pk, }, ), - data={"host_component_map": {}, "config": {}, "attr": {}, "is_verbose": False}, + data={"host_component_map": [], "config": {}, "attr": {}, "is_verbose": False}, ) self.assertEqual(response.status_code, HTTP_200_OK) @@ -263,7 +294,7 @@ def test_host_list_success(self): ) self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(len(response.json()), 1) + self.assertEqual(len(response.json()), 2) def test_host_retrieve_success(self): response: Response = self.client.get( @@ -276,7 +307,35 @@ def test_host_retrieve_success(self): def test_host_run_success(self): response: Response = self.client.post( path=reverse("v2:host-action-run", kwargs={"host_pk": self.host.pk, "pk": self.action.pk}), - data={"host_component_map": {}, "config": {}, "attr": {}, "is_verbose": False}, + data={"host_component_map": [], "config": {}, "attr": {}, "is_verbose": False}, + ) + + self.assertEqual(response.status_code, HTTP_200_OK) + + def test_host_mapped_list_success(self) -> None: + HostComponent.objects.create( + cluster=self.cluster_1, service=self.service_1, component=self.component_1, host=self.host + ) + response: Response = self.client.get( + path=reverse( + "v2:host-action-list", + kwargs={"host_pk": self.host.pk}, + ), + ) + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(len(response.json()), 4) + + def test_host_mapped_retrieve_success(self) -> None: + HostComponent.objects.create( + cluster=self.cluster_1, service=self.service_1, component=self.component_1, host=self.host + ) + action = Action.objects.filter(prototype=self.service_1.prototype, host_action=True).first() + response: Response = self.client.get( + path=reverse( + "v2:host-action-detail", + kwargs={"host_pk": self.host.pk, "pk": action.pk}, + ), ) self.assertEqual(response.status_code, HTTP_200_OK) diff --git a/python/api_v2/tests/test_host_provider.py b/python/api_v2/tests/test_host_provider.py index 085d4448d9..e26924cb7d 100644 --- a/python/api_v2/tests/test_host_provider.py +++ b/python/api_v2/tests/test_host_provider.py @@ -12,7 +12,6 @@ from api_v2.tests.base import BaseAPITestCase from cm.models import Action, HostProvider -from django.conf import settings from rest_framework.response import Response from rest_framework.reverse import reverse from rest_framework.status import ( @@ -28,7 +27,7 @@ class TestHostProvider(BaseAPITestCase): def setUp(self) -> None: self.client.login(username="admin", password="admin") - host_provider_path = settings.BASE_DIR / "python" / "api_v2" / "tests" / "bundles" / "provider" + host_provider_path = self.test_bundles_dir / "provider" self.host_provider_bundle = self.add_bundle(source_dir=host_provider_path) self.host_provider = self.add_provider(self.host_provider_bundle, "test host provider") @@ -54,11 +53,11 @@ def test_retrieve_not_found_fail(self): self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) - def test_host_provider_create_success(self): + def test_create_success(self): response = self.client.post( path=reverse(viewname="v2:hostprovider-list"), data={ - "prototype": self.host_provider_bundle.pk, + "prototype_id": self.host_provider_bundle.pk, "name": self.host_provider.name + " new", "description": "newly created host provider", }, @@ -66,6 +65,17 @@ def test_host_provider_create_success(self): self.assertEqual(response.status_code, HTTP_201_CREATED) self.assertEqual(response.json()["name"], self.host_provider.name + " new") + def test_create_no_description_success(self): + response = self.client.post( + path=reverse(viewname="v2:hostprovider-list"), + data={ + "prototype_id": self.host_provider_bundle.pk, + "name": self.host_provider.name + " new", + }, + ) + self.assertEqual(response.status_code, HTTP_201_CREATED) + self.assertEqual(response.json()["name"], self.host_provider.name + " new") + def test_host_provider_duplicate_fail(self): response = self.client.post( path=reverse(viewname="v2:hostprovider-list"), @@ -103,7 +113,7 @@ def test_action_list_success(self): response: Response = self.client.get( path=reverse( viewname="v2:provider-action-list", - kwargs={"provider_pk": self.provider.pk}, + kwargs={"hostprovider_pk": self.provider.pk}, ), ) @@ -115,7 +125,7 @@ def test_action_retrieve_success(self): path=reverse( viewname="v2:provider-action-detail", kwargs={ - "provider_pk": self.provider.pk, + "hostprovider_pk": self.provider.pk, "pk": self.action.pk, }, ), @@ -129,11 +139,11 @@ def test_action_run_success(self): path=reverse( viewname="v2:provider-action-run", kwargs={ - "provider_pk": self.provider.pk, + "hostprovider_pk": self.provider.pk, "pk": self.action.pk, }, ), - data={"host_component_map": {}, "config": {}, "attr": {}, "is_verbose": False}, + data={"host_component_map": [], "config": {}, "attr": {}, "is_verbose": False}, ) self.assertEqual(response.status_code, HTTP_200_OK) diff --git a/python/api_v2/tests/test_jobs.py b/python/api_v2/tests/test_jobs.py index e5283c730a..f92d3d25d6 100644 --- a/python/api_v2/tests/test_jobs.py +++ b/python/api_v2/tests/test_jobs.py @@ -11,8 +11,18 @@ # limitations under the License. from datetime import timedelta +from unittest.mock import patch -from cm.models import ADCM, Action, ActionType, JobLog, TaskLog +from cm.models import ( + ADCM, + Action, + ActionType, + JobLog, + JobStatus, + LogStorage, + SubAction, + TaskLog, +) from django.contrib.contenttypes.models import ContentType from django.urls import reverse from django.utils import timezone @@ -41,17 +51,27 @@ def setUp(self) -> None: action=self.action, ) self.job_1 = JobLog.objects.create( - status="created", + status=JobStatus.CREATED, start_date=timezone.now(), finish_date=timezone.now() + timedelta(days=1), ) self.job_2 = JobLog.objects.create( - status="failed", + status=JobStatus.RUNNING, start_date=timezone.now() + timedelta(days=1), finish_date=timezone.now() + timedelta(days=2), action=self.action, task=self.task, - pid=self.job_1.pid + 1, + pid=9999, + sub_action=SubAction.objects.create( + action=self.action, + allow_to_terminate=True, + ), + ) + self.log_1 = LogStorage.objects.create( + job=self.job_1, + name="ansible", + type="stderr", + format="txt", ) def test_job_list_success(self): @@ -75,3 +95,24 @@ def test_job_retrieve_not_found_fail(self): def test_job_log_list_success(self): response: Response = self.client.get(path=reverse(viewname="v2:log-list", kwargs={"job_pk": self.job_1.pk})) self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(len(response.json()), 1) + + def test_job_log_download_success(self): + response: Response = self.client.get( + path=reverse(viewname="v2:log-download", kwargs={"job_pk": self.job_1.pk, "log_pk": self.log_1.pk}) + ) + self.assertEqual(response.status_code, HTTP_200_OK) + + def test_job_log_not_found_download_fail(self): + response: Response = self.client.get( + path=reverse(viewname="v2:log-download", kwargs={"job_pk": self.job_1.pk, "log_pk": self.log_1.pk + 10}) + ) + self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) + + def test_job_terminate_success(self): + with patch("cm.models.os.kill") as kill_mock: + response: Response = self.client.post( + path=reverse(viewname="v2:joblog-terminate", kwargs={"pk": self.job_2.pk}), data={} + ) + kill_mock.assert_called() + self.assertEqual(response.status_code, HTTP_200_OK) diff --git a/python/api_v2/tests/test_mapping.py b/python/api_v2/tests/test_mapping.py index e469643ad5..79fd709293 100644 --- a/python/api_v2/tests/test_mapping.py +++ b/python/api_v2/tests/test_mapping.py @@ -21,15 +21,23 @@ class TestMapping(BaseAPITestCase): def setUp(self) -> None: super().setUp() - self.host = self.add_host(bundle=self.provider_bundle, provider=self.provider, fqdn="test_host") - self.add_host_to_cluster(cluster=self.cluster_1, host=self.host) + self.host_1 = self.add_host(bundle=self.provider_bundle, provider=self.provider, fqdn="test_host_1") + self.add_host_to_cluster(cluster=self.cluster_1, host=self.host_1) + + self.host_2 = self.add_host(bundle=self.provider_bundle, provider=self.provider, fqdn="test_host_2") + self.add_host_to_cluster(cluster=self.cluster_1, host=self.host_2) + self.service_1 = self.add_service_to_cluster(service_name="service_1", cluster=self.cluster_1) self.component_1 = ServiceComponent.objects.get( cluster=self.cluster_1, service=self.service_1, prototype__name="component_1" ) - self.hostcomponent_map = self.add_hostcomponent_map( + self.component_2 = ServiceComponent.objects.get( + cluster=self.cluster_1, service=self.service_1, prototype__name="component_2" + ) + + self.add_hostcomponent_map( cluster=self.cluster_1, - hc_map=[{"host_id": self.host.pk, "service_id": self.service_1.pk, "component_id": self.component_1.pk}], + hc_map=[{"host_id": self.host_1.pk, "service_id": self.service_1.pk, "component_id": self.component_1.pk}], ) def test_list_mapping_success(self): @@ -38,31 +46,40 @@ def test_list_mapping_success(self): ) self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(response.json()["count"], 1) + self.assertEqual(len(response.json()), 1) def test_create_mapping_success(self): - host_2 = self.add_host(bundle=self.provider_bundle, provider=self.provider, fqdn="test_host_2") - self.add_host_to_cluster(cluster=self.cluster_1, host=host_2) + host_3 = self.add_host(bundle=self.provider_bundle, provider=self.provider, fqdn="test_host_3") + self.add_host_to_cluster(cluster=self.cluster_1, host=host_3) component_2 = ServiceComponent.objects.get( cluster=self.cluster_1, service=self.service_1, prototype__name="component_2" ) response: Response = self.client.post( path=reverse(viewname="v2:mapping-list", kwargs={"cluster_pk": self.cluster_1.pk}), - data={"service": self.service_1.pk, "host": host_2.pk, "component": component_2.pk}, + data=[ + {"host_id": host_3.pk, "component_id": component_2.pk}, + {"host_id": self.host_1.pk, "component_id": self.component_1.pk}, + ], ) - self.assertEqual(response.status_code, HTTP_201_CREATED) self.assertEqual(HostComponent.objects.count(), 2) + response: Response = self.client.post( + path=reverse(viewname="v2:mapping-list", kwargs={"cluster_pk": self.cluster_1.pk}), + data=[], + ) + self.assertEqual(response.status_code, HTTP_201_CREATED) + self.assertEqual(HostComponent.objects.count(), 0) + def test_mapping_hosts_success(self): response: Response = self.client.get( path=reverse(viewname="v2:mapping-hosts", kwargs={"cluster_pk": self.cluster_1.pk}), ) self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(len(response.json()), 1) - self.assertEqual(response.json()[0]["id"], self.host.pk) + self.assertEqual(len(response.json()), 2) + self.assertEqual({host["id"] for host in response.json()}, {self.host_1.pk, self.host_2.pk}) def test_mapping_components_success(self): response: Response = self.client.get( @@ -70,5 +87,5 @@ def test_mapping_components_success(self): ) self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(len(response.json()), 1) - self.assertEqual(response.json()[0]["id"], self.component_1.pk) + self.assertEqual(len(response.json()), 2) + self.assertEqual({component["id"] for component in response.json()}, {self.component_1.pk, self.component_2.pk}) diff --git a/python/api_v2/tests/test_policy.py b/python/api_v2/tests/test_policy.py new file mode 100644 index 0000000000..2b87633777 --- /dev/null +++ b/python/api_v2/tests/test_policy.py @@ -0,0 +1,174 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from api_v2.tests.base import BaseAPITestCase +from django.urls import reverse +from rbac.models import Group, Policy, Role +from rbac.services.policy import policy_create +from rbac.services.role import role_create +from rest_framework.status import ( + HTTP_200_OK, + HTTP_201_CREATED, + HTTP_204_NO_CONTENT, + HTTP_400_BAD_REQUEST, +) + + +class TestPolicy(BaseAPITestCase): + def setUp(self) -> None: + super().setUp() + + self.remove_hostprovider_role = role_create( + name="Remove Host-Provider", + display_name="Remove Host-Provider", + child=[Role.objects.get(name="Remove provider", built_in=True)], + ) + self.create_user_role = role_create( + name="Create Users", + display_name="Create Users", + child=[Role.objects.get(name="Create user", built_in=True)], + ) + + self.group_1 = Group.objects.create(name="test_local_group_1") + self.group_2 = Group.objects.create(name="test_local_group_2") + + self.remove_hostprovider_policy = policy_create( + name="Awesome Policy", role=self.remove_hostprovider_role, group=[self.group_1], object=[self.provider] + ) + self.create_user_policy = policy_create( + name="Create User Policy", role=self.create_user_role, group=[self.group_1, self.group_2], object=[] + ) + + def test_list_policy_success(self) -> None: + response = self.client.get(path=reverse(viewname="v2:rbac:policy-list")) + + self.assertEqual(response.status_code, HTTP_200_OK) + data = response.json() + self.assertIn("results", data) + policies = data["results"] + self.assertEqual(len(policies), 2) + self.assertTrue(all(set(policy).issuperset({"id", "name", "objects", "groups"}) for policy in policies)) + + def test_retrieve_policy_success(self) -> None: + response = self.client.get( + path=reverse(viewname="v2:rbac:policy-detail", kwargs={"pk": self.create_user_policy.pk}) + ) + + self.assertEqual(response.status_code, HTTP_200_OK) + data = response.json() + self.assertTrue(set(data).issuperset({"id", "objects", "groups", "role"})) + self.assertListEqual(data["objects"], []) + self.assertEqual( + data["role"], + { + "id": self.create_user_role.pk, + "name": self.create_user_role.name, + "displayName": self.create_user_role.display_name, + }, + ) + self.assertListEqual( + sorted(data["groups"], key=lambda item: item["id"]), + sorted( + ( + {"id": group.pk, "name": group.name, "displayName": group.display_name} + for group in (self.group_1, self.group_2) + ), + key=lambda item: item["id"], + ), + ) + + def test_create_parametrized_policy_only_required_fields_success(self) -> None: + response = self.client.post( + path=reverse(viewname="v2:rbac:policy-list"), + data={ + "name": "New Policy", + "role": {"id": self.remove_hostprovider_role.pk}, + "objects": [{"id": self.provider.pk, "type": "provider"}], + "groups": [self.group_1.pk], + }, + ) + + self.assertEqual(response.status_code, HTTP_201_CREATED) + data = response.json() + self.assertTrue(set(data).issuperset({"id", "objects", "groups"})) + self.assertTrue(Policy.objects.filter(pk=data["id"]).exists()) + self.assertEqual( + data["objects"], + [ + { + "id": self.provider.pk, + "type": "provider", + "name": self.provider.name, + "displayName": self.provider.display_name, + } + ], + ) + self.assertEqual( + data["groups"], + [{"id": self.group_1.pk, "name": self.group_1.name, "displayName": self.group_1.display_name}], + ) + + def test_update_policy_every_field_success(self) -> None: + new_data = { + "name": "Updated name", + "role": {"id": self.create_user_role.pk}, + "objects": [], + "groups": [self.group_2.pk], + } + response = self.client.patch( + path=reverse(viewname="v2:rbac:policy-detail", kwargs={"pk": self.remove_hostprovider_policy.pk}), + data=new_data, + ) + + self.assertEqual(response.status_code, HTTP_200_OK) + data = response.json() + self.assertTrue(set(data).issuperset({"id", "objects", "groups"})) + self.assertEqual(data["id"], self.remove_hostprovider_policy.pk) + self.assertListEqual(data["objects"], []) + self.assertEqual( + data["groups"], + [{"id": self.group_2.pk, "name": self.group_2.name, "displayName": self.group_2.display_name}], + ) + + def test_delete_policy_success(self) -> None: + response = self.client.delete( + path=reverse(viewname="v2:rbac:policy-detail", kwargs={"pk": self.create_user_policy.pk}) + ) + + self.assertEqual(response.status_code, HTTP_204_NO_CONTENT) + self.assertFalse(Policy.objects.filter(pk=self.create_user_policy.pk).exists()) + + def test_create_policy_no_group_fail(self): + response = self.client.post( + path=reverse(viewname="v2:rbac:policy-list"), + data={ + "name": "test_policy_new", + "description": "description", + "role": self.create_user_role.pk, + "objects": [{"type": "cluster", "id": self.cluster_1.pk}], + }, + ) + self.assertEqual(response.status_code, HTTP_400_BAD_REQUEST) + + def test_update_policy_no_operation_success(self): + response = self.client.patch( + path=reverse(viewname="v2:rbac:policy-detail", kwargs={"pk": self.create_user_policy.pk}), + data={}, + ) + self.assertEqual(response.status_code, HTTP_200_OK) + + def test_update_policy_wrong_object_fail(self): + response = self.client.patch( + path=reverse(viewname="v2:rbac:policy-detail", kwargs={"pk": self.create_user_policy.pk}), + data={"objects": [{"type": "role", "id": self.create_user_role.pk}]}, + ) + self.assertEqual(response.status_code, HTTP_400_BAD_REQUEST) diff --git a/python/api_v2/tests/test_prototype.py b/python/api_v2/tests/test_prototype.py index cb85991905..0594f359b5 100644 --- a/python/api_v2/tests/test_prototype.py +++ b/python/api_v2/tests/test_prototype.py @@ -9,11 +9,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -# pylint: disable=too-many-lines from api_v2.tests.base import BaseAPITestCase from cm.models import ObjectType, Prototype -from django.conf import settings from rest_framework.reverse import reverse from rest_framework.status import HTTP_200_OK, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT @@ -22,8 +20,8 @@ class TestPrototype(BaseAPITestCase): def setUp(self) -> None: self.client.login(username="admin", password="admin") - cluster_bundle_1_path = settings.BASE_DIR / "python" / "api_v2" / "tests" / "bundles" / "cluster_one" - cluster_bundle_2_path = settings.BASE_DIR / "python" / "api_v2" / "tests" / "bundles" / "cluster_one_upgrade" + cluster_bundle_1_path = self.test_bundles_dir / "cluster_one" + cluster_bundle_2_path = self.test_bundles_dir / "cluster_one_upgrade" self.bundle_1 = self.add_bundle(source_dir=cluster_bundle_1_path) self.bundle_2 = self.add_bundle(source_dir=cluster_bundle_2_path) @@ -78,3 +76,11 @@ def test_accept_non_existing_license_fail(self): ) self.assertEqual(response.status_code, HTTP_409_CONFLICT) + + def test_filter_by_bundle_id_and_type_cluster(self): + response = self.client.get( + path=reverse(viewname="v2:prototype-list"), data={"bundleId": self.bundle_1.id, "type": "cluster"} + ) + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(response.data["count"], 1) diff --git a/python/api_v2/tests/test_role.py b/python/api_v2/tests/test_role.py new file mode 100644 index 0000000000..c64413c172 --- /dev/null +++ b/python/api_v2/tests/test_role.py @@ -0,0 +1,208 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from api_v2.tests.base import BaseAPITestCase +from django.urls import reverse +from rbac.models import Role +from rbac.services.role import role_create +from rest_framework.status import ( + HTTP_200_OK, + HTTP_201_CREATED, + HTTP_204_NO_CONTENT, + HTTP_400_BAD_REQUEST, + HTTP_404_NOT_FOUND, + HTTP_409_CONFLICT, +) + + +class TestRole(BaseAPITestCase): + def setUp(self) -> None: + super().setUp() + + self.view_cluster_config_role = Role.objects.get(name="View cluster configurations", built_in=True) + self.edit_cluster_config_role = Role.objects.get(name="Edit cluster configurations", built_in=True) + + self.cluster_config_role = role_create( + name="Change cluster config", + display_name="Change cluster config", + child=[self.view_cluster_config_role], + ) + + def test_retrieve_not_found_fail(self): + response = self.client.get( + path=reverse(viewname="v2:rbac:role-detail", kwargs={"pk": self.cluster_config_role.pk + 10}) + ) + + self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) + + def test_retrieve_success(self): + response = self.client.get( + path=reverse(viewname="v2:rbac:role-detail", kwargs={"pk": self.cluster_config_role.pk}) + ) + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(response.json()["id"], self.cluster_config_role.pk) + + def test_list_success(self): + response = self.client.get(path=reverse(viewname="v2:rbac:role-list")) + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertGreater(len(response.json()["results"]), 1) + + def test_create_success(self): + response = self.client.post( + path=reverse(viewname="v2:rbac:role-list"), + data={"display_name": "Edit cluster configuration", "children": [self.edit_cluster_config_role.pk]}, + ) + + self.assertEqual(response.status_code, HTTP_201_CREATED) + self.assertTrue(Role.objects.filter(id=response.json()["id"]).exists()) + + def test_create_required_field_failed(self): + response = self.client.post(path=reverse(viewname="v2:rbac:role-list"), data={"display_name": "test"}) + + self.assertEqual(response.status_code, HTTP_400_BAD_REQUEST) + self.assertDictEqual( + response.json(), {"code": "BAD_REQUEST", "desc": "children - This field is required.;", "level": "error"} + ) + + def test_create_already_exists_failed(self): + response = self.client.post( + path=reverse(viewname="v2:rbac:role-list"), + data={ + "display_name": "Change cluster config", + "children": [self.view_cluster_config_role.pk], + }, + ) + + self.assertEqual(response.status_code, HTTP_409_CONFLICT) + self.assertDictEqual( + response.json(), + { + "code": "ROLE_CREATE_ERROR", + "desc": "A role with this name already exists", + "level": "error", + }, + ) + + def test_update_required_filed_success(self): + response = self.client.put( + path=reverse(viewname="v2:rbac:role-detail", kwargs={"pk": self.cluster_config_role.pk}), + data={ + "display_name": "New change cluster config", + "children": [self.edit_cluster_config_role.pk], + }, + ) + + self.cluster_config_role.refresh_from_db() + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual("New change cluster config", self.cluster_config_role.display_name) + self.assertEqual([self.edit_cluster_config_role], list(self.cluster_config_role.child.all())) + + def test_update_required_filed_failed(self): + response = self.client.put( + path=reverse(viewname="v2:rbac:role-detail", kwargs={"pk": self.cluster_config_role.pk}), + data={"display_name": "New change cluster config"}, + ) + self.assertEqual(response.status_code, HTTP_400_BAD_REQUEST) + self.assertDictEqual( + response.json(), {"code": "BAD_REQUEST", "desc": "children - This field is required.;", "level": "error"} + ) + + def test_partial_update_success(self): + response = self.client.patch( + path=reverse(viewname="v2:rbac:role-detail", kwargs={"pk": self.cluster_config_role.pk}), + data={"display_name": "New change cluster config"}, + ) + + self.cluster_config_role.refresh_from_db() + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual("New change cluster config", self.cluster_config_role.display_name) + + def test_update_built_in_failed(self): + response = self.client.patch( + path=reverse(viewname="v2:rbac:role-detail", kwargs={"pk": self.view_cluster_config_role.pk}), + data={"built_in": False}, + ) + self.assertEqual(response.status_code, HTTP_409_CONFLICT) + self.assertDictEqual( + response.json(), + { + "code": "ROLE_UPDATE_ERROR", + "desc": "Can't modify role View cluster configurations as it is auto created", + "level": "error", + }, + ) + + def test_delete_success(self): + response = self.client.delete( + path=reverse(viewname="v2:rbac:role-detail", kwargs={"pk": self.cluster_config_role.pk}) + ) + + self.assertEqual(response.status_code, HTTP_204_NO_CONTENT) + self.assertFalse(Role.objects.filter(pk=self.cluster_config_role.pk).exists()) + + def test_delete_failed(self): + built_in_role = Role.objects.filter(built_in=True).first() + + response = self.client.delete(path=reverse(viewname="v2:rbac:role-detail", kwargs={"pk": built_in_role.pk})) + + self.assertEqual(response.status_code, HTTP_409_CONFLICT) + self.assertDictEqual( + response.json(), + {"code": "ROLE_DELETE_ERROR", "desc": "It is forbidden to remove the built-in role.", "level": "error"}, + ) + + def test_ordering_success(self): + limit = 10 + + response = self.client.get( + path=reverse( + viewname="v2:rbac:role-list", + ), + data={"ordering": "-displayName", "limit": limit}, + ) + + self.assertEqual(response.status_code, HTTP_200_OK) + + response_names = [role_data["displayName"] for role_data in response.json()["results"]] + db_names = [role.display_name for role in Role.objects.order_by("-display_name")[:limit]] + self.assertListEqual(response_names, db_names) + + def test_filtering_by_display_name_success(self): + filter_name = "cReAtE" + + response = self.client.get(path=reverse(viewname="v2:rbac:role-list"), data={"displayName": filter_name}) + + self.assertEqual(response.status_code, HTTP_200_OK) + + response_pks = [role_data["id"] for role_data in response.json()["results"]] + db_pks = [role.pk for role in Role.objects.filter(display_name__icontains=filter_name)] + self.assertListEqual(response_pks, db_pks) + + def test_filtering_by_categories_success(self): + response = self.client.get(path=reverse(viewname="v2:rbac:role-list"), data={"categories": "cluster_one"}) + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(response.json()["count"], 42) + + def test_list_object_candidates_success(self): + response = self.client.get( + path=reverse(viewname="v2:rbac:role-object-candidates", kwargs={"pk": self.cluster_config_role.pk}) + ) + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(len(response.json()["cluster"]), 2) + self.assertEqual(response.json()["cluster"][0]["name"], self.cluster_1.name) + self.assertEqual(response.json()["cluster"][1]["name"], self.cluster_2.name) diff --git a/python/api_v2/tests/test_service.py b/python/api_v2/tests/test_service.py index a7b6d89163..e0979b9f77 100644 --- a/python/api_v2/tests/test_service.py +++ b/python/api_v2/tests/test_service.py @@ -24,7 +24,12 @@ ) from django.urls import reverse from rest_framework.response import Response -from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED, HTTP_204_NO_CONTENT +from rest_framework.status import ( + HTTP_200_OK, + HTTP_201_CREATED, + HTTP_204_NO_CONTENT, + HTTP_409_CONFLICT, +) class TestServiceAPI(BaseAPITestCase): @@ -72,14 +77,30 @@ def test_delete_success(self): self.assertEqual(response.status_code, HTTP_204_NO_CONTENT) self.assertFalse(ClusterObject.objects.filter(pk=self.service_2.pk).exists()) + def test_delete_failed(self): + self.service_2.state = "non_created" + self.service_2.save(update_fields=["state"]) + + response: Response = self.client.delete( + path=reverse( + viewname="v2:service-detail", kwargs={"cluster_pk": self.cluster_1.pk, "pk": self.service_2.pk} + ), + ) + + self.assertEqual(response.status_code, HTTP_409_CONFLICT) + self.assertTrue(ClusterObject.objects.filter(pk=self.service_2.pk).exists()) + def test_create_success(self): + initial_service_count = ClusterObject.objects.count() manual_add_service_proto = Prototype.objects.get(type=ObjectType.SERVICE, name="service_3_manual_add") + response: Response = self.client.post( path=reverse(viewname="v2:service-list", kwargs={"cluster_pk": self.cluster_1.pk}), - data={"prototype": manual_add_service_proto.pk}, + data=[{"prototype_id": manual_add_service_proto.pk}], ) self.assertEqual(response.status_code, HTTP_201_CREATED) + self.assertEqual(ClusterObject.objects.count(), initial_service_count + 1) def test_filter_by_name_success(self): response: Response = self.client.get( @@ -157,7 +178,7 @@ def test_action_run_success(self): "pk": self.action.pk, }, ), - data={"host_component_map": {}, "config": {}, "attr": {}, "is_verbose": False}, + data={"host_component_map": [], "config": {}, "attr": {}, "is_verbose": False}, ) self.assertEqual(response.status_code, HTTP_200_OK) diff --git a/python/api_v2/tests/test_tasks.py b/python/api_v2/tests/test_tasks.py new file mode 100644 index 0000000000..90cc373d60 --- /dev/null +++ b/python/api_v2/tests/test_tasks.py @@ -0,0 +1,206 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from datetime import timedelta +from operator import itemgetter + +from api_v2.tests.base import BaseAPITestCase +from cm.job import create_task, get_selector +from cm.models import ( + ADCM, + Action, + ActionType, + Cluster, + ClusterObject, + Host, + HostComponent, + HostProvider, + JobLog, + ServiceComponent, + TaskLog, +) +from django.contrib.contenttypes.models import ContentType +from django.urls import reverse +from django.utils import timezone +from rest_framework.response import Response +from rest_framework.status import HTTP_200_OK, HTTP_404_NOT_FOUND + +from adcm.tests.base import BaseTestCase + + +class TestTask(BaseTestCase): + def setUp(self) -> None: + super().setUp() + + self.adcm = ADCM.objects.first() + self.action = Action.objects.create( + display_name="test_adcm_action", + prototype=self.adcm.prototype, + type=ActionType.JOB, + state_available="any", + ) + self.task_1 = TaskLog.objects.create( + object_id=self.adcm.pk, + object_type=ContentType.objects.get(app_label="cm", model="adcm"), + start_date=timezone.now(), + finish_date=timezone.now() + timedelta(days=1), + action=self.action, + ) + self.task_2 = TaskLog.objects.create( + object_id=self.adcm.pk, + object_type=ContentType.objects.get(app_label="cm", model="adcm"), + start_date=timezone.now(), + finish_date=timezone.now() + timedelta(days=1), + action=self.action, + selector=get_selector(self.adcm, self.action), + ) + self.job = JobLog.objects.create( + status="failed", + start_date=timezone.now() + timedelta(days=1), + finish_date=timezone.now() + timedelta(days=2), + action=self.action, + task=self.task_1, + ) + + def test_task_list_success(self): + response: Response = self.client.get(path=reverse(viewname="v2:tasklog-list")) + self.assertEqual(len(response.data["results"]), 2) + self.assertEqual(response.status_code, HTTP_200_OK) + + def test_task_retrieve_success(self): + response: Response = self.client.get( + path=reverse(viewname="v2:tasklog-detail", kwargs={"pk": self.task_2.pk}), + ) + task_object = {"type": self.adcm.content_type.name, "id": self.adcm.pk, "name": self.adcm.name} + self.assertEqual(response.data["id"], self.task_2.pk) + self.assertEqual(response.data["objects"], [task_object]) + self.assertEqual(response.status_code, HTTP_200_OK) + + def test_task_retrieve_not_found_fail(self): + response: Response = self.client.get( + path=reverse(viewname="v2:tasklog-detail", kwargs={"pk": self.task_2.pk + 10}), + ) + self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) + + def test_task_log_download_success(self): + response: Response = self.client.get( + path=reverse(viewname="v2:log-download", kwargs={"task_pk": self.task_1.pk}) + ) + self.assertEqual(response.status_code, HTTP_200_OK) + + +class TestTaskObjects(BaseAPITestCase): # pylint: disable=too-many-instance-attributes + def setUp(self) -> None: + super().setUp() + + self.service_1 = self.add_service_to_cluster("service_1", self.cluster_1) + self.service_2 = self.add_service_to_cluster("service_2", self.cluster_1) + + self.component_1 = ServiceComponent.objects.get(service=self.service_1, prototype__name="component_1") + + self.host = self.add_host(bundle=self.provider_bundle, provider=self.provider, fqdn="just-host") + + self.add_host_to_cluster(self.cluster_1, self.host) + HostComponent.objects.create( + cluster=self.cluster_1, host=self.host, service=self.service_1, component=self.component_1 + ) + + self.cluster_object = {"id": self.cluster_1.pk, "name": self.cluster_1.display_name, "type": "cluster"} + self.service_object = { + "id": self.service_1.pk, + "name": self.service_1.display_name, + "type": "service", + } + self.component_object = { + "id": self.component_1.pk, + "name": self.component_1.display_name, + "type": "component", + } + + self.provider_object = {"id": self.provider.pk, "name": self.provider.name, "type": "provider"} + self.host_object = {"id": self.host.pk, "name": self.host.fqdn, "type": "host"} + + def test_cluster_task_objects_success(self) -> None: + task = self.create_task(object_=self.cluster_1, action_name="action") + response = self.client.get(path=reverse("v2:tasklog-detail", kwargs={"pk": task.pk})) + self.assertEqual(response.status_code, HTTP_200_OK) + objects = sorted(response.json()["objects"], key=itemgetter("type")) + self.assertEqual(objects, [self.cluster_object]) + + def test_service_task_objects_success(self) -> None: + task = self.create_task(object_=self.service_1, action_name="action") + response = self.client.get(path=reverse("v2:tasklog-detail", kwargs={"pk": task.pk})) + self.assertEqual(response.status_code, HTTP_200_OK) + objects = sorted(response.json()["objects"], key=itemgetter("type")) + self.assertEqual(objects, [self.cluster_object, self.service_object]) + + def test_component_task_objects_success(self) -> None: + task = self.create_task(object_=self.component_1, action_name="action_1_comp_1") + response = self.client.get(path=reverse("v2:tasklog-detail", kwargs={"pk": task.pk})) + self.assertEqual(response.status_code, HTTP_200_OK) + objects = sorted(response.json()["objects"], key=itemgetter("type")) + self.assertEqual(objects, [self.cluster_object, self.component_object, self.service_object]) + + def test_provider_task_objects_success(self) -> None: + task = self.create_task(object_=self.provider, action_name="provider_action") + response = self.client.get(path=reverse("v2:tasklog-detail", kwargs={"pk": task.pk})) + self.assertEqual(response.status_code, HTTP_200_OK) + objects = sorted(response.json()["objects"], key=itemgetter("type")) + self.assertEqual(objects, [self.provider_object]) + + def test_host_task_objects_success(self) -> None: + task = self.create_task(object_=self.host, action_name="host_action") + response = self.client.get(path=reverse("v2:tasklog-detail", kwargs={"pk": task.pk})) + self.assertEqual(response.status_code, HTTP_200_OK) + objects = sorted(response.json()["objects"], key=itemgetter("type")) + self.assertEqual(objects, [self.host_object, self.provider_object]) + + def test_host_task_of_cluster_action_objects_success(self) -> None: + task = self.create_task(object_=self.cluster_1, action_name="cluster_on_host", host=self.host) + response = self.client.get(path=reverse("v2:tasklog-detail", kwargs={"pk": task.pk})) + self.assertEqual(response.status_code, HTTP_200_OK) + objects = sorted(response.json()["objects"], key=itemgetter("type")) + self.assertEqual(objects, [self.cluster_object, self.host_object]) + + def test_host_task_of_service_action_objects_success(self) -> None: + task = self.create_task(object_=self.service_1, action_name="service_on_host", host=self.host) + response = self.client.get(path=reverse("v2:tasklog-detail", kwargs={"pk": task.pk})) + self.assertEqual(response.status_code, HTTP_200_OK) + objects = sorted(response.json()["objects"], key=itemgetter("type")) + self.assertEqual(objects, [self.cluster_object, self.host_object, self.service_object]) + + def test_host_task_of_component_action_objects_success(self) -> None: + task = self.create_task(object_=self.component_1, action_name="component_on_host", host=self.host) + response = self.client.get(path=reverse("v2:tasklog-detail", kwargs={"pk": task.pk})) + self.assertEqual(response.status_code, HTTP_200_OK) + objects = sorted(response.json()["objects"], key=itemgetter("type")) + self.assertEqual(objects, [self.cluster_object, self.component_object, self.host_object, self.service_object]) + + @staticmethod + def create_task( + object_: Cluster | ClusterObject | ServiceComponent | HostProvider | Host | ADCM, + action_name: str, + *, + host: Host | None = None + ): + action = Action.objects.get(name=action_name, prototype=object_.prototype) + hosts = [] if not host else [host.pk] + return create_task( + action=action, + obj=host or object_, + conf={}, + attr={}, + hostcomponent=[], + hosts=hosts, + verbose=False, + post_upgrade_hc=[], + ) diff --git a/python/api_v2/tests/test_upgrade.py b/python/api_v2/tests/test_upgrade.py index 6e7187fd97..0e041393de 100644 --- a/python/api_v2/tests/test_upgrade.py +++ b/python/api_v2/tests/test_upgrade.py @@ -10,25 +10,27 @@ # See the License for the specific language governing permissions and # limitations under the License. +from pathlib import Path +from unittest.mock import patch from api_v2.tests.base import BaseAPITestCase -from cm.models import Upgrade -from django.conf import settings +from cm.models import ADCM, ConfigLog, HostComponent, ServiceComponent, TaskLog, Upgrade +from django.contrib.contenttypes.models import ContentType from django.urls import reverse +from django.utils import timezone +from init_db import init +from rbac.upgrade.role import init_roles from rest_framework.response import Response -from rest_framework.status import HTTP_200_OK, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT +from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND +from rest_framework.test import APIClient, APITestCase -class TestUpgrade(BaseAPITestCase): +class TestUpgrade(BaseAPITestCase): # pylint:disable=too-many-public-methods def setUp(self) -> None: super().setUp() - cluster_bundle_1_upgrade_path = ( - settings.BASE_DIR / "python" / "api_v2" / "tests" / "bundles" / "cluster_one_upgrade" - ) - provider_bundle_upgrade_path = ( - settings.BASE_DIR / "python" / "api_v2" / "tests" / "bundles" / "provider_upgrade" - ) + cluster_bundle_1_upgrade_path = self.test_bundles_dir / "cluster_one_upgrade" + provider_bundle_upgrade_path = self.test_bundles_dir / "provider_upgrade" cluster_bundle_upgrade = self.add_bundle(source_dir=cluster_bundle_1_upgrade_path) provider_bundle_upgrade = self.add_bundle(source_dir=provider_bundle_upgrade_path) @@ -40,8 +42,22 @@ def setUp(self) -> None: name="upgrade", bundle=provider_bundle_upgrade, ) - self.upgrade_cluster_via_action = Upgrade.objects.get(name="upgrade_via_action", bundle=cluster_bundle_upgrade) - self.upgrade_host_via_action = Upgrade.objects.get(name="upgrade_via_action", bundle=provider_bundle_upgrade) + self.upgrade_cluster_via_action_simple = Upgrade.objects.get( + name="upgrade_via_action_simple", bundle=cluster_bundle_upgrade + ) + self.upgrade_host_via_action_simple = Upgrade.objects.get( + name="upgrade_via_action_simple", bundle=provider_bundle_upgrade + ) + self.upgrade_cluster_via_action_complex = Upgrade.objects.get( + name="upgrade_via_action_complex", bundle=cluster_bundle_upgrade + ) + self.upgrade_host_via_action_complex = Upgrade.objects.get( + name="upgrade_via_action_complex", bundle=provider_bundle_upgrade + ) + + self.create_user() + self.unauthorized_client = APIClient() + self.unauthorized_client.login(username="test_user_username", password="test_user_password") def test_cluster_list_upgrades_success(self): response: Response = self.client.get( @@ -49,7 +65,7 @@ def test_cluster_list_upgrades_success(self): ) self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(len(response.json()), 2) + self.assertEqual(len(response.json()), 3) def test_cluster_upgrade_retrieve_success(self): response: Response = self.client.get( @@ -59,21 +75,114 @@ def test_cluster_upgrade_retrieve_success(self): ) self.assertEqual(response.status_code, HTTP_200_OK) - def test_cluster_upgrade_run_success(self): - response: Response = self.client.post( + upgrade_data = response.json() + self.assertTrue( + set(upgrade_data.keys()).issuperset( + {"id", "hostComponentMapRules", "configSchema", "isAllowToTerminate", "disclaimer"} + ) + ) + + self.assertEqual(upgrade_data["id"], self.cluster_upgrade.pk) + self.assertEqual(len(upgrade_data["hostComponentMapRules"]), 0) + self.assertEqual(upgrade_data["configSchema"], None) + self.assertEqual(upgrade_data["disclaimer"], "") + self.assertFalse(upgrade_data["isAllowToTerminate"]) + + def test_cluster_upgrade_retrieve_complex_success(self): + response: Response = self.client.get( path=reverse( - viewname="v2:upgrade-run", - kwargs={"cluster_pk": self.cluster_1.pk, "pk": self.upgrade_cluster_via_action.pk}, + viewname="v2:upgrade-detail", + kwargs={"cluster_pk": self.cluster_1.pk, "pk": self.upgrade_cluster_via_action_complex.pk}, ), - data={ - "host_component_map": [{}], - "config": {}, - "attr": {}, - "is_verbose": True, + ) + self.assertEqual(response.status_code, HTTP_200_OK) + + upgrade_data = response.json() + self.assertTrue( + set(upgrade_data.keys()).issuperset( + {"id", "hostComponentMapRules", "configSchema", "isAllowToTerminate", "disclaimer"} + ) + ) + + self.assertEqual(upgrade_data["id"], self.upgrade_cluster_via_action_complex.pk) + self.assertEqual(upgrade_data["disclaimer"], "Cool upgrade") + self.assertFalse(upgrade_data["isAllowToTerminate"]) + + self.assertSetEqual( + { + (entry["action"], entry["service"], entry["component"]) + for entry in upgrade_data["hostComponentMapRules"] }, + {("add", "service_1", "component_1"), ("remove", "service_1", "component_2")}, + ) + + attributes = upgrade_data["configSchema"]["fields"] + self.assertEqual(len(attributes), 3) + self.assertEqual([attr["name"] for attr in attributes], ["simple", "grouped", "after"]) + self.assertEqual([attr["name"] for attr in attributes[1]["children"]], ["simple", "second"]) + self.assertEqual(attributes[0]["default"], None) + self.assertEqual(attributes[1]["children"][0]["default"], 4) + + def test_cluster_upgrade_run_success(self): + tasklog = TaskLog.objects.create( + object_id=self.cluster_1.pk, + object_type=ContentType.objects.get(app_label="cm", model="cluster"), + start_date=timezone.now(), + finish_date=timezone.now(), + action=self.upgrade_cluster_via_action_simple.action, ) + with patch("cm.upgrade.start_task", return_value=tasklog): + response: Response = self.client.post( + path=reverse( + viewname="v2:upgrade-run", + kwargs={"cluster_pk": self.cluster_1.pk, "pk": self.upgrade_cluster_via_action_simple.pk}, + ), + data={ + "host_component_map": [], + "config": {}, + "is_verbose": True, + }, + ) + self.assertEqual(response.status_code, HTTP_200_OK) + data = response.json() + self.assertTrue(set(data.keys()).issuperset({"id", "childJobs", "startTime"})) + self.assertEqual(data["id"], tasklog.id) + + def test_cluster_upgrade_run_complex_success(self): + tasklog = TaskLog.objects.create( + object_id=self.cluster_1.pk, + object_type=ContentType.objects.get(app_label="cm", model="cluster"), + start_date=timezone.now(), + finish_date=timezone.now(), + action=self.upgrade_cluster_via_action_simple.action, + ) + + host = self.add_host(bundle=self.provider_bundle, provider=self.provider, fqdn="one_host") + self.add_host_to_cluster(cluster=self.cluster_1, host=host) + service_1 = self.add_service_to_cluster(service_name="service_1", cluster=self.cluster_1) + component_1 = ServiceComponent.objects.get(service=service_1, prototype__name="component_1") + component_2 = ServiceComponent.objects.get(service=service_1, prototype__name="component_2") + HostComponent.objects.create(cluster=self.cluster_1, service=service_1, component=component_2, host=host) + + with patch("cm.upgrade.start_task", return_value=tasklog): + response: Response = self.client.post( + path=reverse( + viewname="v2:upgrade-run", + kwargs={"cluster_pk": self.cluster_1.pk, "pk": self.upgrade_cluster_via_action_complex.pk}, + ), + data={ + "host_component_map": [{"hostId": host.pk, "componentId": component_1.pk}], + "config": {"simple": "val", "grouped": {"simple": 5, "second": 4.3}, "after": ["x", "y"]}, + "is_verbose": True, + }, + ) + + self.assertEqual(response.status_code, HTTP_200_OK) + data = response.json() + self.assertTrue(set(data.keys()).issuperset({"id", "childJobs", "startTime"})) + self.assertEqual(data["id"], tasklog.id) def test_provider_list_upgrades_success(self): response: Response = self.client.get( @@ -81,7 +190,7 @@ def test_provider_list_upgrades_success(self): ) self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(len(response.json()), 2) + self.assertEqual(len(response.json()), 3) def test_provider_upgrade_retrieve_success(self): response: Response = self.client.get( @@ -91,22 +200,73 @@ def test_provider_upgrade_retrieve_success(self): ), ) self.assertEqual(response.status_code, HTTP_200_OK) + upgrade_data = response.json() + self.assertTrue( + set(upgrade_data.keys()).issuperset( + {"id", "hostComponentMapRules", "configSchema", "isAllowToTerminate", "disclaimer"} + ) + ) + self.assertEqual(upgrade_data["id"], self.provider_upgrade.pk) + self.assertEqual(len(upgrade_data["hostComponentMapRules"]), 0) + self.assertEqual(upgrade_data["configSchema"], None) + self.assertEqual(upgrade_data["disclaimer"], "") + self.assertFalse(upgrade_data["isAllowToTerminate"]) - def test_provider_upgrade_run_success(self): - response: Response = self.client.post( + def test_provider_upgrade_retrieve_complex_success(self): + response: Response = self.client.get( path=reverse( - viewname="v2:upgrade-run", - kwargs={"hostprovider_pk": self.provider.pk, "pk": self.upgrade_host_via_action.pk}, + viewname="v2:upgrade-detail", + kwargs={"hostprovider_pk": self.provider.pk, "pk": self.upgrade_host_via_action_complex.pk}, ), - data={ - "host_component_map": [{}], - "config": {}, - "attr": {}, - "is_verbose": True, - }, + ) + self.assertEqual(response.status_code, HTTP_200_OK) + + upgrade_data = response.json() + self.assertTrue( + set(upgrade_data.keys()).issuperset( + {"id", "hostComponentMapRules", "configSchema", "isAllowToTerminate", "disclaimer"} + ) + ) + + self.assertEqual(upgrade_data["id"], self.upgrade_host_via_action_complex.pk) + self.assertEqual(upgrade_data["disclaimer"], "Cool upgrade") + self.assertFalse(upgrade_data["isAllowToTerminate"]) + + self.assertEqual(len(upgrade_data["hostComponentMapRules"]), 0) + + attributes = upgrade_data["configSchema"]["fields"] + self.assertEqual(len(attributes), 3) + self.assertEqual([attr["name"] for attr in attributes], ["simple", "grouped", "after"]) + self.assertEqual([attr["name"] for attr in attributes[1]["children"]], ["simple", "second"]) + self.assertEqual(attributes[0]["default"], None) + self.assertEqual(attributes[1]["children"][0]["default"], 4) + + def test_provider_upgrade_run_success(self): + tasklog = TaskLog.objects.create( + object_id=self.provider.pk, + object_type=ContentType.objects.get(app_label="cm", model="hostprovider"), + start_date=timezone.now(), + finish_date=timezone.now(), + action=self.upgrade_host_via_action_simple.action, ) + with patch("cm.upgrade.start_task", return_value=tasklog): + response: Response = self.client.post( + path=reverse( + viewname="v2:upgrade-run", + kwargs={"hostprovider_pk": self.provider.pk, "pk": self.upgrade_host_via_action_simple.pk}, + ), + data={ + "host_component_map": [], + "config": {}, + "is_verbose": True, + }, + ) + self.assertEqual(response.status_code, HTTP_200_OK) + data = response.json() + self.assertTrue(set(data.keys()).issuperset({"id", "childJobs", "startTime"})) + self.assertEqual(data["id"], tasklog.id) def test_provider_upgrade_run_violate_constraint_fail(self): response: Response = self.client.post( @@ -115,14 +275,13 @@ def test_provider_upgrade_run_violate_constraint_fail(self): kwargs={"hostprovider_pk": self.provider.pk, "pk": self.cluster_upgrade.pk}, ), data={ - "host_component_map": [{}], + "host_component_map": [], "config": {}, - "attr": {}, "is_verbose": True, }, ) - self.assertEqual(response.status_code, HTTP_409_CONFLICT) + self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) def test_cluster_upgrade_run_violate_constraint_fail(self): response: Response = self.client.post( @@ -131,14 +290,13 @@ def test_cluster_upgrade_run_violate_constraint_fail(self): kwargs={"cluster_pk": self.cluster_1.pk, "pk": self.provider_upgrade.pk}, ), data={ - "host_component_map": [{}], + "host_component_map": [], "config": {}, - "attr": {}, "is_verbose": True, }, ) - self.assertEqual(response.status_code, HTTP_409_CONFLICT) + self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) def test_provider_upgrade_run_not_found_fail(self): response: Response = self.client.post( @@ -147,9 +305,8 @@ def test_provider_upgrade_run_not_found_fail(self): kwargs={"hostprovider_pk": self.provider.pk, "pk": self.provider_upgrade.pk + 10}, ), data={ - "host_component_map": [{}], + "host_component_map": [], "config": {}, - "attr": {}, "is_verbose": True, }, ) @@ -163,9 +320,8 @@ def test_cluster_upgrade_run_not_found_fail(self): kwargs={"cluster_pk": self.cluster_1.pk, "pk": self.cluster_upgrade.pk + 10}, ), data={ - "host_component_map": [{}], + "host_component_map": [], "config": {}, - "attr": {}, "is_verbose": True, }, ) @@ -189,3 +345,80 @@ def test_hostprovider_upgrade_retrieve_not_found_fail(self): ), ) self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) + + def test_cluster_upgrade_hostcomponent_validation_fail(self): + for hc_data in ([{"hostId": 1}], [{"componentId": 4}], [{}]): + with self.subTest(f"Pass host_component_map as {hc_data}"): + response: Response = self.client.post( + path=reverse( + viewname="v2:upgrade-run", + kwargs={"cluster_pk": self.cluster_1.pk, "pk": self.upgrade_cluster_via_action_complex.pk}, + ), + data={ + "host_component_map": hc_data, + "config": {"simple": "val", "grouped": {"simple": 5, "second": 4.3}, "after": ["x", "y"]}, + "is_verbose": True, + }, + ) + + self.assertEqual(response.status_code, HTTP_400_BAD_REQUEST) + + def test_cluster_list_unauthorized_fail(self) -> None: + response: Response = self.unauthorized_client.get( + path=reverse(viewname="v2:upgrade-list", kwargs={"cluster_pk": self.cluster_1.pk}), + ) + self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) + + def test_cluster_retrieve_unauthorized_fail(self): + response: Response = self.unauthorized_client.get( + path=reverse( + viewname="v2:upgrade-detail", + kwargs={"cluster_pk": self.cluster_1.pk, "pk": self.cluster_upgrade.pk}, + ), + ) + self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) + + def test_hostprovider_list_unauthorized_fail(self) -> None: + response: Response = self.unauthorized_client.get( + path=reverse(viewname="v2:upgrade-list", kwargs={"hostprovider_pk": self.provider.pk}), + ) + self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) + + def test_hostprovider_retrieve_unauthorized_fail(self): + response: Response = self.unauthorized_client.get( + path=reverse( + viewname="v2:upgrade-detail", + kwargs={"hostprovider_pk": self.cluster_1.pk, "pk": self.provider_upgrade.pk}, + ), + ) + self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) + + +class TestAdcmUpgrade(APITestCase): + @classmethod + def setUpClass(cls): + super().setUpClass() + init_roles() + init(adcm_conf_file=Path(__file__).parent / "bundles" / "adcm_configs" / "config.yaml") + + def setUp(self) -> None: + super().setUp() + self.original_adcm = ADCM.objects.first() + config_log = ConfigLog.objects.get(pk=self.original_adcm.config.current) + config_log.config["job_log"]["log_rotation_on_fs"] = 120 + config_log.config["job_log"]["log_rotation_in_db"] = 50 + config_log.config["config_rotation"]["config_rotation_in_db"] = 10 + config_log.save(update_fields=["config"]) + + def test_adcm_2_6_upgrade_success(self): + init() + new_adcm = ADCM.objects.first() + old_adcm_version = float(self.original_adcm.prototype.version) + new_adcm_version = float(new_adcm.prototype.version) + config_log = ConfigLog.objects.get(obj_ref=new_adcm.config, id=new_adcm.config.current) + self.assertNotIn("job_log", config_log.config) + self.assertNotIn("config_rotation", config_log.config) + self.assertEqual(config_log.config["audit_data_retention"]["log_rotation_in_db"], 50) + self.assertEqual(config_log.config["audit_data_retention"]["log_rotation_on_fs"], 120) + self.assertEqual(config_log.config["audit_data_retention"]["config_rotation_in_db"], 10) + self.assertGreater(new_adcm_version, old_adcm_version) diff --git a/python/api_v2/tests/test_user.py b/python/api_v2/tests/test_user.py new file mode 100644 index 0000000000..74fa51eaaa --- /dev/null +++ b/python/api_v2/tests/test_user.py @@ -0,0 +1,462 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from api_v2.rbac.user.constants import UserTypeChoices +from api_v2.tests.base import BaseAPITestCase +from django.contrib.auth.models import Permission +from django.contrib.contenttypes.models import ContentType +from django.urls import reverse +from django.utils.timezone import now +from rbac.models import Group, OriginType, User +from rest_framework.status import ( + HTTP_200_OK, + HTTP_201_CREATED, + HTTP_204_NO_CONTENT, + HTTP_400_BAD_REQUEST, + HTTP_404_NOT_FOUND, + HTTP_409_CONFLICT, +) + + +class TestUserAPI(BaseAPITestCase): + def setUp(self) -> None: + super().setUp() + + self.group = Group.objects.create(name="test_group") + + def _grant_permissions(self, user: User) -> None: + view_user_permission, _ = Permission.objects.get_or_create( + content_type=ContentType.objects.get_for_model(model=User), + codename=f"view_{User.__name__.lower()}", + ) + change_user_permission, _ = Permission.objects.get_or_create( + content_type=ContentType.objects.get_for_model(model=User), + codename=f"change_{User.__name__.lower()}", + ) + + user.user_permissions.add(*(view_user_permission, change_user_permission)) + + def test_list_success(self): + response = self.client.get(path=reverse(viewname="v2:rbac:user-list")) + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(response.json()["count"], 3) + self.assertListEqual( + sorted(response.json()["results"][0].keys()), + sorted( + [ + "id", + "username", + "firstName", + "lastName", + "status", + "email", + "type", + "isBuiltIn", + "isSuperUser", + "groups", + ] + ), + ) + + def test_create_success(self): + response = self.client.post( + path=reverse(viewname="v2:rbac:user-list"), + data={ + "username": "test_user_username", + "password": "test_user_password", + "firstName": "test_user_first_name", + "lastName": "test_user_last_name", + "groups": [self.group.pk], + "email": "testuser@mail.ru", + "isSuperuser": False, + }, + ) + + self.assertEqual(response.status_code, HTTP_201_CREATED) + user = User.objects.filter(username="test_user_username").first() + self.assertIsNotNone(user) + self.assertEqual(response.json()["firstName"], "test_user_first_name") + self.assertEqual(response.json()["lastName"], "test_user_last_name") + self.assertFalse(response.json()["isSuperUser"]) + self.assertEqual(user.groups.count(), 1) + + def test_create_required_fields_success(self): + response = self.client.post( + path=reverse(viewname="v2:rbac:user-list"), + data={"username": "test_user_username_1", "password": "test_user_password_1"}, + ) + + self.assertEqual(response.status_code, HTTP_201_CREATED) + self.assertTrue(User.objects.filter(username="test_user_username_1").exists()) + + def test_create_required_fields_fail(self): + response = self.client.post(path=reverse(viewname="v2:rbac:user-list"), data={"username": "test_user_username"}) + + self.assertEqual(response.status_code, HTTP_400_BAD_REQUEST) + self.assertDictEqual( + response.json(), {"code": "BAD_REQUEST", "desc": "password - This field is required.;", "level": "error"} + ) + + def test_retrieve_success(self): + user = self.create_user() + + response = self.client.get(path=reverse(viewname="v2:rbac:user-detail", kwargs={"pk": user.pk})) + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertListEqual( + sorted(response.json().keys()), + sorted( + [ + "id", + "username", + "firstName", + "lastName", + "status", + "email", + "type", + "isBuiltIn", + "isSuperUser", + "groups", + ] + ), + ) + + self.assertEqual(response.json()["id"], user.pk) + + def test_retrieve_not_found_fail(self): + wrong_pk = self.get_non_existent_pk(model=User) + + response = self.client.get(path=reverse(viewname="v2:rbac:user-detail", kwargs={"pk": wrong_pk})) + + self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) + + def test_update_by_superuser_success(self): + group = Group.objects.create(name="group") + user = self.create_user(user_data={"username": "test_user", "password": "test_user_password"}) + + response = self.client.patch( + path=reverse(viewname="v2:rbac:user-detail", kwargs={"pk": user.pk}), + data={ + "password": "newtestpassword", + "email": "test_user@mail.ru", + "firstName": "test_user_first_name", + "lastName": "test_user_last_name", + "isSuperUser": True, + "groups": [group.pk], + }, + ) + + user.refresh_from_db() + + data = response.json() + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertFalse(user.check_password(raw_password="test_user_password")) + self.assertTrue(user.check_password(raw_password="newtestpassword")) + self.assertEqual(data["email"], "test_user@mail.ru") + self.assertEqual(data["firstName"], "test_user_first_name") + self.assertEqual(data["lastName"], "test_user_last_name") + self.assertTrue(data["isSuperUser"]) + self.assertEqual(len(data["groups"]), 1) + self.assertDictEqual(data["groups"][0], {"id": group.pk, "name": group.name, "displayName": group.display_name}) + + def test_update_self_by_regular_user_success(self): + """ + According to business requirements, a user cannot make himself a super user and add himself to a group + """ + + group = Group.objects.create(name="group") + user = self.create_user(user_data={"username": "test_user", "password": "test_user_password"}) + self._grant_permissions(user=user) + self.client.login(username="test_user", password="test_user_password") + + response = self.client.patch( + path=reverse(viewname="v2:rbac:user-detail", kwargs={"pk": user.pk}), + data={ + "password": "newtestpassword", + "email": "test_user@mail.ru", + "firstName": "test_user_first_name", + "lastName": "test_user_last_name", + "isSuperUser": True, + "groups": [group.pk], + }, + ) + + user.refresh_from_db() + data = response.json() + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertFalse(user.check_password(raw_password="test_user_password")) + self.assertTrue(user.check_password(raw_password="newtestpassword")) + self.assertEqual(data["email"], "test_user@mail.ru") + self.assertEqual(data["firstName"], "test_user_first_name") + self.assertEqual(data["lastName"], "test_user_last_name") + self.assertFalse(data["isSuperUser"]) + self.assertEqual(len(data["groups"]), 0) + + def test_update_not_self_by_regular_user_fail(self): + """According to business requirements, a non-superuser cannot modify another user""" + + group = Group.objects.create(name="group") + first_user = self.create_user(user_data={"username": "test_user", "password": "test_user_password"}) + second_user = self.create_user( + user_data={ + "username": "test_user2", + "password": "test_user2_password", + "email": "test_user2@mail.ru", + "first_name": "test_user2_first_name", + "last_name": "test_user2_last_name", + } + ) + self._grant_permissions(user=first_user) + self.client.login(username="test_user", password="test_user_password") + + response = self.client.patch( + path=reverse(viewname="v2:rbac:user-detail", kwargs={"pk": second_user.pk}), + data={ + "password": "new_test_user2_password", + "email": "new_test_user2@mail.ru", + "firstName": "new_test_user2_first_name", + "lastName": "new_test_user2_last_name", + "isSuperUser": True, + "groups": [group.pk], + }, + ) + second_user.refresh_from_db() + + self.assertEqual(response.status_code, HTTP_400_BAD_REQUEST) + self.assertDictEqual( + response.json(), {"code": "USER_UPDATE_ERROR", "desc": "Can't update other user", "level": "error"} + ) + self.assertFalse(second_user.check_password(raw_password="new_test_user2_password")) + self.assertTrue(second_user.check_password(raw_password="test_user2_password")) + self.assertEqual(second_user.email, "test_user2@mail.ru") + self.assertEqual(second_user.first_name, "test_user2_first_name") + self.assertEqual(second_user.last_name, "test_user2_last_name") + self.assertFalse(second_user.is_superuser) + self.assertEqual(second_user.groups.count(), 0) + + def test_update_password_self_by_profile_fail(self): + user_data = { + "username": "test_user", + "password": "test_user_password", + "email": "test_user@mail.ru", + "first_name": "test_user_first_name", + "last_name": "test_user_last_name", + } + + user = self.create_user(user_data=user_data) + + self._grant_permissions(user=user) + + self.client.login(username="test_user", password="test_user_password") + + response = self.client.put(path=reverse(viewname="v2:adcm:profile"), data={"newPassword": "newtestpassword"}) + self.assertEqual(response.status_code, HTTP_400_BAD_REQUEST) + self.assertDictEqual( + response.json(), + { + "code": "USER_PASSWORD_CURRENT_PASSWORD_REQUIRED_ERROR", + "desc": 'Field "current_password" should be filled and match user current password', + "level": "error", + }, + ) + + def test_delete_success(self): + user = self.create_user() + + response = self.client.delete( + path=reverse(viewname="v2:rbac:user-detail", kwargs={"pk": user.pk}), + ) + self.assertEqual(response.status_code, HTTP_204_NO_CONTENT) + self.assertIsNone(response.data) + + with self.assertRaises(User.DoesNotExist): + User.objects.get(pk=user.pk) + + def test_delete_built_in_fail(self): + user = self.create_user() + user.built_in = True + user.save(update_fields=["built_in"]) + + response = self.client.delete( + path=reverse(viewname="v2:rbac:user-detail", kwargs={"pk": user.pk}), + ) + self.assertEqual(response.status_code, HTTP_409_CONFLICT) + self.assertDictEqual( + response.json(), + {"code": "USER_DELETE_ERROR", "desc": "Built-in user could not be deleted", "level": "error"}, + ) + + def test_unblock_success(self): + user = self.create_user() + user.blocked_at = now() + user.failed_login_attempts = 5 + user.save(update_fields=["blocked_at", "failed_login_attempts"]) + + response = self.client.post( + path=reverse(viewname="v2:rbac:user-unblock", kwargs={"pk": user.pk}), + ) + + user.refresh_from_db() + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertIsNone(response.data) + self.assertIsNone(user.blocked_at) + self.assertEqual(user.failed_login_attempts, 0) + + def test_unblock_built_in_fail(self): + user = self.create_user() + user.built_in = True + user.save(update_fields=["built_in"]) + + response = self.client.post( + path=reverse(viewname="v2:rbac:user-unblock", kwargs={"pk": user.pk}), + ) + self.assertEqual(response.status_code, HTTP_409_CONFLICT) + self.assertDictEqual( + response.json(), + {"code": "USER_BLOCK_ERROR", "desc": "Built-in user could not be blocked", "level": "error"}, + ) + + def test_ordering_success(self): + user_data = [ + { + "username": "username1", + "password": "username1password", + "email": "username1@mail.ru", + "first_name": "username1_first_name", + "last_name": "username1_last_name", + }, + { + "username": "username2", + "password": "username2password", + "email": "username2@mail.ru", + "first_name": "username2_first_name", + "last_name": "username2_last_name", + }, + { + "username": "username3", + "password": "username3password", + "email": "username3@mail.ru", + "first_name": "username3_first_name", + "last_name": "username3_last_name", + }, + ] + for data in user_data: + self.create_user(user_data=data) + + response = self.client.get(path=reverse(viewname="v2:rbac:user-list"), data={"ordering": "-username"}) + self.assertEqual(response.status_code, HTTP_200_OK) + + response_usernames = [user["username"] for user in response.json()["results"]] + db_usernames = list(User.objects.order_by("-username").values_list("username", flat=True)) + self.assertListEqual(response_usernames, db_usernames) + + def test_ordering_wrong_params_fail(self): + response = self.client.get(path=reverse(viewname="v2:rbac:user-list"), data={"ordering": "param"}) + + self.assertEqual(response.status_code, HTTP_400_BAD_REQUEST) + self.assertDictEqual( + response.json(), + { + "code": "BAD_REQUEST", + "desc": "ordering - Select a valid choice. param is not one of the available choices.;", + "level": "error", + }, + ) + + def test_filtering_by_username_success(self): + user_data = [ + { + "username": "username1", + "password": "username1password", + "email": "username1@mail.ru", + "first_name": "username1_first_name", + "last_name": "username1_last_name", + }, + { + "username": "username2", + "password": "username2password", + "email": "username2@mail.ru", + "first_name": "username2_first_name", + "last_name": "username2_last_name", + }, + ] + for data in user_data: + self.create_user(user_data=data) + + response = self.client.get(path=reverse(viewname="v2:rbac:user-list"), data={"username": "username1"}) + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(len(response.json()["results"]), 1) + self.assertEqual(response.json()["results"][0]["username"], "username1") + + def test_filtering_by_status_success(self): + user_data = [ + { + "username": "username1", + "password": "username1password", + "email": "username1@mail.ru", + "first_name": "username1_first_name", + "last_name": "username1_last_name", + }, + { + "username": "username2", + "password": "username2password", + "email": "username2@mail.ru", + "first_name": "username2_first_name", + "last_name": "username2_last_name", + }, + ] + for data in user_data: + self.create_user(user_data=data) + + target_user = User.objects.get(username="username2") + target_user.blocked_at = now() + target_user.save(update_fields=["blocked_at"]) + + response = self.client.get(path=reverse(viewname="v2:rbac:user-list"), data={"status": "blocked"}) + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(len(response.json()["results"]), 1) + self.assertEqual(response.json()["results"][0]["username"], target_user.username) + + def test_filtering_by_type_success(self): + user_data = [ + { + "username": "username1", + "password": "username1password", + "email": "username1@mail.ru", + "first_name": "username1_first_name", + "last_name": "username1_last_name", + }, + { + "username": "username2", + "password": "username2password", + "email": "username2@mail.ru", + "first_name": "username2_first_name", + "last_name": "username2_last_name", + }, + ] + for data in user_data: + self.create_user(user_data=data) + + target_user = User.objects.get(username="username2") + target_user.type = OriginType.LDAP + target_user.save(update_fields=["type"]) + + response = self.client.get( + path=reverse(viewname="v2:rbac:user-list"), data={"type": UserTypeChoices.LDAP.value} + ) + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(len(response.json()["results"]), 1) + self.assertEqual(response.json()["results"][0]["username"], target_user.username) diff --git a/python/api_v2/upgrade/serializers.py b/python/api_v2/upgrade/serializers.py index 24417b571c..62b37c3ffa 100644 --- a/python/api_v2/upgrade/serializers.py +++ b/python/api_v2/upgrade/serializers.py @@ -12,49 +12,43 @@ from typing import Any -from api_v2.config.serializers import ConfigSerializerUI -from cm.adcm_config.config import get_action_variant, get_prototype_config -from cm.models import Cluster, HostProvider, PrototypeConfig, Upgrade -from rest_framework.serializers import ( - BooleanField, - JSONField, - ModelSerializer, - SerializerMethodField, -) - -from adcm.serializers import EmptySerializer +from api_v2.bundle.serializers import BundleRelatedSerializer +from cm.models import Upgrade +from rest_framework.serializers import ModelSerializer, SerializerMethodField class UpgradeListSerializer(ModelSerializer): - prototype_type = None - is_license_accepted = SerializerMethodField() + license_status = SerializerMethodField() class Meta: model = Upgrade - fields = ["id", "name", "display_name", "is_license_accepted"] + fields = ["id", "name", "display_name", "license_status"] @classmethod - def get_is_license_accepted(cls, upgrade: Upgrade) -> bool: - return upgrade.bundle.prototype_set.filter(type=cls.prototype_type).first().is_license_accepted - - -class ClusterUpgradeListSerializer(UpgradeListSerializer): - prototype_type = "cluster" - - -class HostProviderUpgradeListSerializer(UpgradeListSerializer): - prototype_type = "provider" + def get_license_status(cls, upgrade: Upgrade) -> bool: + return upgrade.bundle.prototype_set.filter(type__in=("cluster", "provider")).first().license -class UpgradeRetrieveSerializer(ModelSerializer): +class UpgradeRetrieveSerializer(UpgradeListSerializer): is_allow_to_terminate = SerializerMethodField() host_component_map_rules = SerializerMethodField() disclaimer = SerializerMethodField() - config = SerializerMethodField() + config_schema = SerializerMethodField() + bundle = BundleRelatedSerializer() class Meta: model = Upgrade - fields = ["is_allow_to_terminate", "host_component_map_rules", "disclaimer", "config"] + fields = ( + "id", + "name", + "display_name", + "is_allow_to_terminate", + "license_status", + "host_component_map_rules", + "config_schema", + "disclaimer", + "bundle", + ) @staticmethod def get_disclaimer(instance: Upgrade) -> str: @@ -77,30 +71,5 @@ def get_host_component_map_rules(instance: Upgrade) -> Any: return [] - def get_config(self, instance): - if instance.action is None: - return {"attr": {}, "config": []} - - if "cluster_id" in self.context: - obj = Cluster.obj.get(pk=self.context["cluster_id"]) - elif "provider_id" in self.context: - obj = HostProvider.obj.get(pk=self.context["provider_id"]) - else: - obj = None - - action_conf = PrototypeConfig.objects.filter( - prototype=instance.action.prototype, - action=instance.action, - ).order_by("id") - *_, attr = get_prototype_config(instance.action.prototype, instance.action) - if obj: - get_action_variant(obj, action_conf) - conf = ConfigSerializerUI(action_conf, many=True, context=self.context, read_only=True) - return {"attr": attr, "config": conf.data} - - -class UpgradeRunSerializer(EmptySerializer): - host_component_map = JSONField() - config = JSONField() - attr = JSONField() - is_verbose = BooleanField() + def get_config_schema(self, _: Upgrade): + return self.context["config_schema"] diff --git a/python/api_v2/upgrade/views.py b/python/api_v2/upgrade/views.py index 3f7676a556..7f7f0fbac8 100644 --- a/python/api_v2/upgrade/views.py +++ b/python/api_v2/upgrade/views.py @@ -9,165 +9,114 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Tuple - -from api_v2.upgrade.serializers import ( - ClusterUpgradeListSerializer, - HostProviderUpgradeListSerializer, - UpgradeRetrieveSerializer, - UpgradeRunSerializer, -) -from cm.issue import update_hierarchy_issues -from cm.models import Cluster, HostProvider, Upgrade -from cm.upgrade import do_upgrade, get_upgrade -from guardian.shortcuts import get_objects_for_user +from api_v2.action.serializers import ActionRunSerializer +from api_v2.action.utils import insert_service_ids +from api_v2.config.utils import get_config_schema +from api_v2.task.serializers import TaskListSerializer +from api_v2.upgrade.serializers import UpgradeListSerializer, UpgradeRetrieveSerializer +from api_v2.views import CamelCaseGenericViewSet +from cm.errors import AdcmEx +from cm.models import Cluster, HostProvider, TaskLog, Upgrade +from cm.upgrade import check_upgrade, do_upgrade, get_upgrade +from rbac.models import User from rest_framework.decorators import action from rest_framework.exceptions import NotFound from rest_framework.mixins import ListModelMixin, RetrieveModelMixin from rest_framework.request import Request from rest_framework.response import Response -from rest_framework.status import ( - HTTP_403_FORBIDDEN, - HTTP_404_NOT_FOUND, - HTTP_409_CONFLICT, -) -from rest_framework.viewsets import GenericViewSet +from rest_framework.status import HTTP_200_OK, HTTP_204_NO_CONTENT +from adcm.mixins import GetParentObjectMixin from adcm.permissions import ( VIEW_CLUSTER_PERM, VIEW_CLUSTER_UPGRADE_PERM, VIEW_PROVIDER_PERM, VIEW_PROVIDER_UPGRADE_PERM, DjangoModelPermissionsAudit, + get_object_for_user, ) -class UpgradeViewSet(GenericViewSet, ListModelMixin, RetrieveModelMixin): - queryset = Upgrade.objects.all().select_related("action") +class UpgradeViewSet( + ListModelMixin, GetParentObjectMixin, RetrieveModelMixin, CamelCaseGenericViewSet +): # pylint: disable=too-many-ancestors + queryset = Upgrade.objects.select_related("action", "bundle", "action__prototype").order_by("pk") permission_classes = [DjangoModelPermissionsAudit] + filter_backends = [] - base_for_upgrade = { - "cluster_pk": {"perms": VIEW_CLUSTER_PERM, "klass": Cluster, "list_serializer": ClusterUpgradeListSerializer}, - "hostprovider_pk": { - "perms": VIEW_PROVIDER_PERM, - "klass": HostProvider, - "list_serializer": HostProviderUpgradeListSerializer, - }, - } - - def get_serializer_class( - self, - ) -> type[ClusterUpgradeListSerializer] | type[UpgradeRunSerializer] | type[UpgradeRetrieveSerializer]: + def get_serializer_class(self) -> type[UpgradeListSerializer | ActionRunSerializer | UpgradeRetrieveSerializer]: if self.action == "retrieve": return UpgradeRetrieveSerializer if self.action == "run": - return UpgradeRunSerializer + return ActionRunSerializer - return self.base_for_upgrade[list(self.kwargs.keys()).pop()]["list_serializer"] + return UpgradeListSerializer - def _has_perm(self, request: Request, **kwargs) -> Cluster | HostProvider | None: - if "hostprovider_pk" in kwargs: - pk_name, pk_value = "hostprovider_pk", kwargs["hostprovider_pk"] - else: - pk_name, pk_value = "cluster_pk", kwargs["cluster_pk"] - perms, klass, _ = self.base_for_upgrade[pk_name].values() - object_queryset = get_objects_for_user(user=request.user, perms=perms, klass=klass) - object_to_upgrade = object_queryset.filter(pk=pk_value).first() - if not object_to_upgrade: - raise NotFound - object_premissions_for_uprgade = {HostProvider: VIEW_PROVIDER_UPGRADE_PERM, Cluster: VIEW_CLUSTER_UPGRADE_PERM} - - if not request.user.has_perm( - perm=object_premissions_for_uprgade[type(object_to_upgrade)], obj=object_to_upgrade - ): - return None - - return object_to_upgrade - - def _get_error_message_403(self, **kwargs): - if "hostprovider_pk" in kwargs: - pk_name, pk_value = "host provider", kwargs["hostprovider_pk"] - else: - pk_name, pk_value = "cluster", kwargs["cluster_pk"] - return ( - f"Current user has no permission to upgrade {pk_name} with pk '{pk_value}' " - f"by upgrade with pk '{kwargs['pk']}'", + def get_parent_object_for_user(self, user: User) -> Cluster | HostProvider: + parent: Cluster | HostProvider | None = self.get_parent_object() + if parent is None or not isinstance(parent, (Cluster, HostProvider)): + message = "Can't find upgrade's parent object" + raise NotFound(message) + + if isinstance(parent, Cluster): + return get_object_for_user( + user=user, perms=(VIEW_CLUSTER_PERM, VIEW_CLUSTER_UPGRADE_PERM), klass=Cluster, id=parent.pk + ) + + return get_object_for_user( + user=user, perms=(VIEW_PROVIDER_PERM, VIEW_PROVIDER_UPGRADE_PERM), klass=HostProvider, id=parent.pk ) - # pylint: disable=unused-argument - def get_upgrade_list( - self, request: Request, *args, **kwargs - ) -> Tuple[HostProvider | Cluster, list[Upgrade]] | None: - object_to_upgrade = self._has_perm(request=request, **kwargs) - if not object_to_upgrade: - return None - update_hierarchy_issues(obj=object_to_upgrade) - return object_to_upgrade, get_upgrade(obj=object_to_upgrade) + def get_upgrade(self, parent: Cluster | HostProvider): + upgrade = self.get_object() + if upgrade.bundle.name != parent.prototype.bundle.name: + raise AdcmEx(code="UPGRADE_NOT_FOUND") + + upgrade_is_allowed, error = check_upgrade(obj=parent, upgrade=upgrade) + if not upgrade_is_allowed: + raise AdcmEx(code="UPGRADE_NOT_FOUND", msg=error) + + return upgrade def list(self, request: Request, *args, **kwargs) -> Response: - object_to_upgrade, upgrade_list = self.get_upgrade_list(request, *args, **kwargs) - if not object_to_upgrade: - Response( - data=self._get_error_message_403(**kwargs), - status=HTTP_403_FORBIDDEN, - ) - serializer = self.get_serializer(instance=upgrade_list, many=True) + parent: Cluster | HostProvider = self.get_parent_object_for_user(user=request.user) + upgrades = get_upgrade(obj=parent) + serializer = self.get_serializer_class()(instance=upgrades, many=True) return Response(data=serializer.data) def retrieve(self, request: Request, *args, **kwargs) -> Response: - object_to_upgrade, upgrade_list = self.get_upgrade_list(request, *args, **kwargs) - if not object_to_upgrade: - Response( - data=self._get_error_message_403(**kwargs), - status=HTTP_403_FORBIDDEN, - ) - instance = self.get_object() - if instance not in upgrade_list: - return Response( - data=f"The upgrade " - f"{instance.name} with pk '{instance.pk}' " - f"has not allowable to instance with pk {kwargs['pk']}", - status=HTTP_404_NOT_FOUND, - ) - serializer = self.get_serializer(instance) + parent: Cluster | HostProvider = self.get_parent_object_for_user(user=request.user) + + upgrade = self.get_upgrade(parent=parent) + + if upgrade.action: + schema = {"fields": get_config_schema(parent_object=parent, action=upgrade.action)} + else: + schema = None + + serializer = self.get_serializer_class()(instance=upgrade, context={"parent": parent, "config_schema": schema}) return Response(serializer.data) @action(methods=["post"], detail=True) - def run(self, request: Request, *args, **kwargs) -> Response: # pylint: disable=unused-argument + def run(self, request: Request, *_, **__) -> Response: serializer = self.get_serializer_class()(data=request.data) serializer.is_valid(raise_exception=True) - object_to_upgrade = self._has_perm(request, **kwargs) - if not object_to_upgrade: - return Response( - data=self._get_error_message_403(**kwargs), - status=HTTP_403_FORBIDDEN, - ) + parent: Cluster | HostProvider = self.get_parent_object_for_user(user=request.user) - object_to_upgrade, allowable_upgrades = self.get_upgrade_list(request, *args, **kwargs) - if not object_to_upgrade: - Response( - data=self._get_error_message_403(**kwargs), - status=HTTP_403_FORBIDDEN, - ) - if not Upgrade.objects.filter(pk=kwargs["pk"]).first(): - return Response(data=f"Upgrade with pk '{kwargs['pk']}' not found", status=HTTP_404_NOT_FOUND) - - matching_upgrades = [u for u in allowable_upgrades if u.pk == int(kwargs["pk"])] - if not matching_upgrades: - return Response( - data=f"Upgrade with pk '{kwargs['pk']}' is not allowable for '{object_to_upgrade.pk}'", - status=HTTP_409_CONFLICT, - ) + upgrade = self.get_upgrade(parent=parent) - do_upgrade( - obj=object_to_upgrade, - upgrade=matching_upgrades.pop(), - config=serializer.validated_data.get("config", {}), + result = do_upgrade( + obj=parent, + upgrade=upgrade, + config=serializer.validated_data["config"], attr=serializer.validated_data.get("attr", {}), - hostcomponent=serializer.validated_data.get("host_component_map", []), + hostcomponent=insert_service_ids(hc_create_data=serializer.validated_data["host_component_map"]), ) - return Response() + if (task_id := result["task_id"]) is None: + return Response(status=HTTP_204_NO_CONTENT) + + return Response(status=HTTP_200_OK, data=TaskListSerializer(instance=TaskLog.objects.get(pk=task_id)).data) diff --git a/python/api_v2/urls.py b/python/api_v2/urls.py index 731b0380e6..0e9354fade 100644 --- a/python/api_v2/urls.py +++ b/python/api_v2/urls.py @@ -15,6 +15,7 @@ urlpatterns = [ path("", APIRoot.as_view(), name="api-root-v2"), + path("rbac/", include(("api_v2.rbac.urls", "rbac"))), path("clusters/", include("api_v2.cluster.urls")), path("bundles/", include("api_v2.bundle.urls")), path("prototypes/", include("api_v2.prototype.urls")), @@ -22,4 +23,6 @@ path("hostproviders/", include("api_v2.hostprovider.urls")), path("audit/", include(("api_v2.audit.urls", "audit"))), path("jobs/", include("api_v2.job.urls")), + path("tasks/", include("api_v2.task.urls")), + path("adcm/", include(("api_v2.adcm.urls", "adcm"))), ] diff --git a/python/api_v2/views.py b/python/api_v2/views.py index f1a92e7b71..b9a5eaf2c5 100644 --- a/python/api_v2/views.py +++ b/python/api_v2/views.py @@ -10,7 +10,24 @@ # See the License for the specific language governing permissions and # limitations under the License. +from djangorestframework_camel_case.parser import ( + CamelCaseFormParser, + CamelCaseJSONParser, + CamelCaseMultiPartParser, +) +from djangorestframework_camel_case.render import ( + CamelCaseBrowsableAPIRenderer, + CamelCaseJSONRenderer, +) +from rest_framework.mixins import ( + CreateModelMixin, + DestroyModelMixin, + ListModelMixin, + RetrieveModelMixin, + UpdateModelMixin, +) from rest_framework.routers import APIRootView +from rest_framework.viewsets import GenericViewSet class APIRoot(APIRootView): @@ -22,4 +39,23 @@ class APIRoot(APIRootView): "hostproviders": "hostprovider-list", "prototypes": "prototype-list", "jobs": "joblog-list", + "tasks": "tasklog-list", + "rbac": "rbac:root", } + + +class CamelCaseGenericViewSet(GenericViewSet): + parser_classes = [CamelCaseJSONParser, CamelCaseMultiPartParser, CamelCaseFormParser] + renderer_classes = [CamelCaseJSONRenderer, CamelCaseBrowsableAPIRenderer] + + +class CamelCaseModelViewSet( + CreateModelMixin, RetrieveModelMixin, UpdateModelMixin, DestroyModelMixin, ListModelMixin, CamelCaseGenericViewSet +): # pylint: disable=too-many-ancestors + pass + + +class CamelCaseReadOnlyModelViewSet( + RetrieveModelMixin, ListModelMixin, CamelCaseGenericViewSet +): # pylint: disable=too-many-ancestors + pass diff --git a/python/audit/cases/rbac.py b/python/audit/cases/rbac.py index 46c00a7795..56e9a72dce 100644 --- a/python/audit/cases/rbac.py +++ b/python/audit/cases/rbac.py @@ -46,7 +46,7 @@ def _rbac_case( operation_type = AuditLogOperationType.UPDATE try: obj = AUDIT_OBJECT_TYPE_TO_MODEL_MAP[obj_type].objects.get(pk=data.obj_pk) - except ObjectDoesNotExist: + except (ObjectDoesNotExist, ValueError): obj = None if obj: diff --git a/python/audit/middleware.py b/python/audit/middleware.py index dbf4e34f63..4764af8a2c 100644 --- a/python/audit/middleware.py +++ b/python/audit/middleware.py @@ -13,7 +13,7 @@ from json.decoder import JSONDecodeError from audit.cef_logger import cef_logger -from audit.models import AuditSession, AuditSessionLoginResult +from audit.models import AuditSession, AuditSessionLoginResult, AuditUser from cm.models import ADCM, ConfigLog from django.conf import settings from django.contrib.auth.models import AnonymousUser, User @@ -44,11 +44,16 @@ def _audit( result = AuditSessionLoginResult.ACCOUNT_DISABLED else: result = AuditSessionLoginResult.WRONG_PASSWORD + except User.DoesNotExist: result = AuditSessionLoginResult.USER_NOT_FOUND user = None - auditsession = AuditSession.objects.create(user=user, login_result=result, login_details=details) + audit_user = None + if user is not None: + audit_user = AuditUser.objects.filter(username=user.username).order_by("-pk").first() + + auditsession = AuditSession.objects.create(user=audit_user, login_result=result, login_details=details) cef_logger(audit_instance=auditsession, signature_id=resolve(request_path).route) return user, result diff --git a/python/audit/migrations/0005_audit_user.py b/python/audit/migrations/0005_audit_user.py new file mode 100644 index 0000000000..3e11f412db --- /dev/null +++ b/python/audit/migrations/0005_audit_user.py @@ -0,0 +1,124 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by Django 3.2.19 on 2023-06-29 08:56 + +import django.db.models.deletion +from django.db import migrations, models +from django.utils import timezone + + +def create_and_link_audit_users(apps, schema_editor): + AuthUser = apps.get_model("auth", "User") + AuditUser = apps.get_model("audit", "AuditUser") + AuditLog = apps.get_model("audit", "AuditLog") + AuditSession = apps.get_model("audit", "AuditSession") + + for auth_user in AuthUser.objects.filter(is_active=True): + AuditUser.objects.get_or_create( + username=auth_user.username, created_at=auth_user.date_joined, auth_user_id=auth_user.pk + ) + + auditlog_usernames = [ + username + for username in AuditLog.objects.values_list("user__username", flat=True).distinct() + if username is not None + ] + auth_user_pks = [AuthUser.objects.get(username=username).pk for username in auditlog_usernames] + for log_username, auth_pk in zip(auditlog_usernames, auth_user_pks): + audit_user, created = AuditUser.objects.get_or_create(username=log_username, auth_user_id=auth_pk) + if created: + audit_user.created_at = AuthUser.objects.get(username=log_username).date_joined + audit_user.save(update_fields=["created_at"]) + + AuditLog.objects.filter(user__username=log_username).update(audit_user=audit_user) + + auditsession_usernames = [ + username + for username in AuditSession.objects.values_list("user__username", flat=True).distinct() + if username is not None + ] + auth_user_pks = [AuthUser.objects.get(username=username).pk for username in auditsession_usernames] + for session_username, auth_pk in zip(auditsession_usernames, auth_user_pks): + audit_user, created = AuditUser.objects.get_or_create(username=session_username, auth_user_id=auth_pk) + if created: + audit_user.created_at = AuthUser.objects.get(username=session_username).date_joined + audit_user.save(update_fields=["created_at"]) + + AuditSession.objects.filter(user__username=session_username).update(audit_user=audit_user) + + deleting_users = AuthUser.objects.filter(is_active=False).values_list("username", flat=True) + AuditUser.objects.filter(username__in=deleting_users).update(deleted_at=timezone.now()) + + +def create_and_link_audit_users_reverse(apps, schema_editor): + AuthUser = apps.get_model("auth", "User") + AuditLog = apps.get_model("audit", "AuditLog") + AuditSession = apps.get_model("audit", "AuditSession") + + for audit_session in AuditSession.objects.filter(audit_user__isnull=False): + auth_user = AuthUser.objects.get(username=audit_session.audit_user.username) + audit_session.user = auth_user + audit_session.save(update_fields=["user"]) + + for audit_log in AuditLog.objects.filter(audit_user__isnull=False): + auth_user = AuthUser.objects.get(username=audit_log.audit_user.username) + audit_log.user = auth_user + audit_log.save(update_fields=["user"]) + + +class Migration(migrations.Migration): + dependencies = [ + ("audit", "0004_auto_20230131_0944"), + ] + + operations = [ + migrations.CreateModel( + name="AuditUser", + fields=[ + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("username", models.CharField(max_length=150)), + ("auth_user_id", models.PositiveBigIntegerField()), + ("created_at", models.DateTimeField(null=True)), + ("deleted_at", models.DateTimeField(null=True)), + ], + ), + migrations.AddField( + model_name="auditlog", + name="audit_user", + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="audit.audituser"), + ), + migrations.AddField( + model_name="auditsession", + name="audit_user", + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="audit.audituser"), + ), + migrations.RunPython(code=create_and_link_audit_users, reverse_code=create_and_link_audit_users_reverse), + migrations.RemoveField( + model_name="auditlog", + name="user", + ), + migrations.RemoveField( + model_name="auditsession", + name="user", + ), + migrations.RenameField( + model_name="auditlog", + old_name="audit_user", + new_name="user", + ), + migrations.RenameField( + model_name="auditsession", + old_name="audit_user", + new_name="user", + ), + ] diff --git a/python/audit/models.py b/python/audit/models.py index 4914fb0e62..b59cf5317b 100644 --- a/python/audit/models.py +++ b/python/audit/models.py @@ -22,12 +22,22 @@ Prototype, ServiceComponent, ) -from django.contrib.auth.models import User as DjangoUser -from django.db import models +from django.db.models import ( + CASCADE, + BooleanField, + CharField, + DateTimeField, + ForeignKey, + JSONField, + Model, + PositiveBigIntegerField, + PositiveIntegerField, + TextChoices, +) from rbac.models import Group, Policy, Role, User -class AuditObjectType(models.TextChoices): +class AuditObjectType(TextChoices): PROTOTYPE = "prototype", "prototype" CLUSTER = "cluster", "cluster" SERVICE = "service", "service" @@ -42,47 +52,54 @@ class AuditObjectType(models.TextChoices): POLICY = "policy", "policy" -class AuditLogOperationType(models.TextChoices): +class AuditLogOperationType(TextChoices): CREATE = "create", "create" UPDATE = "update", "update" DELETE = "delete", "delete" -class AuditLogOperationResult(models.TextChoices): +class AuditLogOperationResult(TextChoices): SUCCESS = "success", "success" FAIL = "fail", "fail" DENIED = "denied", "denied" -class AuditSessionLoginResult(models.TextChoices): +class AuditSessionLoginResult(TextChoices): SUCCESS = "success", "success" WRONG_PASSWORD = "wrong password", "wrong password" ACCOUNT_DISABLED = "account disabled", "account disabled" USER_NOT_FOUND = "user not found", "user not found" -class AuditObject(models.Model): - object_id = models.PositiveIntegerField() - object_name = models.CharField(max_length=2000) - object_type = models.CharField(max_length=2000, choices=AuditObjectType.choices) - is_deleted = models.BooleanField(default=False) +class AuditObject(Model): + object_id = PositiveIntegerField() + object_name = CharField(max_length=2000) + object_type = CharField(max_length=2000, choices=AuditObjectType.choices) + is_deleted = BooleanField(default=False) + + +class AuditUser(Model): + username = CharField(max_length=150, null=False, blank=False) + auth_user_id = PositiveBigIntegerField() + created_at = DateTimeField(null=True) + deleted_at = DateTimeField(null=True) -class AuditLog(models.Model): - audit_object = models.ForeignKey(AuditObject, on_delete=models.CASCADE, null=True) - operation_name = models.CharField(max_length=2000) - operation_type = models.CharField(max_length=2000, choices=AuditLogOperationType.choices) - operation_result = models.CharField(max_length=2000, choices=AuditLogOperationResult.choices) - operation_time = models.DateTimeField(auto_now_add=True) - user = models.ForeignKey(DjangoUser, on_delete=models.CASCADE, null=True) - object_changes = models.JSONField(default=dict) +class AuditLog(Model): + audit_object = ForeignKey(AuditObject, on_delete=CASCADE, null=True) + operation_name = CharField(max_length=2000) + operation_type = CharField(max_length=2000, choices=AuditLogOperationType.choices) + operation_result = CharField(max_length=2000, choices=AuditLogOperationResult.choices) + operation_time = DateTimeField(auto_now_add=True) + user = ForeignKey(AuditUser, on_delete=CASCADE, null=True) + object_changes = JSONField(default=dict) -class AuditSession(models.Model): - user = models.ForeignKey(DjangoUser, on_delete=models.CASCADE, null=True) - login_result = models.CharField(max_length=2000, choices=AuditSessionLoginResult.choices) - login_time = models.DateTimeField(auto_now_add=True) - login_details = models.JSONField(default=dict, null=True) +class AuditSession(Model): + user = ForeignKey(AuditUser, on_delete=CASCADE, null=True) + login_result = CharField(max_length=2000, choices=AuditSessionLoginResult.choices) + login_time = DateTimeField(auto_now_add=True) + login_details = JSONField(default=dict, null=True) @dataclass diff --git a/python/audit/serializers.py b/python/audit/serializers.py index 95cd0ca0b7..a9f7dabea0 100644 --- a/python/audit/serializers.py +++ b/python/audit/serializers.py @@ -16,6 +16,7 @@ CharField, HyperlinkedModelSerializer, IntegerField, + SerializerMethodField, ) @@ -24,6 +25,7 @@ class AuditLogSerializer(HyperlinkedModelSerializer): object_type = CharField(read_only=True, source="audit_object.object_type", allow_null=True) object_name = CharField(read_only=True, source="audit_object.object_name", allow_null=True) username = CharField(read_only=True, source="user.username", allow_null=True) + user_id = SerializerMethodField() class Meta: model = AuditLog @@ -43,8 +45,17 @@ class Meta: ] extra_kwargs = {"url": {"view_name": "audit:auditlog-detail"}} + @staticmethod + def get_user_id(obj: AuditLog) -> str | None: + if obj.user: + return obj.user.auth_user_id + + return None + class AuditSessionSerializer(HyperlinkedModelSerializer): + user_id = SerializerMethodField() + class Meta: model = AuditSession fields = [ @@ -58,3 +69,10 @@ class Meta: extra_kwargs = { "url": {"view_name": "audit:auditsession-detail"}, } + + @staticmethod + def get_user_id(obj: AuditSession) -> str | None: + if obj.user: + return obj.user.auth_user_id + + return None diff --git a/python/audit/signals.py b/python/audit/signals.py index 1750edd2a9..5af357ab23 100644 --- a/python/audit/signals.py +++ b/python/audit/signals.py @@ -10,7 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from audit.models import MODEL_TO_AUDIT_OBJECT_TYPE_MAP, AuditObject +from audit.models import MODEL_TO_AUDIT_OBJECT_TYPE_MAP, AuditObject, AuditUser from cm.models import ( ADCM, Bundle, @@ -21,8 +21,11 @@ Prototype, ServiceComponent, ) -from django.db.models.signals import post_delete +from django.contrib.auth.models import User as AuthUser +from django.db.models.signals import post_delete, post_save from django.dispatch import receiver +from django.utils.timezone import now +from rbac.models import User as RBACUser @receiver(signal=post_delete, sender=Cluster) @@ -42,3 +45,25 @@ def mark_deleted_audit_object_handler(sender, instance, **kwargs) -> None: # py audit_objs.append(audit_obj) AuditObject.objects.bulk_update(objs=audit_objs, fields=["is_deleted"]) + + +@receiver(signal=post_save, sender=AuthUser) +@receiver(signal=post_save, sender=RBACUser) +def create_audit_user(sender, instance, created, **kwargs): # pylint: disable=unused-argument + if kwargs["raw"]: + return + + if created: + AuditUser.objects.get_or_create( + username=instance.username, + created_at=instance.date_joined, + auth_user_id=AuthUser.objects.get(username=instance.username).pk, + ) + + +@receiver(signal=post_delete, sender=AuthUser) +@receiver(signal=post_delete, sender=RBACUser) +def set_deleted_at_audit_user(sender, instance, **kwargs): # pylint: disable=unused-argument + audit_user = AuditUser.objects.filter(username=instance.username).order_by("-pk").first() + audit_user.deleted_at = now() + audit_user.save(update_fields=["deleted_at"]) diff --git a/python/audit/tests/test_action.py b/python/audit/tests/test_action.py index f6ceb6eeda..316ac2c805 100644 --- a/python/audit/tests/test_action.py +++ b/python/audit/tests/test_action.py @@ -114,7 +114,7 @@ def check_obj_updated( self.assertIsInstance(log.operation_time, datetime) if log.user: - self.assertEqual(log.user.pk, user.pk) + self.assertEqual(log.user.username, user.username) self.assertEqual(log.object_changes, {}) @@ -234,14 +234,14 @@ def test_host_denied(self): ) host_role = Role.objects.get(name="View host configurations") host_policy = Policy.objects.create(name="test_host_policy", role=host_role) - host_policy.user.add(self.no_rights_user) + host_policy.group.add(self.no_rights_user_group) host_policy.add_object(host) host_policy.apply() cluster, service, component = self.get_cluster_service_component() component_role = Role.objects.get(name="View component configurations") component_policy = Policy.objects.create(name="test_component_policy", role=component_role) - component_policy.user.add(self.no_rights_user) + component_policy.group.add(self.no_rights_user_group) component_policy.add_object(component) component_policy.apply() diff --git a/python/audit/tests/test_adcm.py b/python/audit/tests/test_adcm.py index 645b30b81c..634fdace77 100644 --- a/python/audit/tests/test_adcm.py +++ b/python/audit/tests/test_adcm.py @@ -67,7 +67,7 @@ def check_adcm_updated(self, log: AuditLog, operation_name: str, operation_resul self.assertIsInstance(log.operation_time, datetime) if log.user: - self.assertEqual(log.user.pk, user.pk) + self.assertEqual(log.user.username, user.username) self.assertEqual(log.object_changes, {}) diff --git a/python/audit/tests/test_audit_object_rename.py b/python/audit/tests/test_audit_object_rename.py index a346e11a49..33d42e00e8 100644 --- a/python/audit/tests/test_audit_object_rename.py +++ b/python/audit/tests/test_audit_object_rename.py @@ -194,7 +194,7 @@ def test_policy_rename(self): }, ], "role": {"id": self.role.pk}, - "user": [{"id": self.test_user.pk}], + "group": [{"id": self.test_user_group.pk}], "description": "test_policy_description", }, content_type=APPLICATION_JSON, @@ -227,7 +227,7 @@ def test_policy_rename(self): }, ], "role": {"id": self.role.pk}, - "user": [{"id": self.test_user.pk}], + "group": [{"id": self.test_user_group.pk}], "name": new_test_policy_name, }, content_type=APPLICATION_JSON, diff --git a/python/audit/tests/test_authentication.py b/python/audit/tests/test_authentication.py index 6024988b86..b3d13a63e8 100644 --- a/python/audit/tests/test_authentication.py +++ b/python/audit/tests/test_authentication.py @@ -29,7 +29,10 @@ def setUp(self) -> None: def check_audit_session(self, user_id: int | None, login_result: AuditSessionLoginResult, username: str) -> None: log: AuditSession = AuditSession.objects.order_by("login_time").last() - self.assertEqual(log.user_id, user_id) + if log.user: + self.assertEqual(log.user.username, User.objects.get(pk=user_id).username) + else: + self.assertEqual(log.user, user_id) self.assertEqual(log.login_result, login_result) self.assertDictEqual(log.login_details, {"username": username}) diff --git a/python/audit/tests/test_bundle.py b/python/audit/tests/test_bundle.py index 2efb4e23ee..b685515484 100644 --- a/python/audit/tests/test_bundle.py +++ b/python/audit/tests/test_bundle.py @@ -52,7 +52,7 @@ def setUp(self) -> None: self.test_bundle_filename = "test_bundle.tar" self.test_bundle_path = Path( - settings.BASE_DIR, + self.base_dir, "python/audit/tests/files", self.test_bundle_filename, ) @@ -63,7 +63,7 @@ def check_log_upload(self, log: AuditLog, operation_result: AuditLogOperationRes self.assertEqual(log.operation_type, AuditLogOperationType.CREATE) self.assertEqual(log.operation_result, operation_result) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, user.pk) + self.assertEqual(log.user.username, user.username) self.assertEqual(log.object_changes, {}) def check_log_load_no_obj(self, log: AuditLog, operation_result: AuditLogOperationResult, user: User) -> None: @@ -72,7 +72,7 @@ def check_log_load_no_obj(self, log: AuditLog, operation_result: AuditLogOperati self.assertEqual(log.operation_type, AuditLogOperationType.CREATE) self.assertEqual(log.operation_result, operation_result) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, user.pk) + self.assertEqual(log.user.username, user.username) self.assertEqual(log.object_changes, {}) def check_log_denied(self, log: AuditLog, operation_name: str, operation_type: AuditLogOperationType) -> None: @@ -84,7 +84,7 @@ def check_log_denied(self, log: AuditLog, operation_name: str, operation_type: A self.assertEqual(log.operation_type, operation_type) self.assertEqual(log.operation_result, AuditLogOperationResult.DENIED) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, self.no_rights_user.pk) + self.assertEqual(log.user.username, self.no_rights_user.username) self.assertEqual(log.object_changes, {}) def check_prototype_licence(self, log: AuditLog, operation_result: AuditLogOperationResult, user: User): @@ -96,7 +96,7 @@ def check_prototype_licence(self, log: AuditLog, operation_result: AuditLogOpera self.assertEqual(log.operation_type, AuditLogOperationType.UPDATE) self.assertEqual(log.operation_result, operation_result) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, user.pk) + self.assertEqual(log.user.username, user.username) self.assertEqual(log.object_changes, {}) def check_log_deleted(self, log: AuditLog, operation_result: AuditLogOperationResult): @@ -108,7 +108,7 @@ def check_log_deleted(self, log: AuditLog, operation_result: AuditLogOperationRe self.assertEqual(log.operation_type, AuditLogOperationType.DELETE) self.assertEqual(log.operation_result, operation_result) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, self.test_user.pk) + self.assertEqual(log.user.username, self.test_user.username) self.assertEqual(log.object_changes, {}) def upload_bundle_and_check(self) -> Bundle: @@ -124,7 +124,7 @@ def upload_bundle_and_check(self) -> Bundle: self.assertEqual(log.operation_type, AuditLogOperationType.CREATE) self.assertEqual(log.operation_result, AuditLogOperationResult.SUCCESS) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, self.test_user.pk) + self.assertEqual(log.user.username, self.test_user.username) self.assertEqual(log.object_changes, {}) return bundle @@ -231,7 +231,7 @@ def test_update(self): self.assertEqual(log.operation_type, AuditLogOperationType.UPDATE) self.assertEqual(log.operation_result, AuditLogOperationResult.SUCCESS) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, self.test_user.pk) + self.assertEqual(log.user.username, self.test_user.username) self.assertEqual(log.object_changes, {}) def test_update_denied(self): @@ -258,7 +258,7 @@ def test_license_accepted(self): self.assertEqual(log.operation_type, AuditLogOperationType.UPDATE) self.assertEqual(log.operation_result, AuditLogOperationResult.SUCCESS) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, self.test_user.pk) + self.assertEqual(log.user.username, self.test_user.username) self.assertEqual(log.object_changes, {}) def test_license_accepted_denied(self): diff --git a/python/audit/tests/test_cluster.py b/python/audit/tests/test_cluster.py index 491ab43e52..dfe448b7bd 100644 --- a/python/audit/tests/test_cluster.py +++ b/python/audit/tests/test_cluster.py @@ -106,7 +106,7 @@ def check_log_no_obj(self, log: AuditLog, operation_result: AuditLogOperationRes self.assertEqual(log.operation_type, AuditLogOperationType.CREATE) self.assertEqual(log.operation_result, operation_result) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, user.pk) + self.assertEqual(log.user.username, user.username) self.assertEqual(log.object_changes, {}) def check_log( @@ -134,7 +134,7 @@ def check_log( self.assertEqual(log.operation_type, operation_type) self.assertEqual(log.operation_result, operation_result) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, user.pk) + self.assertEqual(log.user.username, user.username) self.assertDictEqual(log.object_changes, object_changes) def check_log_denied(self, log: AuditLog, operation_name: str, operation_type: AuditLogOperationType) -> None: @@ -146,7 +146,7 @@ def check_log_denied(self, log: AuditLog, operation_name: str, operation_type: A self.assertEqual(log.operation_type, operation_type) self.assertEqual(log.operation_result, AuditLogOperationResult.DENIED) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, self.no_rights_user.pk) + self.assertEqual(log.user.username, self.no_rights_user.username) self.assertEqual(log.object_changes, {}) def check_cluster_update_config(self, log: AuditLog) -> None: @@ -165,7 +165,7 @@ def check_cluster_delete_failed_not_found(self, log: AuditLog): self.assertEqual(log.operation_type, AuditLogOperationType.DELETE) self.assertEqual(log.operation_result, AuditLogOperationResult.FAIL) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, self.test_user.pk) + self.assertEqual(log.user.username, self.test_user.username) self.assertEqual(log.object_changes, {}) def check_action_log( @@ -236,7 +236,7 @@ def get_component(self) -> tuple[ServiceComponent, ConfigLog]: def add_no_rights_user_cluster_view_rights(self) -> None: role = Role.objects.get(name="View cluster configurations") policy = Policy.objects.create(name="test_policy", role=role) - policy.user.add(self.no_rights_user) + policy.group.add(self.no_rights_user_group) policy.add_object(self.cluster) policy.apply() @@ -282,7 +282,7 @@ def test_delete_two_clusters(self): provider_bundle_filename = "test_provider_bundle.tar" with open( - Path(settings.BASE_DIR, "python/audit/tests/files", cluster_bundle_filename), + Path(self.base_dir, "python/audit/tests/files", cluster_bundle_filename), encoding=settings.ENCODING_UTF_8, ) as f: self.client.post( @@ -296,7 +296,7 @@ def test_delete_two_clusters(self): ) with open( - Path(settings.BASE_DIR, "python/audit/tests/files", provider_bundle_filename), + Path(self.base_dir, "python/audit/tests/files", provider_bundle_filename), encoding=settings.ENCODING_UTF_8, ) as f: self.client.post( diff --git a/python/audit/tests/test_component.py b/python/audit/tests/test_component.py index 9ad1398edb..b16bc84c68 100644 --- a/python/audit/tests/test_component.py +++ b/python/audit/tests/test_component.py @@ -25,7 +25,6 @@ Cluster, ClusterObject, ConfigLog, - MaintenanceMode, ObjectConfig, Prototype, ServiceComponent, @@ -94,7 +93,7 @@ def check_log( self.assertEqual(log.operation_name, operation_name) self.assertEqual(log.operation_type, AuditLogOperationType.UPDATE) self.assertEqual(log.operation_result, operation_result) - self.assertEqual(log.user.pk, user.pk) + self.assertEqual(log.user.username, user.username) self.assertIsInstance(log.operation_time, datetime) self.assertEqual(log.object_changes, object_changes) @@ -287,7 +286,7 @@ def test_action_launch(self): def test_change_maintenance_mode(self): self.client.post( path=reverse(viewname="v1:component-maintenance-mode", kwargs={"component_id": self.component.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) log: AuditLog = AuditLog.objects.order_by("operation_time").last() @@ -295,7 +294,10 @@ def test_change_maintenance_mode(self): self.check_log( log=log, operation_name="Component updated", - object_changes={"current": {"maintenance_mode": "ON"}, "previous": {"maintenance_mode": "OFF"}}, + object_changes={ + "current": {"maintenance_mode": "ON"}, + "previous": {"maintenance_mode": "OFF"}, + }, ) def test_change_maintenance_mode_via_service(self): @@ -304,7 +306,7 @@ def test_change_maintenance_mode_via_service(self): viewname="v1:component-maintenance-mode", kwargs={"service_id": self.service.pk, "component_id": self.component.pk}, ), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) log: AuditLog = AuditLog.objects.order_by("operation_time").last() @@ -312,7 +314,10 @@ def test_change_maintenance_mode_via_service(self): self.check_log( log=log, operation_name="Component updated", - object_changes={"current": {"maintenance_mode": "ON"}, "previous": {"maintenance_mode": "OFF"}}, + object_changes={ + "current": {"maintenance_mode": "ON"}, + "previous": {"maintenance_mode": "OFF"}, + }, ) def test_change_maintenance_mode_via_cluster(self): @@ -325,7 +330,7 @@ def test_change_maintenance_mode_via_cluster(self): "component_id": self.component.pk, }, ), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) log: AuditLog = AuditLog.objects.order_by("operation_time").last() @@ -333,13 +338,16 @@ def test_change_maintenance_mode_via_cluster(self): self.check_log( log=log, operation_name="Component updated", - object_changes={"current": {"maintenance_mode": "ON"}, "previous": {"maintenance_mode": "OFF"}}, + object_changes={ + "current": {"maintenance_mode": "ON"}, + "previous": {"maintenance_mode": "OFF"}, + }, ) def test_change_maintenance_mode_failed(self): self.client.post( path=reverse(viewname="v1:component-maintenance-mode", kwargs={"component_id": self.component.pk}), - data={"maintenance_mode": MaintenanceMode.CHANGING}, + data={"maintenance_mode": "CHANGING"}, ) log: AuditLog = AuditLog.objects.order_by("operation_time").last() @@ -354,7 +362,7 @@ def test_change_maintenance_mode_denied(self): with self.no_rights_user_logged_in: self.client.post( path=reverse(viewname="v1:component-maintenance-mode", kwargs={"component_id": self.component.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) log: AuditLog = AuditLog.objects.order_by("operation_time").last() diff --git a/python/audit/tests/test_config_log.py b/python/audit/tests/test_config_log.py index 072211ca99..76e89626f0 100644 --- a/python/audit/tests/test_config_log.py +++ b/python/audit/tests/test_config_log.py @@ -62,7 +62,7 @@ def check_log( self.assertEqual(log.operation_type, AuditLogOperationType.UPDATE) self.assertEqual(log.operation_result, operation_result) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, user.pk) + self.assertEqual(log.user.username, user.username) self.assertEqual(log.object_changes, {}) def test_create(self): diff --git a/python/audit/tests/test_group.py b/python/audit/tests/test_group.py index 3c3eefd3b8..dad74ec64e 100644 --- a/python/audit/tests/test_group.py +++ b/python/audit/tests/test_group.py @@ -58,7 +58,7 @@ def check_log( self.assertEqual(log.operation_type, operation_type) self.assertEqual(log.operation_result, operation_result) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, user.pk) + self.assertEqual(log.user.username, user.username) self.assertEqual(log.object_changes, object_changes) def test_create(self): @@ -78,7 +78,7 @@ def test_create(self): self.assertEqual(log.operation_type, AuditLogOperationType.CREATE) self.assertEqual(log.operation_result, AuditLogOperationResult.SUCCESS) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, self.test_user.pk) + self.assertEqual(log.user.username, self.test_user.username) self.assertIsInstance(log.object_changes, dict) self.client.post( @@ -93,7 +93,7 @@ def test_create(self): self.assertEqual(log.operation_type, AuditLogOperationType.CREATE) self.assertEqual(log.operation_result, AuditLogOperationResult.FAIL) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, self.test_user.pk) + self.assertEqual(log.user.username, self.test_user.username) self.assertIsInstance(log.object_changes, dict) def test_create_denied(self): @@ -111,7 +111,7 @@ def test_create_denied(self): self.assertEqual(log.operation_type, AuditLogOperationType.CREATE) self.assertEqual(log.operation_result, AuditLogOperationResult.DENIED) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, self.no_rights_user.pk) + self.assertEqual(log.user.username, self.no_rights_user.username) self.assertIsInstance(log.object_changes, dict) def test_delete(self): diff --git a/python/audit/tests/test_group_config.py b/python/audit/tests/test_group_config.py index 3cfe4e403e..8ce125d190 100644 --- a/python/audit/tests/test_group_config.py +++ b/python/audit/tests/test_group_config.py @@ -137,7 +137,7 @@ def check_log( self.assertEqual(log.operation_type, operation_type) self.assertEqual(log.operation_result, operation_result) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, user.pk) + self.assertEqual(log.user.username, user.username) self.assertEqual(log.object_changes, {}) def check_log_no_obj( @@ -153,7 +153,7 @@ def check_log_no_obj( self.assertEqual(log.operation_type, operation_type) self.assertEqual(log.operation_result, operation_result) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, user.pk) + self.assertEqual(log.user.username, user.username) self.assertEqual(log.object_changes, {}) def check_log_updated(self, log: AuditLog, operation_result: AuditLogOperationResult, user: User) -> None: @@ -562,13 +562,13 @@ def check_log(self, log: AuditLog, operation_result: AuditLogOperationResult, us self.assertEqual(log.operation_type, AuditLogOperationType.UPDATE) self.assertEqual(log.operation_result, operation_result) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, user.pk) + self.assertEqual(log.user.username, user.username) self.assertEqual(log.object_changes, {}) def create_cluster_from_bundle(self): test_bundle_filename = "group-config.tar" test_bundle_path = Path( - settings.BASE_DIR, + self.base_dir, "python/audit/tests/files", test_bundle_filename, ) diff --git a/python/audit/tests/test_host.py b/python/audit/tests/test_host.py index 26f8151682..f25469b3ab 100644 --- a/python/audit/tests/test_host.py +++ b/python/audit/tests/test_host.py @@ -26,7 +26,6 @@ ConfigLog, Host, HostProvider, - MaintenanceMode, ObjectConfig, Prototype, ) @@ -85,7 +84,7 @@ def check_host_created_log(self, log: AuditLog, response: Response) -> None: self.assertEqual(log.operation_type, AuditLogOperationType.CREATE) self.assertEqual(log.operation_result, AuditLogOperationResult.SUCCESS) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, self.test_user.pk) + self.assertEqual(log.user.username, self.test_user.username) self.assertEqual(log.object_changes, {}) def check_host_updated_log( @@ -111,7 +110,7 @@ def check_host_updated_log( self.assertEqual(log.operation_type, AuditLogOperationType.UPDATE) self.assertEqual(log.operation_result, operation_result) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, user.pk) + self.assertEqual(log.user.username, user.username) self.assertEqual(log.object_changes, object_changes) def check_host_deleted_log( @@ -131,7 +130,7 @@ def check_host_deleted_log( self.assertEqual(log.operation_type, AuditLogOperationType.DELETE) self.assertEqual(log.operation_result, operation_result) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, user.pk) + self.assertEqual(log.user.username, user.username) self.assertEqual(log.object_changes, {}) def check_cluster_updated_log( @@ -157,7 +156,7 @@ def check_cluster_updated_log( self.assertEqual(log.operation_type, AuditLogOperationType.UPDATE) self.assertEqual(log.operation_result, operation_result) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, user.pk) + self.assertEqual(log.user.username, user.username) self.assertEqual(log.object_changes, {}) def check_denied(self, log: AuditLog) -> None: @@ -166,7 +165,7 @@ def check_denied(self, log: AuditLog) -> None: self.assertEqual(log.operation_type, AuditLogOperationType.CREATE) self.assertEqual(log.operation_result, AuditLogOperationResult.DENIED) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, self.no_rights_user.pk) + self.assertEqual(log.user.username, self.no_rights_user.username) self.assertEqual(log.object_changes, {}) def check_action_log(self, log: AuditLog) -> None: @@ -214,7 +213,7 @@ def test_create(self): self.assertEqual(log.operation_type, AuditLogOperationType.CREATE) self.assertEqual(log.operation_result, AuditLogOperationResult.FAIL) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, self.test_user.pk) + self.assertEqual(log.user.username, self.test_user.username) self.assertEqual(log.object_changes, {}) def test_create_denied(self): @@ -600,7 +599,7 @@ def test_change_maintenance_mode(self): self.client.post( path=reverse(viewname="v1:host-maintenance-mode", kwargs={"host_id": self.host.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) log: AuditLog = AuditLog.objects.order_by("operation_time").last() @@ -608,7 +607,10 @@ def test_change_maintenance_mode(self): self.check_host_updated_log( log=log, operation_name="Host updated", - object_changes={"current": {"maintenance_mode": "ON"}, "previous": {"maintenance_mode": "OFF"}}, + object_changes={ + "current": {"maintenance_mode": "ON"}, + "previous": {"maintenance_mode": "OFF"}, + }, ) def test_change_maintenance_mode_via_cluster(self): @@ -620,7 +622,7 @@ def test_change_maintenance_mode_via_cluster(self): viewname="v1:host-maintenance-mode", kwargs={"cluster_id": self.cluster.pk, "host_id": self.host.pk}, ), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) log: AuditLog = AuditLog.objects.order_by("operation_time").last() @@ -628,7 +630,10 @@ def test_change_maintenance_mode_via_cluster(self): self.check_host_updated_log( log=log, operation_name="Host updated", - object_changes={"current": {"maintenance_mode": "ON"}, "previous": {"maintenance_mode": "OFF"}}, + object_changes={ + "current": {"maintenance_mode": "ON"}, + "previous": {"maintenance_mode": "OFF"}, + }, ) def test_change_maintenance_mode_via_provider(self): @@ -640,7 +645,7 @@ def test_change_maintenance_mode_via_provider(self): viewname="v1:host-maintenance-mode", kwargs={"provider_id": self.provider.pk, "host_id": self.host.pk}, ), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) log: AuditLog = AuditLog.objects.order_by("operation_time").last() @@ -648,13 +653,16 @@ def test_change_maintenance_mode_via_provider(self): self.check_host_updated_log( log=log, operation_name="Host updated", - object_changes={"current": {"maintenance_mode": "ON"}, "previous": {"maintenance_mode": "OFF"}}, + object_changes={ + "current": {"maintenance_mode": "ON"}, + "previous": {"maintenance_mode": "OFF"}, + }, ) def test_change_maintenance_mode_failed(self): self.client.post( path=reverse(viewname="v1:host-maintenance-mode", kwargs={"host_id": self.host.pk}), - data={"maintenance_mode": MaintenanceMode.CHANGING}, + data={"maintenance_mode": "CHANGING"}, ) log: AuditLog = AuditLog.objects.order_by("operation_time").last() @@ -672,7 +680,7 @@ def test_change_maintenance_mode_denied(self): with self.no_rights_user_logged_in: self.client.post( path=reverse(viewname="v1:host-maintenance-mode", kwargs={"host_id": self.host.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) log: AuditLog = AuditLog.objects.order_by("operation_time").last() @@ -689,7 +697,7 @@ def test_change_maintenance_mode_denied(self): path=reverse( viewname="v1:host-maintenance-mode", kwargs={"cluster_id": self.cluster.pk, "host_id": self.host.pk} ), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) log: AuditLog = AuditLog.objects.order_by("operation_time").last() diff --git a/python/audit/tests/test_logrotate.py b/python/audit/tests/test_logrotate.py index fb01bebe0c..ada625a078 100644 --- a/python/audit/tests/test_logrotate.py +++ b/python/audit/tests/test_logrotate.py @@ -40,8 +40,7 @@ def setUp(self) -> None: attr = current_config_log.attr config.update( { - "job_log": {"log_rotation_on_fs": 1, "log_rotation_in_db": 1}, - "config_rotation": {"config_rotation_in_db": 1}, + "audit_data_retention": {"log_rotation_on_fs": 1, "log_rotation_in_db": 1, "config_rotation_in_db": 1}, "logrotate": {"size": "10M", "max_history": 10, "compress": False}, } ) @@ -75,7 +74,7 @@ def check_auditlog(self, log: AuditLog, name): self.assertEqual(log.operation_type, AuditLogOperationType.DELETE) self.assertEqual(log.operation_result, AuditLogOperationResult.SUCCESS) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, self.user.pk) + self.assertEqual(log.user.username, self.user.username) def test_logrotate( self, diff --git a/python/audit/tests/test_policy.py b/python/audit/tests/test_policy.py index f19c791334..3349a028e0 100644 --- a/python/audit/tests/test_policy.py +++ b/python/audit/tests/test_policy.py @@ -81,7 +81,7 @@ def check_log( self.assertEqual(log.operation_type, operation_type) self.assertEqual(log.operation_result, operation_result) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, user.pk) + self.assertEqual(log.user.username, user.username) self.assertEqual(log.object_changes, object_changes) def check_log_update( @@ -112,7 +112,7 @@ def test_create(self): "name": self.name, "object": [{"id": self.cluster.pk, "name": self.cluster_name, "type": "cluster"}], "role": {"id": self.role.pk}, - "user": [{"id": self.test_user.pk}], + "group": [{"id": self.test_user_group.pk}], }, content_type=APPLICATION_JSON, ) @@ -137,7 +137,7 @@ def test_create_denied(self): "name": self.name, "object": [{"id": self.cluster.pk, "name": self.cluster_name, "type": "cluster"}], "role": {"id": self.role.pk}, - "user": [{"id": self.test_user.pk}], + "group": [{"id": self.test_user_group.pk}], }, content_type=APPLICATION_JSON, ) @@ -199,7 +199,7 @@ def test_update_put(self): "name": self.policy.name, "object": [{"id": self.cluster.pk, "name": self.cluster_name, "type": "cluster"}], "role": {"id": self.role.pk}, - "user": [{"id": self.test_user.pk}], + "group": [{"id": self.test_user_group.pk}], "description": "new_test_description", }, content_type=APPLICATION_JSON, @@ -223,13 +223,13 @@ def test_update_put(self): "type": "cluster", }, ], - "user": [self.test_user.username], + "group": [self.test_user_group.name], }, "previous": { "description": prev_description, "role": "", "object": [], - "user": [], + "group": [], }, }, ) @@ -242,7 +242,7 @@ def test_update_put_denied(self): "name": self.policy.name, "object": [{"id": self.cluster.pk, "name": self.cluster_name, "type": "cluster"}], "role": {"id": self.role.pk}, - "user": [{"id": self.test_user.pk}], + "group": [{"id": self.test_user_group.pk}], "description": "new_test_description", }, content_type=APPLICATION_JSON, @@ -269,8 +269,8 @@ def test_update_patch(self): {"id": self.provider.pk, "name": self.provider.name, "type": "provider"}, ], "role": {"id": self.role.pk}, - "user": [{"id": self.test_user.pk}], - "description": "new_test_description", + "group": [{"id": self.test_user_group.pk}], + "description": new_test_description, }, content_type=APPLICATION_JSON, ) @@ -298,13 +298,13 @@ def test_update_patch(self): "type": "provider", }, ], - "user": [self.test_user.username], + "group": [self.test_user_group.name], }, "previous": { "description": prev_description, "role": "", "object": [], - "user": [], + "group": [], }, }, ) @@ -316,7 +316,7 @@ def test_update_patch_denied(self): data={ "object": [{"id": self.cluster.pk, "name": self.cluster_name, "type": "cluster"}], "role": {"id": self.role.pk}, - "user": [{"id": self.test_user.pk}], + "group": [{"id": self.test_user_group.pk}], "description": "new_test_description", }, content_type=APPLICATION_JSON, @@ -339,7 +339,7 @@ def test_update_patch_failed(self): data={ "object": [{"id": self.cluster.pk, "name": self.cluster_name, "type": "cluster"}], "role": {}, - "user": [{"id": self.test_user.pk}], + "group": [{"id": self.test_user_group.pk}], "description": "new_test_description", }, content_type=APPLICATION_JSON, diff --git a/python/audit/tests/test_provider.py b/python/audit/tests/test_provider.py index dd5702a1f1..1f4c7321d8 100644 --- a/python/audit/tests/test_provider.py +++ b/python/audit/tests/test_provider.py @@ -66,7 +66,7 @@ def check_provider_updated( self.assertEqual(log.operation_type, AuditLogOperationType.UPDATE) self.assertEqual(log.operation_result, operation_result) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, user.pk) + self.assertEqual(log.user.username, user.username) self.assertEqual(log.object_changes, {}) def check_provider_deleted( @@ -84,7 +84,7 @@ def check_provider_deleted( self.assertEqual(log.operation_type, AuditLogOperationType.DELETE) self.assertEqual(log.operation_result, operation_result) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, user.pk) + self.assertEqual(log.user.username, user.username) self.assertEqual(log.object_changes, {}) def check_action_log(self, log: AuditLog, provider: HostProvider, operation_name: str) -> None: @@ -117,7 +117,7 @@ def test_create(self): self.assertEqual(log.operation_type, AuditLogOperationType.CREATE) self.assertEqual(log.operation_result, AuditLogOperationResult.SUCCESS) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, self.test_user.pk) + self.assertEqual(log.user.username, self.test_user.username) self.assertEqual(log.object_changes, {}) self.client.post( @@ -135,7 +135,7 @@ def test_create(self): self.assertEqual(log.operation_type, AuditLogOperationType.CREATE) self.assertEqual(log.operation_result, AuditLogOperationResult.FAIL) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, self.test_user.pk) + self.assertEqual(log.user.username, self.test_user.username) self.assertEqual(log.object_changes, {}) def test_create_denied(self): @@ -156,7 +156,7 @@ def test_create_denied(self): self.assertEqual(log.operation_type, AuditLogOperationType.CREATE) self.assertEqual(log.operation_result, AuditLogOperationResult.DENIED) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, self.no_rights_user.pk) + self.assertEqual(log.user.username, self.no_rights_user.username) self.assertEqual(log.object_changes, {}) def test_delete(self): @@ -203,7 +203,7 @@ def test_delete_denied_view_permission(self): role = Role.objects.get(name="View provider configurations") policy = Policy.objects.create(name="test_policy", role=role) - policy.user.add(self.no_rights_user) + policy.group.add(self.no_rights_user_group) policy.add_object(provider) policy.apply() diff --git a/python/audit/tests/test_role.py b/python/audit/tests/test_role.py index 09916b4901..b78d4b2def 100644 --- a/python/audit/tests/test_role.py +++ b/python/audit/tests/test_role.py @@ -67,7 +67,7 @@ def check_log( self.assertEqual(log.operation_type, operation_type) self.assertEqual(log.operation_result, operation_result) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, user.pk) + self.assertEqual(log.user.username, user.username) self.assertEqual(log.object_changes, object_changes) def check_log_update( diff --git a/python/audit/tests/test_service.py b/python/audit/tests/test_service.py index f9efe60348..d9233534dc 100644 --- a/python/audit/tests/test_service.py +++ b/python/audit/tests/test_service.py @@ -27,7 +27,6 @@ ClusterBind, ClusterObject, ConfigLog, - MaintenanceMode, ObjectConfig, Prototype, PrototypeExport, @@ -35,7 +34,7 @@ ) from django.conf import settings from django.urls import reverse -from rbac.models import Policy, Role, User +from rbac.models import Group, Policy, Role, User from rest_framework.response import Response from rest_framework.status import ( HTTP_201_CREATED, @@ -102,7 +101,7 @@ def check_log( self.assertEqual(log.operation_type, operation_type) self.assertEqual(log.operation_result, operation_result) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, user.pk) + self.assertEqual(log.user.username, user.username) self.assertEqual(log.object_changes, object_changes) def check_action_log(self, log: AuditLog) -> None: @@ -304,7 +303,7 @@ def test_delete(self): self.assertEqual(log.operation_type, AuditLogOperationType.UPDATE) self.assertEqual(log.operation_result, AuditLogOperationResult.FAIL) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, self.test_user.pk) + self.assertEqual(log.user.username, self.test_user.username) self.assertEqual(log.object_changes, {}) self.assertFalse(log.audit_object) @@ -312,7 +311,7 @@ def test_delete(self): def test_delete_denied(self): role = Role.objects.get(name="View service config") policy = Policy.objects.create(name="test_policy", role=role) - policy.user.add(self.no_rights_user) + policy.group.add(self.no_rights_user_group) policy.add_object(self.service) policy.apply() @@ -340,7 +339,7 @@ def test_delete_new(self): role = Role.objects.get(name="View service configurations") bundle_filename = "import.tar" with open( - Path(settings.BASE_DIR, "python/audit/tests/files", bundle_filename), + Path(self.base_dir, "python/audit/tests/files", bundle_filename), encoding=settings.ENCODING_UTF_8, ) as f: self.client.post( @@ -387,6 +386,8 @@ def test_delete_new(self): ) user = User.objects.get(pk=response.data["id"]) + group = Group.objects.create(name="group") + group.user_set.add(user) response: Response = self.client.post( path=reverse(viewname="v1:rbac:role-list"), data={ @@ -405,8 +406,7 @@ def test_delete_new(self): data={ "name": "policy_name", "role": {"id": created_role.pk}, - "user": [{"id": user.pk}], - "group": [], + "group": [{"id": group.pk}], "object": [{"name": service.name, "type": "service", "id": service.pk}], }, content_type=APPLICATION_JSON, @@ -641,7 +641,7 @@ def test_action_launch(self): def test_change_maintenance_mode(self): self.client.post( path=reverse(viewname="v1:service-maintenance-mode", kwargs={"service_id": self.service.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) log: AuditLog = AuditLog.objects.order_by("operation_time").last() @@ -655,7 +655,10 @@ def test_change_maintenance_mode(self): operation_type=AuditLogOperationType.UPDATE, operation_result=AuditLogOperationResult.SUCCESS, user=self.test_user, - object_changes={"current": {"maintenance_mode": "ON"}, "previous": {"maintenance_mode": "OFF"}}, + object_changes={ + "current": {"maintenance_mode": "ON"}, + "previous": {"maintenance_mode": "OFF"}, + }, ) def test_change_maintenance_mode_via_cluster(self): @@ -664,7 +667,7 @@ def test_change_maintenance_mode_via_cluster(self): viewname="v1:service-maintenance-mode", kwargs={"cluster_id": self.cluster.pk, "service_id": self.service.pk}, ), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) log: AuditLog = AuditLog.objects.order_by("operation_time").last() @@ -678,13 +681,16 @@ def test_change_maintenance_mode_via_cluster(self): operation_type=AuditLogOperationType.UPDATE, operation_result=AuditLogOperationResult.SUCCESS, user=self.test_user, - object_changes={"current": {"maintenance_mode": "ON"}, "previous": {"maintenance_mode": "OFF"}}, + object_changes={ + "current": {"maintenance_mode": "ON"}, + "previous": {"maintenance_mode": "OFF"}, + }, ) def test_change_maintenance_mode_failed(self): self.client.post( path=reverse(viewname="v1:service-maintenance-mode", kwargs={"service_id": self.service.pk}), - data={"maintenance_mode": MaintenanceMode.CHANGING}, + data={"maintenance_mode": "CHANGING"}, ) log: AuditLog = AuditLog.objects.order_by("operation_time").last() @@ -704,7 +710,7 @@ def test_change_maintenance_mode_denied(self): with self.no_rights_user_logged_in: self.client.post( path=reverse(viewname="v1:service-maintenance-mode", kwargs={"service_id": self.service.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, ) log: AuditLog = AuditLog.objects.order_by("operation_time").last() diff --git a/python/audit/tests/test_task.py b/python/audit/tests/test_task.py index 1701e2cea5..c3b5f0d38f 100644 --- a/python/audit/tests/test_task.py +++ b/python/audit/tests/test_task.py @@ -63,7 +63,7 @@ def check_log( self.assertEqual(log.operation_type, AuditLogOperationType.UPDATE) self.assertEqual(log.operation_result, operation_result) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, user.pk) + self.assertEqual(log.user.username, user.username) self.assertEqual(log.object_changes, {}) def test_cancel(self): diff --git a/python/audit/tests/test_user.py b/python/audit/tests/test_user.py index d178c0c2f9..f1da158bcb 100644 --- a/python/audit/tests/test_user.py +++ b/python/audit/tests/test_user.py @@ -11,13 +11,18 @@ # limitations under the License. from datetime import datetime +from pathlib import Path +from secrets import token_hex from audit.models import ( AuditLog, AuditLogOperationResult, AuditLogOperationType, AuditObjectType, + AuditSession, + AuditUser, ) +from cm.models import ObjectType, Prototype from django.urls import reverse from rbac.models import User from rest_framework.response import Response @@ -54,9 +59,41 @@ def check_log( self.assertEqual(log.operation_type, AuditLogOperationType.UPDATE) self.assertEqual(log.operation_result, operation_result) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, user.pk) + self.assertEqual(log.user.username, user.username) self.assertEqual(log.object_changes, object_changes) + def _recreate_user(self, username: str) -> tuple[User, str]: + new_password = token_hex(nbytes=10) + User.objects.get(username=username).delete() + + return self.get_new_user(username=username, password=new_password), new_password + + def _make_audit_logs(self, username: str, password: str, bundle_pk: int) -> tuple[AuditLog, AuditSession]: + with self.another_user_logged_in(username=username, password=password): + self.client.post( + path=reverse(viewname="v1:rbac:token"), + data={ + "username": username, + "password": password, + }, + content_type=APPLICATION_JSON, + ) + audit_session = AuditSession.objects.order_by("-pk").first() + + self.client.post( + path=reverse(viewname="v1:cluster"), + data={ + "prototype_id": Prototype.objects.get(bundle_id=bundle_pk, type=ObjectType.CLUSTER).pk, + "name": "test_cluster_name", + "display_name": "test_cluster_display_name", + "bundle_id": bundle_pk, + }, + content_type=APPLICATION_JSON, + ) + audit_log = AuditLog.objects.order_by("-pk").first() + + return audit_log, audit_session + def test_create(self): response: Response = self.client.post( path=reverse(viewname=self.list_name), @@ -76,7 +113,7 @@ def test_create(self): self.assertEqual(log.operation_type, AuditLogOperationType.CREATE) self.assertEqual(log.operation_result, AuditLogOperationResult.SUCCESS) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, self.test_user.pk) + self.assertEqual(log.user.username, self.test_user.username) self.assertEqual(log.object_changes, {}) self.client.post( @@ -94,7 +131,7 @@ def test_create(self): self.assertEqual(log.operation_type, AuditLogOperationType.CREATE) self.assertEqual(log.operation_result, AuditLogOperationResult.FAIL) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, self.test_user.pk) + self.assertEqual(log.user.username, self.test_user.username) self.assertEqual(log.object_changes, {}) def test_create_denied(self): @@ -115,7 +152,7 @@ def test_create_denied(self): self.assertEqual(log.operation_type, AuditLogOperationType.CREATE) self.assertEqual(log.operation_result, AuditLogOperationResult.DENIED) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, self.no_rights_user.pk) + self.assertEqual(log.user.username, self.no_rights_user.username) self.assertEqual(log.object_changes, {}) def test_delete(self): @@ -134,7 +171,7 @@ def test_delete(self): self.assertEqual(log.operation_type, AuditLogOperationType.DELETE) self.assertEqual(log.operation_result, AuditLogOperationResult.SUCCESS) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, self.test_user.pk) + self.assertEqual(log.user.username, self.test_user.username) self.assertEqual(log.object_changes, {}) def test_delete_denied(self): @@ -155,7 +192,7 @@ def test_delete_denied(self): self.assertEqual(log.operation_type, AuditLogOperationType.DELETE) self.assertEqual(log.operation_result, AuditLogOperationResult.DENIED) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, self.no_rights_user.pk) + self.assertEqual(log.user.username, self.no_rights_user.username) self.assertEqual(log.object_changes, {}) def test_update_put(self): @@ -270,7 +307,7 @@ def test_reset_failed_login_attempts_fail(self): self.assertEqual(log.operation_type, AuditLogOperationType.UPDATE) self.assertEqual(log.operation_result, AuditLogOperationResult.FAIL) self.assertIsInstance(log.operation_time, datetime) - self.assertEqual(log.user.pk, self.test_user.pk) + self.assertEqual(log.user.username, self.test_user.username) self.assertEqual(log.object_changes, {}) def test_reset_failed_login_attempts_denied(self): @@ -288,3 +325,35 @@ def test_reset_failed_login_attempts_denied(self): user=self.no_rights_user, operation_name="User login attempts reset", ) + + def test_recreate_user_same_username_different_audit_users_success(self): + initial_audit_users_count = AuditUser.objects.count() + with self.another_user_logged_in(username="admin", password="admin"): + bundle = self.upload_and_load_bundle( + path=Path(self.base_dir, "python/audit/tests/files/test_cluster_bundle.tar") + ) + + username, password = "test_user_recreate_username", token_hex(10) + with self.another_user_logged_in(username="admin", password="admin"): + user = self.get_new_user(username=username, password=password) + self.assertEqual(AuditUser.objects.count(), initial_audit_users_count + 1) + old_user_pk = user.pk + + audit_log_1, audit_session_1 = self._make_audit_logs(username=username, password=password, bundle_pk=bundle.pk) + audit_log_1_pk, audit_session_1_pk = audit_log_1.pk, audit_session_1.pk + self.assertEqual(audit_log_1.user.username, username) + self.assertEqual(audit_session_1.user.username, username) + + with self.another_user_logged_in(username="admin", password="admin"): + new_user, new_password = self._recreate_user(username=username) + self.assertEqual(AuditUser.objects.count(), initial_audit_users_count + 2) + self.assertEqual(AuditUser.objects.filter(username=username).count(), 2) + self.assertNotEqual(old_user_pk, new_user.pk) + self.assertEqual(AuditLog.objects.get(pk=audit_log_1_pk).user.username, username) + self.assertEqual(AuditSession.objects.get(pk=audit_session_1_pk).user.username, username) + + audit_log_2, audit_session_2 = self._make_audit_logs( + username=new_user.username, password=new_password, bundle_pk=bundle.pk + ) + self.assertEqual(audit_log_2.user.username, new_user.username) + self.assertEqual(audit_session_2.user.username, new_user.username) diff --git a/python/audit/tests/test_views.py b/python/audit/tests/test_views.py index 312087aa72..8e5f084040 100644 --- a/python/audit/tests/test_views.py +++ b/python/audit/tests/test_views.py @@ -20,6 +20,7 @@ AuditObjectType, AuditSession, AuditSessionLoginResult, + AuditUser, ) from django.urls import reverse from django.utils import timezone @@ -68,7 +69,7 @@ def setUp(self) -> None: operation_name=self.operation_name_first, operation_type=AuditLogOperationType.CREATE, operation_result=AuditLogOperationResult.SUCCESS, - user=self.test_user, + user=AuditUser.objects.filter(username=self.test_user.username).order_by("-pk").first(), object_changes=self.object_changes_first, ) AuditLog.objects.filter(pk=self.audit_log_first.pk).update( @@ -79,7 +80,7 @@ def setUp(self) -> None: operation_name=self.operation_name_second, operation_type=AuditLogOperationType.UPDATE, operation_result=AuditLogOperationResult.FAIL, - user=self.no_rights_user, + user=AuditUser.objects.filter(username=self.no_rights_user.username).order_by("-pk").first(), object_changes=self.object_changes_second, ) AuditLog.objects.filter(pk=self.audit_log_second.pk).update( @@ -90,12 +91,12 @@ def setUp(self) -> None: self.login_details_second = {"login": {"details": "second"}} self.audit_session_first = AuditSession.objects.create( - user=self.test_user, + user=AuditUser.objects.filter(username=self.test_user).order_by("-pk").first(), login_result=AuditSessionLoginResult.SUCCESS, login_details=self.login_details_first, ) self.audit_session_second = AuditSession.objects.create( - user=self.no_rights_user, + user=AuditUser.objects.filter(username=self.no_rights_user).order_by("-pk").first(), login_result=AuditSessionLoginResult.WRONG_PASSWORD, login_details=self.login_details_second, ) diff --git a/python/audit/utils.py b/python/audit/utils.py index 60174bf99c..79a8564f5a 100644 --- a/python/audit/utils.py +++ b/python/audit/utils.py @@ -9,7 +9,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -# pylint: disable=too-many-lines from functools import wraps @@ -25,6 +24,7 @@ AuditLogOperationType, AuditObject, AuditOperation, + AuditUser, ) from cm.errors import AdcmEx from cm.models import ( @@ -92,7 +92,10 @@ def _get_deleted_obj(view: GenericAPIView, request: Request, kwargs) -> Model | if view.queryset.count() == 1: deleted_obj = view.queryset.all()[0] elif "pk" in view.kwargs: - deleted_obj = view.queryset.get(pk=view.kwargs["pk"]) + try: + deleted_obj = view.queryset.get(pk=int(view.kwargs["pk"])) + except ValueError: + deleted_obj = None else: deleted_obj = None except TypeError: @@ -102,7 +105,7 @@ def _get_deleted_obj(view: GenericAPIView, request: Request, kwargs) -> Model | deleted_obj = None except (IndexError, ObjectDoesNotExist): deleted_obj = None - except KeyError: + except (KeyError, ValueError): deleted_obj = None except PermissionDenied: if "cluster_id" in kwargs: @@ -195,8 +198,12 @@ def _get_obj_changes_data(view: GenericAPIView | ModelViewSet) -> tuple[dict | N if serializer_class: model = view.get_queryset().model - current_obj = model.objects.filter(pk=pk).first() - prev_data = serializer_class(model.objects.filter(pk=pk).first()).data + try: + current_obj = model.objects.filter(pk=pk).first() + prev_data = serializer_class(model.objects.filter(pk=pk).first()).data + except ValueError: + current_obj = None + prev_data = None if current_obj: prev_data = serializer_class(current_obj).data @@ -329,16 +336,16 @@ def wrapped(*args, **kwargs): operation_result = AuditLogOperationResult.FAIL if isinstance(view.request.user, DjangoUser): - user = view.request.user + audit_user = AuditUser.objects.filter(username=view.request.user.username).order_by("-pk").first() else: - user = None + audit_user = None auditlog = AuditLog.objects.create( audit_object=audit_object, operation_name=operation_name, operation_type=audit_operation.operation_type, operation_result=operation_result, - user=user, + user=audit_user, object_changes=object_changes, ) cef_logger(audit_instance=auditlog, signature_id=resolve(request.path).route) @@ -366,14 +373,14 @@ def make_audit_log(operation_type, result, operation_status): "type": AuditLogOperationType.DELETE, "name": '"Audit log cleanup/archiving on schedule" job', }, + "statistics": {"type": "", "name": '"Statistics collection on schedule" job'}, } operation_name = operation_type_map[operation_type]["name"] + " " + operation_status - system_user = User.objects.get(username="system") audit_log = AuditLog.objects.create( audit_object=None, operation_name=operation_name, operation_type=operation_type_map[operation_type]["type"], operation_result=result, - user=system_user, + user=AuditUser.objects.get(username="system"), ) cef_logger(audit_instance=audit_log, signature_id="Background operation", empty_resource=True) diff --git a/python/cm/adcm_config/checks.py b/python/cm/adcm_config/checks.py index 3dd0ce0fa9..f9b1a54084 100644 --- a/python/cm/adcm_config/checks.py +++ b/python/cm/adcm_config/checks.py @@ -12,8 +12,8 @@ from typing import Any, Mapping -import yspec.checker from cm.adcm_config.utils import config_is_ro, group_keys_to_flat, proto_ref +from cm.checker import FormatError, SchemaError, process_rule from cm.errors import raise_adcm_ex from cm.logger import logger from cm.models import Action, ADCMEntity, GroupConfig, Prototype, StagePrototype @@ -155,7 +155,6 @@ def check_value_unselected_field( :param spec: Config specification :param obj: Parent object (Cluster, Service, Component Provider or Host) """ - # pylint: disable=too-many-boolean-expressions for group_key, group_value in group_keys.items(): if isinstance(group_value, Mapping): @@ -288,11 +287,11 @@ def check_config_type( # pylint: disable=too-many-branches,too-many-statements, if spec["type"] == "structure": schema = spec["limits"]["yspec"] try: - yspec.checker.process_rule(data=value, rules=schema, name="root") - except yspec.checker.FormatError as e: + process_rule(data=value, rules=schema, name="root") + except FormatError as e: msg = tmpl1.format(f"yspec error: {str(e)} at block {e.data}") raise_adcm_ex(code="CONFIG_VALUE_ERROR", msg=msg) - except yspec.checker.SchemaError as e: + except SchemaError as e: raise_adcm_ex(code="CONFIG_VALUE_ERROR", msg=f"yspec error: {str(e)}") if spec["type"] == "boolean" and not isinstance(value, bool): @@ -316,16 +315,9 @@ def check_config_type( # pylint: disable=too-many-branches,too-many-statements, if spec["type"] == "option": option = spec["limits"]["option"] - check = False - for _value in option.values(): - if _value == value: - check = True - - break - - if not check: - msg = f'not in option list: "{option}"' + if value not in option.values(): + msg = f'not in option list: "{option.values()}"' raise_adcm_ex(code="CONFIG_VALUE_ERROR", msg=tmpl2.format(msg)) if spec["type"] == "variant": diff --git a/python/cm/adcm_config/config.py b/python/cm/adcm_config/config.py index 34ed6934e8..2d9a0e4e84 100644 --- a/python/cm/adcm_config/config.py +++ b/python/cm/adcm_config/config.py @@ -10,10 +10,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -# pylint: disable=too-many-lines - import copy import json +import re from collections import OrderedDict from pathlib import Path from typing import Any @@ -35,7 +34,7 @@ sub_key_is_required, to_flat_dict, ) -from cm.errors import raise_adcm_ex +from cm.errors import AdcmEx, raise_adcm_ex from cm.models import ( ADCM, Action, @@ -43,6 +42,7 @@ ConfigLog, GroupConfig, ObjectConfig, + ObjectType, Prototype, PrototypeConfig, StagePrototype, @@ -59,7 +59,9 @@ def read_bundle_file(proto: Prototype | StagePrototype, fname: str, bundle_hash: file_descriptor = None - if fname[0:2] == "./": + if proto.type == ObjectType.ADCM: + path = settings.BASE_DIR / "conf/adcm" / fname + elif fname.startswith("./"): path = Path(settings.BUNDLE_DIR, bundle_hash, proto.path, fname) else: path = Path(settings.BUNDLE_DIR, bundle_hash, fname) @@ -152,6 +154,7 @@ def switch_config( # pylint: disable=too-many-locals,too-many-branches,too-many return config_log = ConfigLog.objects.get(obj_ref=obj.config, id=obj.config.current) + _, old_spec, _, _ = get_prototype_config(prototype=old_prototype) new_unflat_spec, new_spec, _, _ = get_prototype_config(prototype=new_prototype) old_conf = to_flat_dict(config=config_log.config, spec=old_spec) @@ -320,66 +323,6 @@ def process_file_type(obj: Any, spec: dict, conf: dict): save_file_type(obj, key, subkey, value) -def process_secret_params(spec, conf): - for key in conf: # pylint: disable=too-many-nested-blocks - if "type" in spec[key]: - if spec[key]["type"] in {"password", "secrettext", "secretfile"} and conf[key]: - if conf[key].startswith(settings.ANSIBLE_VAULT_HEADER): - try: - ansible_decrypt(msg=conf[key]) - except AnsibleError: - raise_adcm_ex( - code="CONFIG_VALUE_ERROR", - msg=f"Secret value must not starts with {settings.ANSIBLE_VAULT_HEADER}", - ) - else: - conf[key] = ansible_encrypt_and_format(msg=conf[key]) - else: - for subkey in conf[key]: - if spec[key][subkey]["type"] in {"password", "secrettext", "secretfile"} and conf[key][subkey]: - if conf[key][subkey].startswith(settings.ANSIBLE_VAULT_HEADER): - try: - ansible_decrypt(msg=conf[key][subkey]) - except AnsibleError: - raise_adcm_ex( - code="CONFIG_VALUE_ERROR", - msg=f"Secret value must not starts with {settings.ANSIBLE_VAULT_HEADER}", - ) - else: - conf[key][subkey] = ansible_encrypt_and_format(msg=conf[key][subkey]) - - return conf - - -def process_secretmap(spec: dict, conf: dict) -> dict: - for key in conf: - if "type" not in spec[key]: - for _ in conf: - process_secretmap(spec[key], conf[key]) - - if spec[key].get("type") != "secretmap": - continue - - if conf[key] is None: - continue - - for conf_key, conf_value in conf[key].items(): - if conf_value.startswith(settings.ANSIBLE_VAULT_HEADER): - try: - ansible_decrypt(msg=conf_value) - except AnsibleError: - raise_adcm_ex( - code="CONFIG_VALUE_ERROR", - msg=f"Secret value must not starts with {settings.ANSIBLE_VAULT_HEADER}", - ) - - conf[key][conf_key] = conf_value - else: - conf[key][conf_key] = ansible_encrypt_and_format(msg=conf_value) - - return conf - - def process_config( # pylint: disable=too-many-branches obj: ADCMEntity, spec: dict, @@ -556,7 +499,7 @@ def check_config_spec( conf: dict, attr: dict = None, ) -> None: - # pylint: disable=too-many-branches,too-many-statements + # pylint: disable=too-many-branches ref = proto_ref(proto) if isinstance(conf, (float, int)): raise_adcm_ex(code="JSON_ERROR", msg="config should not be just one int or float") @@ -622,19 +565,108 @@ def check_config_spec( ) +def _process_secretfile(obj: ADCMEntity, key: str, subkey: str, value: Any) -> None: + if value is not None and value.startswith(settings.ANSIBLE_VAULT_HEADER): + try: + value = ansible_decrypt(msg=value) + except AnsibleError as e: + raise AdcmEx(code="CONFIG_VALUE_ERROR", msg="Can't decrypt value") from e + + save_file_type(obj=obj, key=key, subkey=subkey, value=value) + + +def _process_secret_param(conf: dict, key: str, subkey: str) -> None: + value = conf[key] + if subkey: + value = conf[key][subkey] + + if not value: + return + + if value.startswith(settings.ANSIBLE_VAULT_HEADER): + try: + ansible_decrypt(msg=value) + except AnsibleError as e: + raise AdcmEx(code="CONFIG_VALUE_ERROR", msg="Can't decrypt value") from e + + else: + value = ansible_encrypt_and_format(msg=value) + + if subkey: + conf[key][subkey] = value + else: + conf[key] = value + + +def _process_secretmap(conf: dict, key: str, subkey: str) -> None: + value = conf[key] + if subkey: + value = conf[key][subkey] + + if value is None: + return + + for secretmap_key, secretmap_value in value.items(): + if secretmap_value.startswith(settings.ANSIBLE_VAULT_HEADER): + try: + ansible_decrypt(msg=secretmap_value) + except AnsibleError as e: + raise AdcmEx(code="CONFIG_VALUE_ERROR", msg="Can't decrypt value") from e + + if subkey: + conf[key][subkey][secretmap_key] = secretmap_value + else: + conf[key][secretmap_key] = secretmap_value + + else: + if subkey: + conf[key][subkey][secretmap_key] = ansible_encrypt_and_format(msg=secretmap_value) + else: + conf[key][secretmap_key] = ansible_encrypt_and_format(msg=secretmap_value) + + def process_config_spec(obj: ADCMEntity, spec: dict, new_config: dict, current_config: dict = None) -> dict: if current_config: new_config = restore_read_only(obj=obj, spec=spec, conf=new_config, old_conf=current_config) - process_file_type(obj=obj, spec=spec, conf=new_config) - conf = process_secret_params(spec=spec, conf=new_config) - conf = process_secretmap(spec=spec, conf=conf) + for cfg_key, cfg_value in new_config.items(): + spec_type = spec[cfg_key].get("type") - return conf + if spec_type == "file": + save_file_type(obj=obj, key=cfg_key, subkey="", value=cfg_value) + + elif spec_type == "secretfile": + _process_secretfile(obj=obj, key=cfg_key, subkey="", value=cfg_value) + _process_secret_param(conf=new_config, key=cfg_key, subkey="") + + elif spec_type in {"password", "secrettext"}: + _process_secret_param(conf=new_config, key=cfg_key, subkey="") + + elif spec_type == "secretmap": + _process_secretmap(conf=new_config, key=cfg_key, subkey="") + + elif spec_type is None and bool(cfg_value): + for sub_cfg_key, sub_cfg_value in cfg_value.items(): + sub_spec_type = spec[cfg_key][sub_cfg_key]["type"] + + if sub_spec_type == "file": + save_file_type(obj=obj, key=cfg_key, subkey=sub_cfg_key, value=sub_cfg_value) + + elif sub_spec_type == "secretfile": + _process_secretfile(obj=obj, key=cfg_key, subkey=sub_cfg_key, value=sub_cfg_value) + _process_secret_param(conf=new_config, key=cfg_key, subkey=sub_cfg_key) + + elif sub_spec_type in {"password", "secrettext"}: + _process_secret_param(conf=new_config, key=cfg_key, subkey=sub_cfg_key) + + elif sub_spec_type == "secretmap": + _process_secretmap(conf=new_config, key=cfg_key, subkey=sub_cfg_key) + + return new_config def get_adcm_config(section=None): - adcm_object = ADCM.objects.last() + adcm_object = ADCM.objects.get() current_configlog = ConfigLog.objects.get(obj_ref=adcm_object.config, id=adcm_object.config.current) if not section: return current_configlog.attr, current_configlog.config @@ -642,6 +674,17 @@ def get_adcm_config(section=None): return current_configlog.attr.get(section, None), current_configlog.config.get(section, None) +def get_option_value(value: str, limits: dict) -> str | int | float: + if value in limits["option"].values(): + return value + elif re.match(r"^\d+$", value): + return int(value) + elif re.match(r"^\d+\.\d+$", value): + return float(value) + + return raise_adcm_ex("CONFIG_OPTION_ERROR") + + def get_default( # pylint: disable=too-many-branches conf: PrototypeConfig, prototype: Prototype | None = None, @@ -671,20 +714,7 @@ def get_default( # pylint: disable=too-many-branches else: value = bool(conf.default.lower() in {"true", "yes"}) elif conf.type == "option": - if conf.default in conf.limits["option"]: - value = conf.limits["option"][conf.default] - - for option in conf.limits["option"].values(): - if not isinstance(option, type(value)): - if isinstance(option, bool): - value = bool(value) - elif isinstance(option, int): - value = int(value) - elif isinstance(option, float): - value = float(value) - elif isinstance(option, str): - value = str(value) - + value = get_option_value(value=value, limits=conf.limits) elif conf.type == "file": if prototype: if conf.default: diff --git a/python/cm/adcm_schema.yaml b/python/cm/adcm_schema.yaml index 91367e414e..0f2ae0777b 100644 --- a/python/cm/adcm_schema.yaml +++ b/python/cm/adcm_schema.yaml @@ -34,6 +34,7 @@ common_object: &common_object config: config_obj actions: actions_dict venv: string + allow_flags: boolean required_items: - type - name @@ -116,6 +117,7 @@ component_dict: config: config_obj actions: actions_dict config_group_customization: boolean + allow_flags: boolean venv: string comp_req_list: @@ -370,7 +372,7 @@ config_dict_sub_option: items: <<: *config_dict_sub_items option: map_string_any - default: base_type + default: option_type required_items: - option @@ -500,7 +502,7 @@ config_list_option: items: <<: *config_list_items option: map_string_any - default: base_type + default: option_type required_items: - option @@ -735,6 +737,7 @@ common_action: config: config_obj venv: string allow_in_maintenance_mode: boolean + config_jinja: string ## Task action action_task_dict: @@ -772,7 +775,6 @@ action_job_dict: <<: *common_action_items script_type: action_script_type script: string - config_jinja: string required_items: - type - script_type @@ -876,7 +878,7 @@ map_string_string: map_string_any: match: dict - default_item: base_type + default_item: option_type boolean: match: bool @@ -929,3 +931,10 @@ base_type: - string - integer - float + +option_type: + match: one_of + variants: + - string + - integer + - float diff --git a/python/cm/ansible_plugin.py b/python/cm/ansible_plugin.py index 9a2afcec88..99b7a6ea78 100644 --- a/python/cm/ansible_plugin.py +++ b/python/cm/ansible_plugin.py @@ -10,12 +10,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -# pylint: disable=line-too-long - import fcntl import json from collections import defaultdict from copy import deepcopy +from typing import Any # isort: off from ansible.errors import AnsibleError @@ -23,7 +22,9 @@ from ansible.plugins.action import ActionBase # isort: on -from cm.api import add_hc, get_hc, set_object_config + +from cm.adcm_config.config import get_option_value +from cm.api import add_hc, get_hc, set_object_config_with_plugin from cm.api_context import CTX from cm.errors import AdcmEx from cm.errors import raise_adcm_ex as err @@ -43,13 +44,14 @@ Prototype, PrototypeConfig, ServiceComponent, + get_model_by_type, ) from cm.status_api import post_event from django.conf import settings from django.contrib.auth.models import Permission from django.contrib.contenttypes.models import ContentType from rbac.models import Policy, Role -from rbac.roles import assign_user_or_group_perm +from rbac.roles import assign_group_perm MSG_NO_CONFIG = ( "There are no job related vars in inventory. It's mandatory for that module to have some" @@ -90,42 +92,59 @@ def job_lock(job_id): encoding=settings.ENCODING_UTF_8, ) try: - fcntl.flock(file_descriptor.fileno(), fcntl.LOCK_EX) # pylint: disable=I1101 + fcntl.flock(file_descriptor.fileno(), fcntl.LOCK_EX) return file_descriptor except OSError as e: return err("LOCK_ERROR", e) -def check_context_type(task_vars, *context_type, err_msg=None): +def check_context_type(task_vars: dict, context_types: tuple, err_msg: str | None = None) -> None: """ Check context type. Check if inventory.json and config.json were passed and check if `context` exists in task variables, сheck if a context is of a given type. """ if not task_vars: raise AnsibleError(MSG_NO_CONFIG) + if "context" not in task_vars: raise AnsibleError(MSG_NO_CONTEXT) + if not isinstance(task_vars["context"], dict): raise AnsibleError(MSG_NO_CONTEXT) + context = task_vars["context"] - if context["type"] not in context_type: + if context["type"] not in context_types: if err_msg is None: - err_msg = MSG_WRONG_CONTEXT.format(", ".join(context_type), context["type"]) + err_msg = MSG_WRONG_CONTEXT.format(", ".join(context_types), context["type"]) raise AnsibleError(err_msg) -def get_object_id_from_context(task_vars, id_type, *context_type, err_msg=None): +def get_object_id_from_context(task_vars: dict, id_type: str, context_types: tuple, err_msg: str | None = None) -> int: """ Get object id from context. """ - check_context_type(task_vars, *context_type, err_msg=err_msg) + check_context_type(task_vars=task_vars, context_types=context_types, err_msg=err_msg) context = task_vars["context"] if id_type not in context: raise AnsibleError(MSG_WRONG_CONTEXT_ID.format(id_type)) return context[id_type] +def get_context_object(task_vars: dict, err_msg: str = None) -> ADCMEntity: + obj_type = task_vars["context"]["type"] + + obj_pk = get_object_id_from_context( + task_vars=task_vars, id_type=f"{obj_type}_id", context_types=(obj_type,), err_msg=err_msg + ) + obj = get_model_by_type(object_type=obj_type).objects.filter(pk=obj_pk).first() + + if not obj: + raise AnsibleError(f'Object of type "{obj_type}" with PK "{obj_pk}" does not exist') + + return obj + + class ContextActionModule(ActionBase): TRANSFERS_FILES = False _VALID_ARGS = None @@ -173,20 +192,20 @@ def _do_provider(self, task_vars, context): def _do_host_from_provider(self, task_vars, context): raise NotImplementedError - def run(self, tmp=None, task_vars=None): # pylint: disable=too-many-branches + def run(self, tmp=None, task_vars=None): self._check_mandatory() obj_type = self._task.args["type"] job_id = task_vars["job"]["id"] file_descriptor = job_lock(job_id) if obj_type == "cluster": - check_context_type(task_vars, "cluster", "service", "component") + check_context_type(task_vars=task_vars, context_types=("cluster", "service", "component")) res = self._do_cluster(task_vars, {"cluster_id": self._get_job_var(task_vars, "cluster_id")}) elif obj_type == "service" and "service_name" in self._task.args: - check_context_type(task_vars, "cluster", "service", "component") + check_context_type(task_vars=task_vars, context_types=("cluster", "service", "component")) res = self._do_service_by_name(task_vars, {"cluster_id": self._get_job_var(task_vars, "cluster_id")}) elif obj_type == "service": - check_context_type(task_vars, "service", "component") + check_context_type(task_vars=task_vars, context_types=("service", "component")) res = self._do_service( task_vars, { @@ -195,17 +214,17 @@ def run(self, tmp=None, task_vars=None): # pylint: disable=too-many-branches }, ) elif obj_type == "host" and "host_id" in self._task.args: - check_context_type(task_vars, "provider") + check_context_type(task_vars=task_vars, context_types=("provider",)) res = self._do_host_from_provider(task_vars, {}) elif obj_type == "host": - check_context_type(task_vars, "host") + check_context_type(task_vars=task_vars, context_types=("host",)) res = self._do_host(task_vars, {"host_id": self._get_job_var(task_vars, "host_id")}) elif obj_type == "provider": - check_context_type(task_vars, "provider", "host") + check_context_type(task_vars=task_vars, context_types=("provider", "host")) res = self._do_provider(task_vars, {"provider_id": self._get_job_var(task_vars, "provider_id")}) elif obj_type == "component" and "component_name" in self._task.args: if "service_name" in self._task.args: - check_context_type(task_vars, "cluster", "service", "component") + check_context_type(task_vars=task_vars, context_types=("cluster", "service", "component")) res = self._do_component_by_name( task_vars, { @@ -214,7 +233,7 @@ def run(self, tmp=None, task_vars=None): # pylint: disable=too-many-branches }, ) else: - check_context_type(task_vars, "cluster", "service", "component") + check_context_type(task_vars=task_vars, context_types=("cluster", "service", "component")) if task_vars["job"].get("service_id", None) is None: raise AnsibleError(MSG_NO_SERVICE_NAME) res = self._do_component_by_name( @@ -225,7 +244,7 @@ def run(self, tmp=None, task_vars=None): # pylint: disable=too-many-branches }, ) elif obj_type == "component": - check_context_type(task_vars, "component") + check_context_type(task_vars=task_vars, context_types=("component",)) res = self._do_component(task_vars, {"component_id": self._get_job_var(task_vars, "component_id")}) else: raise AnsibleError(MSG_NO_ROUTE) @@ -339,7 +358,7 @@ def set_host_multi_state(host_id, multi_state): return _set_object_multi_state(obj, multi_state) -def change_hc(job_id, cluster_id, operations): # pylint: disable=too-many-branches +def change_hc(job_id, cluster_id, operations): """ For use in ansible plugin adcm_hc """ @@ -379,6 +398,21 @@ def change_hc(job_id, cluster_id, operations): # pylint: disable=too-many-branc file_descriptor.close() +def cast_to_type(field_type: str, value: Any, limits: dict) -> Any: + try: + match field_type: + case "float": + return float(value) + case "integer": + return int(value) + case "option": + return get_option_value(value=value, limits=limits) + case _: + return value + except ValueError as error: + raise AnsibleError(f"Could not convert '{value}' to '{field_type}'") from error + + def update_config(obj: ADCMEntity, conf: dict, attr: dict) -> dict | int | str: config_log = ConfigLog.objects.get(id=obj.config.current) new_config = deepcopy(config_log.config) @@ -392,12 +426,26 @@ def update_config(obj: ADCMEntity, conf: dict, attr: dict) -> dict | int | str: subkey = keys_list[1] if subkey: - new_config[key][subkey] = value + try: + prototype_conf = PrototypeConfig.objects.get( + name=key, subname=subkey, prototype=obj.prototype, action=None + ) + except PrototypeConfig.DoesNotExist as error: + raise AnsibleError(f"Config parameter '{key}/{subkey}' does not exist") from error + new_config[key][subkey] = cast_to_type( + field_type=prototype_conf.type, value=value, limits=prototype_conf.limits + ) else: - new_config[key] = value + try: + prototype_conf = PrototypeConfig.objects.get(name=key, subname="", prototype=obj.prototype, action=None) + except PrototypeConfig.DoesNotExist as error: + raise AnsibleError(f"Config parameter '{key}' does not exist") from error + new_config[key] = cast_to_type(field_type=prototype_conf.type, value=value, limits=prototype_conf.limits) if key in attr: - prototype_conf = PrototypeConfig.objects.filter(name=key, prototype=obj.prototype, type="group") + prototype_conf = PrototypeConfig.objects.filter( + name=key, prototype=obj.prototype, type="group", action=None + ) if not prototype_conf or "activatable" not in prototype_conf.first().limits: raise AnsibleError("'active' key should be used only with activatable group") @@ -409,7 +457,7 @@ def update_config(obj: ADCMEntity, conf: dict, attr: dict) -> dict | int | str: if not new_config[key] or subkey not in new_config[key]: new_config[key][subkey] = value - set_object_config(obj=obj, config=new_config, attr=new_attr) + set_object_config_with_plugin(obj=obj, config=new_config, attr=new_attr) if len(conf) == 1: return list(conf.values())[0] @@ -557,7 +605,7 @@ def log_check(job_id: int, group_data: dict, check_data: dict) -> CheckLog: codename=f"view_{LogStorage.__name__.lower()}", ) for policy in (policy for policy in Policy.objects.all() if task_role in policy.role.child.all()): - assign_user_or_group_perm(policy=policy, permission=view_logstorage_permission, obj=log_storage) + assign_group_perm(policy=policy, permission=view_logstorage_permission, obj=log_storage) post_event( event="add_job_log", diff --git a/python/cm/api.py b/python/cm/api.py index b2121af34b..81c63be6b3 100644 --- a/python/cm/api.py +++ b/python/cm/api.py @@ -13,6 +13,7 @@ import json from functools import partial, wraps +from typing import Literal from cm.adcm_config.config import ( init_object_config, @@ -22,7 +23,8 @@ ) from cm.adcm_config.utils import proto_ref from cm.api_context import CTX -from cm.errors import raise_adcm_ex +from cm.errors import AdcmEx, raise_adcm_ex +from cm.flag import update_object_flag from cm.issue import ( check_bound_components, check_component_constraint, @@ -53,7 +55,7 @@ TaskLog, ) from cm.status_api import api_request, post_event -from cm.utils import obj_ref +from cm.utils import build_id_object_mapping, obj_ref from django.contrib.contenttypes.models import ContentType from django.core.exceptions import MultipleObjectsReturned from django.db.transaction import atomic, on_commit @@ -70,19 +72,19 @@ def check_license(prototype: Prototype) -> None: ) -def version_in(version: str, ver: PrototypeImport) -> bool: - if ver.min_strict: - if rpm.compare_versions(version, ver.min_version) <= 0: +def is_version_suitable(version: str, prototype_import: PrototypeImport) -> bool: + if prototype_import.min_strict: + if rpm.compare_versions(version, prototype_import.min_version) <= 0: return False - elif ver.min_version: - if rpm.compare_versions(version, ver.min_version) < 0: + elif prototype_import.min_version: + if rpm.compare_versions(version, prototype_import.min_version) < 0: return False - if ver.max_strict: - if rpm.compare_versions(version, ver.max_version) >= 0: + if prototype_import.max_strict: + if rpm.compare_versions(version, prototype_import.max_version) >= 0: return False - elif ver.max_version: - if rpm.compare_versions(version, ver.max_version) > 0: + elif prototype_import.max_version: + if rpm.compare_versions(version, prototype_import.max_version) > 0: return False return True @@ -562,6 +564,7 @@ def update_obj_config(obj_conf: ObjectConfig, config: dict, attr: dict, descript with atomic(): config_log = save_obj_config(obj_conf=obj_conf, conf=new_conf, attr=attr, desc=description) update_hierarchy_issues(obj=obj) + update_object_flag(obj=obj) apply_policy_for_new_config(config_object=obj, config_log=config_log) if group is not None: @@ -582,7 +585,7 @@ def update_obj_config(obj_conf: ObjectConfig, config: dict, attr: dict, descript return config_log -def set_object_config(obj: ADCMEntity, config: dict, attr: dict) -> ConfigLog: +def set_object_config_with_plugin(obj: ADCMEntity, config: dict, attr: dict) -> ConfigLog: new_conf = process_json_config(prototype=obj.prototype, obj=obj, new_config=config, new_attr=attr) with atomic(): @@ -639,6 +642,49 @@ def check_sub(_sub_key, _sub_type, _item): raise_adcm_ex("INVALID_INPUT", f"duplicate ({item}) in host service list") +def retrieve_host_component_objects( + cluster: Cluster, plain_hc: list[dict[Literal["host_id", "component_id"], int]] +) -> list[tuple[ClusterObject, Host, ServiceComponent]]: + host_ids: set[int] = set() + component_ids: set[int] = set() + for record in plain_hc: + host_ids.add(record["host_id"]) + component_ids.add(record["component_id"]) + + hosts_in_hc: dict[int, Host] = build_id_object_mapping( + objects=Host.objects.select_related("cluster").filter(pk__in=host_ids) + ) + if len(hosts_in_hc) != len(host_ids): + message = f"hosts not found: {host_ids.difference(hosts_in_hc.keys())}" + raise AdcmEx(code="HOST_NOT_FOUND", msg=message) + + components_in_hc: dict[int, ServiceComponent] = build_id_object_mapping( + objects=ServiceComponent.objects.select_related("service").filter(pk__in=component_ids, cluster=cluster) + ) + if len(components_in_hc) != len(component_ids): + message = f"components not found: {component_ids.difference(components_in_hc.keys())}" + raise AdcmEx(code="COMPONENT_NOT_FOUND", msg=message) + + host_component_objects = [] + + for record in plain_hc: + host: Host = hosts_in_hc[record["host_id"]] + + if not host.cluster: + message = f"host #{host.pk} {host.fqdn} does not belong to any cluster" + raise AdcmEx(code="FOREIGN_HOST", msg=message) + + if host.cluster.pk != cluster.pk: + message = f"host {host.fqdn} (cluster #{host.cluster.pk}) does not belong to cluster #{cluster.pk}" + raise AdcmEx(code="FOREIGN_HOST", msg=message) + + component: ServiceComponent = components_in_hc[record["component_id"]] + + host_component_objects.append((component.service, host, component)) + + return host_component_objects + + def make_host_comp_list(cluster: Cluster, hc_in: list[dict]) -> list[tuple[ClusterObject, Host, ServiceComponent]]: host_comp_list = [] for item in hc_in: @@ -773,6 +819,36 @@ def save_hc( return host_component_list +def set_host_component( + cluster: Cluster, host_component_objects: list[tuple[ClusterObject, Host, ServiceComponent]] +) -> list[HostComponent]: + """ + Save given hosts-components mapping if all sanity checks pass + """ + + check_hc_requires(shc_list=host_component_objects) + + check_bound_components(shc_list=host_component_objects) + + for service in ClusterObject.objects.select_related("prototype").filter(cluster=cluster): + check_component_constraint( + cluster=cluster, + service_prototype=service.prototype, + hc_in=[i for i in host_component_objects if i[0] == service], + ) + check_service_requires(cluster=cluster, proto=service.prototype) + + check_maintenance_mode(cluster=cluster, host_comp_list=host_component_objects) + + with atomic(): + on_commit( + func=partial(post_event, event="change_hostcomponentmap", object_id=cluster.pk, object_type="cluster") + ) + new_host_component = save_hc(cluster=cluster, host_comp_list=host_component_objects) + + return new_host_component + + def add_hc(cluster: Cluster, hc_in: list[dict]) -> list[HostComponent]: host_comp_list = check_hc(cluster=cluster, hc_in=hc_in) @@ -780,9 +856,9 @@ def add_hc(cluster: Cluster, hc_in: list[dict]) -> list[HostComponent]: on_commit( func=partial(post_event, event="change_hostcomponentmap", object_id=cluster.pk, object_type="cluster") ) - new_hc = save_hc(cluster=cluster, host_comp_list=host_comp_list) + new_host_component = save_hc(cluster=cluster, host_comp_list=host_comp_list) - return new_hc + return new_host_component def get_bind( @@ -808,7 +884,7 @@ def get_export(cluster: Cluster, service: ClusterObject | None, proto_import: Pr continue export_proto[prototype_export.prototype.pk] = True - if not version_in(version=prototype_export.prototype.version, ver=proto_import): + if not is_version_suitable(version=prototype_export.prototype.version, prototype_import=proto_import): continue if prototype_export.prototype.type == "cluster": @@ -947,8 +1023,6 @@ def get_prototype_import(import_pk: int, import_obj: Cluster | ClusterObject) -> def multi_bind(cluster: Cluster, service: ClusterObject | None, bind_list: list[dict]): - # pylint: disable=too-many-locals,too-many-statements - check_bind_post(bind_list=bind_list) import_obj = get_bind_obj(cluster=cluster, service=service) old_bind = {} @@ -974,7 +1048,7 @@ def multi_bind(cluster: Cluster, service: ClusterObject | None, bind_list: list[ f'Export {obj_ref(obj=export_obj)} does not match import name "{prototype_import.name}"', ) - if not version_in(version=export_obj.prototype.version, ver=prototype_import): + if not is_version_suitable(version=export_obj.prototype.version, prototype_import=prototype_import): raise_adcm_ex( "BIND_ERROR", f'Import "{export_obj.prototype.name}" of { proto_ref(prototype=prototype_import.prototype)} ' @@ -1023,8 +1097,6 @@ def multi_bind(cluster: Cluster, service: ClusterObject | None, bind_list: list[ def bind( cluster: Cluster, service: ClusterObject | None, export_cluster: Cluster, export_service_pk: int | None ) -> dict: - # pylint: disable=too-many-branches - """ Adapter between old and new bind interface /api/.../bind/ -> /api/.../import/ diff --git a/python/cm/bundle.py b/python/cm/bundle.py index 7123feabe9..301c01397f 100644 --- a/python/cm/bundle.py +++ b/python/cm/bundle.py @@ -14,26 +14,29 @@ import functools import hashlib +import os import shutil import tarfile from collections.abc import Iterable from pathlib import Path from cm.adcm_config.config import init_object_config, switch_config -from cm.adcm_config.utils import proto_ref -from cm.errors import raise_adcm_ex +from cm.adcm_config.utils import cook_file_type_name, proto_ref +from cm.errors import AdcmEx, raise_adcm_ex from cm.logger import logger from cm.models import ( ADCM, Action, Bundle, Cluster, + ConfigLog, HostProvider, ProductCategory, Prototype, PrototypeConfig, PrototypeExport, PrototypeImport, + SignatureStatus, StageAction, StagePrototype, StagePrototypeConfig, @@ -48,6 +51,7 @@ from cm.status_api import post_event from django.conf import settings from django.db import IntegrityError, transaction +from gnupg import GPG, ImportResult from rbac.models import Role from rbac.upgrade.role import prepare_action_roles from version_utils import rpm @@ -62,11 +66,13 @@ ) -def prepare_bundle(bundle_file: str, bundle_hash: str, path: Path) -> Bundle: +def prepare_bundle( + bundle_file: str, bundle_hash: str, path: Path, verification_status: SignatureStatus = SignatureStatus.ABSENT +) -> Bundle: try: check_stage() process_bundle(path=path, bundle_hash=bundle_hash) - bundle_proto = get_stage_bundle(bundle_file) + bundle_proto = get_stage_bundle(bundle_file=bundle_file) second_pass() except Exception: clear_stage() @@ -74,7 +80,7 @@ def prepare_bundle(bundle_file: str, bundle_hash: str, path: Path) -> Bundle: raise try: - bundle = copy_stage(bundle_hash=bundle_hash, bundle_proto=bundle_proto) + bundle = copy_stage(bundle_hash=bundle_hash, bundle_proto=bundle_proto, verification_status=verification_status) order_versions() clear_stage() ProductCategory.re_collect() @@ -91,7 +97,80 @@ def prepare_bundle(bundle_file: str, bundle_hash: str, path: Path) -> Bundle: def load_bundle(bundle_file: str) -> Bundle: logger.info('loading bundle file "%s" ...', bundle_file) bundle_hash, path = process_file(bundle_file=bundle_file) - return prepare_bundle(bundle_file=bundle_file, bundle_hash=bundle_hash, path=path) + + bundle_archive, signature_file = get_bundle_and_signature_paths(path=path) + verification_status = get_verification_status(bundle_archive=bundle_archive, signature_file=signature_file) + untar_and_cleanup(bundle_archive=bundle_archive, signature_file=signature_file, bundle_hash=bundle_hash) + + return prepare_bundle( + bundle_file=bundle_file, bundle_hash=bundle_hash, path=path, verification_status=verification_status + ) + + +def get_bundle_and_signature_paths(path: Path) -> tuple[Path | None, Path | None]: + """ + Search for tarfile (actual bundle archive), `.sig` file (detached signature file) + This paths can be None when processing old style bundles + """ + + bundle_archive, signature_file = None, None + + for item in path.glob("*"): + if item.match("*.sig"): + if signature_file is not None: + raise AdcmEx(code="BUNDLE_ERROR", msg='More than one ".sig" file found') + signature_file = item.absolute() + continue + + if item.is_file() and tarfile.is_tarfile(item): + if bundle_archive is not None: + raise AdcmEx(code="BUNDLE_ERROR", msg="More than one tar file found") + bundle_archive = item.absolute() + continue + + return bundle_archive, signature_file + + +def untar_and_cleanup(bundle_archive: Path | None, signature_file: Path | None, bundle_hash: str) -> None: + if bundle_archive is not None: + untar_safe(bundle_hash=bundle_hash, path=bundle_archive) + bundle_archive.unlink() + if signature_file is not None: + signature_file.unlink() + + +def get_verification_status(bundle_archive: Path | None, signature_file: Path | None) -> SignatureStatus: + if bundle_archive is None or signature_file is None: + return SignatureStatus.ABSENT + + gpg = GPG(gpgbinary=os.popen("which gpg").read().strip()) + gpg.encoding = settings.ENCODING_UTF_8 + key_filepath = cook_file_type_name(obj=ADCM.objects.get(), key="global", sub_key="verification_public_key") + + try: + res: ImportResult = gpg.import_keys_file(key_path=key_filepath) + except (PermissionError, FileNotFoundError): + logger.warning("Can't read public key file: %s", key_filepath) + return SignatureStatus.INVALID + + if res.returncode != 0: + logger.warning("Bad gpg key: %s", res.stderr) + return SignatureStatus.INVALID + + with open(signature_file, mode="rb") as sign_stream: + if bool(gpg.verify_file(fileobj_or_path=sign_stream, data_filename=bundle_archive)): + return SignatureStatus.VALID + else: + return SignatureStatus.INVALID + + +def upload_file(file) -> Path: + file_path = Path(settings.DOWNLOAD_DIR, file.name) + with open(file_path, "wb+") as f: + for chunk in file.chunks(): + f.write(chunk) + + return file_path def update_bundle(bundle): @@ -145,7 +224,7 @@ def untar_safe(bundle_hash: str, path: Path) -> Path: try: dir_path = untar(bundle_hash=bundle_hash, bundle=path) except tarfile.ReadError: - raise_adcm_ex("BUNDLE_ERROR", f"Can't open bundle tar file: {path}") + raise_adcm_ex(code="BUNDLE_ERROR", msg=f"Can't open bundle tar file: {path}") return dir_path @@ -196,9 +275,8 @@ def get_hash(bundle_file: str) -> str: return sha1.hexdigest() -def load_adcm(): +def load_adcm(adcm_file: Path = Path(settings.BASE_DIR, "conf", "adcm", "config.yaml")): check_stage() - adcm_file = Path(settings.BASE_DIR, "conf", "adcm", "config.yaml") conf = read_definition(conf_file=adcm_file) if not conf: logger.warning("Empty adcm config (%s)", adcm_file) @@ -264,6 +342,26 @@ def upgrade_adcm(adcm, bundle): adcm.save() switch_config(adcm, new_proto, old_proto) + if rpm.compare_versions(old_proto.version, "2.6") <= 0 <= rpm.compare_versions(new_proto.version, "2.7"): + config_log_old = ConfigLog.objects.get(obj_ref=adcm.config, id=adcm.config.previous) + config_log_new = ConfigLog.objects.get(obj_ref=adcm.config, id=adcm.config.current) + log_rotation_on_fs = config_log_old.config.get("job_log", {}).get( + "log_rotation_on_fs", config_log_new.config["audit_data_retention"]["log_rotation_on_fs"] + ) + config_log_new.config["audit_data_retention"]["log_rotation_on_fs"] = log_rotation_on_fs + + log_rotation_in_db = config_log_old.config.get("job_log", {}).get( + "log_rotation_in_db", config_log_new.config["audit_data_retention"]["log_rotation_in_db"] + ) + config_log_new.config["audit_data_retention"]["log_rotation_in_db"] = log_rotation_in_db + + config_rotation_in_db = config_log_old.config.get("config_rotation", {}).get( + "config_rotation_in_db", config_log_new.config["audit_data_retention"]["config_rotation_in_db"] + ) + config_log_new.config["audit_data_retention"]["config_rotation_in_db"] = config_rotation_in_db + + config_log_new.save(update_fields=["config"]) + logger.info( "upgrade adcm OK from version %s to %s", old_proto.version, @@ -526,6 +624,7 @@ def copy_stage_prototype(stage_prototypes, bundle): "venv", "config_group_customization", "allow_maintenance_mode", + "allow_flags", ), ) if proto.license_path: @@ -681,6 +780,7 @@ def copy_stage_component(stage_components, stage_proto, prototype, bundle): "description", "adcm_min_version", "config_group_customization", + "allow_flags", "venv", ), ) @@ -753,13 +853,15 @@ def check_license(proto): return Prototype.objects.filter(license_hash=proto.license_hash, license="accepted").exists() -def copy_stage(bundle_hash, bundle_proto): +def copy_stage(bundle_hash: str, bundle_proto, verification_status: SignatureStatus = SignatureStatus.ABSENT) -> Bundle: bundle = copy_obj( bundle_proto, Bundle, ("name", "version", "edition", "description"), ) bundle.hash = bundle_hash + bundle.signature_status = verification_status + try: bundle.save() except IntegrityError: @@ -770,7 +872,7 @@ def copy_stage(bundle_hash, bundle_proto): ) stage_prototypes = StagePrototype.objects.exclude(type="component").order_by("id") - copy_stage_prototype(stage_prototypes, bundle) + copy_stage_prototype(stage_prototypes=stage_prototypes, bundle=bundle) for stage_prototype in stage_prototypes: proto = Prototype.objects.get(name=stage_prototype.name, type=stage_prototype.type, bundle=bundle) @@ -791,17 +893,19 @@ def copy_stage(bundle_hash, bundle_proto): prototype_export = PrototypeExport(prototype=proto, name=stage_prototype_export.name) prototype_export.save() - copy_stage_import(StagePrototypeImport.objects.filter(prototype=stage_prototype).order_by("id"), proto) + copy_stage_import( + stage_imports=StagePrototypeImport.objects.filter(prototype=stage_prototype).order_by("id"), prototype=proto + ) - copy_stage_sub_actions(bundle) - copy_stage_upgrade(StageUpgrade.objects.order_by("id"), bundle) + copy_stage_sub_actions(bundle=bundle) + copy_stage_upgrade(stage_upgrades=StageUpgrade.objects.order_by("id"), bundle=bundle) return bundle def update_bundle_from_stage( bundle, -): # pylint: disable=too-many-locals,too-many-branches,too-many-statements +): # pylint: disable=too-many-locals,too-many-statements for stage_prototype in StagePrototype.objects.order_by("id"): try: prototype = Prototype.objects.get( @@ -821,6 +925,7 @@ def update_bundle_from_stage( prototype.venv = stage_prototype.venv prototype.config_group_customization = stage_prototype.config_group_customization prototype.allow_maintenance_mode = stage_prototype.allow_maintenance_mode + prototype.allow_flags = stage_prototype.allow_flags except Prototype.DoesNotExist: prototype = copy_obj( stage_prototype, @@ -842,6 +947,7 @@ def update_bundle_from_stage( "venv", "config_group_customization", "allow_maintenance_mode", + "allow_flags", ), ) prototype.bundle = bundle @@ -1064,7 +1170,7 @@ def check_services(): prototype_data[prototype.name] = prototype.version -def get_stage_bundle(bundle_file): +def get_stage_bundle(bundle_file: str) -> StagePrototype: bundle = None clusters = StagePrototype.objects.filter(type="cluster") providers = StagePrototype.objects.filter(type="provider") diff --git a/python/cm/checker.py b/python/cm/checker.py index c8ab743e57..efabfb0901 100644 --- a/python/cm/checker.py +++ b/python/cm/checker.py @@ -14,6 +14,19 @@ import ruyaml +MATCH_DICT_RESERVED_DIRECTIVES = ("invisible_items",) + + +def _check_match_dict_reserved(data, rules, rule, path, parent=None): + if any(directive in rules[rule] for directive in MATCH_DICT_RESERVED_DIRECTIVES): + raise FormatError( + path=path, + message=f'{MATCH_DICT_RESERVED_DIRECTIVES} allowed only in "match: dict" sections', + data=data, + rule=rule, + parent=parent, + ) + def round_trip_load(stream, version=None, preserve_quotes=None, allow_duplicate_keys=False): """ @@ -64,7 +77,7 @@ class DataError(Exception): def check_type(data, data_type, path, rule=None, parent=None): - if not isinstance(data, data_type): + if not isinstance(data, data_type) or (isinstance(data, bool) and data_type is int): msg = f"Object should be a {str(data_type)}" if path: last = path[-1] @@ -78,7 +91,9 @@ def check_match_type(match, data, data_type, path, rule, parent=None): raise FormatError(path, msg, data, rule, parent) -def match_none(data, rules, rule, path, parent=None): # pylint: disable=unused-argument +def match_none(data, rules, rule, path, parent=None): + _check_match_dict_reserved(data=data, rules=rules, rule=rule, path=path, parent=parent) + if data is not None: msg = "Object should be empty" if path: @@ -92,7 +107,9 @@ def match_any(data, rules, rule, path, parent=None): # pylint: disable=unused-a def match_list(data, rules, rule, path, parent=None): + _check_match_dict_reserved(data=data, rules=rules, rule=rule, path=path, parent=parent) check_match_type("match_list", data, list, path, rule, parent) + for i, item in enumerate(data): process_rule(item, rules, rules[rule]["item"], path + [("Value of list index", i)], parent) @@ -102,8 +119,8 @@ def match_list(data, rules, rule, path, parent=None): def match_dict(data, rules, rule, path, parent=None): check_match_type("match_dict", data, dict, path, rule, parent) - if "required_items" in rules[rule]: - for i in rules[rule]["required_items"]: + if "required_items" in rules[rule] or "invisible_items" in rules[rule]: + for i in rules[rule].get("required_items", []) + rules[rule].get("invisible_items", []): if i not in data: raise FormatError(path, f'There is no required key "{i}" in map.', data, rule) @@ -121,7 +138,9 @@ def match_dict(data, rules, rule, path, parent=None): def match_dict_key_selection(data, rules, rule, path, parent=None): + _check_match_dict_reserved(data=data, rules=rules, rule=rule, path=path, parent=parent) check_match_type("dict_key_selection", data, dict, path, rule, parent) + key = rules[rule]["selector"] if key not in data: msg = f'There is no key "{key}" in map.' @@ -137,6 +156,8 @@ def match_dict_key_selection(data, rules, rule, path, parent=None): def match_one_of(data, rules, rule, path, parent=None): + _check_match_dict_reserved(data=data, rules=rules, rule=rule, path=path, parent=parent) + errors = [] sub_errors = [] for obj in rules[rule]["variants"]: @@ -152,14 +173,17 @@ def match_one_of(data, rules, rule, path, parent=None): raise FormatError(path, msg, data, rule, parent, caused_by=errors) -def match_set(data, rules, rule, path, parent=None): # pylint: disable=unused-argument +def match_set(data, rules, rule, path, parent=None): + _check_match_dict_reserved(data=data, rules=rules, rule=rule, path=path, parent=parent) + if data not in rules[rule]["variants"]: msg = f'Value "{data}" not in set {rules[rule]["variants"]}' raise FormatError(path, msg, data, rule, parent=parent) def match_simple_type(obj_type): - def match(data, rules, rule, path, parent=None): # pylint: disable=unused-argument + def match(data, rules, rule, path, parent=None): + _check_match_dict_reserved(data=data, rules=rules, rule=rule, path=path, parent=parent) check_type(data, obj_type, path, rule, parent=parent) return match diff --git a/python/cm/errors.py b/python/cm/errors.py index b653eb9166..7d8dd0990a 100644 --- a/python/cm/errors.py +++ b/python/cm/errors.py @@ -43,6 +43,7 @@ "BIND_NOT_FOUND": ("bind doesn't exist", HTTP_404_NOT_FOUND, ERR), "PROVIDER_NOT_FOUND": ("provider doesn't exist", HTTP_404_NOT_FOUND, ERR), "HOST_NOT_FOUND": ("host doesn't exist", HTTP_404_NOT_FOUND, ERR), + "HOST_GROUP_CONFIG_NOT_FOUND": ("group config doesn't exist", HTTP_404_NOT_FOUND, ERR), "HOST_TYPE_NOT_FOUND": ("host type doesn't exist", HTTP_404_NOT_FOUND, ERR), "PROTOTYPE_NOT_FOUND": ("prototype doesn't exist", HTTP_404_NOT_FOUND, ERR), "HOSTSERVICE_NOT_FOUND": ("map host <-> component doesn't exist", HTTP_404_NOT_FOUND, ERR), @@ -56,6 +57,7 @@ "TASK_NOT_FOUND": ("task doesn't exist", HTTP_404_NOT_FOUND, ERR), "JOB_NOT_FOUND": ("job doesn't exist", HTTP_404_NOT_FOUND, ERR), "LOG_NOT_FOUND": ("log file is not found", HTTP_404_NOT_FOUND, ERR), + "LOG_FOR_TASK_VIEW_NOT_ALLOWED": ("method to read logs for tasks is not allowed", HTTP_405_METHOD_NOT_ALLOWED, ERR), "UPGRADE_NOT_FOUND": ("upgrade is not found", HTTP_404_NOT_FOUND, ERR), "USER_NOT_FOUND": ("user profile is not found", HTTP_404_NOT_FOUND, ERR), "GROUP_NOT_FOUND": ("group is not found", HTTP_404_NOT_FOUND, ERR), @@ -100,6 +102,7 @@ "DEFINITION_KEY_ERROR": ("config key error", HTTP_409_CONFLICT, ERR), "DEFINITION_TYPE_ERROR": ("config type error", HTTP_409_CONFLICT, ERR), "UPGRADE_ERROR": ("upgrade error", HTTP_409_CONFLICT, ERR), + "UPGRADE_NOT_FOUND_ERROR": ("upgrade error", HTTP_404_NOT_FOUND, ERR), "ACTION_ERROR": ("action error", HTTP_409_CONFLICT, ERR), "TASK_ERROR": ("task error", HTTP_409_CONFLICT, ERR), "TASK_IS_FAILED": ("task is failed", HTTP_409_CONFLICT, ERR), @@ -202,22 +205,26 @@ "ROLE_MODULE_ERROR": ("No role module with this name", HTTP_409_CONFLICT, ERR), "ROLE_CLASS_ERROR": ("No matching class in this module", HTTP_409_CONFLICT, ERR), "ROLE_FILTER_ERROR": ("Incorrect filter in role", HTTP_409_CONFLICT, ERR), - "ROLE_CREATE_ERROR": ("Error during process of role creating", HTTP_409_CONFLICT, ERR), + "ROLE_CREATE_ERROR": ("A role with this name already exists", HTTP_409_CONFLICT, ERR), "ROLE_UPDATE_ERROR": ("Error during process of role updating", HTTP_409_CONFLICT, ERR), "ROLE_CONFLICT": ( "Combination of cluster/service/component and provider permissions is not allowed", HTTP_409_CONFLICT, ERR, ), + "ROLE_DELETE_ERROR": ("Error during process of role deleting", HTTP_409_CONFLICT, ERR), "GROUP_CREATE_ERROR": ("Error during process of group creating", HTTP_409_CONFLICT, ERR), "GROUP_UPDATE_ERROR": ("Error during process of group updating", HTTP_400_BAD_REQUEST, ERR), - "GROUP_DELETE_ERROR": ("Built-in group could not be deleted", HTTP_405_METHOD_NOT_ALLOWED, ERR), + "GROUP_DELETE_ERROR": ("Built-in group could not be deleted", HTTP_409_CONFLICT, ERR), "POLICY_INTEGRITY_ERROR": ("Incorrect role or user list of policy", HTTP_400_BAD_REQUEST, ERR), "POLICY_CREATE_ERROR": ("Error during process of policy creating", HTTP_409_CONFLICT, ERR), "POLICY_UPDATE_ERROR": ("Error during process of policy updating", HTTP_409_CONFLICT, ERR), + "POLICY_DELETE_ERROR": ("Error during process of policy deleting", HTTP_409_CONFLICT, ERR), "USER_CREATE_ERROR": ("Error during process of user creating", HTTP_409_CONFLICT, ERR), "USER_UPDATE_ERROR": ("Error during process of user updating", HTTP_400_BAD_REQUEST, ERR), - "USER_DELETE_ERROR": ("Built-in user could not be deleted", HTTP_405_METHOD_NOT_ALLOWED, ERR), + "USER_DELETE_ERROR": ("Built-in user could not be deleted", HTTP_409_CONFLICT, ERR), + "USER_BLOCK_ERROR": ("Built-in user could not be blocked", HTTP_409_CONFLICT, ERR), + "USER_UNBLOCK_ERROR": ("Only superuser can reset login attempts.", HTTP_409_CONFLICT, ERR), "JOB_TERMINATION_ERROR": ("Can't terminate job", HTTP_409_CONFLICT, ERR), "USER_PASSWORD_TOO_SHORT_ERROR": ("This password is shorter than min password length", HTTP_400_BAD_REQUEST, ERR), "USER_PASSWORD_TOO_LONG_ERROR": ("This password is longer than max password length", HTTP_400_BAD_REQUEST, ERR), @@ -229,6 +236,8 @@ ERR, ), "BAD_REQUEST": ("Bad request", HTTP_400_BAD_REQUEST, ERR), + "HOSTPROVIDER_CREATE_ERROR": ("Error during process of host provider creating", HTTP_409_CONFLICT, ERR), + "CONFIG_OPTION_ERROR": ("error in config option type", HTTP_409_CONFLICT, ERR), } diff --git a/python/cm/flag.py b/python/cm/flag.py new file mode 100644 index 0000000000..3c315ddae7 --- /dev/null +++ b/python/cm/flag.py @@ -0,0 +1,96 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from cm.hierarchy import Tree +from cm.models import ( + ADCMEntity, + ConcernCause, + ConcernItem, + ConcernType, + KnownNames, + MessageTemplate, +) + + +def get_flag_name(obj: ADCMEntity, msg: str = "") -> str: + name = f"{obj} has an outdated configuration" + if msg: + name = f"{name}: {msg}" + + return name + + +def create_flag(obj: ADCMEntity, msg: str = "") -> ConcernItem: + reason = MessageTemplate.get_message_from_template(name=KnownNames.CONFIG_FLAG.value, source=obj) + if msg: + reason["message"] = f"{reason['message']}: {msg}" + + flag = ConcernItem.objects.create( + type=ConcernType.FLAG, + name=get_flag_name(obj, msg), + reason=reason, + owner=obj, + cause=ConcernCause.CONFIG, + blocking=False, + ) + return flag + + +def remove_flag(obj: ADCMEntity, msg: str = "") -> None: + flag = get_own_flag(owner=obj, msg=msg) + if not flag: + return + + flag.delete() + + +def get_own_flag(owner: ADCMEntity, msg: str) -> ConcernItem: + return ConcernItem.objects.filter( + type=ConcernType.FLAG, owner_id=owner.pk, owner_type=owner.content_type, name=get_flag_name(owner, msg) + ).first() + + +def update_hierarchy(concern: ConcernItem) -> None: + tree = Tree(obj=concern.owner) + + related = set(concern.related_objects) + affected = {node.value for node in tree.get_directly_affected(node=tree.built_from)} + + if related == affected: + return + + for object_moved_out_hierarchy in related.difference(affected): + object_moved_out_hierarchy.remove_from_concerns(item=concern) + + for new_object in affected.difference(related): + new_object.add_to_concerns(item=concern) + + +def update_flags() -> None: + for flag in ConcernItem.objects.filter(type=ConcernType.FLAG): + if flag.owner is None: + flag.delete() + continue + + update_hierarchy(concern=flag) + + +def update_object_flag(obj: ADCMEntity, msg: str = "") -> None: + if not obj.prototype.allow_flags: + return + + flag = get_own_flag(owner=obj, msg=msg) + + if not flag: + flag = create_flag(obj=obj, msg=msg) + + update_hierarchy(concern=flag) diff --git a/python/cm/inventory.py b/python/cm/inventory.py index 767e0174c2..77e88278ef 100644 --- a/python/cm/inventory.py +++ b/python/cm/inventory.py @@ -28,6 +28,7 @@ HostComponent, HostProvider, MaintenanceMode, + ObjectType, Prototype, PrototypeExport, PrototypeImport, @@ -52,6 +53,10 @@ def process_map(flat_spec: dict, config: dict) -> None: if prototype_config.type == "map": name = prototype_config.name sub_name = prototype_config.subname + + if name not in config and not prototype_config.required: + continue + if sub_name: if config[name][sub_name] is None: config[name][sub_name] = {} @@ -180,15 +185,28 @@ def get_before_upgrade(obj: ADCMEntity, host: Host | None) -> dict: bundle_id = obj.cluster.before_upgrade["bundle_id"] else: bundle_id = obj.before_upgrade["bundle_id"] - old_proto = Prototype.objects.filter(name=obj.prototype.name, bundle_id=bundle_id).first() - old_spec, old_flat_spec, _, _ = get_prototype_config(prototype=old_proto) - config = process_config_and_attr( - obj=group_object or obj, - conf=config_log.config, - attr=config_log.attr, - spec=old_spec, - flat_spec=old_flat_spec, - ) + + obj_prototype = obj.prototype + try: + if obj_prototype.type == ObjectType.COMPONENT: + old_proto = Prototype.objects.get( + name=obj_prototype.name, parent__name=obj_prototype.parent.name, bundle_id=bundle_id + ) + else: + old_proto = Prototype.objects.get(name=obj_prototype.name, bundle_id=bundle_id) + + except Prototype.DoesNotExist: + logger.info("Can't get old proto for %s. Old bundle id: %s", obj, bundle_id) + + else: + old_spec, old_flat_spec, _, _ = get_prototype_config(prototype=old_proto) + config = process_config_and_attr( + obj=group_object or obj, + conf=config_log.config, + attr=config_log.attr, + spec=old_spec, + flat_spec=old_flat_spec, + ) return {"state": obj.before_upgrade.get("state"), "config": config} diff --git a/python/cm/issue.py b/python/cm/issue.py old mode 100644 new mode 100755 index a4d8164ecb..1ffc315c7b --- a/python/cm/issue.py +++ b/python/cm/issue.py @@ -133,10 +133,14 @@ def do_check_import(cluster: Cluster, service: ClusterObject | None = None) -> t if service: proto = service.prototype - for prototype_import in PrototypeImport.objects.filter(prototype=proto): - if not prototype_import.required: - return True, "NOT_REQUIRED" + prototype_imports = PrototypeImport.objects.filter(prototype=proto) + if not prototype_imports.exists(): + return import_exist + if not any(prototype_imports.values_list("required", flat=True)): + return True, "NOT_REQUIRED" + + for prototype_import in prototype_imports.filter(required=True): import_exist = (False, None) for cluster_bind in ClusterBind.objects.filter(cluster=cluster): if cluster_bind.source_cluster and cluster_bind.source_cluster.prototype.name == prototype_import.name: @@ -240,55 +244,60 @@ def get_obj_config(obj: ADCMEntity) -> tuple[dict, dict]: return config_log.config, attr -def check_component_constraint( - cluster: Cluster, service_prototype: Prototype, hc_in: list, old_bundle: Bundle | None = None -) -> None: - ref = f"in host component list for {service_prototype.type} {service_prototype.name}" - all_host = Host.objects.filter(cluster=cluster) +def check_min_required_components(count: int, constraint: int, comp: ServiceComponent, ref: str) -> None: + if count < constraint: + raise AdcmEx( + code="COMPONENT_CONSTRAINT_ERROR", + msg=f'less then {constraint} required component "{comp.name}" ({count}) {ref}', + ) - def check_min(count: int, constraint: int, comp: ServiceComponent) -> None: - if count < constraint: - raise AdcmEx( - code="COMPONENT_CONSTRAINT_ERROR", - msg=f'less then {constraint} required component "{comp.name}" ({count}) {ref}', - ) - def check_max(count: int, constraint: int, comp: ServiceComponent) -> None: - if count > constraint: - raise AdcmEx( - code="COMPONENT_CONSTRAINT_ERROR", - msg=f'amount ({count}) of component "{comp.name}" more then maximum ({constraint}) {ref}', - ) +def check_max_required_components(count: int, constraint: int, comp: ServiceComponent, ref: str) -> None: + if count > constraint: + raise AdcmEx( + code="COMPONENT_CONSTRAINT_ERROR", + msg=f'amount ({count}) of component "{comp.name}" more then maximum ({constraint}) {ref}', + ) + + +def check_components_number_is_odd(count: int, constraint: str, comp: ServiceComponent, ref: str) -> None: + if count % 2 == 0: + raise AdcmEx( + code="COMPONENT_CONSTRAINT_ERROR", + msg=f'amount ({count}) of component "{comp.name}" should be odd ({constraint}) {ref}', + ) - def check_odd(count: int, constraint: str, comp: ServiceComponent) -> None: - if count % 2 == 0: - raise AdcmEx( - code="COMPONENT_CONSTRAINT_ERROR", - msg=f'amount ({count}) of component "{comp.name}" should be odd ({constraint}) {ref}', - ) - def check(comp: ServiceComponent, constraint: list) -> None: - count = 0 - for _, _, component in hc_in: - if comp.name == component.prototype.name: - count += 1 +def check_components_mapping_contraints( + cluster: Cluster, service_prototype: Prototype, comp: ServiceComponent, hc_in: list, constraint: list +) -> None: + all_hosts_number = Host.objects.filter(cluster=cluster).count() + ref = f"in host component list for {service_prototype.type} {service_prototype.name}" + count = 0 + for _, _, component in hc_in: + if comp.name == component.prototype.name: + count += 1 - if isinstance(constraint[0], int): - check_min(count=count, constraint=constraint[0], comp=comp) - if len(constraint) < 2: - check_max(count=count, constraint=constraint[0], comp=comp) + if isinstance(constraint[0], int): + check_min_required_components(count=count, constraint=constraint[0], comp=comp, ref=ref) + if len(constraint) < 2: + check_max_required_components(count=count, constraint=constraint[0], comp=comp, ref=ref) - if len(constraint) > 1: - if isinstance(constraint[1], int): - check_max(count=count, constraint=constraint[1], comp=comp) - elif constraint[1] == "odd" and count: - check_odd(count=count, constraint=constraint[1], comp=comp) + if len(constraint) > 1: + if isinstance(constraint[1], int): + check_max_required_components(count=count, constraint=constraint[1], comp=comp, ref=ref) + elif constraint[1] == "odd" and count: + check_components_number_is_odd(count=count, constraint=constraint[1], comp=comp, ref=ref) - if constraint[0] == "+": - check_min(count=count, constraint=len(all_host), comp=comp) - elif constraint[0] == "odd": - check_odd(count=count, constraint=constraint[0], comp=comp) + if constraint[0] == "+": + check_min_required_components(count=count, constraint=all_hosts_number, comp=comp, ref=ref) + elif constraint[0] == "odd": + check_components_number_is_odd(count=count, constraint=constraint[0], comp=comp, ref=ref) + +def check_component_constraint( + cluster: Cluster, service_prototype: Prototype, hc_in: list, old_bundle: Bundle | None = None +) -> None: for component_prototype in Prototype.objects.filter(parent=service_prototype, type="component"): if old_bundle: try: @@ -306,7 +315,13 @@ def check(comp: ServiceComponent, constraint: list) -> None: except Prototype.DoesNotExist: continue - check(comp=component_prototype, constraint=component_prototype.constraint) + check_components_mapping_contraints( + cluster=cluster, + service_prototype=service_prototype, + comp=component_prototype, + hc_in=hc_in, + constraint=component_prototype.constraint, + ) _issue_check_map = { @@ -420,16 +435,15 @@ def update_hierarchy_issues(obj: ADCMEntity) -> None: tree = Tree(obj) affected_nodes = tree.get_directly_affected(node=tree.built_from) for node in affected_nodes: - node_value = node.value - recheck_issues(obj=node_value) + recheck_issues(obj=node.value) def update_issue_after_deleting() -> None: """Remove issues which have no owners after object deleting""" - for concern in ConcernItem.objects.exclude(type=ConcernType.LOCK): + for concern in ConcernItem.objects.filter(type=ConcernType.ISSUE): tree = Tree(obj=concern.owner) affected = {node.value for node in tree.get_directly_affected(node=tree.built_from)} - related = set(concern.related_objects) # pylint: disable=consider-using-set-comprehension + related = set(concern.related_objects) if concern.owner is None: concern_str = str(concern) concern.delete() diff --git a/python/cm/job.py b/python/cm/job.py index 8babe7c412..6e02610b2a 100644 --- a/python/cm/job.py +++ b/python/cm/job.py @@ -18,7 +18,7 @@ from configparser import ConfigParser from functools import partial from pathlib import Path -from typing import Any +from typing import Any, Literal from audit.cases.common import get_or_create_audit_obj from audit.cef_logger import cef_logger @@ -116,7 +116,7 @@ def start_task( return task -def check_action_hosts(action: Action, obj: ADCMEntity, cluster: Cluster | None, hosts: list[Host]) -> None: +def check_action_hosts(action: Action, obj: ADCMEntity, cluster: Cluster | None, hosts: list[int]) -> None: provider = None if obj.prototype.type == "provider": provider = obj @@ -148,9 +148,9 @@ def prepare_task( conf: dict, attr: dict, hostcomponent: list[dict], - hosts: list[Host], + hosts: list[int], verbose: bool, -) -> TaskLog: # pylint: disable=too-many-locals +) -> TaskLog: cluster = get_object_cluster(obj=obj) check_action_state(action=action, task_object=obj, cluster=cluster) _, spec, flat_spec = check_action_config(action=action, obj=obj, conf=conf, attr=attr) @@ -170,7 +170,6 @@ def prepare_task( attr = {} with atomic(): - # pylint: disable=too-many-locals if cluster: on_commit( func=partial(post_event, event="change_hostcomponentmap", object_id=cluster.pk, object_type="cluster") @@ -358,8 +357,8 @@ def check_hostcomponentmap( if not cluster: raise_adcm_ex(code="TASK_ERROR", msg="Only cluster objects can have action with hostcomponentmap") - for host_comp in new_hc: - if not hasattr(action, "upgrade"): + if not hasattr(action, "upgrade"): + for host_comp in new_hc: host = Host.obj.get(id=host_comp.get("host_id", 0)) if host.concerns.filter(type=ConcernType.LOCK).exists(): raise_adcm_ex(code="LOCK_ERROR", msg=f"object {host} is locked") @@ -570,7 +569,9 @@ def prepare_job( prepare_ansible_config(job_id, action, sub_action) -def get_selector(obj: ADCM | Cluster | ClusterObject | ServiceComponent | HostProvider | Host, action: Action) -> dict: +def get_selector( + obj: ADCM | Cluster | ClusterObject | ServiceComponent | HostProvider | Host, action: Action +) -> dict[str | ObjectType, dict[Literal["id", "name"], int | str]]: selector = {obj.prototype.type: {"id": obj.pk, "name": obj.display_name}} if obj.prototype.type == ObjectType.SERVICE: @@ -718,7 +719,7 @@ def create_task( conf: dict, attr: dict, hostcomponent: list[dict], - hosts: list[Host], + hosts: list[int], verbose: bool, post_upgrade_hc: list[dict], ) -> TaskLog: @@ -975,14 +976,6 @@ def prepare_ansible_config(job_id: int, action: Action, sub_action: SubAction): adcm_object = ADCM.objects.first() config_log = ConfigLog.objects.get(obj_ref=adcm_object.config, id=adcm_object.config.current) adcm_conf = config_log.config - mitogen = adcm_conf["ansible_settings"]["mitogen"] - - if mitogen: - config_parser["defaults"]["strategy"] = "mitogen_linear" - config_parser["defaults"]["strategy_plugins"] = str( - Path(settings.PYTHON_SITE_PACKAGES, "ansible_mitogen", "plugins", "strategy"), - ) - config_parser["defaults"]["host_key_checking"] = "False" forks = adcm_conf["ansible_settings"]["forks"] config_parser["defaults"]["forks"] = str(forks) diff --git a/python/cm/management/commands/collect_statistics.py b/python/cm/management/commands/collect_statistics.py new file mode 100644 index 0000000000..ebc4212c90 --- /dev/null +++ b/python/cm/management/commands/collect_statistics.py @@ -0,0 +1,376 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import os +from dataclasses import asdict, dataclass +from datetime import datetime as dt +from hashlib import md5 +from logging import getLogger +from pathlib import Path +from shutil import rmtree +from tarfile import TarFile +from tempfile import mkdtemp +from time import sleep, time +from typing import NamedTuple +from urllib.parse import urlunparse + +import requests +from audit.models import AuditLogOperationResult +from audit.utils import make_audit_log +from cm.adcm_config.config import get_adcm_config +from cm.models import ADCM, Bundle, Cluster, HostComponent, HostProvider +from django.conf import settings as adcm_settings +from django.core.management.base import BaseCommand +from django.db.models import Count, Prefetch, QuerySet +from rbac.models import Policy, Role, User +from rest_framework.status import HTTP_201_CREATED, HTTP_405_METHOD_NOT_ALLOWED + + +@dataclass +class ADCMData: + uuid: str + version: str + + +@dataclass +class BundleData: + name: str + version: str + edition: str + date: str + + +@dataclass +class HostComponentData: + host_name: str + component_name: str + service_name: str + + +@dataclass +class ClusterData: + name: str + host_count: int + bundle: dict + host_component_map: list[dict] + + +@dataclass +class HostProviderData: + name: str + host_count: int + bundle: dict + + +@dataclass +class UserData: + email: str + date_joined: str + + +@dataclass +class RoleData: + name: str + built_in: bool + + +class UrlComponents(NamedTuple): + scheme: str + netloc: str + path: str + params: str = "" + query: str = "" + fragment: str = "" + + +class RetryError(Exception): + pass + + +logger = getLogger("background_tasks") + + +class StatisticsSettings: # pylint: disable=too-many-instance-attributes + def __init__(self): + # pylint: disable=invalid-envvar-default + + adcm_uuid = str(ADCM.objects.get().uuid) + + self.enabled = self._get_enabled() + + self.url = self._get_url() + self.headers = {"Adcm-UUID": adcm_uuid, "accept": "application/json"} + self.timeout = 15 + + self.retries_limit = int(os.getenv("STATISTICS_RETRIES", 10)) + self.retries_frequency = int(os.getenv("STATISTICS_FREQUENCY", 1 * 60 * 60)) # in seconds + + self.format_version = 0.1 + self.adcm_uuid = adcm_uuid + + self.date_format = "%Y-%m-%d %H:%M:%S" + self.data_name = f"{dt.now().date().strftime('%Y_%m_%d')}_statistics" + + @staticmethod + def _get_enabled() -> bool: + if os.getenv("STATISTICS_ENABLED") is not None: + return os.environ["STATISTICS_ENABLED"].upper() in {"1", "TRUE"} + + attr, _ = get_adcm_config(section="statistics_collection") + return bool(attr["active"]) + + @staticmethod + def _get_url() -> str: + url_path = "/api/v1/statistic/adcm" + scheme = "http" + + if os.getenv("STATISTICS_URL") is not None: + netloc = os.environ["STATISTICS_URL"] + else: + _, config = get_adcm_config(section="statistics_collection") + netloc = config["url"] + + if len(splitted := netloc.split("://")) == 2: + scheme = splitted[0] + netloc = splitted[1] + + return urlunparse(components=UrlComponents(scheme=scheme, netloc=netloc, path=url_path)) + + +class Command(BaseCommand): + def __init__(self, *args, **kwargs): + self.settings = StatisticsSettings() + super().__init__(*args, **kwargs) + + def handle(self, *args, **options): + # pylint: disable=attribute-defined-outside-init + try: + self.tmp_dir = Path(mkdtemp()).absolute() + self.main() + except Exception: # pylint: disable=broad-exception-caught + self.log(msg="Unexpected error during statistics collection", method="exception") + finally: + rmtree(path=self.tmp_dir) + + def main(self): + # pylint: disable=attribute-defined-outside-init + if not self.settings.enabled: + self.log(msg="disabled") + return + + self.log(msg="started") + make_audit_log(operation_type="statistics", result=AuditLogOperationResult.SUCCESS, operation_status="launched") + + for try_number in range(self.settings.retries_limit): + self.last_try_timestamp = time() + + try: + self.check_connection() + archive_path = (self.tmp_dir / self.settings.data_name).with_suffix(".tar.gz") + if not archive_path.is_file(): + data = self.collect_statistics() + self.make_archive(target_path=archive_path, data=data) + self.send_data(file_path=archive_path) + make_audit_log( + operation_type="statistics", result=AuditLogOperationResult.SUCCESS, operation_status="completed" + ) + break + + except RetryError: + # skip last iteration sleep() call + if try_number < self.settings.retries_limit - 1: + self.sleep() + else: + make_audit_log( + operation_type="statistics", result=AuditLogOperationResult.FAIL, operation_status="completed" + ) + + self.log(msg="finished") + + def make_archive(self, target_path: Path, data: dict) -> None: + json_path = (self.tmp_dir / self.settings.data_name).with_suffix(".json") + with json_path.open(mode="w", encoding=adcm_settings.ENCODING_UTF_8) as json_file: + json.dump(obj=data, fp=json_file) + + with TarFile.open(name=target_path, mode="w:gz", encoding=adcm_settings.ENCODING_UTF_8, compresslevel=9) as tar: + tar.add(name=json_path, arcname=json_path.name) + + json_path.unlink() + self.log(msg=f"archive created {target_path}") + + def collect_statistics(self) -> dict: + self.log(msg="getting data...") + community_bundles_qs = Bundle.objects.filter(edition="community") + + return { + "adcm": asdict(ADCMData(uuid=self.settings.adcm_uuid, version=adcm_settings.ADCM_VERSION)), + "format_version": self.settings.format_version, + "data": { + "clusters": self._get_clusters_data(bundles=community_bundles_qs), + "bundles": self._get_bundles_data(bundles=community_bundles_qs), + "providers": self._get_hostproviders_data(bundles=community_bundles_qs), + "users": self._get_users_data(), + "roles": self._get_roles_data(), + }, + } + + def check_connection(self) -> None: + """expecting 405 response on HEAD request without headers""" + + try: + response = requests.head(url=self.settings.url, headers={}, timeout=self.settings.timeout) + except requests.exceptions.ConnectionError as e: + self.log(msg=f"error connecting to `{self.settings.url}`", method="exception") + raise RetryError from e + + if response.status_code != HTTP_405_METHOD_NOT_ALLOWED: + self.log(msg=f"Bad response: {response.status_code}`, HEAD {self.settings.url}`") + raise RetryError + + self.log(msg="connection established") + + def send_data(self, file_path): + self.log(msg="sending data...") + with file_path.open(mode="rb") as archive: + try: + response = requests.post( + url=self.settings.url, + headers=self.settings.headers, + files={"file": archive}, + timeout=self.settings.timeout, + ) + except requests.exceptions.ConnectionError as e: + self.log(msg=f"error connecting to `{self.settings.url}`", method="exception") + raise RetryError from e + + if response.status_code != HTTP_201_CREATED: + raise RetryError + + self.log(msg="data succesfully sent") + + def sleep(self): + sleep_seconds = self.last_try_timestamp + self.settings.retries_frequency - time() + sleep_seconds = max(sleep_seconds, 0) + + self.log(f"sleeping for {sleep_seconds} seconds") + sleep(sleep_seconds) + + def log(self, msg: str, method: str = "debug") -> None: + msg = f"Statistics collector: {msg}" + self.stdout.write(msg) + getattr(logger, method)(msg) + + @staticmethod + def _get_roles_data() -> list[dict]: + out_data = [] + + for role_data in Role.objects.filter( + pk__in=Policy.objects.filter(role__isnull=False).values_list("role_id", flat=True).distinct() + ).values("name", "built_in"): + out_data.append(asdict(RoleData(**role_data))) + + return out_data + + def _get_users_data(self) -> list[dict]: + out_data = [] + for user_data in User.objects.values("email", "date_joined"): + out_data.append( + asdict( + UserData( + email=user_data["email"], + date_joined=user_data["date_joined"].strftime(self.settings.date_format), + ) + ) + ) + + return out_data + + def _get_hostproviders_data(self, bundles: QuerySet[Bundle]) -> list[dict]: + out_data = [] + for hostprovider in ( + HostProvider.objects.filter(prototype__bundle__in=bundles) + .select_related("prototype__bundle") + .annotate(host_count=Count("host")) + ): + out_data.append( + asdict( + HostProviderData( + name=hostprovider.name, + host_count=hostprovider.host_count, + bundle=self._get_single_bundle_data(bundle=hostprovider.prototype.bundle), + ) + ) + ) + + return out_data + + @staticmethod + def _get_hostcomponent_data(cluster: Cluster) -> list[dict]: + out_data = [] + for hostcomponent in cluster.hostcomponent_set.all(): + out_data.append( + asdict( + HostComponentData( + host_name=md5( + hostcomponent.host.name.encode(encoding=adcm_settings.ENCODING_UTF_8) + ).hexdigest(), + component_name=hostcomponent.component.name, + service_name=hostcomponent.service.name, + ) + ) + ) + + return out_data + + def _get_clusters_data(self, bundles: QuerySet[Bundle]) -> list[dict]: + out_data = [] + for cluster in ( + Cluster.objects.filter(prototype__bundle__in=bundles) + .select_related("prototype__bundle") + .prefetch_related( + Prefetch( + lookup="hostcomponent_set", + queryset=HostComponent.objects.select_related("host", "service", "component"), + ) + ) + .annotate(host_count=Count("host")) + ): + out_data.append( + asdict( + ClusterData( + name=cluster.name, + host_count=cluster.host_count, + bundle=self._get_single_bundle_data(bundle=cluster.prototype.bundle), + host_component_map=self._get_hostcomponent_data(cluster=cluster), + ) + ) + ) + + return out_data + + def _get_single_bundle_data(self, bundle: Bundle) -> dict: + return asdict( + BundleData( + name=bundle.name, + version=bundle.version, + edition=bundle.edition, + date=bundle.date.strftime(self.settings.date_format), + ) + ) + + def _get_bundles_data(self, bundles: QuerySet[Bundle]) -> list[dict]: + out_data = [] + for bundle in bundles: + out_data.append(self._get_single_bundle_data(bundle=bundle)) + + return out_data diff --git a/python/cm/management/commands/logrotate.py b/python/cm/management/commands/logrotate.py index c70123aa30..1c67ea64ca 100644 --- a/python/cm/management/commands/logrotate.py +++ b/python/cm/management/commands/logrotate.py @@ -120,8 +120,7 @@ def __get_logrotate_config(self): "active": current_configlog.attr["logrotate"]["active"], "nginx": adcm_conf["logrotate"], }, - "job": adcm_conf["job_log"], - "config": adcm_conf["config_rotation"], + "config": adcm_conf["audit_data_retention"], } self.__log(f"Got rotation config: {logrotate_config}") return logrotate_config @@ -213,8 +212,8 @@ def __has_related_records(obj_conf: ObjectConfig) -> bool: def __run_joblog_rotation(self): try: # pylint: disable=too-many-nested-blocks - days_delta_db = self.config["job"]["log_rotation_in_db"] - days_delta_fs = self.config["job"]["log_rotation_on_fs"] + days_delta_db = self.config["config"]["log_rotation_in_db"] + days_delta_fs = self.config["config"]["log_rotation_on_fs"] if days_delta_db <= 0 and days_delta_fs <= 0: return @@ -239,7 +238,7 @@ def __run_joblog_rotation(self): JobLog.objects.filter(task__isnull=True).delete() self.__log("db JobLog rotated", "info") - if days_delta_fs > 0: # pylint: disable=too-many-nested-blocks + if days_delta_fs > 0: for name in os.listdir(settings.RUN_DIR): if not name.startswith("."): # a line of code is used for development path = settings.RUN_DIR / name diff --git a/python/cm/migrations/0111_allow_flag_functionality.py b/python/cm/migrations/0111_allow_flag_functionality.py new file mode 100644 index 0000000000..cb6dfcf5bb --- /dev/null +++ b/python/cm/migrations/0111_allow_flag_functionality.py @@ -0,0 +1,55 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by Django 3.2.17 on 2023-06-21 12:26 + +from django.db import migrations, models + +data = { + "name": "outdated configuration flag", + "template": { + "message": "${source} has an outdated configuration", + "placeholder": { + "source": {"type": "adcm_entity"}, + }, + }, +} + + +def insert_message_templates(apps, schema_editor): + message_template = apps.get_model("cm", "MessageTemplate") + message_template.objects.create(**data) + + +def insert_message_templates_revert(apps, schema_editor): + message_template = apps.get_model("cm", "MessageTemplate") + message_template.objects.filter(name=data["name"]).delete() + + +class Migration(migrations.Migration): + dependencies = [ + ("cm", "0110_message_template_required"), + ] + + operations = [ + migrations.AddField( + model_name="prototype", + name="allow_flags", + field=models.BooleanField(default=False), + ), + migrations.AddField( + model_name="stageprototype", + name="allow_flags", + field=models.BooleanField(default=False), + ), + migrations.RunPython(code=insert_message_templates, reverse_code=insert_message_templates_revert), + ] diff --git a/python/cm/migrations/0112_bundle_signature_status.py b/python/cm/migrations/0112_bundle_signature_status.py new file mode 100644 index 0000000000..c097b071f5 --- /dev/null +++ b/python/cm/migrations/0112_bundle_signature_status.py @@ -0,0 +1,33 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by Django 3.2.19 on 2023-07-05 12:38 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("cm", "0111_allow_flag_functionality"), + ] + + operations = [ + migrations.AddField( + model_name="bundle", + name="signature_status", + field=models.CharField( + choices=[("valid", "valid"), ("invalid", "invalid"), ("absent", "absent")], + default="absent", + max_length=10, + ), + ), + ] diff --git a/python/cm/migrations/0113_mm_lower.py b/python/cm/migrations/0113_mm_lower.py new file mode 100644 index 0000000000..b5271e3de2 --- /dev/null +++ b/python/cm/migrations/0113_mm_lower.py @@ -0,0 +1,70 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by Django 3.2.19 on 2023-08-07 08:56 + +from django.db import migrations, models + +TRANSLATIONS = {"ON": "on", "OFF": "off", "CHANGING": "changing"} + + +def mm_to_lowercase(apps, schema_editor): + Host = apps.get_model("cm", "Host") + ClusterObject = apps.get_model("cm", "ClusterObject") + ServiceComponent = apps.get_model("cm", "ServiceComponent") + + for old_value, new_value in TRANSLATIONS.items(): + Host.objects.filter(maintenance_mode=old_value).update(maintenance_mode=new_value) + ClusterObject.objects.filter(_maintenance_mode=old_value).update(_maintenance_mode=new_value) + ServiceComponent.objects.filter(_maintenance_mode=old_value).update(_maintenance_mode=new_value) + + +def mm_to_uppercase(apps, schema_editor): + Host = apps.get_model("cm", "Host") + ClusterObject = apps.get_model("cm", "ClusterObject") + ServiceComponent = apps.get_model("cm", "ServiceComponent") + + for new_value, old_value in TRANSLATIONS.items(): + Host.objects.filter(maintenance_mode=old_value).update(maintenance_mode=new_value) + ClusterObject.objects.filter(_maintenance_mode=old_value).update(_maintenance_mode=new_value) + ServiceComponent.objects.filter(_maintenance_mode=old_value).update(_maintenance_mode=new_value) + + +class Migration(migrations.Migration): + dependencies = [ + ("cm", "0112_bundle_signature_status"), + ] + + operations = [ + migrations.RunPython(code=mm_to_lowercase, reverse_code=mm_to_uppercase), + migrations.AlterField( + model_name="clusterobject", + name="_maintenance_mode", + field=models.CharField( + choices=[("on", "on"), ("off", "off"), ("changing", "changing")], default="off", max_length=1000 + ), + ), + migrations.AlterField( + model_name="host", + name="maintenance_mode", + field=models.CharField( + choices=[("on", "on"), ("off", "off"), ("changing", "changing")], default="off", max_length=1000 + ), + ), + migrations.AlterField( + model_name="servicecomponent", + name="_maintenance_mode", + field=models.CharField( + choices=[("on", "on"), ("off", "off"), ("changing", "changing")], default="off", max_length=1000 + ), + ), + ] diff --git a/python/cm/migrations/0114_adcm_uuid.py b/python/cm/migrations/0114_adcm_uuid.py new file mode 100644 index 0000000000..6ed1930c3e --- /dev/null +++ b/python/cm/migrations/0114_adcm_uuid.py @@ -0,0 +1,31 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by Django 3.2.19 on 2023-09-18 13:54 + +import uuid + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("cm", "0113_mm_lower"), + ] + + operations = [ + migrations.AddField( + model_name="adcm", + name="uuid", + field=models.UUIDField(default=uuid.uuid4, editable=False), + ), + ] diff --git a/python/cm/models.py b/python/cm/models.py index 14512e0799..4a670fd91f 100644 --- a/python/cm/models.py +++ b/python/cm/models.py @@ -23,6 +23,7 @@ from enum import Enum from itertools import chain from typing import Optional +from uuid import uuid4 from cm.errors import AdcmEx from cm.logger import logger @@ -53,9 +54,23 @@ class ObjectType(models.TextChoices): class MaintenanceMode(models.TextChoices): - ON = "ON", "ON" - OFF = "OFF", "OFF" - CHANGING = "CHANGING", "CHANGING" + ON = "on", "on" + OFF = "off", "off" + CHANGING = "changing", "changing" + + +MAINTENANCE_MODE_BOTH_CASES_CHOICES = ( + ("on", "on"), + ("off", "off"), + ("ON", "ON"), + ("OFF", "OFF"), +) + + +class SignatureStatus(models.TextChoices): + VALID = "valid", "valid" + INVALID = "invalid", "invalid" + ABSENT = "absent", "absent" LICENSE_STATE = ( @@ -182,6 +197,7 @@ class Bundle(ADCMModel): description = models.TextField(blank=True) date = models.DateTimeField(auto_now=True) category = models.ForeignKey("ProductCategory", on_delete=models.RESTRICT, null=True) + signature_status = models.CharField(max_length=10, choices=SignatureStatus.choices, default=SignatureStatus.ABSENT) __error_code__ = "BUNDLE_NOT_FOUND" @@ -255,6 +271,7 @@ class Prototype(ADCMModel): config_group_customization = models.BooleanField(default=False) venv = models.CharField(default="default", max_length=1000, blank=False) allow_maintenance_mode = models.BooleanField(default=False) + allow_flags = models.BooleanField(default=False) __error_code__ = "PROTOTYPE_NOT_FOUND" @@ -264,10 +281,6 @@ def __str__(self): class Meta: unique_together = (("bundle", "type", "parent", "name", "version"),) - @property - def is_license_accepted(self) -> bool: - return self.license == LICENSE_STATE[1][0] - class ObjectConfig(ADCMModel): current = models.PositiveIntegerField() @@ -452,14 +465,14 @@ def set_state(self, state: str, event=None) -> None: def get_id_chain(self) -> dict: """ Get object ID chain for front-end URL generation in message templates - result looks like {'cluster': 12, 'service': 34, 'component': 45} + result looks like {'cluster_id': 12, 'service_id': 34, 'component_id': 45} """ ids = {} - ids[self.prototype.type] = self.pk - for attr in ["cluster", "service", "provider"]: + ids[f"{self.prototype.type}_id"] = self.pk + for attr in ["cluster_id", "service_id", "provider_id"]: value = getattr(self, attr, None) - if value: - ids[attr] = value.pk + if value is not None: + ids[attr] = value return ids @@ -502,6 +515,10 @@ def content_type(self): return ContentType.objects.get(app_label="cm", model=model_name) def delete(self, using=None, keep_parents=False): + for concern in self.concerns.filter(owner_type=self.content_type, owner_id=self.id): + logger.debug("Delete %s", str(concern)) + concern.delete() + super().delete(using, keep_parents) if self.config is not None and not isinstance(self, ServiceComponent): self.config.delete() @@ -542,6 +559,7 @@ def allowed(self, obj: ADCMEntity) -> bool: class ADCM(ADCMEntity): name = models.CharField(max_length=1000, choices=(("ADCM", "ADCM"),), unique=True) + uuid = models.UUIDField(default=uuid4, editable=False) @property def bundle_id(self): @@ -1253,11 +1271,11 @@ def prototype_type(self): def get_id_chain(self, target_ids: dict) -> dict: """Get action ID chain for front-end URL generation in message templates""" - target_ids["action"] = self.pk + target_ids["action_id"] = self.pk result = { - "type": self.prototype.type + "_action_run", + "type": f"{self.prototype.type}_action_run", "name": self.display_name or self.name, - "ids": target_ids, + "params": target_ids, } return result @@ -1536,6 +1554,8 @@ def lock_affected(self, objects: Iterable[ADCMEntity]) -> None: obj.add_to_concerns(item=self.lock) def unlock_affected(self) -> None: + self.refresh_from_db() + if not self.lock: return @@ -1585,6 +1605,10 @@ def cancel(self, event_queue: "cm.status_api.Event" = None, obj_deletion=False): except OSError as e: raise AdcmEx("NOT_ALLOWED_TERMINATION", f"Failed to terminate process: {e}") from e + @property + def duration(self) -> float: + return (self.finish_date - self.start_date).total_seconds() + class JobLog(ADCMModel): task = models.ForeignKey(TaskLog, on_delete=models.SET_NULL, null=True, default=None) @@ -1630,6 +1654,10 @@ def cancel(self, event_queue: "cm.status_api.Event" = None): if event_queue: event_queue.send_state() + @property + def duration(self) -> float: + return (self.finish_date - self.start_date).total_seconds() + class GroupCheckLog(ADCMModel): job = models.ForeignKey(JobLog, on_delete=models.SET_NULL, null=True, default=None) @@ -1699,6 +1727,7 @@ class StagePrototype(ADCMModel): config_group_customization = models.BooleanField(default=False) venv = models.CharField(default="default", max_length=1000, blank=False) allow_maintenance_mode = models.BooleanField(default=False) + allow_flags = models.BooleanField(default=False) __error_code__ = "PROTOTYPE_NOT_FOUND" @@ -1780,6 +1809,7 @@ class KnownNames(Enum): REQUIRED_IMPORT_ISSUE = "required import issue" # kwargs=(source, ) HOST_COMPONENT_ISSUE = "host component issue" # kwargs=(source, ) UNSATISFIED_REQUIREMENT_ISSUE = "unsatisfied service requirement" # kwargs=(source, ) + CONFIG_FLAG = "outdated configuration flag" # kwargs=(source, ) class PlaceHolderType(Enum): @@ -1867,11 +1897,11 @@ def _action_placeholder(cls, _, **kwargs) -> dict: return {} ids = target.get_id_chain() - ids["action"] = action.pk + ids["action_id"] = action.pk return { "type": PlaceHolderType.ACTION.value, "name": action.display_name, - "ids": ids, + "params": ids, } @classmethod @@ -1880,7 +1910,7 @@ def _prototype_placeholder(cls, _, **kwargs) -> dict: if proto: return { - "id": proto.id, + "params": {"prototype_id": proto.id}, "type": "prototype", "name": proto.display_name or proto.name, } @@ -1896,7 +1926,7 @@ def _adcm_entity_placeholder(cls, ph_name, **kwargs) -> dict: return { "type": obj.prototype.type, "name": obj.display_name, - "ids": obj.get_id_chain(), + "params": obj.get_id_chain(), } @classmethod @@ -1910,7 +1940,7 @@ def _job_placeholder(cls, _, **kwargs) -> dict: return { "type": PlaceHolderType.JOB.value, "name": action.display_name or action.name, - "ids": job.id, + "params": {"job_id": job.id}, } @@ -1975,5 +2005,5 @@ def delete(self, using=None, keep_parents=False): class ADCMEntityStatus(models.TextChoices): - UP = "UP", "UP" - DOWN = "DOWN", "DOWN" + UP = "up", "up" + DOWN = "down", "down" diff --git a/python/cm/stack.py b/python/cm/stack.py index 5f87465d39..955b622141 100644 --- a/python/cm/stack.py +++ b/python/cm/stack.py @@ -22,11 +22,10 @@ import ruyaml import yaml -import yspec.checker from cm.adcm_config.checks import check_config_type from cm.adcm_config.config import read_bundle_file from cm.adcm_config.utils import proto_ref -from cm.checker import FormatError, check, round_trip_load +from cm.checker import FormatError, check, check_rule, round_trip_load from cm.errors import raise_adcm_ex from cm.logger import logger from cm.models import ( @@ -280,6 +279,7 @@ def save_prototype(path: Path, conf: dict, def_type: str, bundle_hash: str) -> S dict_to_obj(dictionary=conf, key="config_group_customization", obj=proto) dict_to_obj(dictionary=conf, key="allow_maintenance_mode", obj=proto) + dict_to_obj(dictionary=conf, key="allow_flags", obj=proto) fix_display_name(conf=conf, obj=proto) license_hash = get_license_hash(proto=proto, conf=conf, bundle_hash=bundle_hash) @@ -318,6 +318,11 @@ def check_component_constraint(proto, name, conf): code="INVALID_COMPONENT_DEFINITION", msg=f'constraint of component "{name}" in {proto_ref(prototype=proto)} should have only 1 or 2 elements', ) + if not conf["constraint"]: + raise_adcm_ex( + code="INVALID_COMPONENT_DEFINITION", + msg=f'constraint of component "{name}" in {proto_ref(prototype=proto)} should not be empty', + ) def save_components(proto: StagePrototype, conf: dict, bundle_hash: str) -> None: @@ -355,6 +360,8 @@ def save_components(proto: StagePrototype, conf: dict, bundle_hash: str) -> None process_config_group_customization(actual_config=component_conf, obj=component) dict_to_obj(dictionary=component_conf, key="config_group_customization", obj=component) + dict_to_obj(dictionary=component_conf, key="allow_flags", obj=component) + component.save() save_actions(prototype=component, config=component_conf, bundle_hash=bundle_hash) @@ -781,14 +788,14 @@ def get_yspec(prototype: StagePrototype | Prototype, bundle_hash: str, conf: dic msg=f'yspec file of config key "{name}/{subname}" yaml decode error: {e}', ) - success, error = yspec.checker.check_rule(rules=schema) + success, error = check_rule(rules=schema) if not success: raise_adcm_ex(code="CONFIG_TYPE_ERROR", msg=f'yspec file of config key "{name}/{subname}" error: {error}') return schema -def check_variant(config: dict) -> dict: # pylint: disable=unused-argument +def check_variant(config: dict) -> dict: vtype = config["source"]["type"] source = {"type": vtype, "args": None} @@ -906,7 +913,7 @@ def save_prototype_config( proto_conf: dict, bundle_hash: str, action: StageAction | None = None, -) -> None: # pylint: disable=too-many-statements,too-many-locals +) -> None: if not in_dict(dictionary=proto_conf, key="config"): return diff --git a/python/cm/status_api.py b/python/cm/status_api.py index bb30c94b1a..5bdd327eb6 100644 --- a/python/cm/status_api.py +++ b/python/cm/status_api.py @@ -218,8 +218,11 @@ def get_obj_status(obj: Cluster | ClusterObject | Host | HostComponent | Service url = f"host/{obj.pk}/" case HostComponent.__name__: url = f"host/{obj.host_id}/component/{obj.component_id}/" + obj = obj.component case ServiceComponent.__name__: url = f"component/{obj.pk}/" + case _: + raise ValueError("Wrong obj type") int_status = get_status(obj=obj, url=url) diff --git a/python/cm/tests/test_action.py b/python/cm/tests/test_action.py index e2a4bd1ce8..803c075c90 100644 --- a/python/cm/tests/test_action.py +++ b/python/cm/tests/test_action.py @@ -21,7 +21,6 @@ gen_prototype, gen_provider, ) -from django.conf import settings from django.urls import reverse from rest_framework.response import Response from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED, HTTP_409_CONFLICT @@ -142,10 +141,10 @@ class ActionAllowTest(BaseTestCase): # pylint: disable=too-many-instance-attributes def setUp(self) -> None: super().setUp() - self.files_dir = settings.BASE_DIR / "python" / "cm" / "tests" / "files" + self.test_files_dir = self.base_dir / "python" / "cm" / "tests" / "files" _, self.cluster, _ = self.upload_bundle_create_cluster_config_log( - bundle_path=Path(self.files_dir, "cluster_test_host_actions_mm.tar"), cluster_name="test-cluster-1" + bundle_path=Path(self.test_files_dir, "cluster_test_host_actions_mm.tar"), cluster_name="test-cluster-1" ) service = add_service_to_cluster( cluster=self.cluster, @@ -188,7 +187,7 @@ def setUp(self) -> None: ) _, self.cluster_2, _ = self.upload_bundle_create_cluster_config_log( - bundle_path=Path(self.files_dir, "cluster_with_various_actions.tar"), cluster_name="test-cluster-2" + bundle_path=Path(self.test_files_dir, "cluster_with_various_actions.tar"), cluster_name="test-cluster-2" ) self.service_2_robot = add_service_to_cluster( cluster=self.cluster_2, diff --git a/python/cm/tests/test_adcm_config.py b/python/cm/tests/test_adcm_config.py index 6aa5076987..a45c428871 100644 --- a/python/cm/tests/test_adcm_config.py +++ b/python/cm/tests/test_adcm_config.py @@ -46,7 +46,7 @@ def setUp(self) -> None: def apply_edit_adcm_settings_policy(self): policy = Policy.objects.create(name="test_host_policy", role=Role.objects.get(name="Edit ADCM settings")) - policy.user.add(self.no_rights_user) + policy.group.add(self.no_rights_user_group) policy.add_object(obj=self.adcm) policy.apply() diff --git a/python/cm/tests/test_bundle.py b/python/cm/tests/test_bundle.py index 61edf9a053..9a1a73eafa 100644 --- a/python/cm/tests/test_bundle.py +++ b/python/cm/tests/test_bundle.py @@ -43,11 +43,11 @@ class TestBundle(BaseTestCase): def setUp(self) -> None: super().setUp() - self.files_dir = settings.BASE_DIR / "python" / "cm" / "tests" / "files" + self.test_files_dir = self.base_dir / "python" / "cm" / "tests" / "files" def test_bundle_upload_duplicate_upgrade_fail(self): with self.assertRaises(TransactionManagementError) as raises_context: - self.upload_and_load_bundle(path=Path(self.files_dir, "test_upgrade_duplicated.tar")) + self.upload_and_load_bundle(path=Path(self.test_files_dir, "test_upgrade_duplicated.tar")) # we expect here IntegrityError, but unittest do not raise it directly, # so check context of TransactionManagementError @@ -55,27 +55,27 @@ def test_bundle_upload_duplicate_upgrade_fail(self): self.assertIsInstance(raises_context.exception.__context__, IntegrityError) def test_bundle_upload_upgrade_different_upgrade_name_success(self): - self.upload_and_load_bundle(path=Path(self.files_dir, "test_upgrade_different_name.tar")) + self.upload_and_load_bundle(path=Path(self.test_files_dir, "test_upgrade_different_name.tar")) def test_bundle_upload_upgrade_different_from_edition_success(self): - self.upload_and_load_bundle(path=Path(self.files_dir, "test_upgrade_different_from_edition.tar")) + self.upload_and_load_bundle(path=Path(self.test_files_dir, "test_upgrade_different_from_edition.tar")) def test_bundle_upload_upgrade_different_min_version_success(self): - self.upload_and_load_bundle(path=Path(self.files_dir, "test_upgrade_different_min_version.tar")) + self.upload_and_load_bundle(path=Path(self.test_files_dir, "test_upgrade_different_min_version.tar")) def test_bundle_upload_upgrade_different_max_strict_success(self): - self.upload_and_load_bundle(path=Path(self.files_dir, "test_upgrade_different_max_strict.tar")) + self.upload_and_load_bundle(path=Path(self.test_files_dir, "test_upgrade_different_max_strict.tar")) def test_bundle_upload_upgrade_different_state_available_success(self): - self.upload_and_load_bundle(path=Path(self.files_dir, "test_upgrade_different_state_available.tar")) + self.upload_and_load_bundle(path=Path(self.test_files_dir, "test_upgrade_different_state_available.tar")) def test_bundle_upload_upgrade_different_state_on_success_success(self): - self.upload_and_load_bundle(path=Path(self.files_dir, "test_upgrade_different_state_on_success.tar")) + self.upload_and_load_bundle(path=Path(self.test_files_dir, "test_upgrade_different_state_on_success.tar")) def test_secretfile(self): bundle, cluster, config_log = self.upload_bundle_create_cluster_config_log( bundle_path=Path( - settings.BASE_DIR, + self.base_dir, "python/cm/tests/files/config_cluster_secretfile_secretmap.tar", ), ) @@ -110,7 +110,7 @@ def test_secretfile(self): def test_secretfile_update_config(self): _, cluster, _ = self.upload_bundle_create_cluster_config_log( bundle_path=Path( - settings.BASE_DIR, + self.base_dir, "python/cm/tests/files/test_secretfile_update_config.tar", ), ) @@ -162,7 +162,7 @@ def test_secretfile_update_config(self): def test_secretmap(self): _, cluster, config_log = self.upload_bundle_create_cluster_config_log( bundle_path=Path( - settings.BASE_DIR, + self.base_dir, "python/cm/tests/files/config_cluster_secretfile_secretmap.tar", ), ) @@ -188,7 +188,7 @@ def test_secretmap(self): def test_secretmap_no_default(self): self.upload_bundle_create_cluster_config_log( bundle_path=Path( - settings.BASE_DIR, + self.base_dir, "python/cm/tests/files/test_secret_config_v10_community.tar", ), ) @@ -196,7 +196,7 @@ def test_secretmap_no_default(self): def test_secretmap_no_default1(self): self.upload_bundle_create_cluster_config_log( bundle_path=Path( - settings.BASE_DIR, + self.base_dir, "python/cm/tests/files/test_secret_config_v12_community.tar", ), ) @@ -223,7 +223,7 @@ def test_provider_bundle_deletion(self): self.assertEqual(e.code, "PROVIDER_CONFLICT") def test_duplicate_component_name_fail(self): - path = Path(self.files_dir, "test_duplicate_component_name.tar") + path = Path(self.test_files_dir, "test_duplicate_component_name.tar") self.upload_bundle(path=path) response: Response = self.client.post( @@ -239,7 +239,7 @@ def test_duplicate_component_name_fail(self): ) def test_upload_hc_acl_cluster_action_without_service_fail(self): - path = Path(self.files_dir, "test_cluster_hc_acl_without_service.tar") + path = Path(self.test_files_dir, "test_cluster_hc_acl_without_service.tar") self.upload_bundle(path=path) response = self.client.post(path=reverse(viewname="v1:load-bundle"), data={"bundle_file": path.name}) @@ -253,7 +253,7 @@ def test_upload_hc_acl_cluster_action_without_service_fail(self): ) def test_upload_hc_acl_service_action_without_service_success(self): - path = Path(self.files_dir, "test_service_hc_acl_without_service.tar") + path = Path(self.test_files_dir, "test_service_hc_acl_without_service.tar") self.upload_bundle(path=path) response = self.client.post(path=reverse(viewname="v1:load-bundle"), data={"bundle_file": path.name}) @@ -261,7 +261,7 @@ def test_upload_hc_acl_service_action_without_service_success(self): self.assertEqual(response.status_code, HTTP_200_OK) def test_upload_hc_acl_component_action_without_service_fail(self): - path = Path(self.files_dir, "test_component_hc_acl_without_service.tar") + path = Path(self.test_files_dir, "test_component_hc_acl_without_service.tar") self.upload_bundle(path=path) response = self.client.post(path=reverse(viewname="v1:load-bundle"), data={"bundle_file": path.name}) diff --git a/python/cm/tests/test_component.py b/python/cm/tests/test_component.py index e612eba923..abbc693e02 100644 --- a/python/cm/tests/test_component.py +++ b/python/cm/tests/test_component.py @@ -86,7 +86,7 @@ def test_maintenance_mode_by_hosts(self): def test_maintenance_mode_by_service(self): self.client.post( path=reverse(viewname="v1:service-maintenance-mode", kwargs={"service_id": self.service.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, content_type=APPLICATION_JSON, ) diff --git a/python/cm/tests/test_flag.py b/python/cm/tests/test_flag.py new file mode 100644 index 0000000000..f112840982 --- /dev/null +++ b/python/cm/tests/test_flag.py @@ -0,0 +1,79 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from cm.flag import create_flag, get_flag_name, remove_flag, update_object_flag +from cm.hierarchy import Tree +from cm.models import ConcernCause, ConcernItem, ConcernType +from cm.tests.utils import generate_hierarchy + +from adcm.tests.base import BaseTestCase + + +class FlagTest(BaseTestCase): + """Tests for `cm.issue.create_issues()`""" + + def setUp(self) -> None: + super().setUp() + + self.hierarchy = generate_hierarchy() + self.cluster = self.hierarchy["cluster"] + self.cluster.prototype.allow_flags = True + self.cluster.prototype.save(update_fields=["allow_flags"]) + self.tree = Tree(self.cluster) + + def test_create_flag(self): + create_flag(obj=self.cluster) + flag = ConcernItem.objects.filter(type=ConcernType.FLAG, name=get_flag_name(obj=self.cluster)).first() + + self.assertIsNotNone(flag) + self.assertEqual(flag.owner, self.cluster) + reason = { + "message": "${source} has an outdated configuration", + "placeholder": { + "source": {"type": "cluster", "name": self.cluster.name, "params": {"cluster_id": self.cluster.id}} + }, + } + self.assertEqual(flag.reason, reason) + self.assertEqual(flag.cause, ConcernCause.CONFIG) + + def test_update_flags(self): + update_object_flag(obj=self.cluster) + for node in self.tree.get_directly_affected(self.tree.built_from): + concerns = node.value.concerns.all() + self.assertEqual(concerns.count(), 1) + self.assertEqual(ConcernType.FLAG.value, concerns.first().type) + + def test_unique_flag_name(self): + msg = "Test message" + update_object_flag(obj=self.cluster) + update_object_flag(obj=self.cluster, msg=msg) + concerns = self.cluster.concerns.all() + self.assertEqual(concerns.count(), 2) + + # test what flag with the same name will not create and not apply second time + update_object_flag(obj=self.cluster) + for node in self.tree.get_directly_affected(self.tree.built_from): + concerns = node.value.concerns.all() + self.assertEqual(concerns.count(), 2) + self.assertEqual(ConcernType.FLAG.value, concerns.first().type) + + def test_delete_flag_success(self): + msg = "Test message" + update_object_flag(obj=self.cluster) + update_object_flag(obj=self.cluster, msg=msg) + + remove_flag(obj=self.cluster) + for node in self.tree.get_directly_affected(self.tree.built_from): + concerns = node.value.concerns.all() + self.assertEqual(concerns.count(), 1) + self.assertEqual(ConcernType.FLAG.value, concerns.first().type) + self.assertEqual(concerns.first().name, get_flag_name(obj=self.cluster, msg=msg)) diff --git a/python/cm/tests/test_hc.py b/python/cm/tests/test_hc.py index 8bb1e98079..6d5607c504 100644 --- a/python/cm/tests/test_hc.py +++ b/python/cm/tests/test_hc.py @@ -31,66 +31,51 @@ class TestHC(BaseTestCase): - def test_action_hc_simple(self): # pylint: disable=too-many-locals + def test_action_hc_simple(self): bundle_1 = cook_cluster_bundle("1.0") cluster = cook_cluster(bundle_1, "Test1") bundle_2 = cook_provider_bundle("1.0") provider = cook_provider(bundle_2, "DF01") host_1 = Host.objects.get(provider=provider, fqdn="server01.inter.net") + action = Action(name="run") hc_list, _ = check_hostcomponentmap(cluster, action, []) - self.assertEqual(hc_list, None) - try: - action = Action(name="run", hostcomponentmap="qwe") + with self.assertRaises(AdcmEx) as e: + action = Action(name="run", hostcomponentmap=["qwe"]) hc_list, _ = check_hostcomponentmap(cluster, action, []) - - self.assertNotEqual(hc_list, None) - except AdcmEx as e: - self.assertEqual(e.code, "TASK_ERROR") - self.assertEqual(e.msg, "hc is required") + self.assertEqual(e.exception.code, "TASK_ERROR") + self.assertEqual(e.exception.msg, "hc is required") service = ClusterObject.objects.get(cluster=cluster, prototype__name="hadoop") sc1 = ServiceComponent.objects.get(cluster=cluster, service=service, prototype__name="server") - try: - action = Action(name="run", hostcomponentmap="qwe") + with self.assertRaises(AdcmEx) as e: + action = Action(name="run", hostcomponentmap=["qwe"]) hostcomponent = [{"service_id": service.id, "component_id": sc1.id, "host_id": 500}] hc_list, _ = check_hostcomponentmap(cluster, action, hostcomponent) + self.assertEqual(e.exception.code, "HOST_NOT_FOUND") - self.assertNotEqual(hc_list, None) - except AdcmEx as e: - self.assertEqual(e.code, "HOST_NOT_FOUND") - - try: - action = Action(name="run", hostcomponentmap="qwe") + with self.assertRaises(AdcmEx) as e: + action = Action(name="run", hostcomponentmap=["qwe"]) hostcomponent = [{"service_id": service.id, "component_id": sc1.id, "host_id": host_1.id}] hc_list, _ = check_hostcomponentmap(cluster, action, hostcomponent) - - self.assertNotEqual(hc_list, None) - except AdcmEx as e: - self.assertEqual(e.code, "FOREIGN_HOST") + self.assertEqual(e.exception.code, "FOREIGN_HOST") add_host_to_cluster(cluster, host_1) - try: + with self.assertRaises(AdcmEx) as e: action = Action(name="run", hostcomponentmap="qwe") hostcomponent = [{"service_id": 500, "component_id": sc1.id, "host_id": host_1.id}] hc_list, _ = check_hostcomponentmap(cluster, action, hostcomponent) + self.assertEqual(e.exception.code, "CLUSTER_SERVICE_NOT_FOUND") - self.assertNotEqual(hc_list, None) - except AdcmEx as e: - self.assertEqual(e.code, "CLUSTER_SERVICE_NOT_FOUND") - - try: - action = Action(name="run", hostcomponentmap="qwe") + with self.assertRaises(AdcmEx) as e: + action = Action(name="run", hostcomponentmap=["qwe"]) hostcomponent = [{"service_id": service.id, "component_id": 500, "host_id": host_1.id}] hc_list, _ = check_hostcomponentmap(cluster, action, hostcomponent) + self.assertEqual(e.exception.code, "COMPONENT_NOT_FOUND") - self.assertNotEqual(hc_list, None) - except AdcmEx as e: - self.assertEqual(e.code, "COMPONENT_NOT_FOUND") - - def test_action_hc(self): # pylint: disable=too-many-locals + def test_action_hc(self): bundle_1 = cook_cluster_bundle("1.0") cluster = cook_cluster(bundle_1, "Test1") bundle_2 = cook_provider_bundle("1.0") @@ -138,7 +123,7 @@ def test_action_hc(self): # pylint: disable=too-many-locals def test_empty_hostcomponent(self): test_bundle_filename = "min-3199.tar" test_bundle_path = Path( - settings.BASE_DIR, + self.base_dir, "python/cm/tests/files", test_bundle_filename, ) diff --git a/python/cm/tests/test_host.py b/python/cm/tests/test_host.py index c677a97081..44acb43036 100644 --- a/python/cm/tests/test_host.py +++ b/python/cm/tests/test_host.py @@ -14,7 +14,6 @@ from pathlib import Path from cm.models import Bundle, Cluster, Host, HostProvider, MaintenanceMode, Prototype -from django.conf import settings from django.urls import reverse from rest_framework.response import Response from rest_framework.status import ( @@ -29,7 +28,7 @@ from adcm.tests.base import APPLICATION_JSON, BaseTestCase -class TestHostAPI(BaseTestCase): # pylint: disable=too-many-public-methods +class TestHostAPI(BaseTestCase): def setUp(self) -> None: super().setUp() @@ -49,7 +48,7 @@ def setUp(self) -> None: "Contain-Hyphen.Dot", } - self.upload_and_load_bundle(path=Path(settings.BASE_DIR, "python", "cm", "tests", "files", "ssh.1.0.tar")) + self.upload_and_load_bundle(path=Path(self.base_dir, "python", "cm", "tests", "files", "ssh.1.0.tar")) self.provider = HostProvider.objects.create( name="test_provider", prototype=Prototype.objects.filter(type="provider").first(), diff --git a/python/cm/tests/test_inventory.py b/python/cm/tests/test_inventory.py index 1dc9f7de0a..8fbf56aebd 100644 --- a/python/cm/tests/test_inventory.py +++ b/python/cm/tests/test_inventory.py @@ -215,8 +215,6 @@ def test_get_host(self, mock_get_hosts, mock_get_provider_hosts): @patch("json.dump") @patch("cm.inventory.open") def test_prepare_job_inventory(self, mock_open, mock_dump): - # pylint: disable=too-many-locals - host2 = Host.objects.create(prototype=self.host_pt, fqdn="h2", cluster=self.cluster, provider=self.provider) action = Action.objects.create(prototype=self.cluster_pt) job = JobLog.objects.create(action=action, start_date=timezone.now(), finish_date=timezone.now()) @@ -431,10 +429,11 @@ def setUp(self): super().setUp() init_adcm() - self.files_dir = settings.BASE_DIR / "python" / "cm" / "tests" / "files" + self.test_files_dir = self.base_dir / "python" / "cm" / "tests" / "files" _, self.cluster_hc_acl, _ = self.upload_bundle_create_cluster_config_log( - bundle_path=Path(self.files_dir, "test_inventory_remove_group_mm_hosts.tar"), cluster_name="cluster_hc_acl" + bundle_path=Path(self.test_files_dir, "test_inventory_remove_group_mm_hosts.tar"), + cluster_name="cluster_hc_acl", ) self.provider = gen_provider(name="test_provider") @@ -495,7 +494,7 @@ def setUp(self): self.action_hc_acl = Action.objects.get(name="cluster_action_hc_acl", allow_in_maintenance_mode=True) _, self.cluster_target_group, _ = self.upload_bundle_create_cluster_config_log( - bundle_path=Path(self.files_dir, "cluster_mm_host_target_group.tar"), + bundle_path=Path(self.test_files_dir, "cluster_mm_host_target_group.tar"), cluster_name="cluster_target_group", ) diff --git a/python/cm/tests/test_job.py b/python/cm/tests/test_job.py index 84c0d70164..0d24782c21 100644 --- a/python/cm/tests/test_job.py +++ b/python/cm/tests/test_job.py @@ -72,7 +72,7 @@ class TestJob(BaseTestCase): def setUp(self): super().setUp() - self.files_dir = settings.BASE_DIR / "python" / "cm" / "tests" / "files" + self.test_files_dir = self.base_dir / "python" / "cm" / "tests" / "files" self.multijob_bundle = "multijob_cluster.tar" self.multijob_cluster_name = "multijob_cluster" self.test_user_username = "admin" @@ -85,7 +85,7 @@ def init_adcm(): init() def create_multijob_cluster(self) -> Response: - bundle_id = self.upload_and_load_bundle(path=Path(self.files_dir, self.multijob_bundle)).pk + bundle_id = self.upload_and_load_bundle(path=Path(self.test_files_dir, self.multijob_bundle)).pk return self.client.post( path=reverse(viewname="v1:cluster"), diff --git a/python/cm/tests/test_management_commands.py b/python/cm/tests/test_management_commands.py new file mode 100644 index 0000000000..157004cd8f --- /dev/null +++ b/python/cm/tests/test_management_commands.py @@ -0,0 +1,197 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from api_v2.tests.base import BaseAPITestCase +from cm.models import ADCM, Bundle, ServiceComponent +from cm.tests.utils import gen_cluster, gen_provider +from django.conf import settings +from django.core.management import load_command_class +from rbac.models import Policy, Role, User + + +class TestStatistics(BaseAPITestCase): + def setUp(self) -> None: + super().setUp() + self.maxDiff = None # pylint: disable=invalid-name + + enterprise_bundle_cluster = Bundle.objects.create( + name="enterprise_cluster", version="1.0", edition="enterprise" + ) + enterprise_bundle_provider = Bundle.objects.create( + name="enterprise_provider", version="1.2", edition="enterprise" + ) + + gen_cluster(name="enterprise_cluster", bundle=enterprise_bundle_cluster) + gen_provider(name="enterprise_provider", bundle=enterprise_bundle_provider) + + adcm_user_role = Role.objects.get(name="ADCM User") + Policy.objects.create(name="test policy", role=adcm_user_role, built_in=False) + + host_1 = self.add_host(bundle=self.provider_bundle, provider=self.provider, fqdn="test_host_1") + host_2 = self.add_host(bundle=self.provider_bundle, provider=self.provider, fqdn="test_host_2") + host_3 = self.add_host(bundle=self.provider_bundle, provider=self.provider, fqdn="test_host_3") + host_unmapped = self.add_host(bundle=self.provider_bundle, provider=self.provider, fqdn="test_host_unmapped") + self.add_host(bundle=self.provider_bundle, provider=self.provider, fqdn="test_host_not_in_cluster") + + for host in (host_1, host_2, host_3, host_unmapped): + self.add_host_to_cluster(cluster=self.cluster_1, host=host) + + service = self.add_service_to_cluster(service_name="service_1", cluster=self.cluster_1) + component_1 = ServiceComponent.objects.get( + cluster=self.cluster_1, service=service, prototype__name="component_1" + ) + component_2 = ServiceComponent.objects.get( + cluster=self.cluster_1, service=service, prototype__name="component_2" + ) + + self.add_hostcomponent_map( + cluster=self.cluster_1, + hc_map=[ + { + "host_id": host_1.pk, + "service_id": service.pk, + "component_id": component_1.pk, + }, + { + "host_id": host_2.pk, + "service_id": service.pk, + "component_id": component_1.pk, + }, + { + "host_id": host_3.pk, + "service_id": service.pk, + "component_id": component_2.pk, + }, + ], + ) + + @staticmethod + def _get_expected_data() -> dict: + date_fmt = "%Y-%m-%d %H:%M:%S" + + users = [ + { + "date_joined": User.objects.get(username="admin").date_joined.strftime(date_fmt), + "email": "admin@example.com", + }, + {"date_joined": User.objects.get(username="status").date_joined.strftime(date_fmt), "email": ""}, + {"date_joined": User.objects.get(username="system").date_joined.strftime(date_fmt), "email": ""}, + ] + + bundles = [ + { + "name": "ADCM", + "version": Bundle.objects.get(name="ADCM").version, + "edition": "community", + "date": Bundle.objects.get(name="ADCM").date.strftime(date_fmt), + }, + { + "name": "cluster_one", + "version": "1.0", + "edition": "community", + "date": Bundle.objects.get(name="cluster_one").date.strftime(date_fmt), + }, + { + "name": "cluster_two", + "version": "1.0", + "edition": "community", + "date": Bundle.objects.get(name="cluster_two").date.strftime(date_fmt), + }, + { + "name": "provider", + "version": "1.0", + "edition": "community", + "date": Bundle.objects.get(name="provider").date.strftime(date_fmt), + }, + ] + + clusters = [ + { + "name": "cluster_1", + "host_count": 4, + "bundle": { + "name": "cluster_one", + "version": "1.0", + "edition": "community", + "date": Bundle.objects.get(name="cluster_one").date.strftime(date_fmt), + }, + "host_component_map": [ + { + "host_name": "379679191547aa70b797855c744bf684", + "component_name": "component_1", + "service_name": "service_1", + }, + { + "host_name": "889214cc620857cbf83f2ccc0c190162", + "component_name": "component_1", + "service_name": "service_1", + }, + { + "host_name": "11ee6e2ffdb6fd444dab9ad0a1fbda9d", + "component_name": "component_2", + "service_name": "service_1", + }, + ], + }, + { + "name": "cluster_2", + "host_count": 0, + "bundle": { + "name": "cluster_two", + "version": "1.0", + "edition": "community", + "date": Bundle.objects.get(name="cluster_two").date.strftime(date_fmt), + }, + "host_component_map": [], + }, + ] + + providers = [ + { + "bundle": { + "date": Bundle.objects.get(name="provider").date.strftime(date_fmt), + "edition": "community", + "name": "provider", + "version": "1.0", + }, + "host_count": 5, + "name": "provider", + } + ] + + roles = [{"built_in": True, "name": "ADCM User"}] + + return { + "adcm": {"uuid": str(ADCM.objects.get().uuid), "version": settings.ADCM_VERSION}, + "data": { + "bundles": bundles, + "clusters": clusters, + "providers": providers, + "roles": roles, + "users": users, + }, + "format_version": 0.1, + } + + def test_data_success(self): + data = load_command_class(app_name="cm", name="collect_statistics").collect_statistics() + expected_data = self._get_expected_data() + + self.assertDictEqual(data["adcm"], expected_data["adcm"]) + self.assertEqual(data["format_version"], expected_data["format_version"]) + + self.assertListEqual(data["data"]["bundles"], expected_data["data"]["bundles"]) + self.assertListEqual(data["data"]["clusters"], expected_data["data"]["clusters"]) + self.assertListEqual(data["data"]["providers"], expected_data["data"]["providers"]) + self.assertListEqual(data["data"]["users"], expected_data["data"]["users"]) + self.assertListEqual(data["data"]["roles"], expected_data["data"]["roles"]) diff --git a/python/cm/tests/test_upgrade.py b/python/cm/tests/test_upgrade.py index 3af9e76b17..e488cccb74 100644 --- a/python/cm/tests/test_upgrade.py +++ b/python/cm/tests/test_upgrade.py @@ -503,7 +503,7 @@ def test_hc(self): # pylint: disable=too-many-locals self.assertEqual(len(host_components), 0) - def test_component(self): # pylint: disable=too-many-locals + def test_component(self): bundle_1 = cook_cluster_bundle("1.0") bundle_2 = cook_cluster_bundle("2.0") service_prototype = Prototype.objects.get(bundle=bundle_2, type="service", name="hadoop") diff --git a/python/cm/tests/test_yaml_checker.py b/python/cm/tests/test_yaml_checker.py new file mode 100644 index 0000000000..e871797ce8 --- /dev/null +++ b/python/cm/tests/test_yaml_checker.py @@ -0,0 +1,118 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from copy import deepcopy + +from cm.checker import FormatError, process_rule +from django.test import TestCase + +test_data = { + "cluster": [ + { + "cluster_name": "default_cluster", + "shards": [ + { + "weight": 1, + "internal_replication": True, + "replicas": [ + { + "host": "test-adqm01.ru-central1.internal", + "port": 9000, + "uuid": "123-4gdfwpr-2erett", + "user": "usr", + "password": "pswd", + } + ], + } + ], + } + ] +} + +test_rules = { + "root": {"match": "dict", "items": {"cluster": "cluster_list"}}, + "cluster_list": {"match": "list", "item": "cluster_item"}, + "cluster_item": { + "match": "dict", + "items": {"cluster_name": "string", "shards": "shard_list"}, + "required_items": ["cluster_name"], + }, + "shard_list": {"match": "list", "item": "shard_item"}, + "shard_item": { + "match": "dict", + "items": {"weight": "integer", "internal_replication": "boolean", "replicas": "replica_list"}, + "required_items": ["weight", "internal_replication"], + }, + "replica_list": {"match": "list", "item": "replica_item"}, + "replica_item": { + "match": "dict", + "items": {"host": "string", "port": "integer", "user": "string", "password": "string", "uuid": "string"}, + "required_items": ["host", "port"], + "invisible_items": ["uuid"], + }, + "string": {"match": "string"}, + "integer": {"match": "int"}, + "boolean": {"match": "bool"}, +} + + +class TestYAMLChecker(TestCase): + def test_initial_data_correct_success(self): + process_rule(data=test_data, rules=test_rules, name="root") + + def test_invisible_field_not_in_data_fail(self): + rules = deepcopy(test_rules) + rules["replica_item"]["invisible_items"].append("non_existent_field") + with self.assertRaises(FormatError): + process_rule(data=test_data, rules=rules, name="root") + + def test_invisible_items_in_match_none_fail(self): + rules = deepcopy(test_rules) + rules["root"]["match"] = "none" + rules["root"]["invisible_items"] = ["something"] + with self.assertRaises(FormatError): + process_rule(data=test_data, rules=rules, name="root") + + def test_invisible_items_in_match_list_fail(self): + rules = deepcopy(test_rules) + rules["cluster_list"]["invisible_items"] = ["something"] + with self.assertRaises(FormatError): + process_rule(data=test_data, rules=rules, name="root") + + def test_invisible_items_in_match_dict_key_selection_fail(self): + rules = deepcopy(test_rules) + rules["root"]["match"] = "dict_key_selection" + rules["root"]["invisible_items"] = ["something"] + with self.assertRaises(FormatError): + process_rule(data=test_data, rules=rules, name="root") + + def test_invisible_items_in_match_one_of_fail(self): + rules = deepcopy(test_rules) + rules["root"]["match"] = "one_of" + rules["root"]["invisible_items"] = ["something"] + with self.assertRaises(FormatError): + process_rule(data=test_data, rules=rules, name="root") + + def test_invisible_items_in_match_set_fail(self): + rules = deepcopy(test_rules) + rules["root"]["match"] = "set" + rules["root"]["invisible_items"] = ["something"] + with self.assertRaises(FormatError): + process_rule(data=test_data, rules=rules, name="root") + + def test_invisible_items_in_match_simple_type_fail(self): + for simple_type in ("string", "bool", "int", "float"): + rules = deepcopy(test_rules) + rules["root"]["match"] = simple_type + rules["root"]["invisible_items"] = ["something"] + with self.assertRaises(FormatError): + process_rule(data=test_data, rules=rules, name="root") diff --git a/python/cm/upgrade.py b/python/cm/upgrade.py index bd7b9d2ce4..020ea07d78 100644 --- a/python/cm/upgrade.py +++ b/python/cm/upgrade.py @@ -23,8 +23,8 @@ add_components_to_service, add_service_to_cluster, check_license, + is_version_suitable, save_hc, - version_in, ) from cm.errors import raise_adcm_ex from cm.issue import update_hierarchy_issues @@ -174,7 +174,7 @@ def check_upgrade_import( try: prototype_import = PrototypeImport.objects.get(prototype=prototype, name=export.prototype.name) - if not version_in(export.prototype.version, prototype_import): + if not is_version_suitable(export.prototype.version, prototype_import): return ( False, f'Import "{export.prototype.name}" of {prototype.type} "{prototype.name}" {prototype.version} ' @@ -201,7 +201,7 @@ def check_upgrade_import( import_obj = cbind.service if cbind.service else cbind.cluster prototype_import = PrototypeImport.objects.get(prototype=import_obj.prototype, name=export.prototype.name) - if not version_in(prototype.version, prototype_import): + if not is_version_suitable(prototype.version, prototype_import): return ( False, f'Export of {prototype.type} "{prototype.name}" {prototype.version} ' diff --git a/python/cm/utils.py b/python/cm/utils.py index 08a27ec025..8af5df7fe4 100644 --- a/python/cm/utils.py +++ b/python/cm/utils.py @@ -15,11 +15,22 @@ from collections.abc import Mapping from pathlib import Path from secrets import token_hex -from typing import Any, Iterable +from typing import Any, Iterable, Protocol, TypeVar from django.conf import settings +class WithPK(Protocol): + pk: int + + +ObjectWithPk = TypeVar("ObjectWithPk", bound=WithPK) + + +def build_id_object_mapping(objects: Iterable[ObjectWithPk]) -> dict[int, ObjectWithPk]: + return {object_.pk: object_ for object_ in objects} + + def dict_json_get_or_create(path: str | Path, field: str, value: Any = None) -> Any: with open(path, encoding=settings.ENCODING_UTF_8) as f: data = json.load(f) diff --git a/python/cm/validators.py b/python/cm/validators.py index eec76bcb32..13f6c80f5c 100644 --- a/python/cm/validators.py +++ b/python/cm/validators.py @@ -25,6 +25,14 @@ def __call__(self, value, serializer_field): raise AdcmEx("HOST_CONFLICT", "duplicate host") from e +class ClusterUniqueValidator(UniqueValidator): + def __call__(self, value, serializer_field): + try: + super().__call__(value, serializer_field) + except ValidationError as e: + raise AdcmEx("CLUSTER_CONFLICT", f'Cluster with name "{value}" already exists') from e + + class RegexValidator: def __init__(self, regex: str, code: str, msg: str): self._regex = re.compile(regex) diff --git a/python/init_db.py b/python/init_db.py index 92bc3a9d67..d84bc26476 100755 --- a/python/init_db.py +++ b/python/init_db.py @@ -14,6 +14,7 @@ import json from itertools import chain +from pathlib import Path from secrets import token_hex from django.conf import settings @@ -85,7 +86,7 @@ def recheck_issues(): update_hierarchy_issues(obj) -def init(): +def init(adcm_conf_file: Path = Path(settings.BASE_DIR, "conf", "adcm", "config.yaml")): logger.info("Start initializing ADCM DB...") if not User.objects.filter(username="admin").exists(): User.objects.create_superuser("admin", "admin@example.com", "admin", built_in=True) @@ -98,7 +99,7 @@ def init(): abort_all(event) clear_temp_tables() event.send_state() - load_adcm() + load_adcm(adcm_conf_file) drop_locks() recheck_issues() logger.info("ADCM DB is initialized") diff --git a/python/rbac/endpoints/group/views.py b/python/rbac/endpoints/group/views.py index ade9e1ce39..c16f1b2616 100644 --- a/python/rbac/endpoints/group/views.py +++ b/python/rbac/endpoints/group/views.py @@ -75,6 +75,9 @@ def update(self, request, *args, **kwargs): def destroy(self, request, *args, **kwargs): instance = self.get_object() if instance.built_in: - raise_adcm_ex("GROUP_DELETE_ERROR") + raise_adcm_ex(code="GROUP_DELETE_ERROR") + + if instance.policy_set.exists(): + raise_adcm_ex(code="GROUP_DELETE_ERROR", msg="Group with policy should not be deleted") return super().destroy(request, args, kwargs) diff --git a/python/rbac/endpoints/me/views.py b/python/rbac/endpoints/me/views.py index d8ec6b6cc9..ceaa107fba 100644 --- a/python/rbac/endpoints/me/views.py +++ b/python/rbac/endpoints/me/views.py @@ -12,7 +12,7 @@ from rbac.endpoints.me.serializers import MeUserSerializer from rbac.models import User -from rbac.services.user import update +from rbac.services.user import update_user from rest_framework.generics import RetrieveUpdateAPIView @@ -24,4 +24,4 @@ def get_object(self): return User.objects.get(id=self.request.user.id) def perform_update(self, serializer: MeUserSerializer): - update(user=serializer.instance, context_user=self.request.user, partial=True, **serializer.validated_data) + update_user(user=serializer.instance, context_user=self.request.user, partial=True, **serializer.validated_data) diff --git a/python/rbac/endpoints/policy/serializers.py b/python/rbac/endpoints/policy/serializers.py index 629c5e42ad..10463277a2 100644 --- a/python/rbac/endpoints/policy/serializers.py +++ b/python/rbac/endpoints/policy/serializers.py @@ -13,7 +13,7 @@ import jsonschema from cm.models import Cluster, ClusterObject, Host, HostProvider, ServiceComponent from rbac.endpoints.serializers import BaseRelatedSerializer -from rbac.models import Group, Policy, Role, RoleTypes, User +from rbac.models import Group, Policy, Role, RoleTypes from rest_flex_fields.serializers import FlexFieldsSerializerMixin from rest_framework.exceptions import ValidationError from rest_framework.fields import ( @@ -92,11 +92,6 @@ class PolicyRoleSerializer(BaseRelatedSerializer): url = HyperlinkedIdentityField(view_name="v1:rbac:role-detail") -class PolicyUserSerializer(BaseRelatedSerializer): - id = PrimaryKeyRelatedField(queryset=User.objects.all()) - url = HyperlinkedIdentityField(view_name="v1:rbac:user-detail") - - class PolicyGroupSerializer(BaseRelatedSerializer): id = PrimaryKeyRelatedField(queryset=Group.objects.all()) url = HyperlinkedIdentityField(view_name="v1:rbac:group-detail") @@ -108,8 +103,7 @@ class PolicySerializer(FlexFieldsSerializerMixin, ModelSerializer): object = ObjectField(required=True) built_in = BooleanField(read_only=True) role = PolicyRoleSerializer() - user = PolicyUserSerializer(many=True, required=False) - group = PolicyGroupSerializer(many=True, required=False) + group = PolicyGroupSerializer(many=True, required=True) class Meta: model = Policy @@ -120,12 +114,10 @@ class Meta: "object", "built_in", "role", - "user", "group", "url", ) expandable_fields = { - "user": ("rbac.endpoints.user.views.UserSerializer", {"many": True}), "group": ("rbac.endpoints.group.views.GroupSerializer", {"many": True}), "role": "rbac.endpoints.role.views.RoleSerializer", } @@ -141,7 +133,6 @@ def validate_role(role): class PolicyAuditSerializer(ModelSerializer): role = SerializerMethodField() object = SerializerMethodField() - user = SerializerMethodField() group = SerializerMethodField() class Meta: @@ -151,7 +142,6 @@ class Meta: "description", "role", "object", - "user", "group", ) @@ -173,10 +163,6 @@ def get_object(obj: Policy) -> list[dict[str, int | str]]: for obj in obj.object.all() ] - @staticmethod - def get_user(obj: Policy) -> list[str, ...]: - return [user.username for user in obj.user.all()] - @staticmethod def get_group(obj: Policy) -> list[str, ...]: return [group.name for group in obj.group.all()] diff --git a/python/rbac/endpoints/policy/views.py b/python/rbac/endpoints/policy/views.py index a3a29cdf35..16fb5718ea 100644 --- a/python/rbac/endpoints/policy/views.py +++ b/python/rbac/endpoints/policy/views.py @@ -19,7 +19,7 @@ from rest_framework.status import ( HTTP_201_CREATED, HTTP_400_BAD_REQUEST, - HTTP_405_METHOD_NOT_ALLOWED, + HTTP_409_CONFLICT, ) from rest_framework.viewsets import ModelViewSet @@ -31,18 +31,15 @@ class PolicyViewSet(PermissionListMixin, ModelViewSet): # pylint: disable=too-m serializer_class = PolicySerializer permission_classes = (DjangoModelPermissionsAudit,) permission_required = ["rbac.view_policy"] - filterset_fields = ("id", "name", "built_in", "role", "user", "group") + filterset_fields = ("id", "name", "built_in", "role", "group") ordering_fields = ("id", "name", "built_in", "role") @audit def create(self, request, *args, **kwargs): serializer = self.get_serializer(data=request.data) - if serializer.is_valid(raise_exception=True): - policy = policy_create(**serializer.validated_data) - - return Response(data=self.get_serializer(policy).data, status=HTTP_201_CREATED) - else: - return Response(data=serializer.errors, status=HTTP_400_BAD_REQUEST) + serializer.is_valid(raise_exception=True) + policy = policy_create(**serializer.validated_data) + return Response(data=self.get_serializer(policy).data, status=HTTP_201_CREATED) @audit def update(self, request, *args, **kwargs): @@ -50,7 +47,7 @@ def update(self, request, *args, **kwargs): policy = self.get_object() if policy.built_in: - return Response(status=HTTP_405_METHOD_NOT_ALLOWED) + return Response(status=HTTP_409_CONFLICT) serializer = self.get_serializer(policy, data=request.data, partial=partial) if serializer.is_valid(raise_exception=True): @@ -64,6 +61,6 @@ def update(self, request, *args, **kwargs): def destroy(self, request, *args, **kwargs): policy = self.get_object() if policy.built_in: - return Response(status=HTTP_405_METHOD_NOT_ALLOWED) + return Response(status=HTTP_409_CONFLICT) return super().destroy(request, *args, **kwargs) diff --git a/python/rbac/endpoints/role/views.py b/python/rbac/endpoints/role/views.py index b03e9f4c37..c9688c2843 100644 --- a/python/rbac/endpoints/role/views.py +++ b/python/rbac/endpoints/role/views.py @@ -26,7 +26,7 @@ HTTP_200_OK, HTTP_201_CREATED, HTTP_400_BAD_REQUEST, - HTTP_405_METHOD_NOT_ALLOWED, + HTTP_409_CONFLICT, ) from rest_framework.viewsets import ModelViewSet @@ -87,7 +87,7 @@ def update(self, request, *args, **kwargs): instance = self.get_object() if instance.built_in: - return Response(status=HTTP_405_METHOD_NOT_ALLOWED) + return Response(status=HTTP_409_CONFLICT) serializer = self.get_serializer(data=request.data, partial=partial) @@ -102,7 +102,7 @@ def update(self, request, *args, **kwargs): def destroy(self, request, *args, **kwargs): instance = self.get_object() if instance.built_in: - return Response(status=HTTP_405_METHOD_NOT_ALLOWED) + return Response(status=HTTP_409_CONFLICT) return super().destroy(request, *args, **kwargs) @action(methods=["get"], detail=False) diff --git a/python/rbac/endpoints/user/serializers.py b/python/rbac/endpoints/user/serializers.py index 91f4c04281..8d8e24cfcb 100644 --- a/python/rbac/endpoints/user/serializers.py +++ b/python/rbac/endpoints/user/serializers.py @@ -12,7 +12,7 @@ from rbac.models import Group, User -from rbac.services.user import create, update +from rbac.services.user import create_user, update_user from rest_flex_fields.serializers import FlexFieldsSerializerMixin from rest_framework.fields import ( BooleanField, @@ -92,7 +92,7 @@ class Meta: def update(self, instance, validated_data): context_user = self.context["request"].user - return update( + return update_user( user=instance, context_user=context_user, partial=self.partial, @@ -101,7 +101,7 @@ def update(self, instance, validated_data): ) def create(self, validated_data): - return create(**validated_data) + return create_user(**validated_data) class UserAuditSerializer(ModelSerializer): diff --git a/python/rbac/endpoints/user/views.py b/python/rbac/endpoints/user/views.py index 0d53fe244c..7a58865395 100644 --- a/python/rbac/endpoints/user/views.py +++ b/python/rbac/endpoints/user/views.py @@ -25,7 +25,7 @@ class UserViewSet(PermissionListMixin, ModelViewSet): # pylint: disable=too-many-ancestors - queryset = User.objects.all() + queryset = User.objects.prefetch_related("groups").all() serializer_class = UserSerializer permission_classes = (DjangoModelPermissionsAudit,) permission_required = ["rbac.view_user"] diff --git a/python/rbac/migrations/0012_delete_inactive_users.py b/python/rbac/migrations/0012_delete_inactive_users.py new file mode 100644 index 0000000000..c6d47a1188 --- /dev/null +++ b/python/rbac/migrations/0012_delete_inactive_users.py @@ -0,0 +1,34 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by Django 3.2.19 on 2023-06-26 13:30 + +from django.db import migrations + + +def delete_inactive_users(apps, schema_editor) -> None: + RBACUser = apps.get_model("rbac", "User") + RBACUser.objects.filter(is_active=False).delete() + + +def delete_inactive_users_reverse(apps, schema_editor) -> None: + ... + + +class Migration(migrations.Migration): + dependencies = [ + ("rbac", "0011_user_last_failed_login_at"), + ] + + operations = [ + migrations.RunPython(code=delete_inactive_users, reverse_code=delete_inactive_users_reverse), + ] diff --git a/python/rbac/migrations/0013_auto_20230628_1342.py b/python/rbac/migrations/0013_auto_20230628_1342.py new file mode 100644 index 0000000000..1ffe0e9e31 --- /dev/null +++ b/python/rbac/migrations/0013_auto_20230628_1342.py @@ -0,0 +1,69 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by Django 3.2.17 on 2023-06-28 13:42 + +import hashlib + +from django.conf import settings +from django.db import migrations, models + + +def migrate_users_to_group(apps, schema_editor): + Policy = apps.get_model("rbac", "Policy") + Group = apps.get_model("rbac", "Group") + + for policy in Policy.objects.filter(user__isnull=False).distinct(): + policy_users = policy.user.all() + str_usernames = "".join(user.username for user in policy_users) + group_name = ( + f"migration_rbac_0012_group_{hashlib.sha256(str_usernames.encode(settings.ENCODING_UTF_8)).hexdigest()}" + ) + migrate_group, created = Group.objects.get_or_create(name=group_name, display_name=group_name, type="local") + if created or not migrate_group.user_set.all().exists(): + migrate_group.user_set.add(*policy_users) + + policy.user.remove(*policy_users) + policy.group.add(migrate_group) + + +def migrate_groups_to_users(apps, schema_editor): + Policy = apps.get_model("rbac", "Policy") + for policy in Policy.objects.prefetch_related("group").filter(group__name__startswith="migration_rbac_0012_group_"): + for group in policy.group.filter(name__startswith="migration_rbac_0012_group_"): + for group_policy in group.policy_set.all(): + group_policy.user.add(*group.user_set.values_list("id", flat=True)) + + group.delete() + + +class Migration(migrations.Migration): + dependencies = [ + ("rbac", "0012_delete_inactive_users"), + ] + + operations = [ + migrations.RunPython(code=migrate_users_to_group, reverse_code=migrate_groups_to_users), + migrations.RemoveField( + model_name="policy", + name="user", + ), + migrations.RemoveField( + model_name="policy", + name="user_object_perm", + ), + migrations.AlterField( + model_name="policy", + name="group", + field=models.ManyToManyField(to="rbac.Group"), + ), + ] diff --git a/python/rbac/models.py b/python/rbac/models.py index 6ffb1c0694..918d110613 100644 --- a/python/rbac/models.py +++ b/python/rbac/models.py @@ -39,7 +39,7 @@ UniqueConstraint, ) from django.db.transaction import atomic -from guardian.models import GroupObjectPermission, UserObjectPermission +from guardian.models import GroupObjectPermission from rbac.utils import get_query_tuple_str from rest_framework.exceptions import ValidationError @@ -77,11 +77,6 @@ class User(AuthUser): failed_login_attempts = SmallIntegerField(default=0) blocked_at = DateTimeField(null=True) last_failed_login_at = DateTimeField(null=True) - - def delete(self, using=None, keep_parents=False): - self.is_active = False - self.save() - type = CharField(max_length=1000, choices=OriginType.choices, null=False, default=OriginType.LOCAL) @property @@ -207,16 +202,13 @@ class Policy(Model): role = ForeignKey(Role, on_delete=SET_NULL, null=True) object = ManyToManyField(PolicyObject, blank=True) built_in = BooleanField(default=True) - user = ManyToManyField(User, blank=True) - group = ManyToManyField(Group, blank=True) + group = ManyToManyField(Group) model_perm = ManyToManyField(PolicyPermission, blank=True) - user_object_perm = ManyToManyField(UserObjectPermission, blank=True) group_object_perm = ManyToManyField(GroupObjectPermission, blank=True) - def remove_permissions(self): # pylint: disable=too-many-branches,too-many-statements + def remove_permissions(self): # Placeholder in some places not used because we need to support Postgres and SQLite and I didn't find a way # to use placeholder for list of multiple values for SQLite so used string formatting - user_pks = self.user.values_list("pk", flat=True) group_pks = self.group.values_list("pk", flat=True) cursor = connection.cursor() @@ -248,16 +240,6 @@ def remove_permissions(self): # pylint: disable=too-many-branches,too-many-stat if permission_ids_to_delete: permission_ids_to_delete_str = get_query_tuple_str(tuple_items=permission_ids_to_delete) - if user_pks: - cursor.execute( - f""" - DELETE FROM auth_user_user_permissions WHERE permission_id IN ( - SELECT permission_id FROM rbac_policypermission WHERE user_id IS NOT NULL - AND id IN {permission_ids_to_delete_str} - ) AND user_id IN {get_query_tuple_str(tuple_items=tuple(user_pks))}; - """, - ) - if group_pks: cursor.execute( f""" @@ -283,47 +265,6 @@ def remove_permissions(self): # pylint: disable=too-many-branches,too-many-stat """, ) - cursor.execute( - """ - SELECT userobjectpermission_id FROM rbac_policy_user_object_perm WHERE ( - SELECT COUNT(DISTINCT policy_id) FROM rbac_policy_user_object_perm WHERE policy_id = %s - ) = 1 AND policy_id = %s; - """, - [self.pk, self.pk], - ) - userobj_permission_ids_to_delete = {item[0] for item in cursor.fetchall()} - if userobj_permission_ids_to_delete: - cursor.execute( - f""" - SELECT userobjectpermission_id FROM rbac_policy_user_object_perm - WHERE userobjectpermission_id - in {get_query_tuple_str(tuple_items=userobj_permission_ids_to_delete)} - AND policy_id != {self.pk}; - """ - ) - - userobj_permission_ids_to_keep = {item[0] for item in cursor.fetchall()} - if userobj_permission_ids_to_keep: - userobj_permission_ids_to_delete = tuple( - userobj_permission_ids_to_delete - userobj_permission_ids_to_keep - ) - else: - userobj_permission_ids_to_delete = tuple(userobj_permission_ids_to_delete) - - if userobj_permission_ids_to_delete: - userobj_permission_ids_to_delete_str = get_query_tuple_str(tuple_items=userobj_permission_ids_to_delete) - - cursor.execute( - f""" - DELETE FROM rbac_policy_user_object_perm WHERE userobjectpermission_id - IN {userobj_permission_ids_to_delete_str}; - """, - ) - - cursor.execute( - f"DELETE FROM guardian_userobjectpermission WHERE id IN {userobj_permission_ids_to_delete_str};", - ) - cursor.execute( """ SELECT groupobjectpermission_id FROM rbac_policy_group_object_perm WHERE ( diff --git a/python/rbac/roles.py b/python/rbac/roles.py index e1527f7cf7..3c35c67a75 100644 --- a/python/rbac/roles.py +++ b/python/rbac/roles.py @@ -29,7 +29,7 @@ from django.db import connection from django.db.models import QuerySet from django.db.transaction import atomic -from guardian.models import GroupObjectPermission, UserObjectPermission +from guardian.models import GroupObjectPermission from rbac.models import ( Permission, Policy, @@ -68,51 +68,16 @@ def apply(self, policy: Policy, role: Role, param_obj=None) -> None: policy_permission, _ = PolicyPermission.objects.get_or_create(group=group, permission=perm) policy.model_perm.add(policy_permission) - for user in policy.user.all(): - user.user_permissions.add(perm) - policy_permission, _ = PolicyPermission.objects.get_or_create(user=user, permission=perm) - policy.model_perm.add(policy_permission) - -def assign_user_or_group_perm(policy: Policy, permission: Permission, obj) -> None: +def assign_group_perm(policy: Policy, permission: Permission, obj) -> None: row_template = (obj.pk, ContentType.objects.get_for_model(model=obj).pk, permission.pk) - user_rows = [(*row_template, user_pk) for user_pk in policy.user.values_list("pk", flat=True)] group_rows = [(*row_template, group_pk) for group_pk in policy.group.values_list("pk", flat=True)] - if not any((user_rows, group_rows)): + if not group_rows: return cursor = connection.cursor() with atomic(): - if user_rows: - # Placeholder not used because we need to support Postgres and SQLite and I didn't find a way to use - # placeholder for list of multiple tuples for SQLite so used string formatting - - query_str = ( - "INSERT INTO guardian_userobjectpermission (object_pk, content_type_id, permission_id, user_id) VALUES" - ) - for row in user_rows: - query_str = f"{query_str} {row}," - - query_str = ( - f"{query_str[:-1]} ON CONFLICT (user_id, permission_id, object_pk) DO UPDATE SET " - "object_pk=EXCLUDED.object_pk, content_type_id=EXCLUDED.content_type_id, " - "permission_id=EXCLUDED.permission_id, user_id=EXCLUDED.user_id RETURNING id;" - ) - cursor.execute(query_str) - - rows = [ - (policy.pk, user_object_permission_id) - for user_object_permission_id in {item[0] for item in cursor.fetchall()} - ] - if rows: - query_str = "INSERT INTO rbac_policy_user_object_perm (policy_id, userobjectpermission_id) VALUES" - for row in rows: - query_str = f"{query_str} {row}," - - query_str = f"{query_str[:-1]} ON CONFLICT DO NOTHING;" - cursor.execute(query_str) - if group_rows: query_str = ( "INSERT INTO guardian_groupobjectpermission " @@ -156,7 +121,7 @@ def filter(self) -> QuerySet | None: def apply(self, policy: Policy, role: Role, param_obj=None) -> None: for obj in policy.get_objects(param_obj): for perm in role.get_permissions(): - assign_user_or_group_perm(policy=policy, permission=perm, obj=obj) + assign_group_perm(policy=policy, permission=perm, obj=obj) class ActionRole(AbstractRole): @@ -176,7 +141,7 @@ def apply(self, policy: Policy, role: Role, param_obj=None) -> None: content_type=ContentType.objects.get_for_model(model=Action), codename=f"view_{Action.__name__.lower()}", ) - assign_user_or_group_perm( + assign_group_perm( policy=policy, permission=permission, obj=action, @@ -201,11 +166,11 @@ def apply(self, policy: Policy, role: Role, param_obj=None) -> None: hosts.append(host) for host in hosts: - assign_user_or_group_perm(policy=policy, permission=perm, obj=host) + assign_group_perm(policy=policy, permission=perm, obj=host) continue - assign_user_or_group_perm(policy=policy, permission=perm, obj=obj) + assign_group_perm(policy=policy, permission=perm, obj=obj) class TaskRole(AbstractRole): @@ -228,13 +193,13 @@ def apply_jobs(task: TaskLog, policy: Policy) -> None: content_type=ContentType.objects.get_for_model(model=TaskLog), codename=f"view_{TaskLog.__name__.lower()}", ) - assign_user_or_group_perm(policy=policy, permission=view_tasklog_permission, obj=task) + assign_group_perm(policy=policy, permission=view_tasklog_permission, obj=task) change_tasklog_permission, _ = Permission.objects.get_or_create( content_type=ContentType.objects.get_for_model(model=TaskLog), codename=f"change_{TaskLog.__name__.lower()}", ) - assign_user_or_group_perm(policy=policy, permission=change_tasklog_permission, obj=task) + assign_group_perm(policy=policy, permission=change_tasklog_permission, obj=task) view_joblog_permission, _ = Permission.objects.get_or_create( content_type=ContentType.objects.get_for_model(model=JobLog), @@ -246,10 +211,10 @@ def apply_jobs(task: TaskLog, policy: Policy) -> None: ) for job in JobLog.objects.filter(task=task): - assign_user_or_group_perm(policy=policy, permission=view_joblog_permission, obj=job) + assign_group_perm(policy=policy, permission=view_joblog_permission, obj=job) for log in LogStorage.objects.filter(job=job): - assign_user_or_group_perm(policy=policy, permission=view_logstorage_permission, obj=log) + assign_group_perm(policy=policy, permission=view_logstorage_permission, obj=log) def re_apply_policy_for_jobs(action_object: ADCMEntity, task: TaskLog) -> None: @@ -270,23 +235,6 @@ def re_apply_policy_for_jobs(action_object: ADCMEntity, task: TaskLog) -> None: for obj, content_type in obj_type_map.items(): for policy in Policy.objects.filter(object__object_id=obj.id, object__content_type=content_type): - for user in policy.user.all(): - try: - user_obj_perm = UserObjectPermission.objects.get( - user=user, - permission__codename="view_action", - object_pk=task.action.pk, - ) - except UserObjectPermission.DoesNotExist: - continue - - if user_obj_perm in policy.user_object_perm.all() and user.has_perm( - perm=f"view_{object_model}", - obj=action_object, - ): - policy.role.child.add(task_role) - apply_jobs(task=task, policy=policy) - for group in policy.group.all(): try: group_obj_perm = GroupObjectPermission.objects.get( @@ -317,26 +265,6 @@ def apply_policy_for_new_config(config_object: ADCMEntity, config_log: ConfigLog for obj, content_type in obj_type_map.items(): for policy in Policy.objects.filter(object__object_id=obj.id, object__content_type=content_type): - for user in policy.user.all(): - try: - user_obj_perm = UserObjectPermission.objects.get( - user=user, - permission__codename="view_objectconfig", - object_pk=config_log.obj_ref_id, - ) - except UserObjectPermission.DoesNotExist: - continue - - if user_obj_perm in policy.user_object_perm.all() and user.has_perm( - perm=f"view_{object_model}", - obj=config_object, - ): - assign_user_or_group_perm( - policy=policy, - permission=permission, - obj=config_log, - ) - for group in policy.group.all(): try: group_obj_perm = GroupObjectPermission.objects.get( @@ -353,7 +281,7 @@ def apply_policy_for_new_config(config_object: ADCMEntity, config_log: ConfigLog continue if group_obj_perm in policy.group_object_perm.all() and model_view_gop: - assign_user_or_group_perm(policy=policy, permission=permission, obj=config_log) + assign_group_perm(policy=policy, permission=permission, obj=config_log) class ConfigRole(AbstractRole): @@ -367,9 +295,9 @@ def apply(self, policy: Policy, role: Role, param_obj=None) -> None: for perm in role.get_permissions(): if perm.content_type.model == "objectconfig": - assign_user_or_group_perm(policy=policy, permission=perm, obj=obj.config) + assign_group_perm(policy=policy, permission=perm, obj=obj.config) for config_group in config_groups: - assign_user_or_group_perm( + assign_group_perm( policy=policy, permission=perm, obj=config_group.config, @@ -377,14 +305,14 @@ def apply(self, policy: Policy, role: Role, param_obj=None) -> None: if perm.content_type.model == "configlog": for config in obj.config.configlog_set.all(): - assign_user_or_group_perm(policy=policy, permission=perm, obj=config) + assign_group_perm(policy=policy, permission=perm, obj=config) for config_group in config_groups: for config in config_group.config.configlog_set.all(): - assign_user_or_group_perm(policy=policy, permission=perm, obj=config) + assign_group_perm(policy=policy, permission=perm, obj=config) if perm.content_type.model == "groupconfig": for config_group in config_groups: - assign_user_or_group_perm(policy=policy, permission=perm, obj=config_group) + assign_group_perm(policy=policy, permission=perm, obj=config_group) class ParentRole(AbstractRole): @@ -435,7 +363,7 @@ def apply( for hostcomponent in HostComponent.obj.filter(cluster=obj.cluster, service=obj): self.find_and_apply(obj=hostcomponent.host, policy=policy, role=role) - assign_user_or_group_perm( + assign_group_perm( policy=policy, permission=Permission.objects.get(codename="view_cluster"), obj=obj.cluster, @@ -449,12 +377,12 @@ def apply( ): self.find_and_apply(obj=hostcomponent.host, policy=policy, role=role) - assign_user_or_group_perm( + assign_group_perm( policy=policy, permission=Permission.objects.get(codename="view_cluster"), obj=obj.cluster, ) - assign_user_or_group_perm( + assign_group_perm( policy=policy, permission=Permission.objects.get(codename="view_clusterobject"), obj=obj.service, diff --git a/python/rbac/services/policy.py b/python/rbac/services/policy.py index 33b93ab5f5..cfae9f45a0 100644 --- a/python/rbac/services/policy.py +++ b/python/rbac/services/policy.py @@ -10,12 +10,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -from cm.errors import raise_adcm_ex +from cm.errors import AdcmEx, raise_adcm_ex from cm.models import ADCMEntity from django.contrib.contenttypes.models import ContentType from django.db import IntegrityError from django.db.transaction import atomic -from rbac.models import Policy, PolicyObject, Role +from rbac.models import Group, Policy, PolicyObject, Role def _check_objects(role: Role, objects: list[ADCMEntity]) -> None: @@ -43,16 +43,13 @@ def _check_objects(role: Role, objects: list[ADCMEntity]) -> None: @atomic -def policy_create(name: str, role: Role, built_in: bool = False, **kwargs) -> Policy | None: - users = kwargs.get("user", []) +def policy_create(name: str, role: Role, built_in: bool = False, **kwargs) -> Policy: groups = kwargs.get("group", []) - if not users and not groups: - raise_adcm_ex( - "POLICY_INTEGRITY_ERROR", - msg="Role should be bind with some users or groups", - ) - objects = kwargs.get("object", []) + + if not groups: + raise AdcmEx(code="POLICY_INTEGRITY_ERROR", msg="Policy should contain at least one group") + _check_objects(role, objects) description = kwargs.get("description", "") @@ -63,32 +60,28 @@ def policy_create(name: str, role: Role, built_in: bool = False, **kwargs) -> Po policy_object, _ = PolicyObject.objects.get_or_create(object_id=obj.id, content_type=content_type) policy.object.add(policy_object) - policy.user.add(*users) policy.group.add(*groups) policy.apply() return policy except IntegrityError as e: - raise_adcm_ex("POLICY_CREATE_ERROR", msg=f"Policy creation failed with error {e}") - - return None + raise AdcmEx(code="POLICY_CREATE_ERROR", msg=f"Policy creation failed with error {e}") from e @atomic -def policy_update(policy: Policy, **kwargs) -> Policy: - users = kwargs.get("user") - groups = kwargs.get("group") - if not (users or policy.user.all()) and not (groups or policy.group.all()): - raise_adcm_ex( +def policy_update(policy: Policy, group: list[Group] | None = None, **kwargs) -> Policy: + groups = group + if groups is not None and len(groups) == 0: + raise AdcmEx( "POLICY_INTEGRITY_ERROR", - msg="Role should be bind with some users or groups", + msg="Policy should contain at least one group", ) role = kwargs.get("role") objects = kwargs.get("object") - policy_old_objects = [po.object for po in policy.object.all()] - _check_objects(role or policy.role, objects if objects is not None else policy_old_objects) + + _check_objects(role or policy.role, objects if objects is not None else [po.object for po in policy.object.all()]) if "name" in kwargs: policy.name = kwargs["name"] @@ -99,11 +92,7 @@ def policy_update(policy: Policy, **kwargs) -> Policy: if role is not None: policy.role = role - if users is not None: - policy.user.clear() - policy.user.add(*users) - - if groups is not None: + if groups: policy.group.clear() policy.group.add(*groups) @@ -121,7 +110,7 @@ def policy_update(policy: Policy, **kwargs) -> Policy: try: policy.save() except IntegrityError as e: - raise_adcm_ex("POLICY_UPDATE_ERROR", msg=f"Policy update failed with error {e}") + raise AdcmEx("POLICY_UPDATE_ERROR", msg=f"Policy update failed with error {e}") from e policy.apply() diff --git a/python/rbac/services/role.py b/python/rbac/services/role.py index 5d1ea5c334..00af3e8ab6 100644 --- a/python/rbac/services/role.py +++ b/python/rbac/services/role.py @@ -66,8 +66,8 @@ def role_create(built_in=False, type_of_role=RoleTypes.ROLE, **kwargs) -> Role | role.child.add(*child) return role - except IntegrityError as e: - raise_adcm_ex("ROLE_CREATE_ERROR", msg=f"Role creation failed with error {e}") + except IntegrityError: + raise_adcm_ex("ROLE_CREATE_ERROR") return None diff --git a/python/rbac/services/user.py b/python/rbac/services/user.py index dea670ade7..bd2aa49bcb 100644 --- a/python/rbac/services/user.py +++ b/python/rbac/services/user.py @@ -10,7 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from cm.errors import raise_adcm_ex +from cm.errors import AdcmEx from django.contrib.auth.password_validation import validate_password from django.core.exceptions import ObjectDoesNotExist from django.db import IntegrityError, transaction @@ -22,7 +22,7 @@ def _set_password(user: User, value: str) -> None: if not value: - raise_adcm_ex("USER_UPDATE_ERROR", msg="Password could not be empty") + raise AdcmEx(code="USER_UPDATE_ERROR", msg="Password could not be empty") if value is Empty or user.check_password(value): return @@ -44,12 +44,12 @@ def _update_groups(user: User, groups: [Empty, list[dict]]) -> None: try: group = Group.objects.get(id=group_id) - except ObjectDoesNotExist: + except ObjectDoesNotExist as e: msg = f"Group with ID {group_id} was not found" - raise_adcm_ex("USER_UPDATE_ERROR", msg=msg) + raise AdcmEx(code="USER_UPDATE_ERROR", msg=msg) from e if group and group.type == OriginType.LDAP: - raise_adcm_ex("USER_CONFLICT", msg="You cannot add user to LDAP group") + raise AdcmEx(code="USER_CONFLICT", msg="You cannot add user to LDAP group") user.groups.add(group) user_groups[group_id] = group @@ -59,7 +59,7 @@ def _update_groups(user: User, groups: [Empty, list[dict]]) -> None: continue if group.type == OriginType.LDAP: - raise_adcm_ex("USER_CONFLICT", msg="You cannot remove user from original LDAP group") + raise AdcmEx(code="USER_CONFLICT", msg="You cannot remove user from original LDAP group") user.groups.remove(group) @@ -73,12 +73,13 @@ def _regenerate_token(user: User) -> Token: @transaction.atomic -def update( +def update_user( user: User, context_user: User = None, # None is for use outside of web context *, partial: bool = False, need_current_password: bool = True, + api_v2_behaviour: bool = False, username: str = Empty, first_name: str = Empty, last_name: str = Empty, @@ -92,39 +93,42 @@ def update( ) -> User: # pylint: disable=too-many-locals + if api_v2_behaviour: + if not context_user.is_superuser and user != User.objects.get(user_ptr=context_user): + raise AdcmEx(code="USER_UPDATE_ERROR", msg="Can't update other user") + if (username is not Empty) and (username != user.username): - raise_adcm_ex("USER_CONFLICT", msg="Username could not be changed") + raise AdcmEx(code="USER_CONFLICT", msg="Username could not be changed") args = (username, first_name, last_name, email, is_superuser, is_active) if not partial and not all(arg is not Empty for arg in args): - raise_adcm_ex("USER_UPDATE_ERROR", msg="Full User update with partial argset is forbidden") + raise AdcmEx(code="USER_UPDATE_ERROR", msg="Full User update with partial argset is forbidden") user_exist = User.objects.filter(email=email).exists() if user_exist and (email != ""): email_user = User.objects.get(email=email) if email_user != user: - raise_adcm_ex(code="USER_CONFLICT", msg="User with the same email already exist") + raise AdcmEx(code="USER_CONFLICT", msg="User with the same email already exist") names = { "username": username, "first_name": first_name, "last_name": last_name, "email": email, - "is_superuser": is_superuser, "password": password, "is_active": is_active, } if user.type == OriginType.LDAP and any( (value is not Empty and getattr(user, key) != value) for key, value in names.items() ): - raise_adcm_ex(code="USER_CONFLICT", msg='You can change only "profile" for LDAP type user') + raise AdcmEx(code="USER_CONFLICT", msg='You can change only "profile" for LDAP type user') is_password_changing = password is not Empty and not user.check_password(raw_password=password) if is_password_changing: if need_current_password and ( current_password is Empty or not user.check_password(raw_password=current_password) ): - raise_adcm_ex(code="USER_PASSWORD_CURRENT_PASSWORD_REQUIRED_ERROR") + raise AdcmEx(code="USER_PASSWORD_CURRENT_PASSWORD_REQUIRED_ERROR") validate_password( password=password, @@ -148,15 +152,15 @@ def update( @transaction.atomic -def create( +def create_user( *, username: str, password: str, - first_name: str = None, - last_name: str = None, - email: str = None, - is_superuser: bool = None, - profile: dict = None, + first_name: str = "", + last_name: str = "", + email: str = "", + is_superuser: bool = False, + profile: str = "", groups: list = None, is_active: bool = True, ) -> User: @@ -167,7 +171,7 @@ def create( user_exist = User.objects.filter(email=email).exists() if user_exist and (email != ""): - raise_adcm_ex("USER_CREATE_ERROR", msg="User with the same email already exist") + raise AdcmEx(code="USER_CREATE_ERROR", msg="User with the same email already exist") validate_password( password=password, @@ -186,10 +190,10 @@ def create( is_active=is_active, ) except IntegrityError as e: - raise_adcm_ex("USER_CREATE_ERROR", msg=f"User creation failed with error {e}") + raise AdcmEx(code="USER_CREATE_ERROR", msg=f"User creation failed with error {e}") from e if not User: - raise_adcm_ex("USER_CREATE_ERROR", msg="User creation failed") + raise AdcmEx(code="USER_CREATE_ERROR", msg="User creation failed") _update_groups(user, groups or []) _regenerate_token(user) diff --git a/python/rbac/tests/test_api.py b/python/rbac/tests/test_api.py index c975b9d700..4518d12b73 100644 --- a/python/rbac/tests/test_api.py +++ b/python/rbac/tests/test_api.py @@ -30,129 +30,135 @@ def setUp(self) -> None: self.policy_data = [ ( {}, - "name - This field is required.;object - This field is required.;role - This field is required.;", + "name - This field is required.;object - This field is required.;role - This field is required.;" + "group - This field is required.;", ), ( {"name": []}, ( "name - This value does not match the required pattern.;" - "object - This field is required.;role - This field is required.;" + "object - This field is required.;role - This field is required.;group - This field is required.;" ), ), ( {"name": {}}, ( "name - This value does not match the required pattern.;" - "object - This field is required.;role - This field is required.;" + "object - This field is required.;role - This field is required.;group - This field is required.;" ), ), ( {"name": None}, ( "name - This field may not be null.;object - This field is required.;" - "role - This field is required.;" + "role - This field is required.;group - This field is required.;" ), ), ( {"name": "test", "role": None}, - "object - This field is required.;role - This field may not be null.;", + "object - This field is required.;role - This field may not be null.;" + "group - This field is required.;", ), ( {"name": "test", "role": 1}, ( "object - This field is required.;non_field_errors - Invalid data. " - "Expected a dictionary, but got int.;" + "Expected a dictionary, but got int.;group - This field is required.;" ), ), ( {"name": "test", "role": "string"}, ( "object - This field is required.;non_field_errors - Invalid data. " - "Expected a dictionary, but got str.;" + "Expected a dictionary, but got str.;group - This field is required.;" ), ), ( {"name": "test", "role": []}, ( "object - This field is required.;non_field_errors - Invalid data. " - "Expected a dictionary, but got list.;" + "Expected a dictionary, but got list.;group - This field is required.;" ), ), ( {"name": "test", "role": {}}, - "object - This field is required.;id - This field is required.;", + "object - This field is required.;id - This field is required.;group - This field is required.;", ), ( {"name": "test", "role": {"id": None}}, - "object - This field is required.;id - This field may not be null.;", + "object - This field is required.;id - This field may not be null.;group - This field is required.;", ), ( {"name": "test", "role": {"id": int_1000}}, - f'object - This field is required.;id - Invalid pk "{int_1000}" - object does not exist.;', + f'object - This field is required.;id - Invalid pk "{int_1000}" - object does not exist.;' + f"group - This field is required.;", ), ( {"name": "test", "role": {"id": "string"}}, - "object - This field is required.;id - Incorrect type. Expected pk value, received str.;", + "object - This field is required.;id - Incorrect type. Expected pk value, received str.;" + "group - This field is required.;", ), ( {"name": "test", "role": {"id": Role.objects.get(name="Create provider").pk}}, - 'object - This field is required.;role - Role with type "business" could not be used in policy;', + 'object - This field is required.;role - Role with type "business" could not be used in policy;' + "group - This field is required.;", ), ( {"name": "test", "role": {"id": Role.objects.get(name="Add host").pk}}, - 'object - This field is required.;role - Role with type "hidden" could not be used in policy;', + 'object - This field is required.;role - Role with type "hidden" could not be used in policy;' + "group - This field is required.;", ), ( - {"name": "test", "role": {"id": cluster_adm_role_pk}, "user": None}, - "object - This field is required.;user - This field may not be null.;", + {"name": "test", "role": {"id": cluster_adm_role_pk}, "group": None}, + "object - This field is required.;group - This field may not be null.;", ), ( - {"name": "test", "role": {"id": cluster_adm_role_pk}, "user": 1}, + {"name": "test", "role": {"id": cluster_adm_role_pk}, "group": 1}, 'object - This field is required.;non_field_errors - Expected a list of items but got type "int".;', ), ( - {"name": "test", "role": {"id": cluster_adm_role_pk}, "user": "string"}, + {"name": "test", "role": {"id": cluster_adm_role_pk}, "group": "string"}, 'object - This field is required.;non_field_errors - Expected a list of items but got type "str".;', ), ( - {"name": "test", "role": {"id": cluster_adm_role_pk}, "user": {}}, + {"name": "test", "role": {"id": cluster_adm_role_pk}, "group": {}}, 'object - This field is required.;non_field_errors - Expected a list of items but got type "dict".;', ), ( - {"name": "test", "role": {"id": cluster_adm_role_pk}, "user": [1]}, + {"name": "test", "role": {"id": cluster_adm_role_pk}, "group": [1]}, ( "object - This field is required.;non_field_errors - Invalid data. " "Expected a dictionary, but got int.;" ), ), ( - {"name": "test", "role": {"id": cluster_adm_role_pk}, "user": ["string"]}, + {"name": "test", "role": {"id": cluster_adm_role_pk}, "group": ["string"]}, ( "object - This field is required.;non_field_errors - Invalid data. " "Expected a dictionary, but got str.;" ), ), ( - {"name": "test", "role": {"id": cluster_adm_role_pk}, "user": [{}]}, + {"name": "test", "role": {"id": cluster_adm_role_pk}, "group": [{}]}, "object - This field is required.;id - This field is required.;", ), ( - {"name": "test", "role": {"id": cluster_adm_role_pk}, "user": [{"id": None}]}, + {"name": "test", "role": {"id": cluster_adm_role_pk}, "group": [{"id": None}]}, "object - This field is required.;id - This field may not be null.;", ), ( - {"name": "test", "role": {"id": cluster_adm_role_pk}, "user": [{"id": "string"}]}, + {"name": "test", "role": {"id": cluster_adm_role_pk}, "group": [{"id": "string"}]}, "object - This field is required.;id - Incorrect type. Expected pk value, received str.;", ), ( - {"name": "test", "role": {"id": cluster_adm_role_pk}, "user": [{"id": int_1000}]}, + {"name": "test", "role": {"id": cluster_adm_role_pk}, "group": [{"id": int_1000}]}, f'object - This field is required.;id - Invalid pk "{int_1000}" - object does not exist.;', ), ( { "name": "test", "role": {"id": cluster_adm_role_pk}, - "user": [{"id": self.test_user.pk}], + "group": [{"id": self.test_user_group.pk}], "object": None, }, "object - This field may not be null.;", @@ -161,7 +167,7 @@ def setUp(self) -> None: { "name": "test", "role": {"id": cluster_adm_role_pk}, - "user": [{"id": self.test_user.pk}], + "group": [{"id": self.test_user_group.pk}], "object": 1, }, "object - the field does not match the scheme;", @@ -170,7 +176,7 @@ def setUp(self) -> None: { "name": "test", "role": {"id": cluster_adm_role_pk}, - "user": [{"id": self.test_user.pk}], + "group": [{"id": self.test_user_group.pk}], "object": "string", }, "object - the field does not match the scheme;", @@ -179,7 +185,7 @@ def setUp(self) -> None: { "name": "test", "role": {"id": cluster_adm_role_pk}, - "user": [{"id": self.test_user.pk}], + "group": [{"id": self.test_user_group.pk}], "object": {}, }, "object - the field does not match the scheme;", @@ -188,7 +194,7 @@ def setUp(self) -> None: { "name": "test", "role": {"id": cluster_adm_role_pk}, - "user": [{"id": self.test_user.pk}], + "group": [{"id": self.test_user_group.pk}], "object": [1], }, "object - the field does not match the scheme;", @@ -197,7 +203,7 @@ def setUp(self) -> None: { "name": "test", "role": {"id": cluster_adm_role_pk}, - "user": [{"id": self.test_user.pk}], + "group": [{"id": self.test_user_group.pk}], "object": ["string"], }, "object - the field does not match the scheme;", @@ -206,7 +212,7 @@ def setUp(self) -> None: { "name": "test", "role": {"id": cluster_adm_role_pk}, - "user": [{"id": self.test_user.pk}], + "group": [{"id": self.test_user_group.pk}], "object": [{}], }, "object - the field does not match the scheme;", @@ -215,7 +221,7 @@ def setUp(self) -> None: { "name": "test", "role": {"id": cluster_adm_role_pk}, - "user": [{"id": self.test_user.pk}], + "group": [{"id": self.test_user_group.pk}], "object": [{"id": 1}], }, "object - the field does not match the scheme;", @@ -287,7 +293,7 @@ def test_create_policy(self): ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(response.json()["desc"], response_data) + self.assertEqual(response_data, response.json()["desc"]) def test_create_role(self): for request_data, response_data in self.role_data: @@ -304,7 +310,7 @@ def test_create_role(self): def test_patch_empty_role_id(self): role = Role.objects.create(name="Test role", module_name="rbac.roles", class_name="ModelRole") policy = Policy.objects.create(name="Test policy", role=role, built_in=False) - policy.user.add(self.test_user) + policy.group.add(self.test_user_group) path = reverse(viewname="v1:rbac:policy-detail", kwargs={"pk": policy.pk}) data_valid = { @@ -315,6 +321,7 @@ def test_patch_empty_role_id(self): "role": { "id": role.pk, }, + "group": [{"id": self.test_user_group.pk}], } response = self.client.patch(path=path, data=data_valid, content_type=APPLICATION_JSON) diff --git a/python/rbac/tests/test_policy/base.py b/python/rbac/tests/test_policy/base.py index 1ac6d00c61..ad2c5b7b34 100644 --- a/python/rbac/tests/test_policy/base.py +++ b/python/rbac/tests/test_policy/base.py @@ -11,7 +11,7 @@ # limitations under the License. from cm.models import Bundle, ClusterObject, Host, ObjectType, Prototype -from django.conf import settings +from rbac.models import Group from adcm.tests.base import BaseTestCase @@ -21,15 +21,17 @@ def setUp(self) -> None: super().setUp() self.new_user_password = "new_user_password" - self.new_user = self.get_new_user(username="new_user", password=self.new_user_password) + self.new_user_group = Group.objects.create(name="new_group") + self.new_user = self.get_new_user( + username="new_user", password=self.new_user_password, group_pk=self.new_user_group.pk + ) + bundle = self.upload_and_load_bundle( - path=( - settings.BASE_DIR / "python" / "rbac" / "tests" / "files" / "test_cluster_for_cluster_admin_role.tar" - ), + path=(self.base_dir / "python" / "rbac" / "tests" / "files" / "test_cluster_for_cluster_admin_role.tar"), ) self.cluster = self.create_cluster(bundle_pk=bundle.pk, name="Test Cluster") self.provider = self.create_provider( - bundle_path=settings.BASE_DIR / "python" / "rbac" / "tests" / "files" / "provider.tar", + bundle_path=self.base_dir / "python" / "rbac" / "tests" / "files" / "provider.tar", name="Test Provider", ) host_pks = self.create_hosts() diff --git a/python/rbac/tests/test_policy/test_cluster_admin_role.py b/python/rbac/tests/test_policy/test_cluster_admin_role.py index 733d539aa2..93a8fa8541 100644 --- a/python/rbac/tests/test_policy/test_cluster_admin_role.py +++ b/python/rbac/tests/test_policy/test_cluster_admin_role.py @@ -12,7 +12,7 @@ from unittest.mock import patch -from cm.models import Action, Host, MaintenanceMode +from cm.models import Action, Host from django.urls import reverse from rbac.tests.test_policy.base import PolicyBaseTestCase from rest_framework.response import Response @@ -31,13 +31,15 @@ class PolicyWithClusterAdminRoleTestCase(PolicyBaseTestCase): def setUp(self) -> None: super().setUp() - self.create_policy(role_name="Cluster Administrator", obj=self.cluster, user_pk=self.new_user.pk) + self.create_policy(role_name="Cluster Administrator", obj=self.cluster, group_pk=self.new_user_group.pk) self.another_user_log_in(username=self.new_user.username, password=self.new_user_password) def test_policy_with_cluster_admin_role(self): # pylint: disable=too-many-statements - required_perms = {perm.codename for perm in self.new_user.user_permissions.all()} - required_perms.update({perm.permission.codename for perm in self.new_user.userobjectpermission_set.all()}) + required_perms = {perm.codename for perm in self.new_user_group.permissions.all()} + required_perms.update( + {perm.permission.codename for perm in self.new_user_group.groupobjectpermission_set.all()} + ) self.assertEqual( required_perms, @@ -290,7 +292,7 @@ def test_policy_with_cluster_admin_role(self): # pylint: disable=too-many-state response = self.client.post( path=reverse(viewname="v1:service-maintenance-mode", kwargs={"service_id": self.last_service_pk}), data={ - "maintenance_mode": MaintenanceMode.ON, + "maintenance_mode": "ON", }, content_type=APPLICATION_JSON, ) @@ -300,7 +302,7 @@ def test_policy_with_cluster_admin_role(self): # pylint: disable=too-many-state response = self.client.post( path=reverse(viewname="v1:component-maintenance-mode", kwargs={"component_id": self.last_component_pk}), data={ - "maintenance_mode": MaintenanceMode.ON, + "maintenance_mode": "ON", }, content_type=APPLICATION_JSON, ) @@ -310,7 +312,7 @@ def test_policy_with_cluster_admin_role(self): # pylint: disable=too-many-state response = self.client.post( path=reverse(viewname="v1:host-maintenance-mode", kwargs={"host_id": self.last_host_pk}), data={ - "maintenance_mode": MaintenanceMode.ON, + "maintenance_mode": "ON", }, content_type=APPLICATION_JSON, ) @@ -320,7 +322,7 @@ def test_policy_with_cluster_admin_role(self): # pylint: disable=too-many-state response = self.client.post( path=reverse(viewname="v1:host-maintenance-mode", kwargs={"host_id": self.last_host_pk}), data={ - "maintenance_mode": MaintenanceMode.OFF, + "maintenance_mode": "OFF", }, content_type=APPLICATION_JSON, ) @@ -397,8 +399,10 @@ def test_policy_with_cluster_admin_role(self): # pylint: disable=too-many-state self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) def test_adding_new_policy_keeps_previous_permission(self): - required_perms = {perm.codename for perm in self.new_user.user_permissions.all()} - required_perms.update({perm.permission.codename for perm in self.new_user.userobjectpermission_set.all()}) + required_perms = {perm.codename for perm in self.new_user_group.permissions.all()} + required_perms.update( + {perm.permission.codename for perm in self.new_user_group.groupobjectpermission_set.all()} + ) self.assertEqual( required_perms, @@ -456,10 +460,12 @@ def test_adding_new_policy_keeps_previous_permission(self): self.client.post(path=reverse(viewname="v1:rbac:logout")) self.login() - self.create_policy(role_name="Provider Administrator", obj=self.provider, user_pk=self.new_user.pk) + self.create_policy(role_name="Provider Administrator", obj=self.provider, group_pk=self.new_user_group.pk) - required_perms = {perm.codename for perm in self.new_user.user_permissions.all()} - required_perms.update({perm.permission.codename for perm in self.new_user.userobjectpermission_set.all()}) + required_perms = {perm.codename for perm in self.new_user_group.permissions.all()} + required_perms.update( + {perm.permission.codename for perm in self.new_user_group.groupobjectpermission_set.all()} + ) self.assertEqual( required_perms, diff --git a/python/rbac/tests/test_policy/test_cluster_admin_service_admin_hostcomponent.py b/python/rbac/tests/test_policy/test_cluster_admin_service_admin_hostcomponent.py index e5c5f62238..f55420dc41 100644 --- a/python/rbac/tests/test_policy/test_cluster_admin_service_admin_hostcomponent.py +++ b/python/rbac/tests/test_policy/test_cluster_admin_service_admin_hostcomponent.py @@ -13,6 +13,7 @@ from cm.models import ClusterObject, Host, ServiceComponent from django.urls import reverse +from rbac.models import Group from rbac.tests.test_policy.base import PolicyBaseTestCase from rest_framework.response import Response from rest_framework.status import HTTP_201_CREATED @@ -24,11 +25,14 @@ class ClusterAdminServiceAdminHostcomponentTestCase(PolicyBaseTestCase): def setUp(self) -> None: super().setUp() - self.new_user_2 = self.get_new_user(username="new_user_2", password=self.new_user_password) + self.new_user_group_2 = Group.objects.create(name="new_group_2") + self.new_user_2 = self.get_new_user( + username="new_user_2", password=self.new_user_password, group_pk=self.new_user_group_2.pk + ) self.service = ClusterObject.objects.get(prototype__name="service_1") - self.create_policy(role_name="Cluster Administrator", obj=self.cluster, user_pk=self.new_user.pk) - self.create_policy(role_name="Service Administrator", obj=self.service, user_pk=self.new_user_2.pk) + self.create_policy(role_name="Cluster Administrator", obj=self.cluster, group_pk=self.new_user_group.pk) + self.create_policy(role_name="Service Administrator", obj=self.service, group_pk=self.new_user_group_2.pk) def test_cluster_admin_can_change_host_config(self): response: Response = self.client.post( @@ -49,7 +53,7 @@ def test_cluster_admin_can_change_host_config(self): self.assertEqual(response.status_code, HTTP_201_CREATED) self.assertIn( "cm.add_configlog", - {f"{perm.content_type.app_label}.{perm.codename}" for perm in self.new_user.user_permissions.all()}, + {f"{perm.content_type.app_label}.{perm.codename}" for perm in self.new_user_group.permissions.all()}, ) component = ServiceComponent.objects.get(prototype__name="component_1_1") @@ -65,7 +69,7 @@ def test_cluster_admin_can_change_host_config(self): self.assertEqual(response.status_code, HTTP_201_CREATED) self.assertIn( "cm.add_configlog", - {f"{perm.content_type.app_label}.{perm.codename}" for perm in self.new_user.user_permissions.all()}, + {f"{perm.content_type.app_label}.{perm.codename}" for perm in self.new_user_group.permissions.all()}, ) with self.another_user_logged_in(username=self.new_user.username, password=self.new_user_password): diff --git a/python/rbac/tests/test_policy/test_group_policy.py b/python/rbac/tests/test_policy/test_group_policy.py index 21fbb43dff..3995503d37 100644 --- a/python/rbac/tests/test_policy/test_group_policy.py +++ b/python/rbac/tests/test_policy/test_group_policy.py @@ -13,7 +13,6 @@ from pathlib import Path from cm.models import Action, ConfigLog, ObjectType, ServiceComponent -from django.conf import settings from django.urls import reverse from rbac.models import Group from rest_framework.response import Response @@ -54,7 +53,7 @@ def setUp(self) -> None: ) provider = self.create_provider( - bundle_path=settings.BASE_DIR / "python" / "rbac" / "tests" / "files" / "provider.tar", + bundle_path=self.base_dir / "python" / "rbac" / "tests" / "files" / "provider.tar", name="Test Provider", ) host_1 = self.create_host_in_cluster(provider_pk=provider.pk, name="host-1", cluster_pk=cluster.pk) @@ -78,7 +77,7 @@ def setUp(self) -> None: def get_cluster(self): cluster_bundle = self.upload_and_load_bundle( path=Path( - settings.BASE_DIR, + self.base_dir, "python/rbac/tests/files/bundle_10.tar", ), ) @@ -118,8 +117,10 @@ def setUp(self) -> None: super().setUp() self.user_1_password = "user_1_password" - self.user_1 = self.get_new_user(username="user_1", password=self.user_1_password) - user_2 = self.get_new_user(username="user_2", password="user_2_password") + self.user_1_group = Group.objects.create(name="user_1_group") + self.user_2_group = Group.objects.create(name="user_2_group") + self.user_1 = self.get_new_user(username="user_1", password=self.user_1_password, group_pk=self.user_1_group.pk) + self.get_new_user(username="user_2", password="user_2_password", group_pk=self.user_2_group.pk) self.cluster = self.get_cluster() @@ -128,18 +129,18 @@ def setUp(self) -> None: self.create_policy( role_name="Cluster Administrator", obj=self.cluster, - user_pk=self.user_1.pk, + group_pk=self.user_1_group.pk, ) self.service_admin_policy_pk = self.create_policy( role_name="Service Administrator", obj=self.main_with_components_service, - user_pk=user_2.pk, + group_pk=self.user_2_group.pk, ) def get_cluster(self): cluster_bundle = self.upload_and_load_bundle( path=Path( - settings.BASE_DIR, + self.base_dir, "python/rbac/tests/files/bundle_10.tar", ), ) @@ -192,14 +193,15 @@ def setUp(self) -> None: super().setUp() self.user_1_password = "user_1_password" - self.user_1 = self.get_new_user(username="user_1", password=self.user_1_password) + self.user_1_group = Group.objects.create(name="user_1_group") + self.user_1 = self.get_new_user(username="user_1", password=self.user_1_password, group_pk=self.user_1_group.pk) cluster = self.get_cluster() action_service = self.create_service(cluster_pk=cluster.pk, name="actions_service") component = ServiceComponent.objects.get(prototype__name="single_component") provider = self.create_provider( - bundle_path=settings.BASE_DIR / "python" / "rbac" / "tests" / "files" / "provider.tar", + bundle_path=self.base_dir / "python" / "rbac" / "tests" / "files" / "provider.tar", name="Test Provider", ) self.host_1 = self.create_host_in_cluster(provider_pk=provider.pk, name="host-1", cluster_pk=cluster.pk) @@ -224,13 +226,13 @@ def setUp(self) -> None: self.create_policy( role_name=user_1_role_name, obj=cluster, - user_pk=self.user_1.pk, + group_pk=self.user_1_group.pk, ) def get_cluster(self): cluster_bundle = self.upload_and_load_bundle( path=Path( - settings.BASE_DIR, + self.base_dir, "python/rbac/tests/files/case3.tar", ), ) diff --git a/python/rbac/tests/test_policy/test_permissions.py b/python/rbac/tests/test_policy/test_permissions.py index 3592eaab08..5cd1283279 100644 --- a/python/rbac/tests/test_policy/test_permissions.py +++ b/python/rbac/tests/test_policy/test_permissions.py @@ -11,10 +11,10 @@ # limitations under the License. from django.contrib.auth.models import Group, Permission -from guardian.models import UserObjectPermission +from guardian.models import GroupObjectPermission from rbac.models import Group as RBACGroup from rbac.models import Policy -from rbac.roles import assign_user_or_group_perm +from rbac.roles import assign_group_perm from rbac.tests.test_policy.base import PolicyBaseTestCase @@ -22,11 +22,11 @@ class RemovePermissionsTestCase(PolicyBaseTestCase): def setUp(self) -> None: super().setUp() - self.create_policy(role_name="Cluster Administrator", obj=self.cluster, user_pk=self.new_user.pk) + self.create_policy(role_name="Cluster Administrator", obj=self.cluster, group_pk=self.new_user_group.pk) self.policy = Policy.objects.first() self.policy.group.add(RBACGroup.objects.create(name="test_group_1")) - assign_user_or_group_perm( + assign_group_perm( policy=self.policy, permission=Permission.objects.filter(codename="add_group")[0], obj=Group.objects.create(name="test_group_2"), @@ -36,16 +36,12 @@ def test_remove_permissions(self): model_permission_codenames = { policy_permission.permission.codename for policy_permission in self.policy.model_perm.all() } - user_object_permissions = { - user_object_permission.permission.codename for user_object_permission in self.policy.user_object_perm.all() - } group_object_permissions = { group_object_permission.permission.codename for group_object_permission in self.policy.group_object_perm.all() } self.assertTrue(model_permission_codenames) - self.assertTrue(user_object_permissions) self.assertTrue(group_object_permissions) self.policy.remove_permissions() @@ -55,21 +51,17 @@ def test_remove_permissions(self): model_permission_codenames = { policy_permission.permission.codename for policy_permission in self.policy.model_perm.all() } - user_object_permissions = { - user_object_permission.permission.codename for user_object_permission in self.policy.user_object_perm.all() - } group_object_permissions = { group_object_permission.permission.codename for group_object_permission in self.policy.group_object_perm.all() } self.assertFalse(model_permission_codenames) - self.assertFalse(user_object_permissions) self.assertFalse(group_object_permissions) class AssignPermissionsTestCase(PolicyBaseTestCase): def test_assign_permissions(self): - self.create_policy(role_name="Cluster Administrator", obj=self.cluster, user_pk=self.new_user.pk) + self.create_policy(role_name="Cluster Administrator", obj=self.cluster, group_pk=self.new_user_group.pk) - self.assertTrue(UserObjectPermission.objects.all()) + self.assertTrue(GroupObjectPermission.objects.all()) diff --git a/python/rbac/tests/test_policy/test_policy_cluster_admin_role_upgrade.py b/python/rbac/tests/test_policy/test_policy_cluster_admin_role_upgrade.py index 715632845f..a005c3a9a7 100644 --- a/python/rbac/tests/test_policy/test_policy_cluster_admin_role_upgrade.py +++ b/python/rbac/tests/test_policy/test_policy_cluster_admin_role_upgrade.py @@ -13,7 +13,6 @@ from unittest.mock import patch from cm.models import ServiceComponent, Upgrade -from django.conf import settings from django.urls import reverse from rbac.tests.test_policy.base import PolicyBaseTestCase from rest_framework.response import Response @@ -26,7 +25,7 @@ class PolicyWithClusterAdminRoleUpgradeTestCase(PolicyBaseTestCase): def setUp(self) -> None: super().setUp() - self.create_policy(role_name="Cluster Administrator", obj=self.cluster, user_pk=self.new_user.pk) + self.create_policy(role_name="Cluster Administrator", obj=self.cluster, group_pk=self.new_user_group.pk) self.another_user_log_in(username=self.new_user.username, password=self.new_user_password) self.upgrade_cluster() self.component_upgrade = ServiceComponent.objects.get(prototype__name="component_upgrade") @@ -34,7 +33,7 @@ def setUp(self) -> None: def upgrade_cluster(self): self.upload_and_load_bundle( path=( - settings.BASE_DIR + self.base_dir / "python" / "rbac" / "tests" diff --git a/python/rbac/tests/test_policy/test_policy_delete.py b/python/rbac/tests/test_policy/test_policy_delete.py index 3be9439daf..7a01a6ca3d 100644 --- a/python/rbac/tests/test_policy/test_policy_delete.py +++ b/python/rbac/tests/test_policy/test_policy_delete.py @@ -35,14 +35,16 @@ def test_delete_policy(self): ) provider_policy_pk = self.create_policy( - role_name=provider_role_name, obj=self.provider, user_pk=self.new_user.pk + role_name=provider_role_name, obj=self.provider, group_pk=self.new_user_group.pk + ) + provider_perms = {perm.codename for perm in self.new_user_group.permissions.all()} + provider_perms.update( + {perm.permission.codename for perm in self.new_user_group.groupobjectpermission_set.all()} ) - provider_perms = {perm.codename for perm in self.new_user.user_permissions.all()} - provider_perms.update({perm.permission.codename for perm in self.new_user.userobjectpermission_set.all()}) - self.create_policy(role_name=cluster_role_name, obj=self.cluster, user_pk=self.new_user.pk) - cluster_perms = {perm.codename for perm in self.new_user.user_permissions.all()} - cluster_perms.update({perm.permission.codename for perm in self.new_user.userobjectpermission_set.all()}) + self.create_policy(role_name=cluster_role_name, obj=self.cluster, group_pk=self.new_user_group.pk) + cluster_perms = {perm.codename for perm in self.new_user_group.permissions.all()} + cluster_perms.update({perm.permission.codename for perm in self.new_user_group.groupobjectpermission_set.all()}) cluster_perms = cluster_perms - provider_perms cluster_perms.add("view_action") @@ -50,8 +52,8 @@ def test_delete_policy(self): path=reverse(viewname="v1:rbac:policy-detail", kwargs={"pk": provider_policy_pk}), ) - user_perms = {perm.codename for perm in self.new_user.user_permissions.all()} - user_perms.update({perm.permission.codename for perm in self.new_user.userobjectpermission_set.all()}) + group_perms = {perm.codename for perm in self.new_user_group.permissions.all()} + group_perms.update({perm.permission.codename for perm in self.new_user_group.groupobjectpermission_set.all()}) self.assertEqual(response.status_code, HTTP_204_NO_CONTENT) - self.assertSetEqual(user_perms, cluster_perms) + self.assertSetEqual(group_perms, cluster_perms) diff --git a/python/rbac/tests/test_policy/test_provider_admin_role.py b/python/rbac/tests/test_policy/test_provider_admin_role.py index cf53409895..dcda111dd3 100644 --- a/python/rbac/tests/test_policy/test_provider_admin_role.py +++ b/python/rbac/tests/test_policy/test_provider_admin_role.py @@ -26,11 +26,13 @@ class PolicyWithProviderAdminRole(PolicyBaseTestCase): def setUp(self) -> None: super().setUp() - self.create_policy(role_name="Provider Administrator", obj=self.provider, user_pk=self.new_user.pk) + self.create_policy(role_name="Provider Administrator", obj=self.provider, group_pk=self.new_user_group.pk) def test_policy_with_provider_admin_role(self): - required_perms = {perm.codename for perm in self.new_user.user_permissions.all()} - required_perms.update({perm.permission.codename for perm in self.new_user.userobjectpermission_set.all()}) + required_perms = {perm.codename for perm in self.new_user_group.permissions.all()} + required_perms.update( + {perm.permission.codename for perm in self.new_user_group.groupobjectpermission_set.all()} + ) self.assertEqual( required_perms, diff --git a/python/rbac/tests/test_policy/test_rbac.py b/python/rbac/tests/test_policy/test_rbac.py index 0034e00507..282eaafd19 100644 --- a/python/rbac/tests/test_policy/test_rbac.py +++ b/python/rbac/tests/test_policy/test_rbac.py @@ -30,6 +30,9 @@ def setUp(self) -> None: super().setUp() self.user = User.objects.create(username="user", is_active=True, is_superuser=False) + self.group = Group.objects.create(name="test_group") + self.group.user_set.add(self.user) + self.cluster = Cluster.objects.create(name="Cluster_1", prototype=self.clp) self.service_1 = ClusterObject.objects.create(cluster=self.cluster, prototype=self.sp_1) self.service_2 = ClusterObject.objects.create(cluster=self.cluster, prototype=self.sp_2) @@ -67,38 +70,17 @@ def clear_perm_cache(user): if hasattr(user, "_group_perm_cache"): delattr(user, "_group_perm_cache") - def test_model_policy(self): - policy = Policy.objects.create(name="MyPolicy", role=self.model_role()) - policy.user.add(self.user) - - self.assertNotIn(self.add_host_perm, self.user.user_permissions.all()) - self.assertFalse(self.user.has_perm("cm.add_host")) - self.clear_perm_cache(self.user) - - policy.apply() - - self.assertIn(self.add_host_perm, self.user.user_permissions.all()) - self.assertTrue(self.user.has_perm("cm.add_host")) - - self.clear_perm_cache(self.user) - policy.apply() - - self.assertTrue(self.user.has_perm("cm.add_host")) - def test_model_policy4group(self): - group = Group.objects.create(name="group") - group.user_set.add(self.user) - policy = Policy.objects.create(name="MyPolicy", role=self.model_role()) - policy.group.add(group) + policy.group.add(self.group) - self.assertNotIn(self.add_host_perm, group.permissions.all()) + self.assertNotIn(self.add_host_perm, self.group.permissions.all()) self.assertFalse(self.user.has_perm("cm.add_host")) self.clear_perm_cache(self.user) policy.apply() - self.assertIn(self.add_host_perm, group.permissions.all()) + self.assertIn(self.add_host_perm, self.group.permissions.all()) self.assertTrue(self.user.has_perm("cm.add_host")) self.clear_perm_cache(self.user) @@ -109,7 +91,7 @@ def test_model_policy4group(self): def test_object_policy(self): cluster2 = Cluster.objects.create(name="Cluster_2", prototype=self.clp) policy = Policy.objects.create(name="MyPolicy", role=self.object_role_view_perm_cluster()) - policy.user.add(self.user) + policy.group.add(self.group) self.assertFalse(self.user.has_perm("cm.view_cluster", self.cluster)) self.assertFalse(self.user.has_perm("cm.view_cluster", cluster2)) @@ -120,11 +102,11 @@ def test_object_policy(self): self.assertTrue(self.user.has_perm("cm.view_cluster", self.cluster)) self.assertFalse(self.user.has_perm("cm.view_cluster", cluster2)) - def test_object_policy_remove_user(self): + def test_object_policy_remove_group(self): cluster2 = Cluster.objects.create(name="Cluster_2", prototype=self.clp) policy = Policy.objects.create(name="MyPolicy", role=self.object_role()) - policy.user.add(self.user) + policy.group.add(self.group) policy.add_object(self.cluster) self.assertFalse(self.user.has_perm("cm.view_cluster", self.cluster)) @@ -134,32 +116,15 @@ def test_object_policy_remove_user(self): self.assertTrue(self.user.has_perm("cm.view_cluster", self.cluster)) self.assertFalse(self.user.has_perm("cm.view_cluster", cluster2)) - policy.user.remove(self.user) + policy.group.remove(self.group) policy.apply() self.assertFalse(self.user.has_perm("cm.view_cluster", self.cluster)) self.assertFalse(self.user.has_perm("cm.view_cluster", cluster2)) - def test_object_policy4group(self): - cluster2 = Cluster.objects.create(name="Cluster_2", prototype=self.clp) - group = Group.objects.create(name="group") - group.user_set.add(self.user) - - policy = Policy.objects.create(name="MyPolicy", role=self.object_role()) - policy.group.add(group) - - policy.add_object(self.cluster) - - self.assertFalse(self.user.has_perm("cm.view_cluster", self.cluster)) - - policy.apply() - - self.assertTrue(self.user.has_perm("cm.view_cluster", self.cluster)) - self.assertFalse(self.user.has_perm("cm.view_cluster", cluster2)) - def test_parent_policy4cluster(self): policy = Policy.objects.create(role=self.object_role_custom_perm_cluster_service_component()) - policy.user.add(self.user) + policy.group.add(self.group) policy.add_object(self.cluster) self.assertFalse(self.user.has_perm("cm.change_config_of_cluster", self.cluster)) @@ -178,7 +143,7 @@ def test_parent_policy4cluster(self): def test_parent_policy4service(self): policy = Policy.objects.create(role=self.object_role_custom_perm_cluster_service_component()) - policy.user.add(self.user) + policy.group.add(self.group) policy.add_object(self.service_1) self.assertFalse(self.user.has_perm("cm.change_config_of_cluster", self.cluster)) @@ -198,7 +163,7 @@ def test_parent_policy4service(self): def test_parent_policy4service2(self): policy = Policy.objects.create(role=self.object_role_custom_perm_cluster_service_component()) - policy.user.add(self.user) + policy.group.add(self.group) policy.add_object(self.service_2) self.assertFalse(self.user.has_perm("cm.view_cluster", self.cluster)) @@ -219,7 +184,7 @@ def test_parent_policy4service2(self): def test_parent_policy4component(self): policy = Policy.objects.create(role=self.object_role_custom_perm_cluster_service_component()) - policy.user.add(self.user) + policy.group.add(self.group) policy.add_object(self.component_11) self.assertFalse(self.user.has_perm("cm.view_cluster", self.cluster)) @@ -246,7 +211,7 @@ def test_parent_policy4host_in_cluster(self): add_host_to_cluster(self.cluster, host1) add_host_to_cluster(self.cluster, host2) policy = Policy.objects.create(role=self.object_role_custom_perm_cluster_host()) - policy.user.add(self.user) + policy.group.add(self.group) policy.add_object(self.cluster) self.assertFalse(self.user.has_perm("cm.change_config_of_cluster", self.cluster)) @@ -281,7 +246,7 @@ def test_parent_policy4host_in_service(self): ], ) policy = Policy.objects.create(role=self.object_role_custom_perm_cluster_service_component_host()) - policy.user.add(self.user) + policy.group.add(self.group) policy.add_object(self.service_1) self.assertFalse(self.user.has_perm("cm.change_config_of_cluster", self.cluster)) @@ -330,7 +295,7 @@ def test_parent_policy4host_in_component(self): ) policy = Policy.objects.create(role=self.object_role_custom_perm_cluster_service_component_host()) - policy.user.add(self.user) + policy.group.add(self.group) policy.add_object(self.component_21) self.assertFalse(self.user.has_perm("cm.change_config_of_cluster", self.cluster)) @@ -356,7 +321,7 @@ def test_parent_policy4host_in_component(self): def test_parent_policy4provider(self): provider, host1, host2 = self.get_hosts_and_provider() policy = Policy.objects.create(role=self.object_role_custom_perm_provider_host()) - policy.user.add(self.user) + policy.group.add(self.group) policy.add_object(provider) self.assertFalse(self.user.has_perm("cm.change_config_of_hostprovider", provider)) @@ -371,7 +336,7 @@ def test_parent_policy4provider(self): def test_simple_parent_policy(self): policy = Policy.objects.create(role=self.model_role_view_cluster_service_component_perm()) - policy.user.add(self.user) + policy.group.add(self.group) self.assertFalse(self.user.has_perm("cm.view_cluster")) self.assertFalse(self.user.has_perm("cm.view_clusterobject")) @@ -388,7 +353,7 @@ def test_add_service(self): sp_3 = Prototype.obj.create(bundle=self.bundle_1, type="service", name="service_3") policy = Policy.objects.create(role=self.object_role_custom_perm_cluster_service()) - policy.user.add(self.user) + policy.group.add(self.group) policy.add_object(self.cluster) self.assertFalse(self.user.has_perm("cm.change_config_of_cluster", self.cluster)) @@ -420,7 +385,7 @@ def test_add_host(self): ) policy = Policy.objects.create(role=self.object_role_custom_perm_cluster_service_component_host()) - policy.user.add(self.user) + policy.group.add(self.group) policy.add_object(self.cluster) self.assertFalse(self.user.has_perm("cm.change_config_of_cluster", self.cluster)) @@ -459,7 +424,7 @@ def test_add_hc(self): ], ) policy = Policy.objects.create(role=self.object_role_custom_perm_service_component_host()) - policy.user.add(self.user) + policy.group.add(self.group) policy.add_object(self.service_1) self.assertFalse(self.user.has_perm("cm.change_config_of_cluster", self.cluster)) diff --git a/python/rbac/tests/test_policy/test_remove_user_from_policy.py b/python/rbac/tests/test_policy/test_remove_user_from_policy.py index a2d380f7b6..b524a0ca59 100644 --- a/python/rbac/tests/test_policy/test_remove_user_from_policy.py +++ b/python/rbac/tests/test_policy/test_remove_user_from_policy.py @@ -12,6 +12,7 @@ from cm.models import ClusterObject, ObjectType from django.urls import reverse +from rbac.models import Group from rbac.tests.test_policy.base import PolicyBaseTestCase from rest_framework.response import Response from rest_framework.status import HTTP_200_OK @@ -25,7 +26,11 @@ def setUp(self) -> None: super().setUp() self.service = ClusterObject.objects.first() - self.new_user_2 = self.get_new_user(username="new_user_2", password="new_user_2_password") + + self.new_user_2_group = Group.objects.create(name="new_user_2_group_name") + self.new_user_2 = self.get_new_user( + username="new_user_2", password="new_user_2_password", group_pk=self.new_user_2_group.pk + ) self.new_user_role_name = "new_user_role" self.create_role( @@ -44,27 +49,29 @@ def setUp(self) -> None: self.edit_cluster_config_policy_pk = self.create_policy( role_name=self.new_user_role_name, obj=self.cluster, - user_pk=self.new_user.pk, + group_pk=self.new_user_group.pk, ) self.edit_service_config_policy_pk = self.create_policy( role_name=self.new_user_2_role_name, obj=self.service, - user_pk=self.new_user_2.pk, + group_pk=self.new_user_2_group.pk, ) - self.new_user_perms = {perm.codename for perm in self.new_user.user_permissions.all()} - self.new_user_perms.update({perm.permission.codename for perm in self.new_user.userobjectpermission_set.all()}) + self.new_user_group_perms = {perm.codename for perm in self.new_user_group.permissions.all()} + self.new_user_group_perms.update( + {perm.permission.codename for perm in self.new_user_group.groupobjectpermission_set.all()} + ) - self.new_user_2_perms = {perm.codename for perm in self.new_user_2.user_permissions.all()} - self.new_user_2_perms.update( - {perm.permission.codename for perm in self.new_user_2.userobjectpermission_set.all()} + self.new_user_2_group_perms = {perm.codename for perm in self.new_user_2_group.permissions.all()} + self.new_user_2_group_perms.update( + {perm.permission.codename for perm in self.new_user_2_group.groupobjectpermission_set.all()} ) def test_remove_user_from_policy(self): response: Response = self.client.patch( path=reverse(viewname="v1:rbac:policy-detail", kwargs={"pk": self.edit_cluster_config_policy_pk}), data={ - "user": [{"id": self.new_user_2.pk}], + "group": [{"id": self.new_user_2_group.pk}], "object": [{"name": self.cluster.name, "type": ObjectType.CLUSTER, "id": self.cluster.pk}], }, content_type=APPLICATION_JSON, @@ -75,7 +82,7 @@ def test_remove_user_from_policy(self): response: Response = self.client.patch( path=reverse(viewname="v1:rbac:policy-detail", kwargs={"pk": self.edit_service_config_policy_pk}), data={ - "user": [{"id": self.new_user.pk}], + "group": [{"id": self.new_user_group.pk}], "object": [{"name": self.service.name, "type": ObjectType.SERVICE, "id": self.service.pk}], }, content_type=APPLICATION_JSON, @@ -83,13 +90,15 @@ def test_remove_user_from_policy(self): self.assertEqual(response.status_code, HTTP_200_OK) - new_new_user_perms = {perm.codename for perm in self.new_user.user_permissions.all()} - new_new_user_perms.update({perm.permission.codename for perm in self.new_user.userobjectpermission_set.all()}) + new_new_user_perms = {perm.codename for perm in self.new_user_group.permissions.all()} + new_new_user_perms.update( + {perm.permission.codename for perm in self.new_user_group.groupobjectpermission_set.all()} + ) - new_new_user_2_perms = {perm.codename for perm in self.new_user_2.user_permissions.all()} + new_new_user_2_perms = {perm.codename for perm in self.new_user_2_group.permissions.all()} new_new_user_2_perms.update( - {perm.permission.codename for perm in self.new_user_2.userobjectpermission_set.all()} + {perm.permission.codename for perm in self.new_user_2_group.groupobjectpermission_set.all()} ) - self.assertEqual(new_new_user_perms, self.new_user_2_perms) - self.assertEqual(new_new_user_2_perms, self.new_user_perms) + self.assertEqual(new_new_user_perms, self.new_user_2_group_perms) + self.assertEqual(new_new_user_2_perms, self.new_user_group_perms) diff --git a/python/rbac/tests/test_policy/test_service_admin_role.py b/python/rbac/tests/test_policy/test_service_admin_role.py index ec4d37fc7a..ed895f7422 100644 --- a/python/rbac/tests/test_policy/test_service_admin_role.py +++ b/python/rbac/tests/test_policy/test_service_admin_role.py @@ -17,8 +17,8 @@ Prototype, ServiceComponent, ) -from django.conf import settings from django.urls import reverse +from rbac.models import Group from rest_framework.response import Response from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED @@ -30,24 +30,27 @@ def setUp(self) -> None: super().setUp() self.new_user_password = "new_user_password" - self.new_user = self.get_new_user(username="new_user", password=self.new_user_password) + self.new_user_group = Group.objects.create(name="new_group") + new_user = self.get_new_user( + username="new_user", password=self.new_user_password, group_pk=self.new_user_group.pk + ) self.cluster_bundle = self.upload_and_load_bundle( - path=settings.BASE_DIR / "python" / "rbac" / "tests" / "files" / "service_admin_cluster.tar" + path=self.base_dir / "python" / "rbac" / "tests" / "files" / "service_admin_cluster.tar" ) self.cluster_pk = self.get_cluster_pk() self.host_pk = self.get_host_pk() self.service = self.get_service() self.add_host_to_cluster(cluster_pk=self.cluster_pk, host_pk=self.host_pk) - self.create_policy(role_name="Service Administrator", obj=self.service, user_pk=self.new_user.pk) - self.another_user_log_in(username=self.new_user.username, password=self.new_user_password) + self.create_policy(role_name="Service Administrator", obj=self.service, group_pk=self.new_user_group.pk) + self.another_user_log_in(username=new_user.username, password=self.new_user_password) self.group_config_pk = self.get_group_config_pk() def get_provider_pk(self): provider_bundle = self.upload_and_load_bundle( - path=settings.BASE_DIR / "python" / "rbac" / "tests" / "files" / "service_admin_provider.tar" + path=self.base_dir / "python" / "rbac" / "tests" / "files" / "service_admin_provider.tar" ) response: Response = self.client.post( path=reverse(viewname="v1:provider"), diff --git a/python/rbac/tests/test_role.py b/python/rbac/tests/test_role.py index c2e6a6b076..d14ce51f67 100644 --- a/python/rbac/tests/test_role.py +++ b/python/rbac/tests/test_role.py @@ -23,7 +23,6 @@ ClusterObject, Host, HostProvider, - MaintenanceMode, ProductCategory, Prototype, ServiceComponent, @@ -575,7 +574,7 @@ def check_roles(self): self.assertEqual(sa_role_count, 6, "Roles missing from base roles") -# pylint: disable=too-many-instance-attributes, protected-access +# pylint: disable=protected-access class TestMMRoles(RBACBaseTestCase): def setUp(self) -> None: super().setUp() @@ -659,12 +658,12 @@ def test_change_service_maintenance_mode_failed(self): ) def test_mm_host_role(self): - policy_create(name="mm host policy", object=[self.host], role=self.mm_role_host, user=[self.test_user]) + policy_create(name="mm host policy", object=[self.host], role=self.mm_role_host, group=[self.test_user_group]) check_custom_perm(self.test_user, "change_maintenance_mode", self.host._meta.model_name, self.host) response = self.client.post( path=reverse(viewname="v1:host-maintenance-mode", kwargs={"host_id": self.host.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, format="json", content_type=APPLICATION_JSON, ) @@ -675,7 +674,7 @@ def test_mm_cluster_role(self): name="mm cluster policy", object=[self.cluster], role=self.mm_role_cluster, - user=[self.test_user], + group=[self.test_user_group], ) check_custom_perm(self.test_user, "change_maintenance_mode", self.host._meta.model_name, self.host) check_custom_perm( @@ -688,7 +687,7 @@ def test_mm_cluster_role(self): response = self.client.post( path=reverse(viewname="v1:host-maintenance-mode", kwargs={"host_id": self.host.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, format="json", content_type=APPLICATION_JSON, ) @@ -696,7 +695,7 @@ def test_mm_cluster_role(self): response = self.client.post( path=reverse(viewname="v1:component-maintenance-mode", kwargs={"component_id": self.component.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, format="json", content_type=APPLICATION_JSON, ) @@ -704,7 +703,7 @@ def test_mm_cluster_role(self): response = self.client.post( path=reverse(viewname="v1:service-maintenance-mode", kwargs={"service_id": self.service.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, format="json", content_type=APPLICATION_JSON, ) @@ -715,7 +714,7 @@ def test_mm_cl_adm_role(self): name="mm cluster policy", object=[self.cluster], role=Role.objects.get(name="Cluster Administrator"), - user=[self.test_user], + group=[self.test_user_group], ) check_custom_perm(self.test_user, "change_maintenance_mode", self.host._meta.model_name, self.host) check_custom_perm( @@ -728,7 +727,7 @@ def test_mm_cl_adm_role(self): response = self.client.post( path=reverse(viewname="v1:host-maintenance-mode", kwargs={"host_id": self.host.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, format="json", content_type=APPLICATION_JSON, ) @@ -736,7 +735,7 @@ def test_mm_cl_adm_role(self): response = self.client.post( path=reverse(viewname="v1:component-maintenance-mode", kwargs={"component_id": self.component.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, format="json", content_type=APPLICATION_JSON, ) @@ -744,7 +743,7 @@ def test_mm_cl_adm_role(self): response = self.client.post( path=reverse(viewname="v1:service-maintenance-mode", kwargs={"service_id": self.service.pk}), - data={"maintenance_mode": MaintenanceMode.ON}, + data={"maintenance_mode": "ON"}, format="json", content_type=APPLICATION_JSON, ) diff --git a/python/rbac/upgrade/role_spec.yaml b/python/rbac/upgrade/role_spec.yaml index b38bf03629..6648837b79 100644 --- a/python/rbac/upgrade/role_spec.yaml +++ b/python/rbac/upgrade/role_spec.yaml @@ -1,6 +1,6 @@ --- -version: 7 +version: 8 roles: - name: Add host diff --git a/python/task_runner.py b/python/task_runner.py index 59c0d96847..10aceef974 100755 --- a/python/task_runner.py +++ b/python/task_runner.py @@ -20,6 +20,7 @@ import adcm.init_django # pylint: disable=unused-import +from logging import getLogger from cm.errors import AdcmEx from cm.job import finish_task, re_prepare_job from cm.logger import logger @@ -29,6 +30,8 @@ from django.core.exceptions import ObjectDoesNotExist from django.utils import timezone + +error_logger = getLogger("task_runner_err") TASK_ID = 0 @@ -101,7 +104,7 @@ def set_log_body(job): LogStorage.objects.filter(job=job, name=log_storage.name, type=log_storage.type).update(body=body) -def run_task(task_id: int, args: str | None = None) -> None: # pylint: disable=too-many-statements +def run_task(task_id: int, args: str | None = None) -> None: # pylint: disable=too-many-statements,too-many-branches logger.debug("task_runner.py called as: %s", sys.argv) try: task = TaskLog.objects.get(id=task_id) @@ -133,33 +136,38 @@ def run_task(task_id: int, args: str | None = None) -> None: # pylint: disable= count = 0 res = 0 for job in jobs: - job.refresh_from_db() - if args == "restart" and job.status == JobStatus.SUCCESS: - logger.info('skip job #%s status "%s" of task #%s', job.id, job.status, task_id) - continue - - task.refresh_from_db() - re_prepare_job(task, job) - job.start_date = timezone.now() - job.save() - res = run_job(task.id, job.id, err_file) - set_log_body(job) - - # For multi jobs task object state and/or config can be changed by adcm plugins - if task.task_object is not None: - try: - task.task_object.refresh_from_db() - except ObjectDoesNotExist: - task.object_id = 0 - task.object_type = None - - job.refresh_from_db() - count += 1 - if res != 0: - task.refresh_from_db() - if job.status == JobStatus.ABORTED and task.status != JobStatus.ABORTED: + try: + job.refresh_from_db() + if args == "restart" and job.status == JobStatus.SUCCESS: + logger.info('skip job #%s status "%s" of task #%s', job.id, job.status, task_id) continue + task.refresh_from_db() + re_prepare_job(task, job) + job.start_date = timezone.now() + job.save() + res = run_job(task.id, job.id, err_file) + set_log_body(job) + + # For multi jobs task object state and/or config can be changed by adcm plugins + if task.task_object is not None: + try: + task.task_object.refresh_from_db() + except ObjectDoesNotExist: + task.object_id = 0 + task.object_type = None + + job.refresh_from_db() + count += 1 + if res != 0: + task.refresh_from_db() + if job.status == JobStatus.ABORTED and task.status != JobStatus.ABORTED: + continue + + break + except Exception: # pylint: disable=broad-exception-caught + error_logger.exception("Task #%s: Error processing job #%s", task_id, job.pk) + res = 1 break if job is not None: diff --git a/web/src/app/admin/policies/policies.component.ts b/web/src/app/admin/policies/policies.component.ts index 2feef6916c..503dc600c0 100644 --- a/web/src/app/admin/policies/policies.component.ts +++ b/web/src/app/admin/policies/policies.component.ts @@ -10,12 +10,11 @@ import { RbacEntityListDirective } from '@app/abstract-directives/rbac-entity-li import { ListService } from '@app/shared/components/list/list.service'; import { SocketState } from '@app/core/store'; import { RbacPolicyService } from '@app/services/rbac-policy.service'; -import { ADD_SERVICE_PROVIDER } from '../../shared/add-component/add-service-model'; -import { AddButtonComponent } from '../../shared/add-component'; -import { RbacPolicyFormComponent } from '../../components/rbac/policy-form/rbac-policy-form.component'; -import { RbacRoleModel } from '../../models/rbac/rbac-role.model'; -import { RbacUserModel } from '../../models/rbac/rbac-user.model'; -import { RbacGroupModel } from '../../models/rbac/rbac-group.model'; +import { ADD_SERVICE_PROVIDER } from '@app/shared/add-component/add-service-model'; +import { AddButtonComponent } from '@app/shared/add-component'; +import { RbacPolicyFormComponent } from '@app/components/rbac/policy-form/rbac-policy-form.component'; +import { RbacRoleModel } from '@app/models/rbac/rbac-role.model'; +import { RbacGroupModel } from '@app/models/rbac/rbac-group.model'; @Component({ selector: 'app-policies', @@ -49,11 +48,6 @@ export class PoliciesComponent extends RbacEntityListDirective label: 'Role', value: (row) => (row.role && (row.role as any as RbacRoleModel).display_name) ?? '', }, - { - label: 'Users', - value: (row) => row.user.map((user: RbacUserModel) => user.username).join(', '), - className: 'one-line-string', - }, { label: 'Groups', value: (row) => row.group.map((group: RbacGroupModel) => group.name).join(', '), diff --git a/web/src/app/adwp/lib/form-element/input-select/input-select.component.scss b/web/src/app/adwp/lib/form-element/input-select/input-select.component.scss index 8e5f72e4ff..1185759cdd 100644 --- a/web/src/app/adwp/lib/form-element/input-select/input-select.component.scss +++ b/web/src/app/adwp/lib/form-element/input-select/input-select.component.scss @@ -1 +1,14 @@ .row {display:flex;} + +::ng-deep .mat-form-field { + + &.mat-form-field-invalid { + + adwp-select { + + .mat-select-arrow { + color: #ff9800 !important; + } + } + } +} diff --git a/web/src/app/components/columns/signature-column/signature-column.component.html b/web/src/app/components/columns/signature-column/signature-column.component.html new file mode 100644 index 0000000000..df5db5e15a --- /dev/null +++ b/web/src/app/components/columns/signature-column/signature-column.component.html @@ -0,0 +1,5 @@ +
{{ firstCharToUpperCase(row['signature_status']) }}
diff --git a/web/src/app/components/columns/signature-column/signature-column.component.scss b/web/src/app/components/columns/signature-column/signature-column.component.scss new file mode 100644 index 0000000000..8741dd990e --- /dev/null +++ b/web/src/app/components/columns/signature-column/signature-column.component.scss @@ -0,0 +1,36 @@ +:host { + div { + display: inline-flex; + align-items: center; + + &:after { + content: '\A'; + width: 6px; + height: 6px; + border-radius: 50%; + display: inline-block; + margin: 0 10px; + } + + &.valid { + + &:after { + background: #00D2A0; + } + } + + &.invalid { + + &:after { + background: #C64A6F; + } + } + + &.absent { + + &:after { + background: #FCFCFD; + } + } + } +} diff --git a/web/src/app/components/columns/signature-column/signature-column.component.ts b/web/src/app/components/columns/signature-column/signature-column.component.ts new file mode 100644 index 0000000000..c426d3a1a4 --- /dev/null +++ b/web/src/app/components/columns/signature-column/signature-column.component.ts @@ -0,0 +1,22 @@ +import { Component, Input } from "@angular/core"; +import { AdwpCellComponent } from "@app/adwp"; + +export enum SignatureStatus { + Valid = 'valid', + Invalid = 'invalid', + Absent = 'absent', +} + +@Component({ + selector: 'app-signature-column', + templateUrl: './signature-column.component.html', + styleUrls: ['./signature-column.component.scss'] +}) +export class SignatureColumnComponent implements AdwpCellComponent { + @Input() row: T; + signatureStatus = SignatureStatus; + + firstCharToUpperCase(string){ + return string[0].toUpperCase() + string.slice(1).toLowerCase(); + } +} diff --git a/web/src/app/components/concern/concern-item/concern-item.component.html b/web/src/app/components/concern/concern-item/concern-item.component.html index c15554e1ec..f550f69f2d 100644 --- a/web/src/app/components/concern/concern-item/concern-item.component.html +++ b/web/src/app/components/concern/concern-item/concern-item.component.html @@ -12,32 +12,32 @@ {{placeholder.name}} {{placeholder.name}} {{placeholder.name}} {{placeholder.name}} {{placeholder.name}} {{placeholder.name}} {{placeholder.name}} diff --git a/web/src/app/components/popover/popover.component.scss b/web/src/app/components/popover/popover.component.scss index c7aa306081..d0b880db3f 100644 --- a/web/src/app/components/popover/popover.component.scss +++ b/web/src/app/components/popover/popover.component.scss @@ -4,8 +4,8 @@ font-family: Roboto, "Helvetica Neue", sans-serif; color: #fff; font-size: 14px; - position: absolute; - display: block; + position: fixed; + display: flex; border: solid 1px #455A64; padding: 0; background-color: #37474F; diff --git a/web/src/app/components/rbac/policy-form/rbac-policy-form-step-one/rbac-policy-form-step-one.component.html b/web/src/app/components/rbac/policy-form/rbac-policy-form-step-one/rbac-policy-form-step-one.component.html index bb25ad378a..66268d8226 100644 --- a/web/src/app/components/rbac/policy-form/rbac-policy-form-step-one/rbac-policy-form-step-one.component.html +++ b/web/src/app/components/rbac/policy-form/rbac-policy-form-step-one/rbac-policy-form-step-one.component.html @@ -5,34 +5,31 @@
- + - {{ role | adwpMapper:roles.label }} + {{ role | adwpMapper: roles.label }} + + Role is required. +
- - - - = ADWP_DEFAULT_MATCHER; @@ -29,7 +28,7 @@ export class RbacPolicyFormStepOneComponent extends BaseFormDirective { return f.invalid && (f.dirty || f.touched); } - hasError(error: string): boolean { - return this.form.hasError(error); + hasError(error: string, path): boolean { + return this.form.hasError(error, path); } } diff --git a/web/src/app/components/rbac/policy-form/rbac-policy-form.component.ts b/web/src/app/components/rbac/policy-form/rbac-policy-form.component.ts index 0bf1ec974d..ca9d4413fd 100644 --- a/web/src/app/components/rbac/policy-form/rbac-policy-form.component.ts +++ b/web/src/app/components/rbac/policy-form/rbac-policy-form.component.ts @@ -4,15 +4,14 @@ import { RbacFormDirective } from '@app/shared/add-component/rbac-form.directive import { RbacPolicyModel } from '@app/models/rbac/rbac-policy.model'; import { ADD_SERVICE_PROVIDER } from '@app/shared/add-component/add-service-model'; import { RbacPolicyService } from '@app/services/rbac-policy.service'; -import { atLeastOne } from '@app/components/rbac/policy-form/rbac-policy-form-step-one/validators/user-or-group-required'; import { IRbacObjectCandidateClusterModel, IRbacObjectCandidateHostModel, IRbacObjectCandidateProviderModel, IRbacObjectCandidateServiceModel -} from '../../../models/rbac/rbac-object-candidate'; +} from '@app/models/rbac/rbac-object-candidate'; import { rbacPolicyObjectValidator } from './validators/object-validator'; -import { CustomValidators } from '../../../shared/validators/custom-validators'; +import { CustomValidators } from '@app/shared/validators/custom-validators'; const INITIAL_OBJECT = { cluster: [], @@ -81,7 +80,6 @@ export class RbacPolicyFormComponent extends RbacFormDirective name: value.name, description: value.description || '', role: value.role, - user: value.user, group: value.group }, { @@ -112,10 +110,7 @@ export class RbacPolicyFormComponent extends RbacFormDirective ]), description: new FormControl(null), role: roleControl, - user: new FormControl([]), - group: new FormControl([]) - }, { - validators: [atLeastOne('user', 'group')] + group: new FormControl([], [CustomValidators.required]) }), new FormGroup({ object: new FormGroup({ diff --git a/web/src/app/entry/bundle/bundle-list.component.ts b/web/src/app/entry/bundle/bundle-list.component.ts index 802d6f4b56..e4f61e459a 100644 --- a/web/src/app/entry/bundle/bundle-list.component.ts +++ b/web/src/app/entry/bundle/bundle-list.component.ts @@ -70,6 +70,7 @@ export class BundleListComponent extends AdwpListDirective { } }, ListFactory.descriptionColumn(), + ListFactory.signatureStatusColumn(), ListFactory.deleteColumn(this), ] as IColumns; diff --git a/web/src/app/factories/list.factory.ts b/web/src/app/factories/list.factory.ts index fad624091e..1d3a848702 100644 --- a/web/src/app/factories/list.factory.ts +++ b/web/src/app/factories/list.factory.ts @@ -15,6 +15,7 @@ import { editColumnValues, NameEditColumnComponent } from "@app/components/columns/name-edit-column/name-edit-column.component"; +import { SignatureColumnComponent } from "@app/components/columns/signature-column/signature-column.component"; export type NameEditColumn = IComponentColumn & { column_rules: editColumnValues; }; @@ -92,6 +93,17 @@ export class ListFactory { }; } + static signatureStatusColumn(): IComponentColumn { + return { + label: 'Signature', + sort: 'signatureStatus', + type: 'component', + className: 'width100', + headerClassName: 'width100', + component: SignatureColumnComponent, + }; + } + static actionsButton(listDirective: ConcernListDirective): IComponentColumn { return { label: 'Actions', diff --git a/web/src/app/models/bundle.ts b/web/src/app/models/bundle.ts index 18d24fccf3..95c53942a7 100644 --- a/web/src/app/models/bundle.ts +++ b/web/src/app/models/bundle.ts @@ -1,4 +1,5 @@ import { AdcmEntity } from './entity'; +import { SignatureStatus } from '@app/components/columns/signature-column/signature-column.component'; export interface IBundle extends AdcmEntity { adcm_min_version: string; @@ -14,4 +15,6 @@ export interface IBundle extends AdcmEntity { update: string; url: string; version: string; + signature_status: SignatureStatus; + } diff --git a/web/src/app/models/concern/concern-reason.ts b/web/src/app/models/concern/concern-reason.ts index 098f2419f1..e6f46ecbfe 100644 --- a/web/src/app/models/concern/concern-reason.ts +++ b/web/src/app/models/concern/concern-reason.ts @@ -21,14 +21,17 @@ export enum IMPlaceholderItemType { Service = 'service', Component = 'component', HostProvider = 'provider', + Flag = 'flag', Host = 'host', Job = 'job', } export interface IMPlaceholderItem { - type?: IMPlaceholderItemType; name: string; - ids: { [id: string]: number } | number; + params: { + [id: string]: number; + }; + type?: IMPlaceholderItemType; } export interface IMPlaceholderActionRun extends IMPlaceholderItem { diff --git a/web/src/app/shared/configuration/scheme/item.component.spec.ts b/web/src/app/shared/configuration/scheme/item.component.spec.ts index 6822612ee8..7547353549 100644 --- a/web/src/app/shared/configuration/scheme/item.component.spec.ts +++ b/web/src/app/shared/configuration/scheme/item.component.spec.ts @@ -33,12 +33,13 @@ describe('ItemComponent', () => { const item: IControl = { name: 'test', type: 'string', - rules: { name: 'test', type: 'string', path: ['test'], validator: {}, controlType: 'textbox' }, + rules: { name: 'test', type: 'string', path: ['test'], validator: {}, controlType: 'textbox', isInvisible: false }, form: new FormGroup({ test: new FormControl() }), parent: 'dict', value: {}, }; component.item = item; + component.invisibleItems = ['field']; fixture.detectChanges(); }); diff --git a/web/src/app/shared/configuration/scheme/item.component.ts b/web/src/app/shared/configuration/scheme/item.component.ts index 1b9f9ff5a8..316929b2ff 100644 --- a/web/src/app/shared/configuration/scheme/item.component.ts +++ b/web/src/app/shared/configuration/scheme/item.component.ts @@ -30,13 +30,13 @@ import { IControl } from './scheme.service'; -
+
{{ item.name }} Field [{{ item.name }}] is required!
- + {{ item.name }} @@ -53,6 +53,7 @@ import { IControl } from './scheme.service'; }) export class ItemComponent implements OnInit { @Input() item: IControl; + @Input() invisibleItems: string[]; @Input() index: number; @Input() isReadOnly = false; @Output() remove = new EventEmitter(); diff --git a/web/src/app/shared/configuration/scheme/root.component.html b/web/src/app/shared/configuration/scheme/root.component.html index eb673adb16..9bf48492f0 100644 --- a/web/src/app/shared/configuration/scheme/root.component.html +++ b/web/src/app/shared/configuration/scheme/root.component.html @@ -9,12 +9,12 @@
- - + diff --git a/web/src/app/shared/configuration/scheme/root.component.spec.ts b/web/src/app/shared/configuration/scheme/root.component.spec.ts index b935367d53..aa0e7654d7 100644 --- a/web/src/app/shared/configuration/scheme/root.component.spec.ts +++ b/web/src/app/shared/configuration/scheme/root.component.spec.ts @@ -23,6 +23,7 @@ const item: IYField = { path: ['test'], controlType: 'textbox', validator: {}, + isInvisible: false }; describe('RootComponent', () => { diff --git a/web/src/app/shared/configuration/scheme/root.component.ts b/web/src/app/shared/configuration/scheme/root.component.ts index f544794535..be5d051e2d 100644 --- a/web/src/app/shared/configuration/scheme/root.component.ts +++ b/web/src/app/shared/configuration/scheme/root.component.ts @@ -26,6 +26,7 @@ export class RootComponent implements OnInit { @Input() options: IYContainer | IYField; @Input() value: TValue; @Input() isReadOnly = false; + @Input() invisibleItems: string[]; controls: IControl[] = []; diff --git a/web/src/app/shared/configuration/scheme/scheme.component.ts b/web/src/app/shared/configuration/scheme/scheme.component.ts index 1a2216301d..82db7c2448 100644 --- a/web/src/app/shared/configuration/scheme/scheme.component.ts +++ b/web/src/app/shared/configuration/scheme/scheme.component.ts @@ -32,13 +32,14 @@ import { SchemeService } from './scheme.service'; `, ], template: `
- + Field [{{ field.display_name }}] is required!
`, }) export class SchemeComponent extends FieldDirective implements OnInit, OnChanges { rules: IYField | IYContainer; current: AbstractControl; + invisibleItems: string[]; @ViewChild('root') root: RootComponent; @@ -59,6 +60,7 @@ export class SchemeComponent extends FieldDirective implements OnInit, OnChanges ngOnInit() { this.yspec.Root = this.field.limits.yspec; + this.invisibleItems = this.yspec.getInvisibleItems(); this.rules = this.yspec.build(); this.field.limits.rules = this.rules; this.rules.name = ''; diff --git a/web/src/app/shared/configuration/services/field.service.spec.ts b/web/src/app/shared/configuration/services/field.service.spec.ts index ecf056890a..a01c18dc03 100644 --- a/web/src/app/shared/configuration/services/field.service.spec.ts +++ b/web/src/app/shared/configuration/services/field.service.spec.ts @@ -392,6 +392,7 @@ describe('Configuration fields service', () => { pattern: null, required: true, }, + isInvisible: false }, { controlType: 'textbox', @@ -399,9 +400,10 @@ describe('Configuration fields service', () => { path: ['port', 'listener'], type: 'int', validator: { required: true, pattern: /^[-]?\d+$/ }, + isInvisible: false }, - { name: 'ssl enable', type: 'bool', path: [], controlType: 'boolean', validator: {} }, - { name: 'sasl protocol', type: 'string', path: [], controlType: 'textbox', validator: {} }, + { name: 'ssl enable', type: 'bool', path: [], controlType: 'boolean', validator: {}, isInvisible: false }, + { name: 'sasl protocol', type: 'string', path: [], controlType: 'textbox', validator: {}, isInvisible: false }, ], }, }; diff --git a/web/src/app/shared/configuration/services/field.service.ts b/web/src/app/shared/configuration/services/field.service.ts index 3e797000db..a49b61ec5f 100644 --- a/web/src/app/shared/configuration/services/field.service.ts +++ b/web/src/app/shared/configuration/services/field.service.ts @@ -369,6 +369,13 @@ export class FieldService { return Object.keys(value).reduce((p, c) => { const r = rules.options.find((b: any) => b.name === c); const v = r ? this.runYspec(value[c], r) : null; + + if (r.isInvisible) { + if (v === null) return { ...p, [c]: '' }; + + return { ...p, [c]: v }; + } + return v !== null ? { ...p, [c]: v } : { ...p }; }, {}); } diff --git a/web/src/app/shared/configuration/yspec/yspec.service.spec.ts b/web/src/app/shared/configuration/yspec/yspec.service.spec.ts index 60630f3dca..a5ad050bb9 100644 --- a/web/src/app/shared/configuration/yspec/yspec.service.spec.ts +++ b/web/src/app/shared/configuration/yspec/yspec.service.spec.ts @@ -19,7 +19,8 @@ const simpleField: IYField = { validator: { required: false, pattern: null - } + }, + isInvisible: false }; const simpleStr: IYspec = { @@ -121,14 +122,16 @@ describe('YspecService', () => { type: 'string', path: ['country', 'country_code'], controlType: 'textbox', - validator: { required: false, pattern: null } + validator: { required: false, pattern: null }, + isInvisible: false }, { name: 'code', type: 'integer', path: ['code', 'country_code'], controlType: 'textbox', - validator: { required: false, pattern: /^[-]?\d+$/ } + validator: { required: false, pattern: /^[-]?\d+$/ }, + isInvisible: false } ] } @@ -165,14 +168,16 @@ describe('YspecService', () => { type: 'string', path: ['name', 'policy'], controlType: 'textbox', - validator: { required: false, pattern: null } + validator: { required: false, pattern: null }, + isInvisible: false }, { name: 'move_factor', type: 'float', path: ['move_factor', 'policy'], controlType: 'textbox', - validator: { required: true, pattern: /^[-]?[0-9]+(\.[0-9]+)?$/ } + validator: { required: true, pattern: /^[-]?[0-9]+(\.[0-9]+)?$/ }, + isInvisible: false }, { type: 'list', @@ -186,21 +191,24 @@ describe('YspecService', () => { type: 'string', path: ['name', 'volume', 'volumes_list', 'policy'], controlType: 'textbox', - validator: { required: true, pattern: null } + validator: { required: true, pattern: null }, + isInvisible: false }, { name: 'disk', type: 'string', path: ['disk', 'volume', 'volumes_list', 'policy'], controlType: 'textbox', - validator: { required: false, pattern: null } + validator: { required: false, pattern: null }, + isInvisible: false }, { name: 'max_data_part_size_bytes', type: 'int', path: ['max_data_part_size_bytes', 'volume', 'volumes_list', 'policy'], controlType: 'textbox', - validator: { required: false, pattern: /^[-]?\d+$/ } + validator: { required: false, pattern: /^[-]?\d+$/ }, + isInvisible: false } ] } @@ -236,28 +244,32 @@ describe('YspecService', () => { type: 'bool', path: ['key1'], controlType: 'boolean', - validator: { required: false, pattern: null } + validator: { required: false, pattern: null }, + isInvisible: false }, { name: 'key2', type: 'string', path: ['key2'], controlType: 'textbox', - validator: { required: false, pattern: null } + validator: { required: false, pattern: null }, + isInvisible: false }, { name: 'key3', type: 'int', path: ['key3'], controlType: 'textbox', - validator: { required: false, pattern: /^[-]?\d+$/ } + validator: { required: false, pattern: /^[-]?\d+$/ }, + isInvisible: false }, { name: 'key4', type: 'float', path: ['key4'], controlType: 'textbox', - validator: { required: false, pattern: /^[-]?[0-9]+(\.[0-9]+)?$/ } + validator: { required: false, pattern: /^[-]?[0-9]+(\.[0-9]+)?$/ }, + isInvisible: false }, { type: 'list', @@ -270,7 +282,8 @@ describe('YspecService', () => { validator: { required: false, pattern: null - } + }, + isInvisible: false } }, { @@ -288,7 +301,8 @@ describe('YspecService', () => { validator: { required: false, pattern: null - } + }, + isInvisible: false } }, { @@ -300,28 +314,32 @@ describe('YspecService', () => { type: 'bool', path: ['key1', 'key2', 'key6'], controlType: 'boolean', - validator: { required: false, pattern: null } + validator: { required: false, pattern: null }, + isInvisible: false }, { name: 'key2', type: 'string', path: ['key2', 'key2', 'key6'], controlType: 'textbox', - validator: { required: false, pattern: null } + validator: { required: false, pattern: null }, + isInvisible: false }, { name: 'key3', type: 'int', path: ['key3', 'key2', 'key6'], controlType: 'textbox', - validator: { required: false, pattern: /^[-]?\d+$/ } + validator: { required: false, pattern: /^[-]?\d+$/ }, + isInvisible: false }, { name: 'key4', type: 'float', path: ['key4', 'key2', 'key6'], controlType: 'textbox', - validator: { required: false, pattern: /^[-]?[0-9]+(\.[0-9]+)?$/ } + validator: { required: false, pattern: /^[-]?[0-9]+(\.[0-9]+)?$/ }, + isInvisible: false }, { type: 'list', @@ -334,7 +352,8 @@ describe('YspecService', () => { validator: { required: false, pattern: null - } + }, + isInvisible: false } } ] diff --git a/web/src/app/shared/configuration/yspec/yspec.service.ts b/web/src/app/shared/configuration/yspec/yspec.service.ts index 9c3fd48bab..8437d51f8a 100644 --- a/web/src/app/shared/configuration/yspec/yspec.service.ts +++ b/web/src/app/shared/configuration/yspec/yspec.service.ts @@ -37,6 +37,7 @@ interface IYRoot { item?: string; items?: IRoot; required_items?: string[]; + invisible_items?: string[]; default_item?: string; } @@ -68,6 +69,7 @@ export interface IYField { type: TNBase; controlType: controlType; validator: IValidator; + isInvisible: boolean; } /** @@ -101,6 +103,18 @@ export class YspecService { return this.root; } + getInvisibleItems() { + const invisibleItems = []; + for (const item in this.Root) { + const { invisible_items } = this.Root[item]; + + if (invisible_items !== undefined) { + invisibleItems.push(...invisible_items); + } + } + return invisibleItems; + } + build(rule = 'root', path: string[] = []): IYContainer | IYField { const { match, item, items } = this.Root[rule]; @@ -119,7 +133,9 @@ export class YspecService { } field(field: { type: TNBase; path: string[] }): IYField { + const invisibleItems = this.getInvisibleItems(); const name = field.path.reverse()[0]; + const isInvisible = invisibleItems.includes(name); return { name, type: field.type, @@ -129,6 +145,7 @@ export class YspecService { required: this.findRule(field.path, 'required_items'), pattern: getPattern(field.type), }, + isInvisible, }; } @@ -143,7 +160,7 @@ export class YspecService { return { type: 'list', name, options: this.build(item, [...path, item]) }; } - dict(items: IRoot, path: string[]): IYContainer { + dict(items: IRoot, path: string[]): IYContainer { const name = [...path].reverse()[0] || 'root'; return { type: 'dict', diff --git a/web/src/app/shared/details/left-menu-items/concern-menu-item/concern-menu-item.component.ts b/web/src/app/shared/details/left-menu-items/concern-menu-item/concern-menu-item.component.ts index fb8eb76455..06d5f6b606 100644 --- a/web/src/app/shared/details/left-menu-items/concern-menu-item/concern-menu-item.component.ts +++ b/web/src/app/shared/details/left-menu-items/concern-menu-item/concern-menu-item.component.ts @@ -15,9 +15,7 @@ import { environment } from '@env/environment'; > {{ label }}  - - priority_hight - + `, @@ -26,6 +24,7 @@ import { environment } from '@env/environment'; export class ConcernMenuItemComponent extends MenuItemAbstractDirective implements OnInit { concernsPresent = false; + concerns = null; @Input() set entity(entity: BaseEntity) { this._entity = entity; @@ -57,6 +56,9 @@ export class ConcernMenuItemComponent extends MenuItemAbstractDirective this.concernsPresent = !!concerns?.length); + .subscribe((concerns: any[]) => { + this.concerns = concerns; + this.concernsPresent = !!concerns?.length + }); } } diff --git a/web/src/app/shared/host-components-map/services2hosts/service-host.component.ts b/web/src/app/shared/host-components-map/services2hosts/service-host.component.ts index 44bfe8e001..c457c51d51 100644 --- a/web/src/app/shared/host-components-map/services2hosts/service-host.component.ts +++ b/web/src/app/shared/host-components-map/services2hosts/service-host.component.ts @@ -21,7 +21,7 @@ import { getSelected, TakeService } from '../take.service'; import { CompTile, HostTile, IRawHosComponent, Post, StatePost, Tile } from '../types'; import { ApiService } from "@app/core/api"; import { Observable } from "rxjs"; -import { IServiceComponent } from '@app/models/service-component'; +import { ClusterService } from '@app/core/services/cluster.service'; @Component({ selector: 'app-service-host', @@ -87,7 +87,7 @@ export class ServiceHostComponent extends SocketListenerDirective implements OnI return this.Components.find((component) => component.isSelected); } - constructor(public service: TakeService, private channel: ChannelService, socket: Store, private api: ApiService) { + constructor(public service: TakeService, private channel: ChannelService, socket: Store, private api: ApiService, private clusterService: ClusterService) { super(socket); } @@ -110,7 +110,7 @@ export class ServiceHostComponent extends SocketListenerDirective implements OnI } getAddedServices(): Observable { - return this.api.get(`api/v1/cluster/${this.cluster.id}/service/`); + return this.api.get(`api/v1/cluster/${this.clusterService.Cluster.id}/service/`); } socketListener(m: EventMessage) { diff --git a/web/src/app/shared/shared.module.ts b/web/src/app/shared/shared.module.ts index 2d8f0113ff..fd800411d9 100644 --- a/web/src/app/shared/shared.module.ts +++ b/web/src/app/shared/shared.module.ts @@ -70,6 +70,7 @@ import { } from "@app/components/columns/download-button-column/download-button-column.component"; import { RbacAuditLoginService } from "@app/services/rbac-audit-login.service"; import { ResetLoginAttemptsButtonComponent } from "./reset-login-attempts-button/reset-login-attempts-button.component"; +import { SignatureColumnComponent } from "@app/components/columns/signature-column/signature-column.component"; @NgModule({ imports: [ @@ -108,6 +109,7 @@ import { ResetLoginAttemptsButtonComponent } from "./reset-login-attempts-button UpgradeMasterComponent, UpgradeMasterConfigComponent, StatusColumnComponent, + SignatureColumnComponent, StateColumnComponent, EditionColumnComponent, ClusterColumnComponent, @@ -147,6 +149,7 @@ import { ResetLoginAttemptsButtonComponent } from "./reset-login-attempts-button ImportComponent, ExportComponent, StatusColumnComponent, + SignatureColumnComponent, StateColumnComponent, EditionColumnComponent, ClusterColumnComponent,