Skip to content

Commit

Permalink
Add mypy & pylint check to the build scripts (demisto#15301)
Browse files Browse the repository at this point in the history
* initial work

* fix regex pattern

* add pylint

* fix some pylint issues

* linters fix

* CR changes ph1

* cr changes ph2

* ignore mypy error and fix typo

* fix argument position typo

* add the logger argument

* add logging_module argument

* fix logging mock in test
  • Loading branch information
ilappe authored Nov 17, 2021
1 parent 689307d commit cdc607d
Show file tree
Hide file tree
Showing 65 changed files with 416 additions and 295 deletions.
5 changes: 1 addition & 4 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -348,10 +348,7 @@ references:
exit 0
fi
# Run flake8 on all excluding Packs (Integraions and Scripts) - they will be handled in linting
./Tests/scripts/pyflake.sh *.py
find . -maxdepth 1 -type d -not \( -path . -o -path ./Packs -o -path ./venv \) | xargs ./Tests/scripts/pyflake.sh
./Tests/scripts/linters_runner.sh
./Tests/scripts/validate.sh
run_unit_testing_and_lint: &run_unit_testing_and_lint
Expand Down
9 changes: 1 addition & 8 deletions .gitlab/ci/global.yml
Original file line number Diff line number Diff line change
Expand Up @@ -244,14 +244,7 @@
if [[ -n $FORCE_BUCKET_UPLOAD || -n $BUCKET_UPLOAD ]] && [[ "$(echo "$GCS_MARKET_BUCKET" | tr '[:upper:]' '[:lower:]')" != "marketplace-dist" ]] && [[ $CI_COMMIT_BRANCH != "master" ]]; then
echo "Skipping the -Validate Files and Yaml- step when uploading to a test bucket."
else
echo "Run flake8 on all excluding Packs (Integrations and Scripts) - they will be handled in linting"
./Tests/scripts/pyflake.sh *.py
# do not run pyflake on venv or content-test-conf awsinstancetool
find . -maxdepth 1 -type d -not \( -path . -o -path ./Packs -o -path ./venv -o -path ./Tests \) | xargs ./Tests/scripts/pyflake.sh
./Tests/scripts/pyflake.sh ./Tests/*.py
find ./Tests -maxdepth 1 -type d -not \( -path ./Tests -o -path ./Tests/scripts \) | xargs ./Tests/scripts/pyflake.sh
./Tests/scripts/pyflake.sh ./Tests/scripts/*.py
find ./Tests/scripts -maxdepth 1 -type d -not \( -path ./Tests/scripts -o -path ./Tests/scripts/awsinstancetool \) | xargs ./Tests/scripts/pyflake.sh
./Tests/scripts/linters_runner.sh
./Tests/scripts/validate.sh
fi
- section_end "Validate Files and Yaml"
Expand Down
3 changes: 1 addition & 2 deletions .gitlab/ci/miscellaneous.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ delete-mismatched-branches:
- if: '$DELETE_MISMATCHED_BRANCHES == "true"'
when: always
script:
- cd Utils
- python3 delete_mismatched_branches.py
- python3 Utils/delete_mismatched_branches.py
retry:
max: 2
6 changes: 3 additions & 3 deletions Documentation/common_server_docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,9 +144,9 @@ def create_py_documentation(path, origin, language):

code = compile(py_script, '<string>', 'exec')
ns = {'demisto': demistomock}
exec(code, ns) # guardrails-disable-line
exec(code, ns) # guardrails-disable-line # pylint: disable=W0122

x = []
x: list = []

for a in ns:
a_object = ns.get(a)
Expand Down Expand Up @@ -216,7 +216,7 @@ def create_ps_documentation(path, origin, language):

for parameter in parameters[1:]:

split_param = list(filter(None, parameter.split('\n')))
split_param: list = list(filter(None, parameter.split('\n')))
required = False
param_name = split_param[0].strip()
if 'required' in param_name:
Expand Down
8 changes: 7 additions & 1 deletion Tests/Marketplace/Tests/marketplace_services_test.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@

# type: ignore[attr-defined]

import shutil
import pytest
import json
Expand All @@ -12,6 +15,9 @@
from datetime import datetime, timedelta
from typing import List, Dict, Optional, Tuple, Any

# pylint: disable=no-member


from Tests.Marketplace.marketplace_services import Pack, input_to_list, get_valid_bool, convert_price, \
get_updated_server_version, load_json, \
store_successful_and_failed_packs_in_ci_artifacts, is_ignored_pack_file, \
Expand Down Expand Up @@ -936,7 +942,7 @@ def mock_os_path_join(path, *paths):
return TestChangelogCreation.dummy_pack_changelog(CHANGELOG_DATA_INITIAL_VERSION)
if path == 'changelog_new_exist':
return TestChangelogCreation.dummy_pack_changelog(CHANGELOG_DATA_MULTIPLE_VERSIONS)
if path == 'changelog_not_exist' or path == 'metadata_not_exist':
if path in ['changelog_not_exist', 'metadata_not_exist']:
return path_to_non_existing_changelog

@freeze_time("2020-11-04T13:34:14.75Z")
Expand Down
1 change: 1 addition & 0 deletions Tests/Marketplace/Tests/test_pack_dependencies.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# type: ignore[attr-defined]
from unittest.mock import patch
import networkx as nx

Expand Down
7 changes: 5 additions & 2 deletions Tests/Marketplace/Tests/upload_packs_test.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
# type: ignore[attr-defined]
# pylint: disable=no-member
import copy
import json
import os
Expand All @@ -8,6 +10,7 @@


# disable-secrets-detection-start

class TestModifiedPacks:
@pytest.mark.parametrize("packs_names_input, expected_result", [
("pack1,pack2,pack1", {"pack1", "pack2"}),
Expand Down Expand Up @@ -43,7 +46,7 @@ def is_dir(self):

@staticmethod
def isdir(path):
return True if path == 'mock_path' else False
return path == 'mock_path'


def scan_dir(dirs=None):
Expand Down Expand Up @@ -591,7 +594,7 @@ def test_is_private_packs_updated(self, mocker):
assert not is_private_packs_updated(public_index_json, index_file_path)

# private pack was deleted
del (private_index_json.get("packs")[0])
del private_index_json.get("packs")[0]
mocker.patch('Tests.Marketplace.upload_packs.load_json', return_value=private_index_json)
assert is_private_packs_updated(public_index_json, index_file_path)

Expand Down
3 changes: 3 additions & 0 deletions Tests/Marketplace/Tests/zip_packs_test.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
# type: ignore[attr-defined]
# pylint: disable=no-member

import pytest

from Tests.Marketplace.zip_packs import get_latest_pack_zip_from_pack_files, zip_packs,\
Expand Down
4 changes: 2 additions & 2 deletions Tests/Marketplace/configure_and_install_packs.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
import argparse
import logging
import sys

from Tests.configure_and_test_integration_instances import set_marketplace_url, MARKET_PLACE_CONFIGURATION, \
Build, Server
from Tests.test_content import get_json_file
from Tests.Marketplace.search_and_install_packs import install_all_content_packs_from_build_bucket
from Tests.scripts.utils.log_util import install_logging
from Tests.scripts.utils import logging_wrapper as logging
from Tests.Marketplace.marketplace_constants import GCPConfig


Expand All @@ -30,7 +30,7 @@ def options_handler():


def main():
install_logging('Install_Packs.log')
install_logging('Install_Packs.log', logger=logging)
options = options_handler()

# Get the host by the ami env
Expand Down
4 changes: 2 additions & 2 deletions Tests/Marketplace/copy_and_upload_packs.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import sys
import argparse
import shutil
import logging
import re
from zipfile import ZipFile
from google.cloud.storage import Blob, Bucket
Expand All @@ -15,6 +14,7 @@
from Tests.Marketplace.marketplace_constants import PackStatus, GCPConfig, BucketUploadFlow, PACKS_FOLDER, \
PACKS_FULL_PATH, IGNORED_FILES
from Tests.Marketplace.upload_packs import extract_packs_artifacts, print_packs_summary, get_packs_summary
from Tests.scripts.utils import logging_wrapper as logging

LATEST_ZIP_REGEX = re.compile(fr'^{GCPConfig.GCS_PUBLIC_URL}/[\w./-]+/content/packs/([A-Za-z0-9-_.]+/\d+\.\d+\.\d+/'
r'[A-Za-z0-9-_.]+\.zip$)')
Expand Down Expand Up @@ -314,7 +314,7 @@ def options_handler():


def main():
install_logging('Copy_and_Upload_Packs.log')
install_logging('Copy_and_Upload_Packs.log', logger=logging)
options = options_handler()
packs_artifacts_path = options.artifacts_path
extract_destination_path = options.extract_path
Expand Down
2 changes: 1 addition & 1 deletion Tests/Marketplace/download_private_id_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@


def create_empty_id_set_in_artifacts(private_id_set_path):
empty_id_set = {
empty_id_set: dict = {
"scripts": [],
"playbooks": [],
"integrations": [],
Expand Down
46 changes: 24 additions & 22 deletions Tests/Marketplace/marketplace_services.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
import fnmatch
import glob
import json
import logging
import os
import re
import shutil
Expand All @@ -26,6 +25,7 @@
from Tests.Marketplace.marketplace_constants import PackFolders, Metadata, GCPConfig, BucketUploadFlow, PACKS_FOLDER, \
PackTags, PackIgnored, Changelog
from Utils.release_notes_generator import aggregate_release_notes_for_marketplace
from Tests.scripts.utils import logging_wrapper as logging


class Pack(object):
Expand Down Expand Up @@ -151,7 +151,7 @@ def is_feed(self, is_feed):
"""
self._is_feed = is_feed

@status.setter
@status.setter # type: ignore[attr-defined,no-redef]
def status(self, status_value):
""" setter of pack current status.
"""
Expand Down Expand Up @@ -229,7 +229,7 @@ def user_metadata(self):
"""
return self._user_metadata

@display_name.setter
@display_name.setter # type: ignore[attr-defined,no-redef]
def display_name(self, display_name_value):
""" setter of display name property of the pack.
"""
Expand Down Expand Up @@ -398,7 +398,7 @@ def _get_all_pack_images(pack_integration_images, display_dependencies_images, d
list: collection of integration display name and it's path in gcs.
"""
dependencies_integration_images_dict = {}
dependencies_integration_images_dict: dict = {}
additional_dependencies_data = {k: v for k, v in dependencies_data.items() if k in display_dependencies_images}

for dependency_data in additional_dependencies_data.values():
Expand Down Expand Up @@ -587,7 +587,7 @@ def _parse_pack_metadata(self, build_number, commit_hash):
Metadata.VERSION_INFO: build_number,
Metadata.COMMIT: commit_hash,
Metadata.DOWNLOADS: self._downloads_count,
Metadata.TAGS: list(self._tags),
Metadata.TAGS: list(self._tags or []),
Metadata.CATEGORIES: self._categories,
Metadata.CONTENT_ITEMS: self._content_items,
Metadata.SEARCH_RANK: self._search_rank,
Expand Down Expand Up @@ -1195,7 +1195,8 @@ def get_same_block_versions(self, release_notes_dir: str, version: str, changelo
"""
lowest_version = [LooseVersion(Pack.PACK_INITIAL_VERSION)]
lower_versions, higher_versions = [], []
lower_versions: list = []
higher_versions: list = []
same_block_versions_dict: dict = dict()
for item in changelog.keys(): # divide the versions into lists of lower and higher than given version
(lower_versions if LooseVersion(item) < version else higher_versions).append(LooseVersion(item))
Expand Down Expand Up @@ -1275,7 +1276,7 @@ def assert_upload_bucket_version_matches_release_notes_version(self,
changelog: The changelog from the production bucket.
latest_release_notes: The latest release notes version string in the current branch
"""
changelog_latest_release_notes = max(changelog, key=lambda k: LooseVersion(k))
changelog_latest_release_notes = max(changelog, key=lambda k: LooseVersion(k)) # pylint: disable=W0108
assert LooseVersion(latest_release_notes) >= LooseVersion(changelog_latest_release_notes), \
f'{self._pack_name}: Version mismatch detected between upload bucket and current branch\n' \
f'Upload bucket version: {changelog_latest_release_notes}\n' \
Expand Down Expand Up @@ -1465,7 +1466,7 @@ def collect_content_items(self):
.
"""
task_status = False
content_items_result = {}
content_items_result: dict = {}

try:
# the format is defined in issue #19786, may change in the future
Expand Down Expand Up @@ -1696,7 +1697,7 @@ def load_user_metadata(self):
self.current_version = user_metadata.get(Metadata.CURRENT_VERSION, '')
self.hidden = user_metadata.get(Metadata.HIDDEN, False)
self.description = user_metadata.get(Metadata.DESCRIPTION, False)
self.display_name = user_metadata.get(Metadata.NAME, '')
self.display_name = user_metadata.get(Metadata.NAME, '') # type: ignore[misc]
self._user_metadata = user_metadata
self.eula_link = user_metadata.get(Metadata.EULA_LINK, Metadata.EULA_URL)

Expand Down Expand Up @@ -1826,7 +1827,7 @@ def format_metadata(self, index_folder_path, packs_dependencies_mapping, build_n

try:
self.set_pack_dependencies(packs_dependencies_mapping)
if Metadata.DISPLAYED_IMAGES not in self.user_metadata:
if Metadata.DISPLAYED_IMAGES not in self.user_metadata and self._user_metadata:
self._user_metadata[Metadata.DISPLAYED_IMAGES] = packs_dependencies_mapping.get(
self._pack_name, {}).get(Metadata.DISPLAYED_IMAGES, [])
logging.info(f"Adding auto generated display images for {self._pack_name} pack")
Expand Down Expand Up @@ -1882,7 +1883,7 @@ def _calculate_pack_creation_date(pack_name, index_folder_path):

if metadata:
if metadata.get(Metadata.CREATED):
created_time = metadata.get(Metadata.CREATED)
created_time = metadata.get(Metadata.CREATED, '')
else:
raise Exception(f'The metadata file of the {pack_name} pack does not contain "{Metadata.CREATED}" time')

Expand All @@ -1908,7 +1909,7 @@ def _get_pack_update_date(self, index_folder_path, pack_was_modified):

def set_pack_dependencies(self, packs_dependencies_mapping):
pack_dependencies = packs_dependencies_mapping.get(self._pack_name, {}).get(Metadata.DEPENDENCIES, {})
if Metadata.DEPENDENCIES not in self.user_metadata:
if Metadata.DEPENDENCIES not in self.user_metadata and self._user_metadata:
self._user_metadata[Metadata.DEPENDENCIES] = {}

# If it is a core pack, check that no new mandatory packs (that are not core packs) were added
Expand All @@ -1923,7 +1924,8 @@ def set_pack_dependencies(self, packs_dependencies_mapping):
f'found in the core pack {self._pack_name}')

pack_dependencies.update(self.user_metadata[Metadata.DEPENDENCIES])
self._user_metadata[Metadata.DEPENDENCIES] = pack_dependencies
if self._user_metadata:
self._user_metadata[Metadata.DEPENDENCIES] = pack_dependencies

def prepare_for_index_upload(self):
""" Removes and leaves only necessary files in pack folder.
Expand Down Expand Up @@ -1970,7 +1972,7 @@ def _get_spitted_yml_image_data(root, target_folder_files):
for pack_file in target_folder_files:
if pack_file.startswith('.'):
continue
elif pack_file.endswith('_image.png'):
if pack_file.endswith('_image.png'):
image_data['repo_image_path'] = os.path.join(root, pack_file)
elif pack_file.endswith('.yml'):
with open(os.path.join(root, pack_file), 'r') as integration_file:
Expand Down Expand Up @@ -2447,7 +2449,7 @@ def add_bc_entries_if_needed(self, release_notes_dir: str, changelog: Dict[str,
if not os.path.exists(release_notes_dir):
return
bc_version_to_text: Dict[str, Optional[str]] = self._breaking_changes_versions_to_text(release_notes_dir)
loose_versions: List[LooseVersion] = [LooseVersion(bc_ver) for bc_ver in bc_version_to_text.keys()]
loose_versions: List[LooseVersion] = [LooseVersion(bc_ver) for bc_ver in bc_version_to_text]
predecessor_version: LooseVersion = LooseVersion('0.0.0')
for changelog_entry in sorted(changelog.keys(), key=LooseVersion):
rn_loose_version: LooseVersion = LooseVersion(changelog_entry)
Expand Down Expand Up @@ -2489,7 +2491,7 @@ def _calculate_bc_text(self, release_notes_dir: str, bc_version_to_text: Dict[st
else:
# Important: Currently, implementation of aggregating BCs was decided to concat between them
# In the future this might be needed to re-thought.
return '\n'.join(bc_version_to_text.values())
return '\n'.join(bc_version_to_text.values()) # type: ignore[arg-type]

def _handle_many_bc_versions_some_with_text(self, release_notes_dir: str, text_of_bc_versions: List[str],
bc_versions_without_text: List[str], ) -> str:
Expand Down Expand Up @@ -2619,11 +2621,11 @@ def get_upload_data(packs_results_file_path: str, stage: str) -> Tuple[dict, dic
"""
if os.path.exists(packs_results_file_path):
packs_results_file = load_json(packs_results_file_path)
stage = packs_results_file.get(stage, {})
successful_packs_dict = stage.get(BucketUploadFlow.SUCCESSFUL_PACKS, {})
failed_packs_dict = stage.get(BucketUploadFlow.FAILED_PACKS, {})
successful_private_packs_dict = stage.get(BucketUploadFlow.SUCCESSFUL_PRIVATE_PACKS, {})
images_data_dict = stage.get(BucketUploadFlow.IMAGES, {})
stage_data: dict = packs_results_file.get(stage, {})
successful_packs_dict = stage_data.get(BucketUploadFlow.SUCCESSFUL_PACKS, {})
failed_packs_dict = stage_data.get(BucketUploadFlow.FAILED_PACKS, {})
successful_private_packs_dict = stage_data.get(BucketUploadFlow.SUCCESSFUL_PRIVATE_PACKS, {})
images_data_dict = stage_data.get(BucketUploadFlow.IMAGES, {})
return successful_packs_dict, failed_packs_dict, successful_private_packs_dict, images_data_dict
return {}, {}, {}, {}

Expand Down Expand Up @@ -2674,7 +2676,7 @@ def store_successful_and_failed_packs_in_ci_artifacts(packs_results_file_path: s
logging.debug(f"Successful packs {successful_packs_dict}")

if updated_private_packs:
successful_private_packs_dict = {
successful_private_packs_dict: dict = {
BucketUploadFlow.SUCCESSFUL_PRIVATE_PACKS: {pack_name: {} for pack_name in updated_private_packs}
}
packs_results[stage].update(successful_private_packs_dict)
Expand Down
Loading

0 comments on commit cdc607d

Please sign in to comment.