forked from demisto/content
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathconfigure_and_test_integration_instances.py
1634 lines (1395 loc) · 74.5 KB
/
configure_and_test_integration_instances.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
from __future__ import print_function
import argparse
import ast
import json
import os
import subprocess
import sys
import uuid
import zipfile
from abc import abstractmethod
from datetime import datetime
from packaging.version import Version
from enum import IntEnum
from pprint import pformat
from threading import Thread
from time import sleep
from typing import List, Tuple, Union
from urllib.parse import quote_plus
import demisto_client
from demisto_sdk.commands.common.constants import FileType
from demisto_sdk.commands.common.tools import run_threads_list, run_command, get_yaml, \
str2bool, format_version, find_type
from demisto_sdk.commands.test_content.constants import SSH_USER
from demisto_sdk.commands.test_content.mock_server import MITMProxy, run_with_mock, RESULT
from demisto_sdk.commands.test_content.tools import update_server_configuration, is_redhat_instance
from demisto_sdk.commands.test_content.TestContentClasses import BuildContext
from demisto_sdk.commands.validate.validate_manager import ValidateManager
from ruamel import yaml
from Tests.Marketplace.search_and_install_packs import search_and_install_packs_and_their_dependencies, \
upload_zipped_packs, install_all_content_packs_for_nightly
from Tests.scripts.utils.log_util import install_logging
from Tests.scripts.utils import logging_wrapper as logging
from Tests.test_content import get_server_numeric_version
from Tests.test_integration import __get_integration_config, test_integration_instance, disable_all_integrations
from Tests.tools import run_with_proxy_configured
from Tests.update_content_data import update_content
MARKET_PLACE_MACHINES = ('master',)
SKIPPED_PACKS = ['NonSupported', 'ApiModules']
NO_PROXY = ','.join([
'oproxy.demisto.ninja',
'oproxy-dev.demisto.ninja',
])
NO_PROXY_CONFIG = {'python.pass.extra.keys': f'--env##no_proxy={NO_PROXY}'} # noqa: E501
DOCKER_HARDENING_CONFIGURATION = {
'docker.cpu.limit': '1.0',
'docker.run.internal.asuser': 'true',
'limit.docker.cpu': 'true',
'python.pass.extra.keys': f'--memory=1g##--memory-swap=-1##--pids-limit=256##--ulimit=nofile=1024:8192##--env##no_proxy={NO_PROXY}', # noqa: E501
'powershell.pass.extra.keys': f'--env##no_proxy={NO_PROXY}',
}
DOCKER_HARDENING_CONFIGURATION_FOR_PODMAN = {
'docker.run.internal.asuser': 'true'
}
MARKET_PLACE_CONFIGURATION = {
'content.pack.verify': 'false',
'marketplace.initial.sync.delay': '0',
'content.pack.ignore.missing.warnings.contentpack': 'true'
}
AVOID_DOCKER_IMAGE_VALIDATION = {
'content.validate.docker.images': 'false'
}
ID_SET_PATH = './artifacts/id_set.json'
XSOAR_BUILD_TYPE = "XSOAR"
XSIAM_BUILD_TYPE = "XSIAM"
MARKETPLACE_TEST_BUCKET = 'marketplace-ci-build/content/builds'
MARKETPLACE_XSIAM_BUCKETS = 'marketplace-v2-dist-dev/upload-flow/builds-xsiam'
ARTIFACTS_FOLDER_MPV2 = "/builds/xsoar/content/artifacts/marketplacev2"
SET_SERVER_KEYS = True
class Running(IntEnum):
CI_RUN = 0
WITH_OTHER_SERVER = 1
WITH_LOCAL_SERVER = 2
class Server:
def __init__(self):
self.internal_ip = None
self.ssh_tunnel_port = None
self.user_name = None
self.password = None
self.name = ''
class XSIAMServer(Server):
def __init__(self, api_key, server_numeric_version, base_url, xdr_auth_id, name):
super().__init__()
self.name = name
self.api_key = api_key
self.server_numeric_version = server_numeric_version
self.base_url = base_url
self.xdr_auth_id = xdr_auth_id
self.__client = None
# we use client without demisto username
os.environ.pop('DEMISTO_USERNAME', None)
def __str__(self):
return self.name
@property
def client(self):
if self.__client is None:
self.__client = self.reconnect_client()
return self.__client
def reconnect_client(self):
self.__client = demisto_client.configure(base_url=self.base_url,
verify_ssl=False,
api_key=self.api_key,
auth_id=self.xdr_auth_id)
return self.__client
class XSOARServer(Server):
def __init__(self, internal_ip, port, user_name, password):
super().__init__()
self.__ssh_client = None
self.__client = None
self.internal_ip = internal_ip
self.ssh_tunnel_port = port
self.user_name = user_name
self.password = password
def __str__(self):
return self.internal_ip
@property
def client(self):
if self.__client is None:
self.__client = self.reconnect_client()
return self.__client
def reconnect_client(self):
self.__client = demisto_client.configure(f'https://localhost:{self.ssh_tunnel_port}',
verify_ssl=False,
username=self.user_name,
password=self.password)
return self.__client
def add_server_configuration(self, config_dict, error_msg, restart=False):
update_server_configuration(self.client, config_dict, error_msg)
if restart:
self.exec_command('sudo systemctl restart demisto')
def exec_command(self, command):
subprocess.check_output(f'ssh {SSH_USER}@{self.internal_ip} {command}'.split(),
stderr=subprocess.STDOUT)
def get_id_set(id_set_path) -> Union[dict, None]:
"""
Used to collect the ID set so it can be passed to the Build class on init.
:return: ID set as a dict if it exists.
"""
if os.path.isfile(id_set_path):
return get_json_file(id_set_path)
return None
class Build:
# START CHANGE ON LOCAL RUN #
content_path = f'{os.getenv("HOME")}/project' if os.getenv('CIRCLECI') else os.getenv('CI_PROJECT_DIR')
test_pack_target = f'{os.getenv("HOME")}/project/Tests' if os.getenv(
'CIRCLECI') else f'{os.getenv("CI_PROJECT_DIR")}/Tests' # noqa
key_file_path = 'Use in case of running with non local server'
run_environment = Running.CI_RUN
env_results_path = f'{os.getenv("ARTIFACTS_FOLDER")}/env_results.json'
DEFAULT_SERVER_VERSION = '99.99.98'
# END CHANGE ON LOCAL RUN #
def __init__(self, options):
self._proxy = None
self.is_xsiam = False
self.xsiam_machine = None
self.servers = []
self.server_numeric_version = ''
self.git_sha1 = options.git_sha1
self.branch_name = options.branch
self.ci_build_number = options.build_number
self.is_nightly = options.is_nightly
self.secret_conf = get_json_file(options.secret)
self.username = options.user if options.user else self.secret_conf.get('username')
self.password = options.password if options.password else self.secret_conf.get('userPassword')
self.is_private = options.is_private
conf = get_json_file(options.conf)
self.tests = conf['tests']
self.skipped_integrations_conf = conf['skipped_integrations']
self.unmockable_integrations = conf['unmockable_integrations']
id_set_path = options.id_set_path if options.id_set_path else ID_SET_PATH
self.id_set = get_id_set(id_set_path)
self.test_pack_path = options.test_pack_path if options.test_pack_path else None
self.tests_to_run = self.fetch_tests_list(options.tests_to_run)
self.content_root = options.content_root
self.pack_ids_to_install = self.fetch_pack_ids_to_install(options.pack_ids_to_install)
self.service_account = options.service_account
@staticmethod
def fetch_tests_list(tests_to_run_path: str):
"""
Fetches the test list from the filter. (Parses lines, all test written in the filter.txt file)
:param tests_to_run_path: Path to location of test filter.
:return: List of tests if there are any, otherwise empty list.
"""
tests_to_run = []
with open(tests_to_run_path, "r") as filter_file:
tests_from_file = filter_file.readlines()
for test_from_file in tests_from_file:
test_clean = test_from_file.rstrip()
tests_to_run.append(test_clean)
return tests_to_run
@staticmethod
def fetch_pack_ids_to_install(packs_to_install_path: str):
"""
Fetches the test list from the filter.
:param packs_to_install_path: Path to location of pack IDs to install file.
:return: List of Pack IDs if there are any, otherwise empty list.
"""
tests_to_run = []
with open(packs_to_install_path, "r") as filter_file:
tests_from_file = filter_file.readlines()
for test_from_file in tests_from_file:
test_clean = test_from_file.rstrip()
tests_to_run.append(test_clean)
return tests_to_run
@abstractmethod
def configure_servers_and_restart(self):
pass
@abstractmethod
def install_nightly_pack(self):
pass
@abstractmethod
def test_integrations_post_update(self, new_module_instances: list,
modified_module_instances: list) -> tuple:
pass
@abstractmethod
def configure_and_test_integrations_pre_update(self, new_integrations, modified_integrations) -> tuple:
pass
@abstractmethod
def test_integration_with_mock(self, instance: dict, pre_update: bool):
pass
@staticmethod
def set_marketplace_url(servers, branch_name, ci_build_number):
pass
def disable_instances(self):
for server in self.servers:
disable_all_integrations(server.client)
def get_changed_integrations(self) -> tuple:
"""
Return 2 lists - list of new integrations and list of modified integrations since the commit of the git_sha1.
Args:
self: the build object
Returns:
list of new integrations and list of modified integrations
"""
new_integrations_files, modified_integrations_files = get_new_and_modified_integration_files(
self.branch_name) if not self.is_private else ([], [])
new_integrations_names, modified_integrations_names = [], []
if new_integrations_files:
new_integrations_names = get_integration_names_from_files(new_integrations_files)
logging.debug(f'New Integrations Since Last Release:\n{new_integrations_names}')
if modified_integrations_files:
modified_integrations_names = get_integration_names_from_files(modified_integrations_files)
logging.debug(f'Updated Integrations Since Last Release:\n{modified_integrations_names}')
return new_integrations_names, modified_integrations_names
@abstractmethod
def concurrently_run_function_on_servers(self, function=None, pack_path=None, service_account=None):
pass
def install_packs(self, pack_ids=None):
"""
Install pack_ids or packs from "$ARTIFACTS_FOLDER/content_packs_to_install.txt" file, and packs dependencies.
Args:
pack_ids: Packs to install on the server. If no packs provided, installs packs that was provided
by previous step of the build.
Returns:
installed_content_packs_successfully: Whether packs installed successfully
"""
pack_ids = self.pack_ids_to_install if pack_ids is None else pack_ids
installed_content_packs_successfully = True
for server in self.servers:
try:
hostname = self.xsiam_machine if self.is_xsiam else ''
_, flag = search_and_install_packs_and_their_dependencies(pack_ids, server.client, hostname)
if not flag:
raise Exception('Failed to search and install packs.')
except Exception:
logging.exception('Failed to search and install packs')
installed_content_packs_successfully = False
return installed_content_packs_successfully
def get_tests(self) -> List[dict]:
"""
Selects the tests from that should be run in this execution and filters those that cannot run in this server version
Args:
self: Build object
Returns:
Test configurations from conf.json that should be run in this execution
"""
server_numeric_version: str = self.server_numeric_version
tests: dict = self.tests
if Build.run_environment == Running.CI_RUN:
filtered_tests = BuildContext._extract_filtered_tests()
if self.is_nightly:
# skip test button testing
logging.debug('Not running instance tests in nightly flow')
tests_for_iteration = []
else:
# if not filtered_tests in XSIAM, we not running tests at all
if self.is_xsiam and not filtered_tests:
tests_for_iteration = []
else:
tests_for_iteration = [test for test in tests
if not filtered_tests or test.get('playbookID', '') in filtered_tests]
tests_for_iteration = filter_tests_with_incompatible_version(tests_for_iteration, server_numeric_version)
return tests_for_iteration
# START CHANGE ON LOCAL RUN #
return [
{
"playbookID": "Docker Hardening Test",
"fromversion": "5.0.0"
},
{
"integrations": "SplunkPy",
"playbookID": "SplunkPy-Test-V2",
"memory_threshold": 500,
"instance_names": "use_default_handler"
}
]
# END CHANGE ON LOCAL RUN #
def configure_server_instances(self, tests_for_iteration, all_new_integrations, modified_integrations):
"""
"""
modified_module_instances = []
new_module_instances = []
testing_client = self.servers[0].client
for test in tests_for_iteration:
integrations = get_integrations_for_test(test, self.skipped_integrations_conf)
playbook_id = test.get('playbookID')
new_integrations, modified_integrations, unchanged_integrations, integration_to_status = group_integrations(
integrations, self.skipped_integrations_conf, all_new_integrations, modified_integrations
)
integration_to_status_string = '\n\t\t\t\t\t\t'.join(
[f'"{key}" - {val}' for key, val in integration_to_status.items()])
if integration_to_status_string:
logging.info(f'All Integrations for test "{playbook_id}":\n\t\t\t\t\t\t{integration_to_status_string}')
else:
logging.info(f'No Integrations for test "{playbook_id}"')
instance_names_conf = test.get('instance_names', [])
if not isinstance(instance_names_conf, list):
instance_names_conf = [instance_names_conf]
integrations_to_configure = modified_integrations[:]
integrations_to_configure.extend(unchanged_integrations)
placeholders_map = {'%%SERVER_HOST%%': self.servers[0]}
new_ints_params_set = set_integration_params(self,
new_integrations,
self.secret_conf['integrations'],
instance_names_conf,
placeholders_map)
ints_to_configure_params_set = set_integration_params(self,
integrations_to_configure,
self.secret_conf['integrations'],
instance_names_conf, placeholders_map)
if not new_ints_params_set:
logging.error(f'failed setting parameters for integrations: {new_integrations}')
if not ints_to_configure_params_set:
logging.error(f'failed setting parameters for integrations: {integrations_to_configure}')
if not (new_ints_params_set and ints_to_configure_params_set):
continue
modified_module_instances_for_test, new_module_instances_for_test = self.configure_modified_and_new_integrations(
integrations_to_configure,
new_integrations,
testing_client)
modified_module_instances.extend(modified_module_instances_for_test)
new_module_instances.extend(new_module_instances_for_test)
return modified_module_instances, new_module_instances
def configure_modified_and_new_integrations(self,
modified_integrations_to_configure: list,
new_integrations_to_configure: list,
demisto_client_: demisto_client) -> tuple:
"""
Configures old and new integrations in the server configured in the demisto_client.
Args:
self: The build object
modified_integrations_to_configure: Integrations to configure that are already exists
new_integrations_to_configure: Integrations to configure that were created in this build
demisto_client: A demisto client
Returns:
A tuple with two lists:
1. List of configured instances of modified integrations
2. List of configured instances of new integrations
"""
modified_modules_instances = []
new_modules_instances = []
for integration in modified_integrations_to_configure:
placeholders_map = {'%%SERVER_HOST%%': self.servers[0]}
module_instance = configure_integration_instance(integration, demisto_client_, placeholders_map)
if module_instance:
modified_modules_instances.append(module_instance)
for integration in new_integrations_to_configure:
placeholders_map = {'%%SERVER_HOST%%': self.servers[0]}
module_instance = configure_integration_instance(integration, demisto_client_, placeholders_map)
if module_instance:
new_modules_instances.append(module_instance)
return modified_modules_instances, new_modules_instances
def instance_testing(self,
all_module_instances: list,
pre_update: bool,
use_mock: bool = True,
first_call: bool = True) -> Tuple[set, set]:
"""
Runs 'test-module' command for the instances detailed in `all_module_instances`
Args:
self: An object containing the current build info.
all_module_instances: The integration instances that should be tested
pre_update: Whether this instance testing is before or after the content update on the server.
use_mock: Whether to use mock while testing mockable integrations. Should be used mainly with
private content build which aren't using the mocks.
first_call: indicates if its the first time the function is called from the same place
Returns:
A set of the successful tests containing the instance name and the integration name
A set of the failed tests containing the instance name and the integration name
"""
update_status = 'Pre' if pre_update else 'Post'
failed_tests = set()
successful_tests = set()
# Test all module instances (of modified + unchanged integrations) pre-updating content
if all_module_instances:
# only print start message if there are instances to configure
logging.info(f'Start of Instance Testing ("Test" button) ({update_status}-update)')
else:
logging.info(f'No integrations to configure for the chosen tests. ({update_status}-update)')
failed_instances = []
for instance in all_module_instances:
integration_of_instance = instance.get('brand', '')
instance_name = instance.get('name', '')
# If there is a failure, test_integration_instance will print it
if integration_of_instance not in self.unmockable_integrations and use_mock:
success = self.test_integration_with_mock(instance, pre_update)
else:
testing_client = self.servers[0].reconnect_client()
success, _ = test_integration_instance(testing_client, instance)
if not success:
failed_tests.add((instance_name, integration_of_instance))
failed_instances.append(instance)
else:
successful_tests.add((instance_name, integration_of_instance))
# in case some tests failed post update, wait a 15 secs, runs the tests again
if failed_instances and not pre_update and first_call:
logging.info("some post-update tests failed, sleeping for 15 seconds, then running the failed tests again")
sleep(15)
_, failed_tests = self.instance_testing(failed_instances, pre_update=False, first_call=False)
return successful_tests, failed_tests
def update_content_on_servers(self) -> bool:
"""
Changes marketplace bucket to new one that was created for current branch.
Updates content on the build's server according to the server version.
Args:
self: Build object
Returns:
A boolean that indicates whether the content installation was successful.
If the server version is lower then 5.9.9 will return the 'installed_content_packs_successfully' parameter as is
If the server version is higher or equal to 6.0 - will return True if the packs installation was successful
both before that update and after the update.
"""
self.set_marketplace_url(self.servers, self.branch_name, self.ci_build_number)
installed_content_packs_successfully = self.install_packs()
return installed_content_packs_successfully
class XSOARBuild(Build):
def __init__(self, options):
super().__init__(options)
self.ami_env = options.ami_env
self.server_to_port_mapping, self.server_numeric_version = self.get_servers(options.ami_env)
self.servers = [XSOARServer(internal_ip,
port,
self.username,
self.password) for internal_ip, port in self.server_to_port_mapping.items()]
@property
def proxy(self) -> MITMProxy:
"""
A property method that should create and return a single proxy instance through out the build
Returns:
The single proxy instance that should be used in this build.
"""
if not self._proxy:
self._proxy = MITMProxy(self.servers[0].internal_ip,
logging_module=logging,
build_number=self.ci_build_number,
branch_name=self.branch_name)
return self._proxy
def configure_servers_and_restart(self):
manual_restart = Build.run_environment == Running.WITH_LOCAL_SERVER
for server in self.servers:
configurations = dict()
configure_types = []
if is_redhat_instance(server.internal_ip):
configurations.update(DOCKER_HARDENING_CONFIGURATION_FOR_PODMAN)
configurations.update(NO_PROXY_CONFIG)
configurations['python.pass.extra.keys'] += "##--network=slirp4netns:cidr=192.168.0.0/16"
else:
configurations.update(DOCKER_HARDENING_CONFIGURATION)
configure_types.append('docker hardening')
configure_types.append('marketplace')
configurations.update(MARKET_PLACE_CONFIGURATION)
error_msg = 'failed to set {} configurations'.format(' and '.join(configure_types))
server.add_server_configuration(configurations, error_msg=error_msg, restart=not manual_restart)
if manual_restart:
input('restart your server and then press enter.')
else:
logging.info('Done restarting servers. Sleeping for 1 minute')
sleep(60)
def install_nightly_pack(self):
"""
Installs all existing packs in master
Collects all existing test playbooks, saves them to test_pack.zip
Uploads test_pack.zip to server
Args:
self: A build object
"""
# Install all existing packs with latest version
self.concurrently_run_function_on_servers(function=install_all_content_packs_for_nightly,
service_account=self.service_account)
# creates zip file test_pack.zip witch contains all existing TestPlaybooks
create_nightly_test_pack()
# uploads test_pack.zip to all servers
self.concurrently_run_function_on_servers(function=upload_zipped_packs,
pack_path=f'{Build.test_pack_target}/test_pack.zip')
logging.info('Sleeping for 45 seconds while installing nightly packs')
sleep(45)
@run_with_proxy_configured
def test_integrations_post_update(self, new_module_instances: list,
modified_module_instances: list) -> tuple:
"""
Runs 'test-module on all integrations for post-update check
Args:
self: A build object
new_module_instances: A list containing new integrations instances to run test-module on
modified_module_instances: A list containing old (existing) integrations instances to run test-module on
Returns:
* A list of integration names that have failed the 'test-module' execution post update
* A list of integration names that have succeeded the 'test-module' execution post update
"""
modified_module_instances.extend(new_module_instances)
successful_tests_post, failed_tests_post = self.instance_testing(modified_module_instances, pre_update=False)
return successful_tests_post, failed_tests_post
@run_with_proxy_configured
def configure_and_test_integrations_pre_update(self, new_integrations, modified_integrations) -> tuple:
"""
Configures integration instances that exist in the current version and for each integration runs 'test-module'.
Args:
self: Build object
new_integrations: A list containing new integrations names
modified_integrations: A list containing modified integrations names
Returns:
A tuple consists of:
* A list of modified module instances configured
* A list of new module instances configured
* A list of integrations that have failed the 'test-module' command execution
* A list of integrations that have succeeded the 'test-module' command execution
* A list of new integrations names
"""
tests_for_iteration = self.get_tests()
modified_module_instances, new_module_instances = self.configure_server_instances(
tests_for_iteration,
new_integrations,
modified_integrations)
successful_tests_pre, failed_tests_pre = self.instance_testing(modified_module_instances, pre_update=True)
return modified_module_instances, new_module_instances, failed_tests_pre, successful_tests_pre
def test_integration_with_mock(self, instance: dict, pre_update: bool):
"""
Runs 'test-module' for given integration with mitmproxy
In case the playback mode fails and this is a pre-update run - a record attempt will be executed.
Args:
build: An object containing the current build info.
instance: A dict containing the instance details
pre_update: Whether this instance testing is before or after the content update on the server.
Returns:
The result of running the 'test-module' command for the given integration.
If a record was executed - will return the result of the 'test--module' with the record mode only.
"""
testing_client = self.servers[0].reconnect_client()
integration_of_instance = instance.get('brand', '')
logging.debug(f'Integration "{integration_of_instance}" is mockable, running test-module with mitmproxy')
has_mock_file = self.proxy.has_mock_file(integration_of_instance)
success = False
if has_mock_file:
with run_with_mock(self.proxy, integration_of_instance) as result_holder:
success, _ = test_integration_instance(testing_client, instance)
result_holder[RESULT] = success
if not success:
logging.warning(f'Running test-module for "{integration_of_instance}" has failed in playback mode')
if not success and not pre_update:
logging.debug(f'Recording a mock file for integration "{integration_of_instance}".')
with run_with_mock(self.proxy, integration_of_instance, record=True) as result_holder:
success, _ = test_integration_instance(testing_client, instance)
result_holder[RESULT] = success
if not success:
logging.debug(f'Record mode for integration "{integration_of_instance}" has failed.')
return success
@staticmethod
def set_marketplace_url(servers, branch_name, ci_build_number):
url_suffix = quote_plus(f'{branch_name}/{ci_build_number}/xsoar')
config_path = 'marketplace.bootstrap.bypass.url'
config = {config_path: f'https://storage.googleapis.com/marketplace-ci-build/content/builds/{url_suffix}'}
for server in servers:
server.add_server_configuration(config, 'failed to configure marketplace custom url ', True)
logging.success('Updated marketplace url and restarted servers')
logging.info('sleeping for 120 seconds')
sleep(120)
@staticmethod
def get_servers(ami_env):
env_conf = get_env_conf()
server_to_port_mapping = map_server_to_port(env_conf, ami_env)
if Build.run_environment == Running.CI_RUN:
server_numeric_version = get_server_numeric_version(ami_env)
else:
server_numeric_version = Build.DEFAULT_SERVER_VERSION
return server_to_port_mapping, server_numeric_version
def concurrently_run_function_on_servers(self, function=None, pack_path=None, service_account=None):
threads_list = []
if not function:
raise Exception('Install method was not provided.')
# For each server url we install pack/ packs
for server in self.servers:
kwargs = {'client': server.client, 'host': server.internal_ip}
if service_account:
kwargs['service_account'] = service_account
if pack_path:
kwargs['pack_path'] = pack_path
threads_list.append(Thread(target=function, kwargs=kwargs))
run_threads_list(threads_list)
class XSIAMBuild(Build):
def __init__(self, options):
global SET_SERVER_KEYS
SET_SERVER_KEYS = False
super().__init__(options)
self.is_xsiam = True
self.xsiam_machine = options.xsiam_machine
self.xsiam_servers = get_json_file(options.xsiam_servers_path)
self.api_key, self.server_numeric_version, self.base_url, self.xdr_auth_id =\
self.get_xsiam_configuration(options.xsiam_machine, self.xsiam_servers)
self.servers = [XSIAMServer(self.api_key, self.server_numeric_version, self.base_url, self.xdr_auth_id,
self.xsiam_machine)]
@staticmethod
def get_xsiam_configuration(xsiam_machine, xsiam_servers):
conf = xsiam_servers.get(xsiam_machine)
return conf.get('api_key'), conf.get('demisto_version'), conf.get('base_url'), conf.get('x-xdr-auth-id')
def configure_servers_and_restart(self):
# No need of this step in XSIAM.
pass
def test_integration_with_mock(self, instance: dict, pre_update: bool):
# No need of this step in XSIAM.
pass
def install_nightly_pack(self):
"""
Installs packs from content_packs_to_install.txt file
Collects all existing test playbooks, saves them to test_pack.zip
Uploads test_pack.zip to server
"""
self.install_packs()
# creates zip file test_pack.zip witch contains all existing TestPlaybooks
create_nightly_test_pack()
# uploads test_pack.zip to all servers (we have only one xsiam server)
for server in self.servers:
upload_zipped_packs(client=server.client,
host=server.name,
pack_path=f'{Build.test_pack_target}/test_pack.zip')
logging.info('Sleeping for 45 seconds while installing nightly packs')
sleep(45)
def test_integrations_post_update(self, new_module_instances: list,
modified_module_instances: list) -> tuple:
"""
Runs 'test-module on all integrations for post-update check
Args:
self: A build object
new_module_instances: A list containing new integrations instances to run test-module on
modified_module_instances: A list containing old (existing) integrations instances to run test-module on
Returns:
* A list of integration names that have failed the 'test-module' execution post update
* A list of integration names that have succeeded the 'test-module' execution post update
"""
modified_module_instances.extend(new_module_instances)
successful_tests_post, failed_tests_post = self.instance_testing(modified_module_instances, pre_update=False,
use_mock=False)
return successful_tests_post, failed_tests_post
def configure_and_test_integrations_pre_update(self, new_integrations, modified_integrations) -> tuple:
"""
Configures integration instances that exist in the current version and for each integration runs 'test-module'.
Args:
self: Build object
new_integrations: A list containing new integrations names
modified_integrations: A list containing modified integrations names
Returns:
A tuple consists of:
* A list of modified module instances configured
* A list of new module instances configured
* A list of integrations that have failed the 'test-module' command execution
* A list of integrations that have succeeded the 'test-module' command execution
* A list of new integrations names
"""
tests_for_iteration = self.get_tests()
modified_module_instances, new_module_instances = self.configure_server_instances(
tests_for_iteration,
new_integrations,
modified_integrations)
successful_tests_pre, failed_tests_pre = self.instance_testing(modified_module_instances,
pre_update=True,
use_mock=False)
return modified_module_instances, new_module_instances, failed_tests_pre, successful_tests_pre
@staticmethod
def set_marketplace_url(servers, branch_name, ci_build_number):
logging.info('Copying custom build bucket to xsiam_instance_bucket.')
from_bucket = f'{MARKETPLACE_TEST_BUCKET}/{branch_name}/{ci_build_number}/marketplacev2/content'
output_file = f'{ARTIFACTS_FOLDER_MPV2}/Copy_custom_bucket_to_xsiam_machine.log'
for server in servers:
to_bucket = f'{MARKETPLACE_XSIAM_BUCKETS}/{server.name}'
cmd = f'gsutil -m cp -r gs://{from_bucket} gs://{to_bucket}/'
with open(output_file, "w") as outfile:
subprocess.run(cmd.split(), stdout=outfile, stderr=outfile)
logging.info('Finished copying successfully.')
def concurrently_run_function_on_servers(self, function=None, pack_path=None, service_account=None):
# no need to run this cuncurrently since we have only one server
pass
def options_handler(args=None):
parser = argparse.ArgumentParser(description='Utility for instantiating and testing integration instances')
parser.add_argument('-u', '--user', help='The username for the login', required=True)
parser.add_argument('-p', '--password', help='The password for the login', required=True)
parser.add_argument('--ami_env', help='The AMI environment for the current run. Options are '
'"Server Master", "Server 6.0". '
'The server url is determined by the AMI environment.')
parser.add_argument('-g', '--git_sha1', help='commit sha1 to compare changes with')
parser.add_argument('-c', '--conf', help='Path to conf file', required=True)
parser.add_argument('-s', '--secret', help='Path to secret conf file')
parser.add_argument('-n', '--is-nightly', type=str2bool, help='Is nightly build')
parser.add_argument('-pr', '--is_private', type=str2bool, help='Is private build')
parser.add_argument('--branch', help='GitHub branch name', required=True)
parser.add_argument('--build-number', help='CI job number where the instances were created', required=True)
parser.add_argument('--test_pack_path', help='Path to where the test pack will be saved.',
default='/home/runner/work/content-private/content-private/content/artifacts/packs')
parser.add_argument('--content_root', help='Path to the content root.',
default='/home/runner/work/content-private/content-private/content')
parser.add_argument('--id_set_path', help='Path to the ID set.')
parser.add_argument('-l', '--tests_to_run', help='Path to the Test Filter.',
default='./artifacts/filter_file.txt')
parser.add_argument('-pl', '--pack_ids_to_install', help='Path to the packs to install file.',
default='./artifacts/content_packs_to_install.txt')
parser.add_argument('--build_object_type', help='Build type running: XSOAR or XSIAM')
parser.add_argument('--xsiam_machine', help='XSIAM machine to use, if it is XSIAM build.')
parser.add_argument('--xsiam_servers_path', help='Path to secret xsiam server metadata file.')
# disable-secrets-detection-start
parser.add_argument('-sa', '--service_account',
help=("Path to gcloud service account, is for circleCI usage. "
"For local development use your personal account and "
"authenticate using Google Cloud SDK by running: "
"`gcloud auth application-default login` and leave this parameter blank. "
"For more information go to: "
"https://googleapis.dev/python/google-api-core/latest/auth.html"),
required=False)
# disable-secrets-detection-end
options = parser.parse_args(args)
return options
def check_test_version_compatible_with_server(test, server_version):
"""
Checks if a given test is compatible wis the given server version.
Arguments:
test: (dict)
Test playbook object from content conf.json. May contain the following fields: "playbookID",
"integrations", "instance_names", "timeout", "nightly", "fromversion", "toversion.
server_version: (int)
The server numerical version.
Returns:
(bool) True if test is compatible with server version or False otherwise.
"""
test_from_version = format_version(test.get('fromversion', '0.0.0'))
test_to_version = format_version(test.get('toversion', '99.99.99'))
server_version = format_version(server_version)
if not Version(test_from_version) <= Version(server_version) <= Version(test_to_version):
playbook_id = test.get('playbookID')
logging.debug(
f'Test Playbook: {playbook_id} was ignored in the content installation test due to version mismatch '
f'(test versions: {test_from_version}-{test_to_version}, server version: {server_version})')
return False
return True
def filter_tests_with_incompatible_version(tests, server_version):
"""
Filter all tests with incompatible version to the given server.
Arguments:
tests: (list)
List of test objects.
server_version: (int)
The server numerical version.
Returns:
(lst): List of filtered tests (compatible version)
"""
filtered_tests = [test for test in tests if
check_test_version_compatible_with_server(test, server_version)]
return filtered_tests
def configure_integration_instance(integration, client, placeholders_map):
"""
Configure an instance for an integration
Arguments:
integration: (dict)
Integration object whose params key-values are set
client: (demisto_client)
The client to connect to
placeholders_map: (dict)
Dict that holds the real values to be replaced for each placeholder.
Returns:
(dict): Configured integration instance
"""
integration_name = integration.get('name')
logging.info(f'Configuring instance for integration "{integration_name}"')
integration_instance_name = integration.get('instance_name', '')
integration_params = change_placeholders_to_values(placeholders_map, integration.get('params'))
is_byoi = integration.get('byoi', True)
validate_test = integration.get('validate_test', True)
integration_configuration = __get_integration_config(client, integration_name)
if not integration_configuration:
return None
# In the integration configuration in content-test-conf conf.json, the test_validate flag was set to false
if not validate_test:
logging.debug(f'Skipping configuration for integration: {integration_name} (it has test_validate set to false)')
return None
module_instance = set_integration_instance_parameters(integration_configuration, integration_params,
integration_instance_name, is_byoi, client)
return module_instance
def filepath_to_integration_name(integration_file_path):
"""Load an integration file and return the integration name.
Args:
integration_file_path (str): The path to an integration yml file.
Returns:
(str): The name of the integration.
"""
integration_yaml = get_yaml(integration_file_path)
integration_name = integration_yaml.get('name')
return integration_name
def get_integration_names_from_files(integration_files_list):
integration_names_list = [filepath_to_integration_name(path) for path in integration_files_list]
return [name for name in integration_names_list if name] # remove empty values
def get_new_and_modified_integration_files(branch_name):
"""Return 2 lists - list of new integrations and list of modified integrations since the first commit of the branch.
Args:
branch_name: The branch name against which we will run the 'git diff' command.
Returns:
(tuple): Returns a tuple of two lists, the file paths of the new integrations and modified integrations.
"""
# get changed yaml files (filter only added and modified files)
file_validator = ValidateManager(skip_dependencies=True)
file_validator.branch_name = branch_name
modified_files, added_files, _, _, _ = file_validator.get_changed_files_from_git()
new_integration_files = [
file_path for file_path in added_files if
find_type(file_path) in [FileType.INTEGRATION, FileType.BETA_INTEGRATION]
]
modified_integration_files = [
file_path for file_path in modified_files if
isinstance(file_path, str) and find_type(file_path) in [FileType.INTEGRATION, FileType.BETA_INTEGRATION]
]
return new_integration_files, modified_integration_files
def is_content_update_in_progress(client):
"""Make request to check if content is updating.
Args:
client (demisto_client): The configured client to use.
Returns:
(str): Returns the request response data which is 'true' if updating and 'false' if not.
"""
host = client.api_client.configuration.host
logging.debug(f'Making "Get" request to server - "{host}" to check if content is installing.')
# make request to check if content is updating
response_data, status_code, _ = demisto_client.generic_request_func(self=client, path='/content/updating',
method='GET', accept='application/json')
if status_code >= 300 or status_code < 200:
result_object = ast.literal_eval(response_data)
message = result_object.get('message', '')
logging.error(f"Failed to check if content is installing - with status code {status_code}\n{message}")
return 'request unsuccessful'
return response_data
def get_content_version_details(client, ami_name):
"""Make request for details about the content installed on the demisto instance.
Args:
client (demisto_client): The configured client to use.