-
Notifications
You must be signed in to change notification settings - Fork 0
/
parallel_test_single_folder.py
executable file
·107 lines (80 loc) · 3.22 KB
/
parallel_test_single_folder.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
#! /usr/bin/env python3
from multiprocessing.dummy import Pool as ThreadPool
import os
import sys
import threading
import fnmatch
import json
MYTHRIL = (
"command time -v myth a {} --parallel-solving --solc-json {} --execution-timeout 60 -o json > {} 2>&1"
)
MAX_NO_OF_THREADS = 1
CONTRACTS_PER_THREAD = 10000
result_path_root = os.path.expanduser("~/results")
solc_json = "./solc_json.json"
def analyse_contract(contract_file_full_path, solc_json_file=solc_json):
if not fnmatch.fnmatch(contract_file_full_path, "*.sol"):
return
contract_file = os.path.basename(contract_file_full_path)
json_result_file = os.path.join(
result_path_root, str(contract_file).replace(".sol", ".json")
)
if os.path.exists(json_result_file):
print(
"{}>> Skipping {}.....".format(threading.current_thread().name, contract_file)
)
return
print(
"{}>> Analysing {}.....".format(threading.current_thread().name, contract_file)
)
os.system(MYTHRIL.format(contract_file_full_path, solc_json_file, json_result_file))
print(
"{}>> Finished analysing {}".format(
threading.current_thread().name, contract_file
)
)
def analyse_contracts_in_chunk(chunk):
for contract in chunk:
analyse_contract(contract)
if __name__ == "__main__":
if len(sys.argv) <= 3:
print("+" * 60)
print("No dataset folder specified!")
print("\t\t\t(or)")
print("No solc configuration specified!")
print(
"""
Usage: ./script.py <contracts_folder> [<solc_json_file>] [<results_folder>] [<no_of_threads>]
contracts_folder: specifies the folder where the contracts of different types are present.
solc_json_file: specifies the solc config file which defines compiler configuration like remappings
results_folder: an optional argument which specifies the folder where the results will be stored, defaults to ~/results/
no_of_threads: specifies the maximum number of threads running currently at any time, defaults to 7
"""
)
exit()
if os.path.exists(sys.argv[1]):
dataset_abs_path = os.path.abspath(sys.argv[1])
if len(sys.argv) >= 3:
solc_json = os.path.abspath(sys.argv[2])
if len(sys.argv) == 4:
result_path_root = os.path.expanduser(sys.argv[3])
if not os.path.exists(result_path_root):
os.makedirs(result_path_root)
if len(sys.argv) == 5:
MAX_NO_OF_THREADS = int(sys.argv[4])
print("+" * 60, end="\n\n")
print(f"The results will be stored in:", result_path_root)
print("The maximum number of threads will be limited to:", MAX_NO_OF_THREADS, end="\n\n")
print("+" * 60, end="\n\n")
with open(dataset_abs_path) as fp:
to_analyse = json.load(fp)
files = [os.path.join("../sourcecode", to_analyse[key]) for key in to_analyse]
pool = ThreadPool(MAX_NO_OF_THREADS)
pool.map(analyse_contract, files)
pool.close()
pool.join()
print()
print("+" * 60, end="\n\n")
print("Finished analysis.")
print("The results are in:", result_path_root, end="\n\n")
print("+" * 60, end="\n\n")