forked from gaijinctfx/malwaresearch
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmalwaresearch.py
145 lines (117 loc) · 4.24 KB
/
malwaresearch.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
#!/usr/bin/python
# -*- coding: UTF-8 -*-
from bs4 import BeautifulSoup, SoupStrainer
import urllib2
import sys
import time
import argparse
import re
import requests
parser = argparse.ArgumentParser(description='MalwareSearch 0.1\t[github.com/MalwareReverseBrasil/malwaresearch.git]',
epilog='Telegram: https://t.me/MalwareReverseBR')
group = parser.add_mutually_exclusive_group()
group.add_argument('-f', '--find', help='Enter your search via MD5, SHA1, SHA256 or an Common Signature name.', action='store', metavar= 'Sample')
group.add_argument('-d', '--download', help='Download selected sample', action='store', metavar = 'Hash')
parser.add_argument('-w', '--write', help='Save the output results.', action='store', metavar= 'File')
parser.add_argument('-o', '--output', help='Show number of results', action='store', type = int, metavar= 'Int')
args = parser.parse_args()
if (args.write != None):
sys.stdout = open(args.write,'w')
t1 = time.time()
if args.find:
host = 'http://openmalware.org'
address = '/search.cgi?search='
malfind = args.find
site = host+address+malfind
elif args.download:
host = 'http://openmalware.org'
address = '/search.cgi?search='
maldow = args.download
site = host+address+maldow
def download(url):
print('Starting MalwareSearch ...')
try:
html = urllib2.urlopen(url).read()
except urllib2.URLError as e:
print ('Sorry, download error:', e.reason)
html = None
return html
try:
url = download(site)
soup = BeautifulSoup(url,'lxml')
except (KeyboardInterrupt,SystemExit):
print('Keyboard Interrupt')
sys.exit(0)
if args.download:
try:
dow = soup(attrs={'href':re.compile('/download.cgi*')})[0]['href']
name_dow = soup(attrs={'id':'checksum'})[4].string.strip('\n')
print('\nStarting downloading of the malware %s.zip' %name_dow)
r = requests.get(host + dow)
with open(name_dow + '.zip', "wb") as code:
code.write(r.content)
print('\nDownload Successful - Original Filename: %s' %name_dow)
except:
print('Download failed!')
sys.exit(1)
if args.find:
try:
founds = soup.find('h3').string
except:
print('Error: Search too short')
sys.exit(0)
if (args.write != None):
print('\n\t\t' + founds + '\n')
else:
print('\033[1;31m\n\t\t' + founds + '\n\033[0m')
if (args.output != None):
if (args.output > 20):
outnumber = 20
print('Result(s) for %s Sample(s)\n' %args.output)
else:
outnumber = args.output
print('Result(s) for %s Sample(s)\n' %args.output)
else:
outnumber = 1
print('This is the first result!\n')
x = 0
for i in range(0,(5*(outnumber))):
try:
tr = soup(attrs={'id':'checksum'})[i]
except:
pass
valid = tr.text.strip('\n')
if (len(valid) == 64) and (valid.count(' ',0,len(valid)) == 0):
for j in range(-3,3):
try:
tr = soup(attrs={'id':'checksum'})[i-j]
except:
pass
area = tr.text.strip('\n')
if (j == 2):
md5 = 'MD5:\t\t\t' + area
print(md5)
break
elif (j == 1):
sha1 = 'SHA1:\t\t\t' + area
print(sha1)
elif (j == 0):
sha256 = 'SHA256:\t\t\t' + area
print(sha256)
elif (j == -1):
ocid = 'OCID:\t\t\t' + area
print(ocid)
elif (j == -2):
orifile = 'Original Filename:\t' + area
if (args.write != None):
print(orifile)
else:
print('\033[1;32m' + orifile + '\033[0m')
elif (j == -3):
td1 = soup(text='Added:')[x].findNext('td').string.strip('\n')
x += 1
added = 'Added:\t\t\t' + td1
print('\n'+added)
t2 = time.time()
total_time = t2-t1
print('\n\nTotal search time %.2f seconds' %total_time)