-
Notifications
You must be signed in to change notification settings - Fork 1
/
main.py
178 lines (156 loc) · 5.35 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
from urllib.parse import urlparse
import requests
import socket
from datetime import datetime, timezone
import ssl
from OpenSSL import crypto
import whois
from fastapi.middleware.cors import CORSMiddleware
import logging
import dill
import pandas as pd
app = FastAPI()
# Setup logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Load the trained model
model_path = 'Model.pkl'
def load_model(filename):
with open(filename, 'rb') as file:
return dill.load(file)
try:
model = load_model(model_path)
logger.info(f"Model loaded from {model_path}")
except Exception as e:
logger.error(f"Error loading model: {e}")
class URLData(BaseModel):
url: str
def calculate_url_depth(url):
return urlparse(url).path.count('/') - 1 if urlparse(url).path != '/' else 0
def get_protocol(url):
return urlparse(url).scheme
def get_domain(url):
return urlparse(url).netloc
def get_ssl_age(domain):
try:
context = ssl.create_default_context()
conn = context.wrap_socket(
socket.socket(socket.AF_INET),
server_hostname=domain,
)
conn.settimeout(10)
conn.connect((domain, 443))
cert = conn.getpeercert(True)
pem_cert = ssl.DER_cert_to_PEM_cert(cert)
conn.close()
x509 = crypto.load_certificate(crypto.FILETYPE_PEM, pem_cert)
timestamp = x509.get_notBefore().decode('utf-8')
issue_date = datetime.strptime(timestamp, '%Y%m%d%H%M%S%z').replace(tzinfo=timezone.utc)
now = datetime.now(timezone.utc)
age = now - issue_date
return age.days
except Exception as e:
logger.error(f"Error getting SSL age for domain {domain}: {e}")
return None
def get_domain_info(domain):
try:
info = whois.whois(domain)
created_date = info.creation_date
if isinstance(created_date, list):
created_date = created_date[0]
domain_age = (datetime.now() - created_date).days if created_date else None
registrar = info.registrar
return domain_age, registrar
except Exception as e:
logger.error(f"Error getting domain info for {domain}: {e}")
return None, None
def get_page_rank(domain):
try:
url = 'https://openpagerank.com/api/v1.0/getPageRank'
query = {'domains[]': [domain]}
headers = {'API-OPR': 's8wkos0kkckcgos4sock0s448k4o8o8gko4g0ow4 '}
response = requests.get(url, headers=headers, params=query)
response.raise_for_status() # Check for HTTP errors
data = response.json()
if 'response' in data and data['response']:
return data['response'][0]['rank']
else:
logger.error(f"Unexpected response structure: {data}")
return None
except Exception as e:
logger.error(f"Error getting page rank for domain {domain}: {e}")
return None
@app.post("/extract")
def extract_data(data: URLData):
logger.info(f"Received data: {data}")
url = data.url
try:
url_length = len(url)
url_depth = calculate_url_depth(url)
protocol = get_protocol(url)
domain = get_domain(url)
ssl_age = get_ssl_age(domain)
domain_age, registrar = get_domain_info(domain)
page_rank = get_page_rank(domain)
result = {
"urlLength": url_length,
"urlDepth": url_depth,
"protocol": protocol,
"domain": domain,
"domainAge": domain_age,
"registrar": registrar,
"sslAge": ssl_age,
"pageRank": page_rank,
}
logger.info(f"Processed data: {result}")
return result
except Exception as e:
logger.error(f"Error processing data: {e}")
raise HTTPException(status_code=400, detail=str(e))
@app.post("/predict")
def predict_score(data: URLData):
logger.info(f"Received data for prediction: {data}")
url = data.url
try:
# Extract features
url_length = len(url)
url_depth = calculate_url_depth(url)
protocol = get_protocol(url)
domain = get_domain(url)
ssl_age = get_ssl_age(domain)
domain_age, registrar = get_domain_info(domain)
page_rank = int(get_page_rank(domain))
# Create a dictionary of features
features_dict = {
"urlLength": [url_length],
"urlDepth": [url_depth],
"protocol": [protocol],
"domain": [domain],
"domainAge": [domain_age],
"registrar": [registrar],
"sslAge": [ssl_age],
"PageRank": [page_rank]
}
# Convert to pandas DataFrame
df = pd.DataFrame(features_dict)
logger.info("Features Extracted",features_dict)
# Make prediction
prediction = model.predict_proba(df)
score = prediction[0][1]
result = {
"predictedScore": float(score) # Convert to float for JSON serialization
}
logger.info(f"Prediction result: {result}")
return result
except Exception as e:
logger.error(f"Error making prediction: {e}")
raise HTTPException(status_code=400, detail=str(e))