Skip to content

Commit

Permalink
recaptcha commented
Browse files Browse the repository at this point in the history
  • Loading branch information
GiuseppeBocci authored Aug 30, 2023
1 parent 6a1a450 commit 0204c4a
Show file tree
Hide file tree
Showing 4 changed files with 22 additions and 25 deletions.
25 changes: 10 additions & 15 deletions NMTF_link.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
from scripts.processNetwork import runNetworkRE, runNetworkMM

warnings.filterwarnings('ignore')

if __name__ == '__main__':
matplotlib.use('agg')

Expand Down Expand Up @@ -68,8 +67,11 @@ def predict(num_iterations, th, network=None, rng=np.random.default_rng()):
try:
max_iter_value = graph_topology["number.of.iterations"]
max_iter = int(max_iter_value)
if max_iter > MAX_ITER:
raise ValueError()
except ValueError:
print(f"Invalid number of iteration {max_iter_value}, set default value {max_iter}")
max_iter = MAX_ITER
print(f"Invalid number of iteration {max_iter_value}, set default value {MAX_ITER}")

try:
threshold = graph_topology["score.threshold"]
Expand Down Expand Up @@ -128,6 +130,12 @@ def predict(num_iterations, th, network=None, rng=np.random.default_rng()):
best_epsilon_arr = []
ss = np.random.SeedSequence()
# Spawn off 10 child SeedSequences to pass to child processes.
# completamente indipendenti dato che usiamo numpy = 1.18 farò riferimento a
# https://albertcthomas.github.io/good-practices-random-number-generators/
# è necessitata SeedSequence spawing https://numpy.org/doc/1.18/reference/random/parallel.html
# essa implementa un algoritmo che garantisce un'alta probabilità che due seed genrati vicini sian
# molto diversi.
# SeedSequence avoids these problems by using successions of integer hashes with good avalanche properties
child_seeds = ss.spawn(N_ITERATIONS + 1)
streams = [np.random.default_rng(s) for s in child_seeds]
processes = list()
Expand All @@ -144,19 +152,6 @@ def predict(num_iterations, th, network=None, rng=np.random.default_rng()):

print("best_epsilon_arr: "+str(best_epsilon_arr))

# completamente indipendenti dato che usiamo numpy = 1.18 farò riferimento a
# https://albertcthomas.github.io/good-practices-random-number-generators/
# è necessitata SeedSequence spawing https://numpy.org/doc/1.18/reference/random/parallel.html
# essa implementa un algoritmo che garantisce un'alta probabilità che due seed genrati vicini sian
# molto diversi.
# SeedSequence avoids these problems by using successions of integer hashes with good avalanche properties
"""
ss = SeedSequence(12345)
# Spawn off 10 child SeedSequences to pass to child processes.
child_seeds = ss.spawn(10)
streams = [default_rng(s) for s in child_seeds]
stream[x].random() # generate np.float64 random number between 0. and 1.
"""
complete_plot(metric)

res_best_epsilon = statistics.median(best_epsilon_arr)
Expand Down
18 changes: 11 additions & 7 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@
app.config['MAX_CONTENT_LENGTH'] = 8 * 1024 * 1204 # 8 MB max file size / error 413
app.config['SOCK_SERVER_OPTIONS'] = {'ping_interval': 25} # 'max_message_size': 10240} # websocket options
app.config['MAX_NUM_FILES'] = 10 # Max number of files that can be elaborated
app.config['RECAPTCHA_SITE_KEY'] = "6LcmdFgmAAAAAG7NkCZUq71j1Kh9rUENWa44R7Fm" # Site key for g-recaptcha v3
app.config['RECAPTCHA_SECRET_KEY'] = "6LcmdFgmAAAAAOTksc1WGWIGExkPWjVa0LfYd5qn" # Secret key for g-recaptcha v3
app.config['RECAPTCHA_SITE_KEY'] = "" # Site key for g-recaptcha v3
app.config['RECAPTCHA_SECRET_KEY'] = "" # Secret key for g-recaptcha v3
app.config['RECAPTCHA_VERIFY_URL'] = "https://www.google.com/recaptcha/api/siteverify" # g-recaptcha verify url
sock = Sock(app) # websocket initialization

Expand Down Expand Up @@ -164,6 +164,7 @@ def create_set_file(req, files):
leftNodes = req.getlist("nodes.left")
rightNodes = req.getlist("nodes.right")
main = req["main"]
print("main", main)
fileString += " graph.datasets:\n"
for i in range(len(files)):
fileString += " - nodes.left: " + leftNodes[i] + "\n"
Expand All @@ -175,6 +176,7 @@ def create_set_file(req, files):
else:
fileString += "0"
fileString += "\n"
print(fileString)
unique = new_unique_name()
create_dir(unique)
try:
Expand Down Expand Up @@ -204,10 +206,12 @@ def loader():
If the form is not consistent an error page 400 is returned. If test recaptcha failed error page 401 is
returned.
"""
recaptcha_test = requests.post(url=f"{app.config['RECAPTCHA_VERIFY_URL']}" +
f"?secret={app.config['RECAPTCHA_SECRET_KEY']}" +
f"&response={request.form['g-recaptcha-response']}").json()
if recaptcha_test["success"]:
# Uncomment this part and configure the app.config['RECAPTCHA_SECRET_KEY'] and app.config['RECAPTCHA_SITE_KEY']
# to activate the recaptcha test
# recaptcha_test = requests.post(url=f"{app.config['RECAPTCHA_VERIFY_URL']}" +
# f"?secret={app.config['RECAPTCHA_SECRET_KEY']}" +
# f"&response={request.form['g-recaptcha-response']}").json()
if True: # recaptcha_test["success"]:
unique = ""
set_file = request.files["sfile"]
files = request.files.getlist("afiles")
Expand All @@ -232,7 +236,7 @@ def loader():
else:
filesNames = list(map(lambda f: f.filename, files))
unique = create_set_file(request.form, filesNames)
if unique is not None:
if unique is not None and check_files(files):
save_files(files, unique)
add_to_unique_not_used(unique)
else:
Expand Down
2 changes: 1 addition & 1 deletion files_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from werkzeug.utils import secure_filename

ROUTINE_S = 30 # 2 minutes
FILE_LIFE_S = 60 * 1 # 20 minutes
FILE_LIFE_S = 60 * 2 # 2 minutes
UNIQUE_NOT_USED_LIFE = 30 # 1 minute
ROUTINE_UNIQUE_NOT_USED = 30 # 30 seconds
Uniques = set() # HashMap containing unique dir names
Expand Down
2 changes: 0 additions & 2 deletions networkx-yer-or-no.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
# graph_topology.yaml

Websocket connection opened!
nmtf app lunched
metric : APS
Expand Down

0 comments on commit 0204c4a

Please sign in to comment.