From e8f07a03e182ec69615773377c5c6467fd917c7f Mon Sep 17 00:00:00 2001 From: Matthias Tafelmeier Date: Sun, 19 Nov 2023 16:50:48 +0100 Subject: [PATCH] breeder - correct archive db query templating Let's not use sqlalchemy to generate the archive db interacting sql queries because that has led to complications. bindparams was not working as expect for String/Text type. --- breeder/linux_network_stack/effectuation.py | 9 ++------- breeder/linux_network_stack/objective.py | 11 ++++------- breeder/linux_network_stack/optimization.py | 2 +- 3 files changed, 7 insertions(+), 15 deletions(-) diff --git a/breeder/linux_network_stack/effectuation.py b/breeder/linux_network_stack/effectuation.py index c1ee489d..80811502 100644 --- a/breeder/linux_network_stack/effectuation.py +++ b/breeder/linux_network_stack/effectuation.py @@ -72,14 +72,9 @@ def run_push_optimization(ti=None): metric_data = dict(metric=metric_value) msg = asyncio.run(send_msg_via_nats(subject=f'recon_{identifier}', data_dict=metric_data)) + breeder_table_name = config.get("name") - breeder_table_name = f"from_dag_name" # TBD local dag id based name - - query = text("INSERT INTO :table_name VALUES (:setting_id, :setting_full, :setting_result )") - query = query.bindparams(bindparam("table_name", breeder_table_name, type_=String), - bindparam("setting_id", setting_id, type_=String), - bindparam("setting_full", settings_full, type_=String), - bindparam("setting_result", metric_data, type_=String)) + query = f"INSERT INTO {breeder_table_name} VALUES ({setting_id}, {setting_full}, {setting_result});" archive_db_engine.execute(query) diff --git a/breeder/linux_network_stack/objective.py b/breeder/linux_network_stack/objective.py index 23dab924..eed62cc2 100644 --- a/breeder/linux_network_stack/objective.py +++ b/breeder/linux_network_stack/objective.py @@ -1,6 +1,6 @@ -def objective(trial, identifier, archive_db_url): +def objective(trial, identifier, archive_db_url, breeder_name): ###--- definition coroutines ---### ### We have to keep to coroutines in the objective function, @@ -31,13 +31,10 @@ def objective(trial, identifier, archive_db_url): settings = '\n'.join(settings) is_setting_explored = False - setting_id = hashlib.sha256(str.encode(settings_full)).hexdigest()[0:6] + setting_id = hashlib.sha256(str.encode(settings)).hexdigest()[0:6] - breeder_table_name = f"from_breeder_name" # TBD global knowledge db table nam - query = text("SELECT * FROM :table_name WHERE :table_name.setting_id == :setting_id") - - query = query.bindparams(bindparam("table_name", breeder_table_name, type_=String), - bindparam("setting_id", setting_id, type_=String)) + breeder_table_name = f"{breeder_name}" + query = f"SELECT * FROM {breeder_table_name} WHERE {breeder_table_name}.setting_id = '{setting_id}';" archive_db_data = archive_db_engine.execute(query).fetchall() diff --git a/breeder/linux_network_stack/optimization.py b/breeder/linux_network_stack/optimization.py index 44b949e0..d877e0c0 100644 --- a/breeder/linux_network_stack/optimization.py +++ b/breeder/linux_network_stack/optimization.py @@ -37,7 +37,7 @@ def run_optimization(): # Create a study using Dask-compatible storage storage = DaskStorage(InMemoryStorage()) study = optuna.create_study(directions=__directions, storage=storage) - objective_wrapped = lambda trial: objective(trial,identifier, archive_db_url) + objective_wrapped = lambda trial: objective(trial,identifier, archive_db_url, config.get('name')) # Optimize in parallel on your Dask cluster futures = [ client.submit(study.optimize, objective_wrapped, n_trials=10, pure=False)