Skip to content

Commit

Permalink
Merge pull request #26 from RENCI/Remove-ASGS
Browse files Browse the repository at this point in the history
updating to use the new unified database (apsviz + asgs_dashboard -> …
  • Loading branch information
PhillipsOwen authored Feb 2, 2024
2 parents 19735ec + d6bcdcd commit e5dddad
Show file tree
Hide file tree
Showing 4 changed files with 15 additions and 15 deletions.
10 changes: 5 additions & 5 deletions src/common/pg_impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def get_job_defs(self):
sql: str = 'SELECT public.get_supervisor_job_defs_json()'

# get the data
ret_val = self.exec_sql('asgs', sql)
ret_val = self.exec_sql('apsviz', sql)

# return the data
return ret_val
Expand All @@ -75,7 +75,7 @@ def get_new_runs(self):
sql: str = 'SELECT public.get_supervisor_config_items_json()'

# get the data
ret_val = self.exec_sql('asgs', sql)
ret_val = self.exec_sql('apsviz', sql)

# if there were no runs return None
if ret_val == -1:
Expand Down Expand Up @@ -103,11 +103,11 @@ def update_job_status(self, run_id, value):
sql = f"SELECT public.set_config_item({int(run[0])}, '{uid}', 'supervisor_job_status', '{value[:1024]}')"

# run the SQL
ret_val = self.exec_sql('asgs', sql)
ret_val = self.exec_sql('apsviz', sql)

# if there were no errors, commit the updates
if ret_val > -1:
self.commit('asgs')
self.commit('apsviz')

def get_first_job(self, workflow_type: str):
"""
Expand All @@ -119,7 +119,7 @@ def get_first_job(self, workflow_type: str):
sql: str = f"SELECT public.get_supervisor_job_order('{workflow_type}')"

# get the order of jobs for this workflow type
jobs_in_order = self.exec_sql('asgs', sql)
jobs_in_order = self.exec_sql('apsviz', sql)

# if we got a list get the first one
if isinstance(jobs_in_order, list):
Expand Down
2 changes: 1 addition & 1 deletion src/common/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def send_slack_msg(self, run_id: str, msg: str, channel: str, debug_mode: bool =
:param msg: the msg to be sent
:param channel: the Slack channel to post the message to
:param debug_mode: mode to indicate that this is a no-op
:param instance_name: the name of the ASGS instance
:param instance_name: the name of the APSVIZ instance
:param emoticon: an emoticon if set
:return: nothing
"""
Expand Down
10 changes: 5 additions & 5 deletions src/supervisor/job_create.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,11 +54,11 @@ def __init__(self):
# declare the secret environment variables
self.secret_env_params: list = [{'name': 'LOG_LEVEL', 'key': 'log-level'},
{'name': 'LOG_PATH', 'key': 'log-path'},
{'name': 'ASGS_DB_HOST', 'key': 'asgs-host'},
{'name': 'ASGS_DB_PORT', 'key': 'asgs-port'},
{'name': 'ASGS_DB_USERNAME', 'key': 'asgs-username'},
{'name': 'ASGS_DB_PASSWORD', 'key': 'asgs-password'},
{'name': 'ASGS_DB_DATABASE', 'key': 'asgs-database'},
{'name': 'ASGS_DB_HOST', 'key': 'apsviz-host'},
{'name': 'ASGS_DB_PORT', 'key': 'apsviz-port'},
{'name': 'ASGS_DB_USERNAME', 'key': 'apsviz-username'},
{'name': 'ASGS_DB_PASSWORD', 'key': 'apsviz-password'},
{'name': 'ASGS_DB_DATABASE', 'key': 'apsviz-database'},
{'name': 'APSVIZ_DB_HOST', 'key': 'apsviz-host'},
{'name': 'APSVIZ_DB_PORT', 'key': 'apsviz-port'},
{'name': 'APSVIZ_DB_USERNAME', 'key': 'apsviz-username'},
Expand Down
8 changes: 4 additions & 4 deletions src/supervisor/job_supervisor.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def __init__(self):

# specify the DB to get a connection
# note the extra comma makes this single item a singleton tuple
db_names: tuple = ('asgs',)
db_names: tuple = ('apsviz',)

# assign utility objects
self.util_objs: dict = {'create': JobCreate(), 'k8s_find': JobFind(), 'pg_db': PGImplementation(db_names, _logger=self.logger),
Expand Down Expand Up @@ -304,7 +304,7 @@ def get_base_command_line(self, run: dict, job_type: JobType) -> (list, bool):
been set to '[""]' in the DB also note that the supervisor should be terminated prior to
killing the job to avoid data directory removal (if that matters)
command_line_params = ['/bin/sh', '-c', 'while true; do date; sleep 3600; done']
update public."ASGS_Mon_supervisor_config" set command_line='[""]', command_matrix='[""]'
update public."supervisor_config" set command_line='[""]', command_matrix='[""]'
where id=;
:param run: the run parameters
Expand Down Expand Up @@ -585,9 +585,9 @@ def check_input_params(self, run_info: dict) -> (str, str, bool):
# get the workflow type
if 'workflow_type' in run_info:
workflow_type = run_info['workflow_type']
# if there is no workflow type default to ASGS legacy runs
# if there is no workflow type default to APSVIZ legacy runs
else:
workflow_type = 'ASGS'
workflow_type = 'ECFLOW'

# get the physical location of the cluster that initiated the run
if 'physical_location' in run_info:
Expand Down

0 comments on commit e5dddad

Please sign in to comment.