Skip to content

Commit

Permalink
Ditch remaining references to "main" task pool.
Browse files Browse the repository at this point in the history
  • Loading branch information
hjoliver committed Sep 26, 2023
1 parent 866aa0d commit 6b0446e
Showing 1 changed file with 11 additions and 11 deletions.
22 changes: 11 additions & 11 deletions cylc/flow/task_pool.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,7 @@ def release_runahead_tasks(self):
Call when RH limit changes.
"""
if not self.active_tasks or not self.runahead_limit_point:
# (At start-up main pool might not exist yet)
# (At start-up task pool might not exist yet)
return False

released = False
Expand Down Expand Up @@ -630,14 +630,14 @@ def load_db_task_action_timers(self, row_idx, row):
return
LOG.info("+ %s/%s %s" % (cycle, name, ctx_key))
if ctx_key == "poll_timer":
itask = self._get_main_task_by_id(id_)
itask = self._get_task_by_id(id_)
if itask is None:
LOG.warning("%(id)s: task not found, skip" % {"id": id_})
return
itask.poll_timer = TaskActionTimer(
ctx, delays, num, delay, timeout)
elif ctx_key[0] == "try_timers":
itask = self._get_main_task_by_id(id_)
itask = self._get_task_by_id(id_)
if itask is None:
LOG.warning("%(id)s: task not found, skip" % {"id": id_})
return
Expand Down Expand Up @@ -709,7 +709,7 @@ def get_or_spawn_task(
It does not add a spawned task proxy to the pool.
"""
ntask = self._get_main_task_by_id(
ntask = self._get_task_by_id(
Tokens(cycle=str(point), task=name).relative_id
)
if ntask is None:
Expand Down Expand Up @@ -775,7 +775,7 @@ def remove(self, itask, reason=""):
del itask

def get_tasks(self) -> List[TaskProxy]:
"""Return a list of task proxies in the main pool."""
"""Return a list of task proxies in the task pool."""
# Cached list only for use internally in this method.
if self.active_tasks_changed:
self.active_tasks_changed = False
Expand All @@ -799,8 +799,8 @@ def get_task(self, point, name):
if tasks and rel_id in tasks:
return tasks[rel_id]

def _get_main_task_by_id(self, id_: str) -> Optional[TaskProxy]:
"""Return main pool task by ID if it exists, or None."""
def _get_task_by_id(self, id_: str) -> Optional[TaskProxy]:
"""Return pool task by ID if it exists, or None."""
for itask_ids in list(self.active_tasks.values()):
with suppress(KeyError):
return itask_ids[id_]
Expand Down Expand Up @@ -973,7 +973,7 @@ def reload_taskdefs(self, config: 'WorkflowConfig') -> None:

# Now queue all tasks that are ready to run
for itask in self.get_tasks():
# Recreate data store elements from main pool.
# Recreate data store elements from task pool.
self.create_data_store_elements(itask)
if itask.state.is_queued:
# Already queued
Expand Down Expand Up @@ -1267,7 +1267,7 @@ def spawn_on_output(self, itask, output, forced=False):
cycle=str(c_point),
task=c_name,
).relative_id
c_task = self._get_main_task_by_id(c_taskid)
c_task = self._get_task_by_id(c_taskid)
if c_task is not None and c_task != itask:
# (Avoid self-suicide: A => !A)
self.merge_flows(c_task, itask.flow_nums)
Expand Down Expand Up @@ -1404,7 +1404,7 @@ def spawn_on_all_outputs(
cycle=str(c_point),
task=c_name,
).relative_id
c_task = self._get_main_task_by_id(c_taskid)
c_task = self._get_task_by_id(c_taskid)
if c_task is not None:
# already spawned
continue
Expand Down Expand Up @@ -1622,7 +1622,7 @@ def _set_outputs(self, point, taskdef, outputs, flow_nums, flow_wait):
Do not spawn the target task if it is not already in the pool, but
update the DB to reflect the set outputs, and spawn the children.
"""
itask = self._get_main_task_by_id(
itask = self._get_task_by_id(
Tokens(
cycle=str(point),
task=taskdef.name
Expand Down

0 comments on commit 6b0446e

Please sign in to comment.