Skip to content

Commit

Permalink
Merge pull request #18068 from davelopez/24.0_fix_history_update_time…
Browse files Browse the repository at this point in the history
…_after_bulk_op

[24.0] Fix history update time after bulk operation
  • Loading branch information
mvdbeek authored Apr 30, 2024
2 parents 6ed6843 + 51d8743 commit 49ca43c
Show file tree
Hide file tree
Showing 4 changed files with 28 additions and 6 deletions.
13 changes: 8 additions & 5 deletions lib/galaxy/model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2927,7 +2927,7 @@ def prune(cls, sa_session):
session.execute(q)


class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable):
class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable, UsesCreateAndUpdateTime):
__tablename__ = "history"
__table_args__ = (Index("ix_history_slug", "slug", mysql_length=200),)

Expand Down Expand Up @@ -3094,6 +3094,9 @@ def username(self):
def count(self):
return self.hid_counter - 1

def update(self):
self._update_time = now()

def add_pending_items(self, set_output_hid=True):
# These are assumed to be either copies of existing datasets or new, empty datasets,
# so we don't need to set the quota.
Expand Down Expand Up @@ -7362,7 +7365,7 @@ def __init__(self):
self.user = None


class StoredWorkflow(Base, HasTags, Dictifiable, RepresentById):
class StoredWorkflow(Base, HasTags, Dictifiable, RepresentById, UsesCreateAndUpdateTime):
"""
StoredWorkflow represents the root node of a tree of objects that compose a workflow, including workflow revisions, steps, and subworkflows.
It is responsible for the metadata associated with a workflow including owner, name, published, and create/update time.
Expand Down Expand Up @@ -7740,7 +7743,7 @@ def log_str(self):
InputConnDictType = Dict[str, Union[Dict[str, Any], List[Dict[str, Any]]]]


class WorkflowStep(Base, RepresentById):
class WorkflowStep(Base, RepresentById, UsesCreateAndUpdateTime):
"""
WorkflowStep represents a tool or subworkflow, its inputs, annotations, and any outputs that are flagged as workflow outputs.
Expand Down Expand Up @@ -10061,7 +10064,7 @@ def equals(self, user_id, provider, authn_id, config):
)


class Page(Base, HasTags, Dictifiable, RepresentById):
class Page(Base, HasTags, Dictifiable, RepresentById, UsesCreateAndUpdateTime):
__tablename__ = "page"
__table_args__ = (Index("ix_page_slug", "slug", mysql_length=200),)

Expand Down Expand Up @@ -10175,7 +10178,7 @@ class PageUserShareAssociation(Base, UserShareAssociation):
page = relationship("Page", back_populates="users_shared_with")


class Visualization(Base, HasTags, Dictifiable, RepresentById):
class Visualization(Base, HasTags, Dictifiable, RepresentById, UsesCreateAndUpdateTime):
__tablename__ = "visualization"
__table_args__ = (
Index("ix_visualization_dbkey", "dbkey", mysql_length=200),
Expand Down
1 change: 1 addition & 0 deletions lib/galaxy/model/tags.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,7 @@ def set_tags_from_list(
if flush:
with transaction(self.sa_session):
self.sa_session.commit()
item.update()
return item.tags

def get_tag_assoc_class(self, item_class):
Expand Down
3 changes: 3 additions & 0 deletions lib/galaxy/webapps/galaxy/services/history_contents.py
Original file line number Diff line number Diff line change
Expand Up @@ -1445,6 +1445,9 @@ def _undelete(self, item: HistoryItemModel):
raise exceptions.ItemDeletionException("This item has been permanently deleted and cannot be recovered.")
manager = self._get_item_manager(item)
manager.undelete(item, flush=self.flush)
# Again, we need to force an update in the edge case where all selected items are already undeleted
# or when the item was purged as undelete will not trigger an update
item.update()

def _purge(self, item: HistoryItemModel, trans: ProvidesHistoryContext):
if getattr(item, "purged", False):
Expand Down
17 changes: 16 additions & 1 deletion lib/galaxy_test/api/test_history_contents.py
Original file line number Diff line number Diff line change
Expand Up @@ -1625,6 +1625,7 @@ def _get_item_with_id_from_history_contents(
return None

def _apply_bulk_operation(self, history_id: str, payload, query: str = "", expected_status_code: int = 200):
original_history_update_time = self._get_history_update_time(history_id)
if query:
query = f"?{query}"
response = self._put(
Expand All @@ -1633,8 +1634,22 @@ def _apply_bulk_operation(self, history_id: str, payload, query: str = "", expec
json=True,
)
self._assert_status_code_is(response, expected_status_code)
return response.json()
result = response.json()

if "err_msg" in result or result.get("success_count", 0) == 0:
# We don't need to check the history update time if there was an error or no items were updated
return result

# After a successful operation, history update time should be updated so the changes can be detected by the frontend
after_bulk_operation_history_update_time = self._get_history_update_time(history_id)
assert after_bulk_operation_history_update_time > original_history_update_time

return result

def _assert_bulk_success(self, bulk_operation_result, expected_success_count: int):
assert bulk_operation_result["success_count"] == expected_success_count, bulk_operation_result
assert not bulk_operation_result["errors"]

def _get_history_update_time(self, history_id: str):
history = self._get(f"histories/{history_id}").json()
return history.get("update_time")

0 comments on commit 49ca43c

Please sign in to comment.