Skip to content

Commit

Permalink
Merge branch 'release_24.0' into release_24.1
Browse files Browse the repository at this point in the history
  • Loading branch information
mvdbeek committed Aug 13, 2024
2 parents 8d13d70 + cd2f1f8 commit 0bc1a46
Show file tree
Hide file tree
Showing 4 changed files with 17 additions and 12 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,9 @@ export default {
// logic from legacy code
return !!(this.contains_file_or_folder && Galaxy.user);
},
totalRows: function () {
return this.metadata?.total_rows ?? 0;
},
},
created() {
const Galaxy = getGalaxyInstance();
Expand Down Expand Up @@ -241,7 +244,8 @@ export default {
const selected = await this.services.getFilteredFolderContents(
this.folder_id,
this.unselected,
this.$parent.searchText
this.$parent.searchText,
this.totalRows
);
this.$emit("setBusy", false);
return selected;
Expand Down
7 changes: 4 additions & 3 deletions client/src/components/Libraries/LibraryFolder/services.js
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,13 @@ export class Services {
}
}

async getFilteredFolderContents(id, excluded, searchText) {
async getFilteredFolderContents(id, excluded, searchText, limit) {
// The intent of this method is to get folder contents applying
// seachText filters only; we explicitly set limit to 0
// seachText filters only; limit should match the total number of
// items in the folder, so that all items are returned.
const config = {
params: {
limit: 0,
limit,
},
};
searchText = searchText?.trim();
Expand Down
10 changes: 5 additions & 5 deletions lib/galaxy/jobs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2005,13 +2005,13 @@ def fail(message=job.info, exception=None):
# Once datasets are collected, set the total dataset size (includes extra files)
for dataset_assoc in job.output_datasets:
dataset = dataset_assoc.dataset.dataset
if not dataset.purged:
# assume all datasets in a job get written to the same objectstore
quota_source_info = dataset.quota_source_info
collected_bytes += dataset.set_total_size()
else:
# assume all datasets in a job get written to the same objectstore
quota_source_info = dataset.quota_source_info
collected_bytes += dataset.set_total_size()
if dataset.purged:
# Purge, in case job wrote directly to object store
dataset.full_delete()
collected_bytes = 0

user = job.user
if user and collected_bytes > 0 and quota_source_info is not None and quota_source_info.use:
Expand Down
6 changes: 3 additions & 3 deletions lib/galaxy/model/store/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -668,9 +668,9 @@ def handle_dataset_object_edit(dataset_instance, dataset_attrs):
assert file_source_root
dataset_extra_files_path = os.path.join(file_source_root, dataset_extra_files_path)
persist_extra_files(self.object_store, dataset_extra_files_path, dataset_instance)
# Don't trust serialized file size
dataset_instance.dataset.file_size = None
dataset_instance.dataset.set_total_size() # update the filesize record in the database
# Only trust file size if the dataset is purged. If we keep the data we should check the file size.
dataset_instance.dataset.file_size = None
dataset_instance.dataset.set_total_size() # update the filesize record in the database

if dataset_instance.deleted:
dataset_instance.dataset.deleted = True
Expand Down

0 comments on commit 0bc1a46

Please sign in to comment.