Skip to content

Commit

Permalink
Merge branch 'release_24.0' into release_24.1
Browse files Browse the repository at this point in the history
  • Loading branch information
mvdbeek committed Sep 20, 2024
2 parents a9160dd + 0c48cf6 commit 2bbae08
Show file tree
Hide file tree
Showing 6 changed files with 42 additions and 16 deletions.
2 changes: 1 addition & 1 deletion config/plugins/visualizations/editor/templates/editor.mako
Original file line number Diff line number Diff line change
Expand Up @@ -62,11 +62,11 @@
const ajax_url = "${h.url_for( controller='/datasets', action='index')}/" + hda_id + "/display";
const data = httpGet(ajax_url);
document.getElementById("editor").innerHTML = data;
var editor = ace.edit("editor", {
mode: "ace/mode/powershell",
theme: "ace/theme/textmate"
});
editor.setValue(data, -1);
</script>
</body>
</html>
2 changes: 1 addition & 1 deletion lib/galaxy/config/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -597,7 +597,7 @@ def admin_users(self):
@admin_users.setter
def admin_users(self, value):
self._admin_users = value
self.admin_users_list = listify(value)
self.admin_users_list = listify(value, do_strip=True)

def is_admin_user(self, user: Optional["User"]) -> bool:
"""Determine if the provided user is listed in `admin_users`."""
Expand Down
9 changes: 5 additions & 4 deletions lib/galaxy/model/store/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -578,10 +578,11 @@ def handle_dataset_object_edit(dataset_instance, dataset_attrs):
self._attach_raw_id_if_editing(dataset_instance, dataset_attrs)

# Older style...
if "uuid" in dataset_attrs:
dataset_instance.dataset.uuid = dataset_attrs["uuid"]
if "dataset_uuid" in dataset_attrs:
dataset_instance.dataset.uuid = dataset_attrs["dataset_uuid"]
if self.import_options.allow_edit:
if "uuid" in dataset_attrs:
dataset_instance.dataset.uuid = dataset_attrs["uuid"]
if "dataset_uuid" in dataset_attrs:
dataset_instance.dataset.uuid = dataset_attrs["dataset_uuid"]

self._session_add(dataset_instance)

Expand Down
18 changes: 12 additions & 6 deletions lib/galaxy/model/store/discover.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,11 +132,6 @@ def create_dataset(
)
self.persist_object(primary_data)

if init_from:
self.permission_provider.copy_dataset_permissions(init_from, primary_data)
primary_data.state = init_from.state
else:
self.permission_provider.set_default_hda_permissions(primary_data)
else:
ld = galaxy.model.LibraryDataset(folder=library_folder, name=name)
ldda = galaxy.model.LibraryDatasetDatasetAssociation(
Expand Down Expand Up @@ -208,6 +203,7 @@ def create_dataset(
filename=filename,
link_data=link_data,
output_name=output_name,
init_from=init_from,
)
else:
storage_callbacks.append(
Expand All @@ -218,11 +214,14 @@ def create_dataset(
filename=filename,
link_data=link_data,
output_name=output_name,
init_from=init_from,
)
)
return primary_data

def finalize_storage(self, primary_data, dataset_attributes, extra_files, filename, link_data, output_name):
def finalize_storage(
self, primary_data, dataset_attributes, extra_files, filename, link_data, output_name, init_from
):
if primary_data.dataset.purged:
# metadata won't be set, maybe we should do that, then purge ?
primary_data.dataset.file_size = 0
Expand All @@ -243,6 +242,13 @@ def finalize_storage(self, primary_data, dataset_attributes, extra_files, filena
else:
# We are sure there are no extra files, so optimize things that follow by settting total size also.
primary_data.set_size(no_extra_files=True)

if init_from:
self.permission_provider.copy_dataset_permissions(init_from, primary_data)
primary_data.state = init_from.state
else:
self.permission_provider.set_default_hda_permissions(primary_data)

# TODO: this might run set_meta after copying the file to the object store, which could be inefficient if job working directory is closer to the node.
self.set_datasets_metadata(datasets=[primary_data], datasets_attributes=[dataset_attributes])

Expand Down
26 changes: 22 additions & 4 deletions test/integration/objectstore/test_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,17 @@
import os
import string

from galaxy_test.driver.integration_util import (
integration_module_instance,
integration_tool_runner,
)
from ._base import (
BaseObjectStoreIntegrationTestCase,
files_count,
)
from .test_selection_with_resource_parameters import DISTRIBUTED_OBJECT_STORE_CONFIG_TEMPLATE

DISTRIBUTED_OBJECT_STORE_CONFIG_TEMPLATE = string.Template(
HIERARCHICAL_OBJECT_STORE_CONFIG_TEMPLATE = string.Template(
"""<?xml version="1.0"?>
<object_store type="hierarchical">
<backends>
Expand Down Expand Up @@ -39,7 +44,20 @@
TEST_INPUT_FILES_CONTENT = "1 2 3"


class TestObjectStoreJobsIntegration(BaseObjectStoreIntegrationTestCase):
class TestDistributedObjectStore(BaseObjectStoreIntegrationTestCase):
@classmethod
def handle_galaxy_config_kwds(cls, config):
super().handle_galaxy_config_kwds(config)
config["metadata_strategy"] = "directory"
config["object_store_store_by"] = "uuid"
cls._configure_object_store(DISTRIBUTED_OBJECT_STORE_CONFIG_TEMPLATE, config)


instance = integration_module_instance(TestDistributedObjectStore)
test_tools = integration_tool_runner(["all_output_types"])


class TestObjectStoreJobsIntegration(TestDistributedObjectStore):
# setup by _configure_object_store
files1_path: str
files2_path: str
Expand All @@ -48,7 +66,7 @@ class TestObjectStoreJobsIntegration(BaseObjectStoreIntegrationTestCase):
@classmethod
def handle_galaxy_config_kwds(cls, config):
super().handle_galaxy_config_kwds(config)
cls._configure_object_store(DISTRIBUTED_OBJECT_STORE_CONFIG_TEMPLATE, config)
cls._configure_object_store(HIERARCHICAL_OBJECT_STORE_CONFIG_TEMPLATE, config)

def setUp(self):
super().setUp()
Expand All @@ -68,7 +86,7 @@ def setUp(self):
def test_files_count_and_content_in_each_objectstore_backend(self):
"""
According to the ObjectStore configuration given in the
`DISTRIBUTED_OBJECT_STORE_CONFIG_TEMPLATE` variable, datasets
`HIERARCHICAL_OBJECT_STORE_CONFIG_TEMPLATE` variable, datasets
can be stored on three backends, named:
- primary/files1;
- primary/files2;
Expand Down
1 change: 1 addition & 0 deletions test/integration/test_extended_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
"collection_creates_dynamic_nested_from_json_elements",
"implicit_conversion",
"environment_variables",
"all_output_types",
]


Expand Down

0 comments on commit 2bbae08

Please sign in to comment.