Skip to content

Commit

Permalink
Merge branch 'release_24.0' into dev
Browse files Browse the repository at this point in the history
  • Loading branch information
nsoranzo committed Apr 25, 2024
2 parents d02ac22 + 21a57d2 commit 1d64120
Show file tree
Hide file tree
Showing 34 changed files with 192 additions and 107 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/integration.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ jobs:
id: minikube
uses: CodingNagger/[email protected]
with:
k8s-version: '1.19.16'
k8s-version: '1.23.0'
- name: Launch Minikube
run: eval ${{ steps.minikube.outputs.launcher }}
- name: Check pods
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/osx_startup.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ jobs:
path: .tox
key: tox-cache-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('galaxy root/requirements.txt') }}-osx
- name: Install miniconda # use this job to test using Python from a conda environment
uses: conda-incubator/setup-miniconda@v2
uses: conda-incubator/setup-miniconda@v3
with:
activate-environment: ''
- name: Restore client cache
Expand Down
7 changes: 4 additions & 3 deletions client/src/api/schema/schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3738,7 +3738,7 @@ export interface components {
* Element Type
* @description The type of the element. Used to interpret the `object` field.
*/
element_type: components["schemas"]["DCEType"];
element_type?: components["schemas"]["DCEType"] | null;
/**
* Dataset Collection Element ID
* @example 0123456789ABCDEF
Expand All @@ -3755,10 +3755,11 @@ export interface components {
* Object
* @description The element's specific data depending on the value of `element_type`.
*/
object:
object?:
| components["schemas"]["HDAObject"]
| components["schemas"]["HDADetailed"]
| components["schemas"]["DCObject"];
| components["schemas"]["DCObject"]
| null;
};
/**
* DCEType
Expand Down
14 changes: 6 additions & 8 deletions client/src/components/Dataset/DatasetStorage/DatasetStorage.vue
Original file line number Diff line number Diff line change
Expand Up @@ -59,14 +59,7 @@ watch(props, fetch, { immediate: true });

<template>
<div>
<h2 v-if="includeTitle" class="h-md">
Dataset Storage
<RelocateLink
v-if="storageInfo"
:dataset-id="datasetId"
:dataset-storage-details="storageInfo"
@relocated="fetch" />
</h2>
<h2 v-if="includeTitle" class="h-md">Dataset Storage</h2>
<div v-if="errorMessage" class="error">{{ errorMessage }}</div>
<LoadingSpan v-else-if="storageInfo == null"> </LoadingSpan>
<div v-else-if="discarded">
Expand All @@ -84,5 +77,10 @@ watch(props, fetch, { immediate: true });
<div v-else>
<DescribeObjectStore what="This dataset is stored in" :storage-info="storageInfo" />
</div>
<RelocateLink
v-if="storageInfo"
:dataset-id="datasetId"
:dataset-storage-details="storageInfo"
@relocated="fetch" />
</div>
</template>
13 changes: 9 additions & 4 deletions client/src/components/Dataset/DatasetStorage/RelocateDialog.vue
Original file line number Diff line number Diff line change
Expand Up @@ -13,24 +13,26 @@ defineProps<RelocateProps>();
const emit = defineEmits<{
(e: "relocate", value: string): void;
(e: "closeModal"): void;
}>();
const fromWhat = "This dataset location in a";
const fromWhat = "This dataset location is";
const toWhat = "This dataset will be relocated to";
</script>

<template>
<div>
<p>Relocate the dataset's current object store of:</p>
<p>Currently the dataset is located in:</p>
<b-button-group vertical size="lg" class="select-button-group">
<ObjectStoreSelectButton
:key="fromObjectStore.object_store_id"
id-prefix="swap-target"
class="swap-target-object-store-select-button"
variant="info"
:object-store="fromObjectStore" />
:object-store="fromObjectStore"
@click="emit('closeModal')" />
</b-button-group>
<p>Select a new object store below to relocate the dataset</p>
<p class="relocate-to">Select new storage location for the dataset:</p>
<b-button-group vertical size="lg" class="select-button-group">
<ObjectStoreSelectButton
v-for="objectStore in targetObjectStores"
Expand Down Expand Up @@ -60,4 +62,7 @@ const toWhat = "This dataset will be relocated to";
margin: auto;
width: 400px;
}
.relocate-to {
margin-top: 2em;
}
</style>
12 changes: 9 additions & 3 deletions client/src/components/Dataset/DatasetStorage/RelocateLink.vue
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
<script setup lang="ts">
import { BButton } from "bootstrap-vue";
import { storeToRefs } from "pinia";
import { computed, ref } from "vue";
Expand Down Expand Up @@ -70,6 +71,10 @@ const emit = defineEmits<{
(e: "relocated"): void;
}>();
function closeModal() {
showModal.value = false;
}
async function relocate(objectStoreId: string) {
try {
await updateObjectStore(props.datasetId, objectStoreId);
Expand All @@ -84,12 +89,13 @@ async function relocate(objectStoreId: string) {

<template>
<span class="storage-relocate-link">
<SelectModal v-if="currentObjectStore" v-model="showModal" title="Relocate Dataset Storage">
<SelectModal v-if="currentObjectStore" v-model="showModal" title="Relocate Dataset">
<RelocateDialog
:from-object-store="currentObjectStore"
:target-object-stores="validTargets"
@relocate="relocate" />
@relocate="relocate"
@closeModal="closeModal" />
</SelectModal>
<b-link v-if="relocatable" href="#" @click="showModal = true">(relocate)</b-link>
<BButton v-if="relocatable" @click="showModal = true">Relocate Dataset</BButton>
</span>
</template>
14 changes: 11 additions & 3 deletions doc/source/admin/galaxy_options.rst
Original file line number Diff line number Diff line change
Expand Up @@ -3030,6 +3030,17 @@
:Type: bool


~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
``use_access_logging_middleware``
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

:Description:
Log request start as well as request end. Disables uvicorn access
log handler.
:Default: ``false``
:Type: bool


~~~~~~~~~~~~
``use_lint``
~~~~~~~~~~~~
Expand Down Expand Up @@ -5495,6 +5506,3 @@
This requires the help_forum_api_url to be set.
:Default: ``false``
:Type: bool



4 changes: 4 additions & 0 deletions lib/galaxy/config/sample/galaxy.yml.sample
Original file line number Diff line number Diff line change
Expand Up @@ -1743,6 +1743,10 @@ galaxy:
# job is complete.
#debug: false

# Log request start as well as request end. Disables uvicorn access
# log handler.
#use_access_logging_middleware: false

# Check for WSGI compliance.
#use_lint: false

Expand Down
12 changes: 9 additions & 3 deletions lib/galaxy/config/sample/object_store_conf.sample.yml
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ backends:
# Sample Distributed Object Store with disk backends configuration
#

# In the distributed object store, existing dataests will be located by the `object_store_id` column in the `dataset`
# In the distributed object store, existing datasets will be located by the `object_store_id` column in the `dataset`
# table of the Galaxy database, which corresponds to the `id` option on the backend. New datasets are created based on
# the `weight` option: a backend with weight "2" has twice the chance of being (randomly) selected for new datasets as a
# backend with weight "1". A weight of "0" will still allow datasets in that backend to be read, but no new datasets
Expand Down Expand Up @@ -299,20 +299,25 @@ extra_dirs:
# The admin is responsible for routinely cleaning that storage using Galaxy's admin scripts - this object store
# configuration just allows the user selection and communicates expectations to the user. Training related to Galaxy
# cleanup scripts can be found in the Galaxy Training Network:
#
#
# Slides: https://gxy.io/GTN:S00103
# Tutorial: https://gxy.io/GTN:T00324
#
# In this example, the scratch storage is marked as user-private by setting the `private` option to "true" on the
# backend definition. This means it cannot be used in public datasets, shared between users, etc.. This is more example
# backend definition. This means it cannot be used in public datasets, shared between users, etc.. This is for example
# purposes - you may very well not want scratch storage to be defined as private as it prevents a lot of regular
# functionality and Galaxy handles regularly cleaned datasets fairly gracefully when the appropriate admin scripts are
# used.
#
# It is safe to just relabel the object store that a dataset belongs to if the underlying paths mapped to by the object
# stores are the same and the dataset has not been copied. To enable users to relocate datasets this way set the
# backends' `device` property to the same value.

type: distributed
backends:
- id: default
type: disk
device: device1
weight: 1
allow_selection: true
name: Default Galaxy Storage
Expand All @@ -329,6 +334,7 @@ backends:
found on our [Archive Tier Storage](https://www.msi.umn.edu/content/archive-tier-storage) page.
- id: scratch
type: disk
device: device2
weight: 0
allow_selection: true
private: true
Expand Down
11 changes: 8 additions & 3 deletions lib/galaxy/config/sample/object_store_conf.xml.sample
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@
<!--
Sample Distributed Object Store with disk backends
In the distributed object store, existing dataests will be located by the
In the distributed object store, existing datasets will be located by the
`object_store_id` column in the `dataset` table of the Galaxy database,
which corresponds to the `id` attribute on the backend tag. New datasets are
created based on the "weight" attribute: a backend with weight "2" has a
Expand Down Expand Up @@ -272,11 +272,16 @@
of regular functionality and Galaxy handles regularly cleaned
datasets fairly gracefully when the appropriate admin scripts
are used.
It is safe to just relabel the object store that a dataset belongs
to if the underlying paths mapped to by the object stores are the
same and the dataset has not been copied. To enable users to relocate
datasets this way set the backends' `device` property to the same value.
-->
<!--
<object_store type="distributed">
<backends>
<backend id="default" allow_selection="true" type="disk" weight="1" name="Default Galaxy Storage">
<backend id="default" allow_selection="true" type="disk" device="device1" weight="1" name="Default Galaxy Storage">
<description>This is Galaxy's default object store - this disk is regularly backed up and all user's have a default quota of 200 GB.
</description>
<files_dir path="database/objects/deafult"/>
Expand All @@ -286,7 +291,7 @@
<backed_up>Backed up to Galaxy's institutional long term tape drive nightly. More information about our tape drive can be found on our [Archive Tier Storage](https://www.msi.umn.edu/content/archive-tier-storage) page.</backed_up>
</badges>
</backend>
<backend id="scratch" allow_selection="true" type="disk" weight="0" name="Scratch Storage" private="true">
<backend id="scratch" allow_selection="true" type="disk" device="device2" weight="0" name="Scratch Storage" private="true">
<quota source="second_tier" />
<description>This object store is connected to institutional scratch storage. This disk is not backed up and private to your user and datasets belonging to this storage will be automatically deleted after one month.
</description>
Expand Down
7 changes: 7 additions & 0 deletions lib/galaxy/config/schemas/config_schema.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2193,6 +2193,13 @@ mapping:
causes the files used by PBS/SGE (submission script, output, and error)
to remain on disk after the job is complete.
use_access_logging_middleware:
type: bool
default: false
required: false
desc: |
Log request start as well as request end. Disables uvicorn access log handler.
use_lint:
type: bool
default: false
Expand Down
6 changes: 5 additions & 1 deletion lib/galaxy/datatypes/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -842,7 +842,11 @@ def convert_dataset(
# Make the target datatype available to the converter
params["__target_datatype__"] = target_type
# Run converter, job is dispatched through Queue
job, converted_datasets, *_ = converter.execute(trans, incoming=params, set_output_hid=visible, history=history)
job, converted_datasets, *_ = converter.execute(
trans, incoming=params, set_output_hid=visible, history=history, flush_job=False
)
for converted_dataset in converted_datasets.values():
original_dataset.attach_implicitly_converted_dataset(trans.sa_session, converted_dataset, target_type)
trans.app.job_manager.enqueue(job, tool=converter)
if len(params) > 0:
trans.log_event(f"Converter params: {str(params)}", tool_id=converter.id)
Expand Down
18 changes: 1 addition & 17 deletions lib/galaxy/datatypes/display_applications/parameters.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@

from galaxy.datatypes.data import Data
from galaxy.model import DatasetInstance
from galaxy.model.base import transaction
from galaxy.schema.schema import DatasetState
from galaxy.util import string_as_bool
from galaxy.util.template import fill_template
Expand Down Expand Up @@ -182,22 +181,7 @@ def prepare(self, other_values, dataset_hash, user_hash, trans):
if target_ext and not converted_dataset:
if isinstance(data, DisplayDataValueWrapper):
data = data.value
new_data = next(
iter(
data.datatype.convert_dataset(
trans, data, target_ext, return_output=True, visible=False
).values()
)
)
new_data.hid = data.hid
new_data.name = data.name
trans.sa_session.add(new_data)
assoc = trans.app.model.ImplicitlyConvertedDatasetAssociation(
parent=data, file_type=target_ext, dataset=new_data, metadata_safe=False
)
trans.sa_session.add(assoc)
with transaction(trans.sa_session):
trans.sa_session.commit()
data.datatype.convert_dataset(trans, data, target_ext, return_output=True, visible=False)
elif converted_dataset and converted_dataset.state == DatasetState.ERROR:
raise Exception(f"Dataset conversion failed for data parameter: {self.name}")
return self.get_value(other_values, dataset_hash, user_hash, trans)
Expand Down
2 changes: 2 additions & 0 deletions lib/galaxy/datatypes/protocols.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,5 +85,7 @@ def has_data(self) -> bool: ...

def set_peek(self) -> None: ...

def attach_implicitly_converted_dataset(self, session, new_dataset, target_ext: str) -> None: ...


class DatasetHasHidProtocol(DatasetProtocol, HasHid, Protocol): ...
8 changes: 4 additions & 4 deletions lib/galaxy/managers/collections.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def precreate_dataset_collection_instance(
# TODO: prebuild all required HIDs and send them in so no need to flush in between.
dataset_collection = self.precreate_dataset_collection(
structure,
allow_unitialized_element=implicit_output_name is not None,
allow_uninitialized_element=implicit_output_name is not None,
completed_collection=completed_collection,
implicit_output_name=implicit_output_name,
)
Expand All @@ -112,10 +112,10 @@ def precreate_dataset_collection_instance(
return instance

def precreate_dataset_collection(
self, structure, allow_unitialized_element=True, completed_collection=None, implicit_output_name=None
self, structure, allow_uninitialized_element=True, completed_collection=None, implicit_output_name=None
):
has_structure = not structure.is_leaf and structure.children_known
if not has_structure and allow_unitialized_element:
if not has_structure and allow_uninitialized_element:
dataset_collection = model.DatasetCollectionElement.UNINITIALIZED_ELEMENT
elif not has_structure:
collection_type_description = structure.collection_type_description
Expand Down Expand Up @@ -143,7 +143,7 @@ def precreate_dataset_collection(
element = model.DatasetCollectionElement.UNINITIALIZED_ELEMENT
else:
element = self.precreate_dataset_collection(
substructure, allow_unitialized_element=allow_unitialized_element
substructure, allow_uninitialized_element=allow_uninitialized_element
)

element = model.DatasetCollectionElement(
Expand Down
9 changes: 2 additions & 7 deletions lib/galaxy/managers/hdas.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ def copy(
parent_id=kwargs.get("parent_id"),
copy_hid=False,
copy_tags=hda.tags, # type:ignore[attr-defined]
flush=flush,
flush=False,
)
if hide_copy:
copy.visible = False
Expand All @@ -227,12 +227,6 @@ def copy(

return copy

def copy_ldda(self, history, ldda, **kwargs):
"""
Copy this HDA as a LDDA and return.
"""
return ldda.to_history_dataset_association(history, add_to_history=True)

# .... deletion and purging
def purge(self, hda, flush=True, **kwargs):
if self.app.config.enable_celery_tasks:
Expand Down Expand Up @@ -569,6 +563,7 @@ def add_serializers(self):
annotatable.AnnotatableSerializerMixin.add_serializers(self)

serializers: Dict[str, base.Serializer] = {
"hid": lambda item, key, **context: item.hid if item.hid is not None else -1,
"model_class": lambda item, key, **context: "HistoryDatasetAssociation",
"history_content_type": lambda item, key, **context: "dataset",
"hda_ldda": lambda item, key, **context: "hda",
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/managers/history_contents.py
Original file line number Diff line number Diff line change
Expand Up @@ -590,7 +590,7 @@ def get_filter(attr, op, val):
if val == "__null__":
val = None
if val not in ids:
raise KeyError(f"Could not find key {val} in object store keys {list(ids.keys())}")
raise ValueError(f"Could not find key {val} in object store keys {list(ids.keys())}")
object_store_ids = ids[val]
return sql.column("object_store_id").in_(object_store_ids)

Expand Down
Loading

0 comments on commit 1d64120

Please sign in to comment.