diff --git a/client/src/api/schema/schema.ts b/client/src/api/schema/schema.ts index eef3762d5172..01a0046d0a32 100644 --- a/client/src/api/schema/schema.ts +++ b/client/src/api/schema/schema.ts @@ -12942,49 +12942,64 @@ export interface components { /** the type of collection to create */ collection_type: string; /** - * if True, copy the elements into the collection + * Copy Elements + * @description if True, copy the elements into the collection * @default false */ copy_elements: boolean; - /** the type of item to create */ + /** @description the type of item to create */ create_type: components["schemas"]["CreateType"]; /** list of dictionaries containing the element identifiers for the collection */ element_identifiers: Record[]; - /** sub-dictionary containing any extended metadata to associate with the item */ + /** + * Extended Metadata + * @description sub-dictionary containing any extended metadata to associate with the item + */ extended_metadata?: Record | null; /** - * the encoded id of the parent folder of the new item + * Folder Id + * @description the encoded id of the parent folder of the new item * @example 0123456789ABCDEF */ folder_id: string; - /** (only if create_type is 'file') the encoded id of an accessible HDA to copy into the library */ + /** + * From Hda Id + * @description (only if create_type is 'file') the encoded id of an accessible HDA to copy into the library + */ from_hda_id?: string | null; - /** (only if create_type is 'file') the encoded id of an accessible HDCA to copy into the library */ + /** + * From Hdca Id + * @description (only if create_type is 'file') the encoded id of an accessible HDCA to copy into the library + */ from_hdca_id?: string | null; /** - * if True, hide the source items in the collection + * Hide Source Items + * @description if True, hide the source items in the collection * @default false */ hide_source_items: boolean; /** - * the new message attribute of the LDDA created + * Ldda Message + * @description the new message attribute of the LDDA created * @default */ ldda_message: string; /** the name of the collection */ name?: string | null; /** - * create tags on datasets using the file's original name + * Tag Using Filenames + * @description create tags on datasets using the file's original name * @default false */ tag_using_filenames: boolean; /** - * create the given list of tags on datasets + * Tags + * @description create the given list of tags on datasets * @default [] */ tags: string[]; /** - * the method to use for uploading files + * @description the method to use for uploading files * @default upload_file */ upload_option: components["schemas"]["UploadOption"]; @@ -13070,7 +13085,8 @@ export interface components { /** LibraryContentsDeletePayload */ LibraryContentsDeletePayload: { /** - * if True, purge the library dataset + * Purge + * @description if True, purge the library dataset * @default false */ purge: boolean; @@ -13087,38 +13103,50 @@ export interface components { }; /** LibraryContentsFileCreatePayload */ LibraryContentsFileCreatePayload: { - /** the type of item to create */ + /** @description the type of item to create */ create_type: components["schemas"]["CreateType"]; /** * database key * @default ? */ dbkey: string | unknown[]; - /** sub-dictionary containing any extended metadata to associate with the item */ + /** + * Extended Metadata + * @description sub-dictionary containing any extended metadata to associate with the item + */ extended_metadata?: Record | null; /** file type */ file_type?: string | null; /** - * (only if upload_option is 'upload_paths' and the user is an admin) file paths on the Galaxy server to upload to the library, one file per line + * Filesystem Paths + * @description (only if upload_option is 'upload_paths' and the user is an admin) file paths on the Galaxy server to upload to the library, one file per line * @default */ filesystem_paths: string; /** - * the encoded id of the parent folder of the new item + * Folder Id + * @description the encoded id of the parent folder of the new item * @example 0123456789ABCDEF */ folder_id: string; - /** (only if create_type is 'file') the encoded id of an accessible HDA to copy into the library */ + /** + * From Hda Id + * @description (only if create_type is 'file') the encoded id of an accessible HDA to copy into the library + */ from_hda_id?: string | null; - /** (only if create_type is 'file') the encoded id of an accessible HDCA to copy into the library */ + /** + * From Hdca Id + * @description (only if create_type is 'file') the encoded id of an accessible HDCA to copy into the library + */ from_hdca_id?: string | null; /** - * the new message attribute of the LDDA created + * Ldda Message + * @description the new message attribute of the LDDA created * @default */ ldda_message: string; /** - * (only when upload_option is 'upload_directory' or 'upload_paths').Setting to 'link_to_files' symlinks instead of copying the files + * @description (only when upload_option is 'upload_directory' or 'upload_paths').Setting to 'link_to_files' symlinks instead of copying the files * @default copy_files */ link_data_only: components["schemas"]["LinkDataOnly"]; @@ -13128,27 +13156,30 @@ export interface components { */ roles: string; /** - * (only if upload_option is 'upload_directory') relative path of the subdirectory of Galaxy ``library_import_dir`` (if admin) or ``user_library_import_dir`` (if non-admin) to upload. All and only the files (i.e. no subdirectories) contained in the specified directory will be uploaded. + * Server Dir + * @description (only if upload_option is 'upload_directory') relative path of the subdirectory of Galaxy ``library_import_dir`` (if admin) or ``user_library_import_dir`` (if non-admin) to upload. All and only the files (i.e. no subdirectories) contained in the specified directory will be uploaded. * @default */ server_dir: string; /** - * create tags on datasets using the file's original name + * Tag Using Filenames + * @description create tags on datasets using the file's original name * @default false */ tag_using_filenames: boolean; /** - * create the given list of tags on datasets + * Tags + * @description create the given list of tags on datasets * @default [] */ tags: string[]; /** - * list of dictionaries containing the uploaded file fields + * list of the uploaded files * @default [] */ upload_files: Record[]; /** - * the method to use for uploading files + * @description the method to use for uploading files * @default upload_file */ upload_option: components["schemas"]["UploadOption"]; @@ -13159,26 +13190,37 @@ export interface components { }; /** LibraryContentsFolderCreatePayload */ LibraryContentsFolderCreatePayload: { - /** the type of item to create */ + /** @description the type of item to create */ create_type: components["schemas"]["CreateType"]; /** * description of the folder to create * @default */ description: string; - /** sub-dictionary containing any extended metadata to associate with the item */ + /** + * Extended Metadata + * @description sub-dictionary containing any extended metadata to associate with the item + */ extended_metadata?: Record | null; /** - * the encoded id of the parent folder of the new item + * Folder Id + * @description the encoded id of the parent folder of the new item * @example 0123456789ABCDEF */ folder_id: string; - /** (only if create_type is 'file') the encoded id of an accessible HDA to copy into the library */ + /** + * From Hda Id + * @description (only if create_type is 'file') the encoded id of an accessible HDA to copy into the library + */ from_hda_id?: string | null; - /** (only if create_type is 'file') the encoded id of an accessible HDCA to copy into the library */ + /** + * From Hdca Id + * @description (only if create_type is 'file') the encoded id of an accessible HDCA to copy into the library + */ from_hdca_id?: string | null; /** - * the new message attribute of the LDDA created + * Ldda Message + * @description the new message attribute of the LDDA created * @default */ ldda_message: string; @@ -13188,17 +13230,19 @@ export interface components { */ name: string; /** - * create tags on datasets using the file's original name + * Tag Using Filenames + * @description create tags on datasets using the file's original name * @default false */ tag_using_filenames: boolean; /** - * create the given list of tags on datasets + * Tags + * @description create the given list of tags on datasets * @default [] */ tags: string[]; /** - * the method to use for uploading files + * @description the method to use for uploading files * @default upload_file */ upload_option: components["schemas"]["UploadOption"]; diff --git a/lib/galaxy/schema/library_contents.py b/lib/galaxy/schema/library_contents.py index f04f9bb1538e..21febf223f13 100644 --- a/lib/galaxy/schema/library_contents.py +++ b/lib/galaxy/schema/library_contents.py @@ -52,39 +52,39 @@ class LinkDataOnly(str, Enum): class LibraryContentsCreatePayload(Model): create_type: CreateType = Field( ..., - title="the type of item to create", + description="the type of item to create", ) upload_option: UploadOption = Field( UploadOption.upload_file, - title="the method to use for uploading files", + description="the method to use for uploading files", ) folder_id: LibraryFolderDatabaseIdField = Field( ..., - title="the encoded id of the parent folder of the new item", + description="the encoded id of the parent folder of the new item", ) tag_using_filenames: bool = Field( False, - title="create tags on datasets using the file's original name", + description="create tags on datasets using the file's original name", ) tags: List[str] = Field( [], - title="create the given list of tags on datasets", + description="create the given list of tags on datasets", ) from_hda_id: Optional[DecodedDatabaseIdField] = Field( None, - title="(only if create_type is 'file') the encoded id of an accessible HDA to copy into the library", + description="(only if create_type is 'file') the encoded id of an accessible HDA to copy into the library", ) from_hdca_id: Optional[DecodedDatabaseIdField] = Field( None, - title="(only if create_type is 'file') the encoded id of an accessible HDCA to copy into the library", + description="(only if create_type is 'file') the encoded id of an accessible HDCA to copy into the library", ) ldda_message: str = Field( "", - title="the new message attribute of the LDDA created", + description="the new message attribute of the LDDA created", ) extended_metadata: Optional[Dict[str, Any]] = Field( None, - title="sub-dictionary containing any extended metadata to associate with the item", + description="sub-dictionary containing any extended metadata to associate with the item", ) @field_validator("tags", mode="before", check_fields=False) @@ -110,7 +110,7 @@ class LibraryContentsFileCreatePayload(LibraryContentsCreatePayload): ) server_dir: str = Field( "", - title="(only if upload_option is 'upload_directory') relative path of the " + description="(only if upload_option is 'upload_directory') relative path of the " "subdirectory of Galaxy ``library_import_dir`` (if admin) or " "``user_library_import_dir`` (if non-admin) to upload. " "All and only the files (i.e. no subdirectories) contained " @@ -118,12 +118,12 @@ class LibraryContentsFileCreatePayload(LibraryContentsCreatePayload): ) filesystem_paths: str = Field( "", - title="(only if upload_option is 'upload_paths' and the user is an admin) " + description="(only if upload_option is 'upload_paths' and the user is an admin) " "file paths on the Galaxy server to upload to the library, one file per line", ) link_data_only: LinkDataOnly = Field( LinkDataOnly.copy_files, - title="(only when upload_option is 'upload_directory' or 'upload_paths')." + description="(only when upload_option is 'upload_directory' or 'upload_paths')." "Setting to 'link_to_files' symlinks instead of copying the files", ) uuid: Optional[str] = Field( @@ -132,7 +132,7 @@ class LibraryContentsFileCreatePayload(LibraryContentsCreatePayload): ) upload_files: List[Dict[str, Any]] = Field( [], - title="list of dictionaries containing the uploaded file fields", + title="list of the uploaded files", ) # uploaded file fields @@ -165,25 +165,25 @@ class LibraryContentsCollectionCreatePayload(LibraryContentsCreatePayload): ) hide_source_items: bool = Field( False, - title="if True, hide the source items in the collection", + description="if True, hide the source items in the collection", ) copy_elements: bool = Field( False, - title="if True, copy the elements into the collection", + description="if True, copy the elements into the collection", ) class LibraryContentsUpdatePayload(Model): converted_dataset_id: Optional[DecodedDatabaseIdField] = Field( None, - title="the decoded id of the dataset that was created from the file", + title="the decoded id of the dataset", ) class LibraryContentsDeletePayload(Model): purge: bool = Field( False, - title="if True, purge the library dataset", + description="if True, purge the library dataset", ) @@ -310,18 +310,18 @@ class LibraryContentsPurgedResponse(LibraryContentsDeleteResponse): purged: bool -LIBRARY_ID = Union[DecodedDatabaseIdField, LibraryFolderDatabaseIdField] +AnyLibraryId = Union[DecodedDatabaseIdField, LibraryFolderDatabaseIdField] -SHOW_RESPONSE = Union[ +AnyLibraryContentsShowResponse = Union[ LibraryContentsShowFolderResponse, LibraryContentsShowDatasetResponse, ] -CREATE_PAYLOAD = Union[ +AnyLibraryContentsCreatePayload = Union[ LibraryContentsFolderCreatePayload, LibraryContentsFileCreatePayload, LibraryContentsCollectionCreatePayload ] -CREATE_RESPOSNSE = Union[ +AnyLibraryContentsCreateResponse = Union[ LibraryContentsCreateFolderListResponse, LibraryContentsCreateFileListResponse, LibraryContentsCreateDatasetCollectionResponse, diff --git a/lib/galaxy/webapps/galaxy/api/library_contents.py b/lib/galaxy/webapps/galaxy/api/library_contents.py index ccde81116ee5..9a0e539d12b0 100644 --- a/lib/galaxy/webapps/galaxy/api/library_contents.py +++ b/lib/galaxy/webapps/galaxy/api/library_contents.py @@ -24,14 +24,14 @@ ) from galaxy.schema.fields import DecodedDatabaseIdField from galaxy.schema.library_contents import ( - CREATE_PAYLOAD, - CREATE_RESPOSNSE, - LIBRARY_ID, + AnyLibraryContentsCreatePayload, + AnyLibraryContentsCreateResponse, + AnyLibraryContentsShowResponse, + AnyLibraryId, LibraryContentsDeletePayload, LibraryContentsDeleteResponse, LibraryContentsFileCreatePayload, LibraryContentsIndexListResponse, - SHOW_RESPONSE, ) from galaxy.webapps.galaxy.api import ( depends, @@ -74,7 +74,7 @@ class FastAPILibraryContents: ) def index( self, - library_id: LIBRARY_ID, + library_id: AnyLibraryId, trans: ProvidesUserContext = DependsOnTrans, ) -> LibraryContentsIndexListResponse: """This endpoint is deprecated. Please use GET /api/folders/{folder_id}/contents instead.""" @@ -88,10 +88,10 @@ def index( ) def show( self, - library_id: LIBRARY_ID, + library_id: AnyLibraryId, id: MaybeLibraryFolderOrDatasetID, trans: ProvidesUserContext = DependsOnTrans, - ) -> SHOW_RESPONSE: + ) -> AnyLibraryContentsShowResponse: """This endpoint is deprecated. Please use GET /api/libraries/datasets/{library_id} instead.""" return self.service.show(trans, id) @@ -103,10 +103,10 @@ def show( ) def create_json( self, - library_id: LIBRARY_ID, - payload: CREATE_PAYLOAD, + library_id: AnyLibraryId, + payload: AnyLibraryContentsCreatePayload, trans: ProvidesHistoryContext = DependsOnTrans, - ) -> CREATE_RESPOSNSE: + ) -> AnyLibraryContentsCreateResponse: """This endpoint is deprecated. Please use POST /api/folders/{folder_id} or POST /api/folders/{folder_id}/contents instead.""" return self.service.create(trans, library_id, payload) @@ -119,11 +119,11 @@ def create_json( async def create_form( self, request: Request, - library_id: LIBRARY_ID, + library_id: AnyLibraryId, payload: LibraryContentsFileCreatePayload = Depends(LibraryContentsCreateForm.as_form), files: Optional[List[UploadFile]] = None, trans: ProvidesHistoryContext = DependsOnTrans, - ) -> CREATE_RESPOSNSE: + ) -> AnyLibraryContentsCreateResponse: """This endpoint is deprecated. Please use POST /api/folders/{folder_id} or POST /api/folders/{folder_id}/contents instead.""" # FastAPI's UploadFile is a very light wrapper around starlette's UploadFile if not files: @@ -148,7 +148,7 @@ async def create_form( ) def update( self, - library_id: LIBRARY_ID, + library_id: AnyLibraryId, id: DecodedDatabaseIdField, payload, trans: ProvidesUserContext = DependsOnTrans, @@ -163,7 +163,7 @@ def update( ) def delete( self, - library_id: LIBRARY_ID, + library_id: AnyLibraryId, id: DecodedDatabaseIdField, payload: Optional[LibraryContentsDeletePayload] = Body(None), trans: ProvidesHistoryContext = DependsOnTrans, diff --git a/lib/galaxy/webapps/galaxy/services/library_contents.py b/lib/galaxy/webapps/galaxy/services/library_contents.py index d5fdf0d9d1cd..f70574ea35e3 100644 --- a/lib/galaxy/webapps/galaxy/services/library_contents.py +++ b/lib/galaxy/webapps/galaxy/services/library_contents.py @@ -26,9 +26,10 @@ from galaxy.model.base import transaction from galaxy.schema.fields import DecodedDatabaseIdField from galaxy.schema.library_contents import ( - CREATE_PAYLOAD, - CREATE_RESPOSNSE, - LIBRARY_ID, + AnyLibraryContentsCreatePayload, + AnyLibraryContentsCreateResponse, + AnyLibraryContentsShowResponse, + AnyLibraryId, LibraryContentsCreateDatasetCollectionResponse, LibraryContentsCreateDatasetResponse, LibraryContentsCreateFileListResponse, @@ -41,7 +42,6 @@ LibraryContentsShowDatasetResponse, LibraryContentsShowFolderResponse, LibraryContentsUpdatePayload, - SHOW_RESPONSE, ) from galaxy.security.idencoding import IdEncodingHelper from galaxy.webapps.base.controller import ( @@ -81,7 +81,7 @@ def __init__( def index( self, trans: ProvidesUserContext, - library_id: LIBRARY_ID, + library_id: AnyLibraryId, ) -> LibraryContentsIndexListResponse: """Return a list of library files and folders.""" rval: List[Union[LibraryContentsIndexFolderResponse, LibraryContentsIndexDatasetResponse]] = [] @@ -111,7 +111,7 @@ def show( self, trans: ProvidesUserContext, id: MaybeLibraryFolderOrDatasetID, - ) -> SHOW_RESPONSE: + ) -> AnyLibraryContentsShowResponse: """Returns information about library file or folder.""" class_name, content_id = self._decode_library_content_id(id) if class_name == "LibraryFolder": @@ -127,9 +127,9 @@ def show( def create( self, trans: ProvidesHistoryContext, - library_id: LIBRARY_ID, - payload: CREATE_PAYLOAD, - ) -> CREATE_RESPOSNSE: + library_id: AnyLibraryId, + payload: AnyLibraryContentsCreatePayload, + ) -> AnyLibraryContentsCreateResponse: """Create a new library file or folder.""" if trans.user_is_bootstrap_admin: raise exceptions.RealUserRequiredException("Only real users can create a new library file or folder.") @@ -194,45 +194,39 @@ def delete( ) -> LibraryContentsDeleteResponse: """Delete the LibraryDataset with the given ``id``.""" rval = {"id": id} - try: - ld = self.get_library_dataset(trans, id, check_ownership=False, check_accessible=True) - user_is_admin = trans.user_is_admin - can_modify = trans.app.security_agent.can_modify_library_item(trans.user.all_roles(), ld) - log.debug("is_admin: %s, can_modify: %s", user_is_admin, can_modify) - if not (user_is_admin or can_modify): - raise exceptions.InsufficientPermissionsException( - "Unauthorized to delete or purge this library dataset" - ) - - ld.deleted = True - if payload.purge: - ld.purged = True - trans.sa_session.add(ld) - with transaction(trans.sa_session): - trans.sa_session.commit() - - # TODO: had to change this up a bit from Dataset.user_can_purge - dataset = ld.library_dataset_dataset_association.dataset - no_history_assoc = len(dataset.history_associations) == len(dataset.purged_history_associations) - no_library_assoc = dataset.library_associations == [ld.library_dataset_dataset_association] - can_purge_dataset = not dataset.purged and no_history_assoc and no_library_assoc + ld = self.get_library_dataset(trans, id, check_ownership=False, check_accessible=True) + user_is_admin = trans.user_is_admin + can_modify = trans.app.security_agent.can_modify_library_item(trans.user.all_roles(), ld) + log.debug("is_admin: %s, can_modify: %s", user_is_admin, can_modify) + if not (user_is_admin or can_modify): + raise exceptions.InsufficientPermissionsException("Unauthorized to delete or purge this library dataset") - if can_purge_dataset: - try: - ld.library_dataset_dataset_association.dataset.full_delete() - trans.sa_session.add(ld.dataset) - except Exception: - pass - # flush now to preserve deleted state in case of later interruption - with transaction(trans.sa_session): - trans.sa_session.commit() - rval["purged"] = True + ld.deleted = True + if payload.purge: + ld.purged = True + trans.sa_session.add(ld) with transaction(trans.sa_session): trans.sa_session.commit() - rval["deleted"] = True - except Exception as exc: - log.exception(f"library_contents API, delete: uncaught exception: {id}, {payload}") - raise exceptions.InternalServerError(util.unicodify(exc)) + + # TODO: had to change this up a bit from Dataset.user_can_purge + dataset = ld.library_dataset_dataset_association.dataset + no_history_assoc = len(dataset.history_associations) == len(dataset.purged_history_associations) + no_library_assoc = dataset.library_associations == [ld.library_dataset_dataset_association] + can_purge_dataset = not dataset.purged and no_history_assoc and no_library_assoc + + if can_purge_dataset: + try: + ld.library_dataset_dataset_association.dataset.full_delete() + trans.sa_session.add(ld.dataset) + except Exception: + pass + # flush now to preserve deleted state in case of later interruption + with transaction(trans.sa_session): + trans.sa_session.commit() + rval["purged"] = True + with transaction(trans.sa_session): + trans.sa_session.commit() + rval["deleted"] = True return LibraryContentsDeleteResponse(**rval) def _decode_library_content_id(