diff --git a/client/src/api/schema/schema.ts b/client/src/api/schema/schema.ts index 19954176a24f..aa766a46a9fc 100644 --- a/client/src/api/schema/schema.ts +++ b/client/src/api/schema/schema.ts @@ -3028,6 +3028,63 @@ export interface paths { patch?: never; trace?: never; }; + "/api/libraries/{library_id}/contents": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Return a list of library files and folders. + * @deprecated + * @description This endpoint is deprecated. Please use GET /api/folders/{folder_id}/contents instead. + */ + get: operations["index_api_libraries__library_id__contents_get"]; + put?: never; + /** + * Create a new library file or folder. + * @deprecated + * @description This endpoint is deprecated. Please use POST /api/folders/{folder_id} or POST /api/folders/{folder_id}/contents instead. + */ + post: operations["create_form_api_libraries__library_id__contents_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/api/libraries/{library_id}/contents/{id}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Return a library file or folder. + * @deprecated + * @description This endpoint is deprecated. Please use GET /api/libraries/datasets/{library_id} instead. + */ + get: operations["library_content_api_libraries__library_id__contents__id__get"]; + /** + * Update a library file or folder. + * @deprecated + * @description This endpoint is deprecated. Please use PATCH /api/libraries/datasets/{library_id} instead. + */ + put: operations["update_api_libraries__library_id__contents__id__put"]; + post?: never; + /** + * Delete a library file or folder. + * @deprecated + * @description This endpoint is deprecated. Please use DELETE /api/libraries/datasets/{library_id} instead. + */ + delete: operations["delete_api_libraries__library_id__contents__id__delete"]; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; "/api/licenses": { parameters: { query?: never; @@ -6073,6 +6130,75 @@ export interface components { /** Name */ name?: unknown; }; + /** Body_create_form_api_libraries__library_id__contents_post */ + Body_create_form_api_libraries__library_id__contents_post: { + /** Create Type */ + create_type: unknown; + /** + * Dbkey + * @default ? + */ + dbkey: unknown; + /** Extended Metadata */ + extended_metadata?: unknown; + /** File Type */ + file_type?: unknown; + /** Files */ + files?: string[] | null; + /** + * Filesystem Paths + * @default + */ + filesystem_paths: unknown; + /** Folder Id */ + folder_id: unknown; + /** From Hda Id */ + from_hda_id?: unknown; + /** From Hdca Id */ + from_hdca_id?: unknown; + /** + * Ldda Message + * @default + */ + ldda_message: unknown; + /** + * Link Data Only + * @default copy_files + */ + link_data_only: unknown; + /** + * Roles + * @default + */ + roles: unknown; + /** + * Server Dir + * @default + */ + server_dir: unknown; + /** + * Tag Using Filenames + * @default false + */ + tag_using_filenames: unknown; + /** + * Tags + * @default [] + */ + tags: unknown; + /** + * Upload Files + * @default [] + */ + upload_files: unknown; + /** + * Upload Option + * @default upload_file + */ + upload_option: unknown; + /** Uuid */ + uuid?: unknown; + }; /** Body_fetch_form_api_tools_fetch_post */ Body_fetch_form_api_tools_fetch_post: { /** Files */ @@ -6971,6 +7097,11 @@ export interface components { */ url: string; }; + /** + * CreateType + * @enum {string} + */ + CreateType: "file" | "folder" | "collection"; /** CreateWorkflowLandingRequestPayload */ CreateWorkflowLandingRequestPayload: { /** Client Secret */ @@ -12907,20 +13038,464 @@ export interface components { */ page: number; /** - * Page Limit - * @description Maximum number of items per page. + * Page Limit + * @description Maximum number of items per page. + */ + page_limit: number; + /** + * Roles + * @description A list available roles that can be assigned to a particular permission. + */ + roles: components["schemas"]["BasicRoleModel"][]; + /** + * Total + * @description Total number of items + */ + total: number; + }; + /** LibraryContentsCollectionCreatePayload */ + LibraryContentsCollectionCreatePayload: { + /** the type of collection to create */ + collection_type: string; + /** + * Copy Elements + * @description if True, copy the elements into the collection + * @default false + */ + copy_elements: boolean; + /** @description the type of item to create */ + create_type: components["schemas"]["CreateType"]; + /** list of dictionaries containing the element identifiers for the collection */ + element_identifiers: Record[]; + /** + * Extended Metadata + * @description sub-dictionary containing any extended metadata to associate with the item + */ + extended_metadata?: Record | null; + /** + * Folder Id + * @description the encoded id of the parent folder of the new item + * @example 0123456789ABCDEF + */ + folder_id: string; + /** + * From Hda Id + * @description (only if create_type is 'file') the encoded id of an accessible HDA to copy into the library + */ + from_hda_id?: string | null; + /** + * From Hdca Id + * @description (only if create_type is 'file') the encoded id of an accessible HDCA to copy into the library + */ + from_hdca_id?: string | null; + /** + * Hide Source Items + * @description if True, hide the source items in the collection + * @default false + */ + hide_source_items: boolean; + /** + * Ldda Message + * @description the new message attribute of the LDDA created + * @default + */ + ldda_message: string; + /** the name of the collection */ + name?: string | null; + /** + * Tag Using Filenames + * @description create tags on datasets using the file's original name + * @default false + */ + tag_using_filenames: boolean; + /** + * Tags + * @description create the given list of tags on datasets + * @default [] + */ + tags: string[]; + /** + * @description the method to use for uploading files + * @default upload_file + */ + upload_option: components["schemas"]["UploadOption"]; + }; + /** LibraryContentsCreateDatasetCollectionResponse */ + LibraryContentsCreateDatasetCollectionResponse: components["schemas"]["LibraryContentsCreateDatasetResponse"][]; + /** LibraryContentsCreateDatasetResponse */ + LibraryContentsCreateDatasetResponse: { + /** Created From Basename */ + created_from_basename: string | null; + /** Data Type */ + data_type: string; + /** Deleted */ + deleted: boolean; + /** File Ext */ + file_ext: string; + /** File Name */ + file_name: string; + /** File Size */ + file_size: number; + /** Genome Build */ + genome_build: string; + /** Hda Ldda */ + hda_ldda: string; + /** Id */ + id: string; + /** Library Dataset Id */ + library_dataset_id: string; + /** Misc Blurb */ + misc_blurb: string | null; + /** Misc Info */ + misc_info: string | null; + /** + * Model class + * @description The name of the database model class. + * @constant + * @enum {string} + */ + model_class: "LibraryDatasetDatasetAssociation"; + /** Name */ + name: string; + /** Parent Library Id */ + parent_library_id: string; + /** State */ + state: string; + /** Update Time */ + update_time: string; + /** Uuid */ + uuid: string; + /** Visible */ + visible: boolean; + } & { + [key: string]: unknown; + }; + /** LibraryContentsCreateFileListResponse */ + LibraryContentsCreateFileListResponse: components["schemas"]["LibraryContentsCreateFileResponse"][]; + /** LibraryContentsCreateFileResponse */ + LibraryContentsCreateFileResponse: { + /** + * Id + * @example 0123456789ABCDEF + */ + id: string; + /** Name */ + name: string; + /** Url */ + url: string; + }; + /** LibraryContentsCreateFolderListResponse */ + LibraryContentsCreateFolderListResponse: components["schemas"]["LibraryContentsCreateFolderResponse"][]; + /** LibraryContentsCreateFolderResponse */ + LibraryContentsCreateFolderResponse: { + /** + * Id + * @example 0123456789ABCDEF + */ + id: string; + /** Name */ + name: string; + /** Url */ + url: string; + }; + /** LibraryContentsDeletePayload */ + LibraryContentsDeletePayload: { + /** + * Purge + * @description if True, purge the library dataset + * @default false + */ + purge: boolean; + }; + /** LibraryContentsDeleteResponse */ + LibraryContentsDeleteResponse: { + /** Deleted */ + deleted: boolean; + /** + * Id + * @example 0123456789ABCDEF + */ + id: string; + }; + /** LibraryContentsFileCreatePayload */ + LibraryContentsFileCreatePayload: { + /** @description the type of item to create */ + create_type: components["schemas"]["CreateType"]; + /** + * database key + * @default ? + */ + dbkey: string | unknown[]; + /** + * Extended Metadata + * @description sub-dictionary containing any extended metadata to associate with the item + */ + extended_metadata?: Record | null; + /** file type */ + file_type?: string | null; + /** + * Filesystem Paths + * @description (only if upload_option is 'upload_paths' and the user is an admin) file paths on the Galaxy server to upload to the library, one file per line + * @default + */ + filesystem_paths: string; + /** + * Folder Id + * @description the encoded id of the parent folder of the new item + * @example 0123456789ABCDEF + */ + folder_id: string; + /** + * From Hda Id + * @description (only if create_type is 'file') the encoded id of an accessible HDA to copy into the library + */ + from_hda_id?: string | null; + /** + * From Hdca Id + * @description (only if create_type is 'file') the encoded id of an accessible HDCA to copy into the library + */ + from_hdca_id?: string | null; + /** + * Ldda Message + * @description the new message attribute of the LDDA created + * @default + */ + ldda_message: string; + /** + * @description (only when upload_option is 'upload_directory' or 'upload_paths').Setting to 'link_to_files' symlinks instead of copying the files + * @default copy_files + */ + link_data_only: components["schemas"]["LinkDataOnly"]; + /** + * user selected roles + * @default + */ + roles: string; + /** + * Server Dir + * @description (only if upload_option is 'upload_directory') relative path of the subdirectory of Galaxy ``library_import_dir`` (if admin) or ``user_library_import_dir`` (if non-admin) to upload. All and only the files (i.e. no subdirectories) contained in the specified directory will be uploaded. + * @default + */ + server_dir: string; + /** + * Tag Using Filenames + * @description create tags on datasets using the file's original name + * @default false + */ + tag_using_filenames: boolean; + /** + * Tags + * @description create the given list of tags on datasets + * @default [] + */ + tags: string[]; + /** + * list of the uploaded files + * @default [] + */ + upload_files: Record[]; + /** + * @description the method to use for uploading files + * @default upload_file + */ + upload_option: components["schemas"]["UploadOption"]; + /** UUID of the dataset to upload */ + uuid?: string | null; + } & { + [key: string]: unknown; + }; + /** LibraryContentsFolderCreatePayload */ + LibraryContentsFolderCreatePayload: { + /** @description the type of item to create */ + create_type: components["schemas"]["CreateType"]; + /** + * description of the folder to create + * @default + */ + description: string; + /** + * Extended Metadata + * @description sub-dictionary containing any extended metadata to associate with the item + */ + extended_metadata?: Record | null; + /** + * Folder Id + * @description the encoded id of the parent folder of the new item + * @example 0123456789ABCDEF + */ + folder_id: string; + /** + * From Hda Id + * @description (only if create_type is 'file') the encoded id of an accessible HDA to copy into the library + */ + from_hda_id?: string | null; + /** + * From Hdca Id + * @description (only if create_type is 'file') the encoded id of an accessible HDCA to copy into the library + */ + from_hdca_id?: string | null; + /** + * Ldda Message + * @description the new message attribute of the LDDA created + * @default + */ + ldda_message: string; + /** + * name of the folder to create + * @default + */ + name: string; + /** + * Tag Using Filenames + * @description create tags on datasets using the file's original name + * @default false + */ + tag_using_filenames: boolean; + /** + * Tags + * @description create the given list of tags on datasets + * @default [] + */ + tags: string[]; + /** + * @description the method to use for uploading files + * @default upload_file + */ + upload_option: components["schemas"]["UploadOption"]; + }; + /** LibraryContentsIndexDatasetResponse */ + LibraryContentsIndexDatasetResponse: { + /** + * Id + * @example 0123456789ABCDEF + */ + id: string; + /** Name */ + name: string; + /** Type */ + type: string; + /** Url */ + url: string; + }; + /** LibraryContentsIndexFolderResponse */ + LibraryContentsIndexFolderResponse: { + /** + * Id + * @example 0123456789ABCDEF + */ + id: string; + /** Name */ + name: string; + /** Type */ + type: string; + /** Url */ + url: string; + }; + /** LibraryContentsIndexListResponse */ + LibraryContentsIndexListResponse: ( + | components["schemas"]["LibraryContentsIndexFolderResponse"] + | components["schemas"]["LibraryContentsIndexDatasetResponse"] + )[]; + /** LibraryContentsShowDatasetResponse */ + LibraryContentsShowDatasetResponse: { + /** Created From Basename */ + created_from_basename: string | null; + /** Data Type */ + data_type: string; + /** Date Uploaded */ + date_uploaded: string; + /** File Ext */ + file_ext: string; + /** File Name */ + file_name: string; + /** File Size */ + file_size: number; + /** + * Folder Id + * @example 0123456789ABCDEF + */ + folder_id: string; + /** Genome Build */ + genome_build: string | null; + /** + * Id + * @example 0123456789ABCDEF + */ + id: string; + /** + * Ldda Id + * @example 0123456789ABCDEF + */ + ldda_id: string; + /** Message */ + message: string | null; + /** Misc Blurb */ + misc_blurb: string | null; + /** Misc Info */ + misc_info: string | null; + /** + * Model class + * @description The name of the database model class. + * @constant + * @enum {string} + */ + model_class: "LibraryDataset"; + /** Name */ + name: string; + /** + * Parent Library Id + * @example 0123456789ABCDEF + */ + parent_library_id: string; + /** Peek */ + peek: string | null; + /** State */ + state: string; + tags: components["schemas"]["TagCollection"]; + /** Update Time */ + update_time: string; + /** Uploaded By */ + uploaded_by: string | null; + /** Uuid */ + uuid: string; + } & { + [key: string]: unknown; + }; + /** LibraryContentsShowFolderResponse */ + LibraryContentsShowFolderResponse: { + /** Deleted */ + deleted: boolean; + /** Description */ + description: string; + /** Genome Build */ + genome_build: string | null; + /** + * Id + * @example 0123456789ABCDEF */ - page_limit: number; + id: string; + /** Item Count */ + item_count: number; + /** Library Path */ + library_path: string[]; /** - * Roles - * @description A list available roles that can be assigned to a particular permission. + * Model class + * @description The name of the database model class. + * @constant + * @enum {string} */ - roles: components["schemas"]["BasicRoleModel"][]; + model_class: "LibraryFolder"; + /** Name */ + name: string; + /** Parent Id */ + parent_id: string | null; /** - * Total - * @description Total number of items + * Parent Library Id + * @example 0123456789ABCDEF */ - total: number; + parent_library_id: string; + /** Update Time */ + update_time: string; }; /** LibraryCurrentPermissions */ LibraryCurrentPermissions: { @@ -13377,6 +13952,11 @@ export interface components { /** Name */ name: string; }; + /** + * LinkDataOnly + * @enum {string} + */ + LinkDataOnly: "copy_files" | "link_to_files"; /** * ListJstreeResponse * @deprecated @@ -16602,6 +17182,11 @@ export interface components { /** Tool Version */ tool_version?: string | null; }; + /** + * UploadOption + * @enum {string} + */ + UploadOption: "upload_file" | "upload_paths" | "upload_directory"; /** UrlDataElement */ UrlDataElement: { /** Md5 */ @@ -27615,6 +28200,241 @@ export interface operations { }; }; }; + index_api_libraries__library_id__contents_get: { + parameters: { + query?: never; + header?: { + /** @description The user ID that will be used to effectively make this API call. Only admins and designated users can make API calls on behalf of other users. */ + "run-as"?: string | null; + }; + path: { + library_id: string; + }; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description Successful Response */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["LibraryContentsIndexListResponse"]; + }; + }; + /** @description Request Error */ + "4XX": { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["MessageExceptionModel"]; + }; + }; + /** @description Server Error */ + "5XX": { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["MessageExceptionModel"]; + }; + }; + }; + }; + create_form_api_libraries__library_id__contents_post: { + parameters: { + query?: never; + header?: { + /** @description The user ID that will be used to effectively make this API call. Only admins and designated users can make API calls on behalf of other users. */ + "run-as"?: string | null; + }; + path: { + library_id: string; + }; + cookie?: never; + }; + requestBody: { + content: { + "multipart/form-data": components["schemas"]["Body_create_form_api_libraries__library_id__contents_post"]; + }; + }; + responses: { + /** @description Successful Response */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": + | components["schemas"]["LibraryContentsCreateFolderListResponse"] + | components["schemas"]["LibraryContentsCreateFileListResponse"] + | components["schemas"]["LibraryContentsCreateDatasetCollectionResponse"] + | components["schemas"]["LibraryContentsCreateDatasetResponse"]; + }; + }; + /** @description Request Error */ + "4XX": { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["MessageExceptionModel"]; + }; + }; + /** @description Server Error */ + "5XX": { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["MessageExceptionModel"]; + }; + }; + }; + }; + library_content_api_libraries__library_id__contents__id__get: { + parameters: { + query?: never; + header?: { + /** @description The user ID that will be used to effectively make this API call. Only admins and designated users can make API calls on behalf of other users. */ + "run-as"?: string | null; + }; + path: { + library_id: string; + /** @example F0123456789ABCDEF */ + id: string; + }; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description Successful Response */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": + | components["schemas"]["LibraryContentsShowFolderResponse"] + | components["schemas"]["LibraryContentsShowDatasetResponse"]; + }; + }; + /** @description Request Error */ + "4XX": { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["MessageExceptionModel"]; + }; + }; + /** @description Server Error */ + "5XX": { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["MessageExceptionModel"]; + }; + }; + }; + }; + update_api_libraries__library_id__contents__id__put: { + parameters: { + query: { + payload: unknown; + }; + header?: { + /** @description The user ID that will be used to effectively make this API call. Only admins and designated users can make API calls on behalf of other users. */ + "run-as"?: string | null; + }; + path: { + library_id: string; + id: string; + }; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description Successful Response */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": unknown; + }; + }; + /** @description Request Error */ + "4XX": { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["MessageExceptionModel"]; + }; + }; + /** @description Server Error */ + "5XX": { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["MessageExceptionModel"]; + }; + }; + }; + }; + delete_api_libraries__library_id__contents__id__delete: { + parameters: { + query?: never; + header?: { + /** @description The user ID that will be used to effectively make this API call. Only admins and designated users can make API calls on behalf of other users. */ + "run-as"?: string | null; + }; + path: { + library_id: string; + id: string; + }; + cookie?: never; + }; + requestBody?: { + content: { + "application/json": components["schemas"]["LibraryContentsDeletePayload"] | null; + }; + }; + responses: { + /** @description Successful Response */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["LibraryContentsDeleteResponse"]; + }; + }; + /** @description Request Error */ + "4XX": { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["MessageExceptionModel"]; + }; + }; + /** @description Server Error */ + "5XX": { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["MessageExceptionModel"]; + }; + }; + }; + }; index_api_licenses_get: { parameters: { query?: never; diff --git a/lib/galaxy/actions/library.py b/lib/galaxy/actions/library.py index 72b651e1869a..8a62c2470691 100644 --- a/lib/galaxy/actions/library.py +++ b/lib/galaxy/actions/library.py @@ -5,22 +5,18 @@ import json import logging import os.path -from typing import Optional from markupsafe import escape -from galaxy import util -from galaxy.exceptions import ( - AdminRequiredException, - ConfigDoesNotAllowException, - ItemAccessibilityException, - ObjectNotFound, - RequestParameterInvalidException, +from galaxy import ( + exceptions, + util, ) -from galaxy.model import ( - LibraryDataset, - LibraryFolder, +from galaxy.managers.collections_util import ( + api_payload_to_create_params, + dictify_dataset_collection_instance, ) +from galaxy.model import LibraryFolder from galaxy.model.base import transaction from galaxy.tools.actions import upload_common from galaxy.tools.parameters import populate_state @@ -35,17 +31,19 @@ def validate_server_directory_upload(trans, server_dir): if server_dir in [None, "None", ""]: - raise RequestParameterInvalidException("Invalid or unspecified server_dir parameter") + raise exceptions.RequestParameterInvalidException("Invalid or unspecified server_dir parameter") if trans.user_is_admin: import_dir = trans.app.config.library_import_dir import_dir_desc = "library_import_dir" if not import_dir: - raise ConfigDoesNotAllowException('"library_import_dir" is not set in the Galaxy configuration') + raise exceptions.ConfigDoesNotAllowException('"library_import_dir" is not set in the Galaxy configuration') else: import_dir = trans.app.config.user_library_import_dir if not import_dir: - raise ConfigDoesNotAllowException('"user_library_import_dir" is not set in the Galaxy configuration') + raise exceptions.ConfigDoesNotAllowException( + '"user_library_import_dir" is not set in the Galaxy configuration' + ) if server_dir != trans.user.email: import_dir = os.path.join(import_dir, trans.user.email) import_dir_desc = "user_library_import_dir" @@ -75,17 +73,21 @@ def validate_server_directory_upload(trans, server_dir): ) unsafe = True if unsafe: - raise RequestParameterInvalidException("Invalid server_dir specified") + raise exceptions.RequestParameterInvalidException("Invalid server_dir specified") return full_dir, import_dir_desc def validate_path_upload(trans): if not trans.app.config.allow_library_path_paste: - raise ConfigDoesNotAllowException('"allow_path_paste" is not set to True in the Galaxy configuration file') + raise exceptions.ConfigDoesNotAllowException( + '"allow_path_paste" is not set to True in the Galaxy configuration file' + ) if not trans.user_is_admin: - raise AdminRequiredException("Uploading files via filesystem paths can only be performed by administrators") + raise exceptions.AdminRequiredException( + "Uploading files via filesystem paths can only be performed by administrators" + ) class LibraryActions: @@ -93,96 +95,86 @@ class LibraryActions: Mixin for controllers that provide library functionality. """ - def _upload_dataset(self, trans, folder_id: int, replace_dataset: Optional[LibraryDataset] = None, **kwd): + def _upload_dataset(self, trans, folder_id: int, payload): # Set up the traditional tool state/params cntrller = "api" tool_id = "upload1" - message = None - file_type = kwd.get("file_type") - try: - upload_common.validate_datatype_extension(datatypes_registry=trans.app.datatypes_registry, ext=file_type) - except RequestParameterInvalidException as e: - return (400, util.unicodify(e)) + upload_common.validate_datatype_extension( + datatypes_registry=trans.app.datatypes_registry, ext=payload.file_type + ) tool = trans.app.toolbox.get_tool(tool_id) state = tool.new_state(trans) - populate_state(trans, tool.inputs, kwd, state.inputs) + populate_state(trans, tool.inputs, payload.model_dump(), state.inputs) tool_params = state.inputs dataset_upload_inputs = [] for input in tool.inputs.values(): if input.type == "upload_dataset": dataset_upload_inputs.append(input) # Library-specific params - server_dir = kwd.get("server_dir", "") - upload_option = kwd.get("upload_option", "upload_file") - response_code = 200 - if upload_option == "upload_directory": - full_dir, import_dir_desc = validate_server_directory_upload(trans, server_dir) - message = "Select a directory" - elif upload_option == "upload_paths": + if payload.upload_option == "upload_directory": + full_dir, import_dir_desc = validate_server_directory_upload(trans, payload.server_dir) + elif payload.upload_option == "upload_paths": # Library API already checked this - following check isn't actually needed. validate_path_upload(trans) # Some error handling should be added to this method. try: - # FIXME: instead of passing params here ( which have been processed by util.Params(), the original kwd + # FIXME: instead of passing params here ( which have been processed by util.Params(), the original payload # should be passed so that complex objects that may have been included in the initial request remain. - library_bunch = upload_common.handle_library_params(trans, kwd, folder_id, replace_dataset) + library_bunch = upload_common.handle_library_params(trans, payload.model_dump(), folder_id, None) except Exception: - response_code = 500 - message = "Unable to parse upload parameters, please report this error." + raise exceptions.InvalidFileFormatError("Invalid folder specified") # Proceed with (mostly) regular upload processing if we're still errorless - if response_code == 200: - if upload_option == "upload_file": - tool_params = upload_common.persist_uploads(tool_params, trans) - uploaded_datasets = upload_common.get_uploaded_datasets( - trans, cntrller, tool_params, dataset_upload_inputs, library_bunch=library_bunch - ) - elif upload_option == "upload_directory": - uploaded_datasets, response_code, message = self._get_server_dir_uploaded_datasets( - trans, kwd, full_dir, import_dir_desc, library_bunch, response_code, message - ) - elif upload_option == "upload_paths": - uploaded_datasets, response_code, message = self._get_path_paste_uploaded_datasets( - trans, kwd, library_bunch, response_code, message - ) - if upload_option == "upload_file" and not uploaded_datasets: - response_code = 400 - message = "Select a file, enter a URL or enter text" - if response_code != 200: - return (response_code, message) + if payload.upload_option == "upload_file": + for i, upload_dataset in enumerate(tool_params["files"]): + upload_dataset["file_data"] = payload.upload_files[i] + tool_params = upload_common.persist_uploads(tool_params, trans) + uploaded_datasets = upload_common.get_uploaded_datasets( + trans, cntrller, tool_params, dataset_upload_inputs, library_bunch=library_bunch + ) + elif payload.upload_option == "upload_directory": + uploaded_datasets = self._get_server_dir_uploaded_datasets( + trans, payload, full_dir, import_dir_desc, library_bunch + ) + elif payload.upload_option == "upload_paths": + uploaded_datasets, response_code, message = self._get_path_paste_uploaded_datasets( + trans, payload.model_dump(), library_bunch, 200, None + ) + if response_code != 200: + raise exceptions.RequestParameterInvalidException(message) + if payload.upload_option == "upload_file" and not uploaded_datasets: + raise exceptions.RequestParameterInvalidException("Select a file, enter a URL or enter text") json_file_path = upload_common.create_paramfile(trans, uploaded_datasets) data_list = [ud.data for ud in uploaded_datasets] job_params = {} - job_params["link_data_only"] = json.dumps(kwd.get("link_data_only", "copy_files")) - job_params["uuid"] = json.dumps(kwd.get("uuid", None)) + job_params["link_data_only"] = json.dumps(payload.link_data_only) + job_params["uuid"] = json.dumps(payload.uuid) job, output = upload_common.create_job( trans, tool_params, tool, json_file_path, data_list, folder=library_bunch.folder, job_params=job_params ) trans.app.job_manager.enqueue(job, tool=tool) + if not output: + raise exceptions.RequestParameterInvalidException("Upload failed") return output - def _get_server_dir_uploaded_datasets( - self, trans, params, full_dir, import_dir_desc, library_bunch, response_code, message - ): - dir_response = self._get_server_dir_files(params, full_dir, import_dir_desc) - files = dir_response[0] - if not files: - return dir_response + def _get_server_dir_uploaded_datasets(self, trans, payload, full_dir, import_dir_desc, library_bunch): + files = self._get_server_dir_files(payload, full_dir, import_dir_desc) uploaded_datasets = [] for file in files: name = os.path.basename(file) uploaded_datasets.append( - self._make_library_uploaded_dataset(trans, params, name, file, "server_dir", library_bunch) + self._make_library_uploaded_dataset( + trans, payload.model_dump(), name, file, "server_dir", library_bunch + ) ) - return uploaded_datasets, 200, None + return uploaded_datasets - def _get_server_dir_files(self, params, full_dir, import_dir_desc): + def _get_server_dir_files(self, payload, full_dir, import_dir_desc): files = [] try: for entry in os.listdir(full_dir): # Only import regular files path = os.path.join(full_dir, entry) - link_data_only = params.get("link_data_only", "copy_files") - if os.path.islink(full_dir) and link_data_only == "link_to_files": + if os.path.islink(full_dir) and payload.link_data_only == "link_to_files": # If we're linking instead of copying and the # sub-"directory" in the import dir is actually a symlink, # dereference the symlink, but not any of its contents. @@ -191,7 +183,7 @@ def _get_server_dir_files(self, params, full_dir, import_dir_desc): path = os.path.join(link_path, entry) else: path = os.path.abspath(os.path.join(link_path, entry)) - elif os.path.islink(path) and os.path.isfile(path) and link_data_only == "link_to_files": + elif os.path.islink(path) and os.path.isfile(path) and payload.link_data_only == "link_to_files": # If we're linking instead of copying and the "file" in the # sub-directory of the import dir is actually a symlink, # dereference the symlink (one dereference only, Vasili). @@ -203,14 +195,12 @@ def _get_server_dir_files(self, params, full_dir, import_dir_desc): if os.path.isfile(path): files.append(path) except Exception as e: - message = f"Unable to get file list for configured {import_dir_desc}, error: {util.unicodify(e)}" - response_code = 500 - return None, response_code, message + raise exceptions.InternalServerError( + f"Unable to get file list for configured {import_dir_desc}, error: {util.unicodify(e)}" + ) if not files: - message = f"The directory '{full_dir}' contains no valid files" - response_code = 400 - return None, response_code, message - return files, None, None + raise exceptions.ObjectAttributeMissingException(f"The directory '{full_dir}' contains no valid files") + return files def _get_path_paste_uploaded_datasets(self, trans, params, library_bunch, response_code, message): preserve_dirs = util.string_as_bool(params.get("preserve_dirs", False)) @@ -307,15 +297,37 @@ def _make_library_uploaded_dataset(self, trans, params, name, path, type, librar trans.sa_session.commit() return uploaded_dataset - def _create_folder(self, trans, parent_id: int, **kwd): + def _upload_library_dataset(self, trans, payload): is_admin = trans.user_is_admin current_user_roles = trans.get_current_user_roles() - parent_folder = trans.sa_session.get(LibraryFolder, parent_id) + folder = trans.sa_session.get(LibraryFolder, payload.folder_id) + if not folder: + raise exceptions.RequestParameterInvalidException("Invalid folder id specified.") + self._check_access(trans, is_admin, folder, current_user_roles) + self._check_add(trans, is_admin, folder, current_user_roles) + if payload.roles: + # Check to see if the user selected roles to associate with the DATASET_ACCESS permission + # on the dataset that would cause accessibility issues. + vars = dict(DATASET_ACCESS_in=payload.roles) + permissions, in_roles, error, message = trans.app.security_agent.derive_roles_from_access( + trans, folder.parent_library.id, "api", library=True, **vars + ) + if error: + raise exceptions.RequestParameterInvalidException(message) + created_outputs_dict = self._upload_dataset(trans, folder.id, payload) + return created_outputs_dict + + def _create_folder(self, trans, payload): + is_admin = trans.user_is_admin + current_user_roles = trans.get_current_user_roles() + parent_folder = trans.sa_session.get(LibraryFolder, payload.folder_id) + if not parent_folder: + raise exceptions.RequestParameterInvalidException("Invalid folder id specified.") # Check the library which actually contains the user-supplied parent folder, not the user-supplied # library, which could be anything. self._check_access(trans, is_admin, parent_folder, current_user_roles) self._check_add(trans, is_admin, parent_folder, current_user_roles) - new_folder = LibraryFolder(name=kwd.get("name", ""), description=kwd.get("description", "")) + new_folder = LibraryFolder(name=payload.name, description=payload.description) # We are associating the last used genome build with folders, so we will always # initialize a new folder with the first dbkey in genome builds list which is currently # ? unspecified (?) @@ -326,25 +338,39 @@ def _create_folder(self, trans, parent_id: int, **kwd): trans.sa_session.commit() # New folders default to having the same permissions as their parent folder trans.app.security_agent.copy_library_permissions(trans, parent_folder, new_folder) - return 200, dict(created=new_folder) + new_folder_dict = dict(created=new_folder) + return new_folder_dict + + def _create_collection(self, trans, payload, parent): + # Not delegating to library_common, so need to check access to parent folder here. + self.check_user_can_add_to_library_item(trans, parent, check_accessible=True) + create_params = api_payload_to_create_params(payload.model_dump()) + # collection_manager.create needs trans as one of the params + create_params["trans"] = trans + create_params["parent"] = parent + dataset_collection_instance = self.collection_manager.create(**create_params) + dataset_collection = dictify_dataset_collection_instance( + dataset_collection_instance, security=trans.security, url_builder=trans.url_builder, parent=parent + ) + return [dataset_collection] def _check_access(self, trans, is_admin, item, current_user_roles): if isinstance(item, trans.model.HistoryDatasetAssociation): # Make sure the user has the DATASET_ACCESS permission on the history_dataset_association. if not item: message = f"Invalid history dataset ({escape(str(item))}) specified." - raise ObjectNotFound(message) + raise exceptions.ObjectNotFound(message) elif ( not trans.app.security_agent.can_access_dataset(current_user_roles, item.dataset) and item.user == trans.user ): message = f"You do not have permission to access the history dataset with id ({str(item.id)})." - raise ItemAccessibilityException(message) + raise exceptions.ItemAccessibilityException(message) else: # Make sure the user has the LIBRARY_ACCESS permission on the library item. if not item: message = f"Invalid library item ({escape(str(item))}) specified." - raise ObjectNotFound(message) + raise exceptions.ObjectNotFound(message) elif not ( is_admin or trans.app.security_agent.can_access_library_item(current_user_roles, item, trans.user) ): @@ -355,10 +381,10 @@ def _check_access(self, trans, is_admin, item, current_user_roles): else: item_type = "(unknown item type)" message = f"You do not have permission to access the {escape(item_type)} with id ({str(item.id)})." - raise ItemAccessibilityException(message) + raise exceptions.ItemAccessibilityException(message) def _check_add(self, trans, is_admin, item, current_user_roles): # Deny access if the user is not an admin and does not have the LIBRARY_ADD permission. if not (is_admin or trans.app.security_agent.can_add_library_item(current_user_roles, item)): message = f"You are not authorized to add an item to ({escape(item.name)})." - raise ItemAccessibilityException(message) + raise exceptions.ItemAccessibilityException(message) diff --git a/lib/galaxy/schema/library_contents.py b/lib/galaxy/schema/library_contents.py new file mode 100644 index 000000000000..b46fabf233ab --- /dev/null +++ b/lib/galaxy/schema/library_contents.py @@ -0,0 +1,327 @@ +import json +from enum import Enum +from typing import ( + Any, + Dict, + List, + Optional, + Union, +) + +from pydantic import ( + ConfigDict, + Field, + RootModel, +) +from pydantic.functional_validators import field_validator +from typing_extensions import ( + Annotated, + Literal, +) + +from galaxy.schema.fields import ( + DecodedDatabaseIdField, + EncodedDatabaseIdField, + EncodedLibraryFolderDatabaseIdField, + LibraryFolderDatabaseIdField, + ModelClassField, +) +from galaxy.schema.schema import ( + Model, + TagCollection, +) + + +class UploadOption(str, Enum): + upload_file = "upload_file" + upload_paths = "upload_paths" + upload_directory = "upload_directory" + + +class CreateType(str, Enum): + file = "file" + folder = "folder" + collection = "collection" + + +class LinkDataOnly(str, Enum): + copy_files = "copy_files" + link_to_files = "link_to_files" + + +class LibraryContentsCreatePayload(Model): + create_type: CreateType = Field( + ..., + description="the type of item to create", + ) + upload_option: UploadOption = Field( + UploadOption.upload_file, + description="the method to use for uploading files", + ) + folder_id: LibraryFolderDatabaseIdField = Field( + ..., + description="the encoded id of the parent folder of the new item", + ) + tag_using_filenames: bool = Field( + False, + description="create tags on datasets using the file's original name", + ) + tags: List[str] = Field( + [], + description="create the given list of tags on datasets", + ) + from_hda_id: Optional[DecodedDatabaseIdField] = Field( + None, + description="(only if create_type is 'file') the encoded id of an accessible HDA to copy into the library", + ) + from_hdca_id: Optional[DecodedDatabaseIdField] = Field( + None, + description="(only if create_type is 'file') the encoded id of an accessible HDCA to copy into the library", + ) + ldda_message: str = Field( + "", + description="the new message attribute of the LDDA created", + ) + extended_metadata: Optional[Dict[str, Any]] = Field( + None, + description="sub-dictionary containing any extended metadata to associate with the item", + ) + + @field_validator("tags", mode="before", check_fields=False) + @classmethod + def tags_string_to_json(cls, v): + if isinstance(v, str): + return json.loads(v) + return v + + +class LibraryContentsFileCreatePayload(LibraryContentsCreatePayload): + dbkey: Union[str, list] = Field( + "?", + title="database key", + ) + roles: str = Field( + "", + title="user selected roles", + ) + file_type: Optional[str] = Field( + None, + title="file type", + ) + server_dir: str = Field( + "", + description="(only if upload_option is 'upload_directory') relative path of the " + "subdirectory of Galaxy ``library_import_dir`` (if admin) or " + "``user_library_import_dir`` (if non-admin) to upload. " + "All and only the files (i.e. no subdirectories) contained " + "in the specified directory will be uploaded.", + ) + filesystem_paths: str = Field( + "", + description="(only if upload_option is 'upload_paths' and the user is an admin) " + "file paths on the Galaxy server to upload to the library, one file per line", + ) + link_data_only: LinkDataOnly = Field( + LinkDataOnly.copy_files, + description="(only when upload_option is 'upload_directory' or 'upload_paths')." + "Setting to 'link_to_files' symlinks instead of copying the files", + ) + uuid: Optional[str] = Field( + None, + title="UUID of the dataset to upload", + ) + upload_files: List[Dict[str, Any]] = Field( + [], + title="list of the uploaded files", + ) + + # uploaded file fields + model_config = ConfigDict(extra="allow") + + +class LibraryContentsFolderCreatePayload(LibraryContentsCreatePayload): + name: str = Field( + "", + title="name of the folder to create", + ) + description: str = Field( + "", + title="description of the folder to create", + ) + + +class LibraryContentsCollectionCreatePayload(LibraryContentsCreatePayload): + collection_type: str = Field( + ..., + title="the type of collection to create", + ) + element_identifiers: List[Dict[str, Any]] = Field( + ..., + title="list of dictionaries containing the element identifiers for the collection", + ) + name: Optional[str] = Field( + None, + title="the name of the collection", + ) + hide_source_items: bool = Field( + False, + description="if True, hide the source items in the collection", + ) + copy_elements: bool = Field( + False, + description="if True, copy the elements into the collection", + ) + + +class LibraryContentsUpdatePayload(Model): + converted_dataset_id: Optional[DecodedDatabaseIdField] = Field( + None, + title="the decoded id of the dataset", + ) + + +class LibraryContentsDeletePayload(Model): + purge: bool = Field( + False, + description="if True, purge the library dataset", + ) + + +class LibraryContentsIndexResponse(Model): + type: str + name: str + url: str + + +class LibraryContentsIndexFolderResponse(LibraryContentsIndexResponse): + id: EncodedLibraryFolderDatabaseIdField + + +class LibraryContentsIndexDatasetResponse(LibraryContentsIndexResponse): + id: EncodedDatabaseIdField + + +class LibraryContentsIndexListResponse(RootModel): + root: List[Union[LibraryContentsIndexFolderResponse, LibraryContentsIndexDatasetResponse]] + + +class LibraryContentsShowResponse(Model): + name: str + genome_build: Optional[str] + update_time: str + parent_library_id: EncodedDatabaseIdField + + +class LibraryContentsShowFolderResponse(LibraryContentsShowResponse): + model_class: Annotated[Literal["LibraryFolder"], ModelClassField(Literal["LibraryFolder"])] + id: EncodedLibraryFolderDatabaseIdField + parent_id: Optional[EncodedLibraryFolderDatabaseIdField] + description: str + item_count: int + deleted: bool + library_path: List[str] + + +class LibraryContentsShowDatasetResponse(LibraryContentsShowResponse): + model_class: Annotated[Literal["LibraryDataset"], ModelClassField(Literal["LibraryDataset"])] + id: EncodedDatabaseIdField + ldda_id: EncodedDatabaseIdField + folder_id: EncodedLibraryFolderDatabaseIdField + state: str + file_name: str + created_from_basename: Optional[str] + uploaded_by: Optional[str] + message: Optional[str] + date_uploaded: str + file_size: int + file_ext: str + data_type: str + misc_info: Optional[str] + misc_blurb: Optional[str] + peek: Optional[str] + uuid: str + tags: TagCollection + + # metadata fields + model_config = ConfigDict(extra="allow") + + +class LibraryContentsCreateResponse(Model): + name: str + url: str + + +class LibraryContentsCreateFolderResponse(LibraryContentsCreateResponse): + id: EncodedLibraryFolderDatabaseIdField + + +class LibraryContentsCreateFileResponse(LibraryContentsCreateResponse): + id: EncodedDatabaseIdField + + +class LibraryContentsCreateFolderListResponse(RootModel): + root: List[LibraryContentsCreateFolderResponse] + + +class LibraryContentsCreateFileListResponse(RootModel): + root: List[LibraryContentsCreateFileResponse] + + +class LibraryContentsCreateDatasetResponse(Model): + # id, library_dataset_id, parent_library_id should change to EncodedDatabaseIdField latter + # because they are encoded ids in _copy_hda_to_library_folder and _copy_hdca_to_library_folder + # functions that are shared by LibraryFolderContentsService too + id: str + hda_ldda: str + model_class: Annotated[ + Literal["LibraryDatasetDatasetAssociation"], ModelClassField(Literal["LibraryDatasetDatasetAssociation"]) + ] + name: str + deleted: bool + visible: bool + state: str + library_dataset_id: str + file_size: int + file_name: str + update_time: str + file_ext: str + data_type: str + genome_build: str + misc_info: Optional[str] + misc_blurb: Optional[str] + created_from_basename: Optional[str] + uuid: str + parent_library_id: str + + # metadata fields + model_config = ConfigDict(extra="allow") + + +class LibraryContentsCreateDatasetCollectionResponse(RootModel): + root: List[LibraryContentsCreateDatasetResponse] + + +class LibraryContentsDeleteResponse(Model): + id: EncodedDatabaseIdField + deleted: bool + + +class LibraryContentsPurgedResponse(LibraryContentsDeleteResponse): + purged: bool + + +AnyLibraryContentsShowResponse = Union[ + LibraryContentsShowFolderResponse, + LibraryContentsShowDatasetResponse, +] + +AnyLibraryContentsCreatePayload = Union[ + LibraryContentsFolderCreatePayload, LibraryContentsFileCreatePayload, LibraryContentsCollectionCreatePayload +] + +AnyLibraryContentsCreateResponse = Union[ + LibraryContentsCreateFolderListResponse, + LibraryContentsCreateFileListResponse, + LibraryContentsCreateDatasetCollectionResponse, + LibraryContentsCreateDatasetResponse, +] diff --git a/lib/galaxy/webapps/galaxy/api/library_contents.py b/lib/galaxy/webapps/galaxy/api/library_contents.py index dc3c5e120f34..a0403b78adeb 100644 --- a/lib/galaxy/webapps/galaxy/api/library_contents.py +++ b/lib/galaxy/webapps/galaxy/api/library_contents.py @@ -3,506 +3,169 @@ """ import logging -from typing import Optional +import shutil +import tempfile +from typing import ( + List, + Optional, +) + +from fastapi import ( + Body, + Depends, + Request, + UploadFile, +) +from starlette.datastructures import UploadFile as StarletteUploadFile -from galaxy import ( - exceptions, - managers, - util, +from galaxy.managers.context import ( + ProvidesHistoryContext, + ProvidesUserContext, ) -from galaxy.actions.library import ( - LibraryActions, - validate_path_upload, +from galaxy.schema.fields import DecodedDatabaseIdField +from galaxy.schema.library_contents import ( + AnyLibraryContentsCreatePayload, + AnyLibraryContentsCreateResponse, + AnyLibraryContentsShowResponse, + LibraryContentsDeletePayload, + LibraryContentsDeleteResponse, + LibraryContentsFileCreatePayload, + LibraryContentsIndexListResponse, ) -from galaxy.managers.collections_util import ( - api_payload_to_create_params, - dictify_dataset_collection_instance, +from galaxy.webapps.galaxy.api import ( + depends, + DependsOnTrans, + Router, ) -from galaxy.model import ( - ExtendedMetadata, - ExtendedMetadataIndex, - Library, - LibraryDataset, - LibraryFolder, - tags, +from galaxy.webapps.galaxy.services.library_contents import ( + LibraryContentsService, + MaybeLibraryFolderOrDatasetID, ) -from galaxy.model.base import transaction -from galaxy.structured_app import StructuredApp -from galaxy.web import expose_api -from galaxy.webapps.base.controller import ( - HTTPBadRequest, - url_for, - UsesFormDefinitionsMixin, - UsesLibraryMixinItems, +from . import ( + APIContentTypeRoute, + as_form, ) -from galaxy.webapps.galaxy.api import BaseGalaxyAPIController log = logging.getLogger(__name__) - -class LibraryContentsController( - BaseGalaxyAPIController, UsesLibraryMixinItems, UsesFormDefinitionsMixin, LibraryActions -): - def __init__(self, app: StructuredApp, hda_manager: managers.hdas.HDAManager): - super().__init__(app) - self.hda_manager = hda_manager - - @expose_api - def index(self, trans, library_id, **kwd): - """ - GET /api/libraries/{library_id}/contents: - - Return a list of library files and folders. - - .. note:: This endpoint is slow for large libraries. Returns all content traversing recursively through all folders. - .. seealso:: :class:`galaxy.webapps.galaxy.api.FolderContentsController.index` for a faster non-recursive solution - - :param library_id: the encoded id of the library - :type library_id: str - - :returns: list of dictionaries of the form: - - * id: the encoded id of the library item - * name: the 'library path' - or relationship of the library item to the root - * type: 'file' or 'folder' - * url: the url to get detailed information on the library item - - :rtype: list - - :raises: MalformedId, InconsistentDatabase, RequestParameterInvalidException, InternalServerError - """ - rval = [] - current_user_roles = trans.get_current_user_roles() - - def traverse(folder): - admin = trans.user_is_admin - rval = [] - for subfolder in folder.active_folders: - if not admin: - can_access, folder_ids = trans.app.security_agent.check_folder_contents( - trans.user, current_user_roles, subfolder - ) - if (admin or can_access) and not subfolder.deleted: - subfolder.api_path = f"{folder.api_path}/{subfolder.name}" - subfolder.api_type = "folder" - rval.append(subfolder) - rval.extend(traverse(subfolder)) - for ld in folder.datasets: - if not admin: - can_access = trans.app.security_agent.can_access_dataset( - current_user_roles, ld.library_dataset_dataset_association.dataset - ) - if (admin or can_access) and not ld.deleted: - ld.api_path = f"{folder.api_path}/{ld.name}" - ld.api_type = "file" - rval.append(ld) - return rval - - library = trans.sa_session.get(Library, self.decode_id(library_id)) - if not library: - raise exceptions.RequestParameterInvalidException("No library found with the id provided.") - if not (trans.user_is_admin or trans.app.security_agent.can_access_library(current_user_roles, library)): - raise exceptions.RequestParameterInvalidException("No library found with the id provided.") - encoded_id = f"F{trans.security.encode_id(library.root_folder.id)}" - # appending root folder - rval.append( - dict( - id=encoded_id, - type="folder", - name="/", - url=url_for("library_content", library_id=library_id, id=encoded_id), - ) - ) - library.root_folder.api_path = "" - # appending all other items in the library recursively - for content in traverse(library.root_folder): - encoded_id = trans.security.encode_id(content.id) - if content.api_type == "folder": - encoded_id = f"F{encoded_id}" - rval.append( - dict( - id=encoded_id, - type=content.api_type, - name=content.api_path, - url=url_for( - "library_content", - library_id=library_id, - id=encoded_id, - ), - ) - ) - return rval - - @expose_api - def show(self, trans, id, library_id, **kwd): - """ - GET /api/libraries/{library_id}/contents/{id} - - Returns information about library file or folder. - - :param id: the encoded id of the library item to return - :type id: str - - :param library_id: the encoded id of the library that contains this item - :type library_id: str - - :returns: detailed library item information - :rtype: dict - - .. seealso:: - :func:`galaxy.model.LibraryDataset.to_dict` and - :attr:`galaxy.model.LibraryFolder.dict_element_visible_keys` - """ - class_name, content_id = self._decode_library_content_id(id) - if class_name == "LibraryFolder": - content = self.get_library_folder(trans, content_id, check_ownership=False, check_accessible=True) - rval = content.to_dict(view="element", value_mapper={"id": trans.security.encode_id}) - rval["id"] = f"F{str(rval['id'])}" - if rval["parent_id"] is not None: # This can happen for root folders. - rval["parent_id"] = f"F{str(trans.security.encode_id(rval['parent_id']))}" - rval["parent_library_id"] = trans.security.encode_id(rval["parent_library_id"]) - else: - content = self.get_library_dataset(trans, content_id, check_ownership=False, check_accessible=True) - rval = content.to_dict(view="element") - rval["id"] = trans.security.encode_id(rval["id"]) - rval["ldda_id"] = trans.security.encode_id(rval["ldda_id"]) - rval["folder_id"] = f"F{str(trans.security.encode_id(rval['folder_id']))}" - rval["parent_library_id"] = trans.security.encode_id(rval["parent_library_id"]) - - tag_manager = tags.GalaxyTagHandler(trans.sa_session) - rval["tags"] = tag_manager.get_tags_list(content.library_dataset_dataset_association.tags) - return rval - - @expose_api - def create(self, trans, library_id, payload, **kwd): - """ - POST /api/libraries/{library_id}/contents: - - Create a new library file or folder. - - To copy an HDA into a library send ``create_type`` of 'file' and - the HDA's encoded id in ``from_hda_id`` (and optionally ``ldda_message``). - - To copy an HDCA into a library send ``create_type`` of 'file' and - the HDCA's encoded id in ``from_hdca_id`` (and optionally ``ldda_message``). - - :type library_id: str - :param library_id: the encoded id of the library where to create the new item - :type payload: dict - :param payload: dictionary structure containing: - - * folder_id: the encoded id of the parent folder of the new item - * create_type: the type of item to create ('file', 'folder' or 'collection') - * from_hda_id: (optional, only if create_type is 'file') the - encoded id of an accessible HDA to copy into the library - * ldda_message: (optional) the new message attribute of the LDDA created - * extended_metadata: (optional) sub-dictionary containing any extended - metadata to associate with the item - * upload_option: (optional) one of 'upload_file' (default), 'upload_directory' or 'upload_paths' - * server_dir: (optional, only if upload_option is - 'upload_directory') relative path of the subdirectory of Galaxy - ``library_import_dir`` (if admin) or ``user_library_import_dir`` - (if non-admin) to upload. All and only the files (i.e. - no subdirectories) contained in the specified directory will be - uploaded. - * filesystem_paths: (optional, only if upload_option is - 'upload_paths' and the user is an admin) file paths on the - Galaxy server to upload to the library, one file per line - * link_data_only: (optional, only when upload_option is - 'upload_directory' or 'upload_paths') either 'copy_files' - (default) or 'link_to_files'. Setting to 'link_to_files' - symlinks instead of copying the files - * name: (optional, only if create_type is 'folder') name of the - folder to create - * description: (optional, only if create_type is 'folder') - description of the folder to create - * tag_using_filenames: (optional) - create tags on datasets using the file's original name - * tags: (optional) - create the given list of tags on datasets - - :returns: a dictionary describing the new item unless ``from_hdca_id`` is supplied, - in that case a list of such dictionaries is returned. - :rtype: object - """ - if trans.user_is_bootstrap_admin: - raise exceptions.RealUserRequiredException("Only real users can create a new library file or folder.") - if "create_type" not in payload: - raise exceptions.RequestParameterMissingException("Missing required 'create_type' parameter.") - create_type = payload.pop("create_type") - if create_type not in ("file", "folder", "collection"): - raise exceptions.RequestParameterInvalidException( - f"Invalid value for 'create_type' parameter ( {create_type} ) specified." - ) - if "upload_option" in payload and payload["upload_option"] not in ( - "upload_file", - "upload_directory", - "upload_paths", - ): - raise exceptions.RequestParameterInvalidException( - f"Invalid value for 'upload_option' parameter ( {payload['upload_option']} ) specified." - ) - if "folder_id" not in payload: - raise exceptions.RequestParameterMissingException("Missing required 'folder_id' parameter.") - folder_id = payload.pop("folder_id") - _, folder_id = self._decode_library_content_id(folder_id) - folder_id = trans.security.decode_id(folder_id) - # security is checked in the downstream controller - parent = self.get_library_folder(trans, folder_id, check_ownership=False, check_accessible=False) - # The rest of the security happens in the library_common controller. - - payload["tag_using_filenames"] = util.string_as_bool(payload.get("tag_using_filenames", None)) - payload["tags"] = util.listify(payload.get("tags", None)) - - # are we copying an HDA to the library folder? - # we'll need the id and any message to attach, then branch to that private function - from_hda_id, from_hdca_id, ldda_message = ( - payload.pop("from_hda_id", None), - payload.pop("from_hdca_id", None), - payload.pop("ldda_message", ""), - ) - if create_type == "file": - if from_hda_id: - return self._copy_hda_to_library_folder( - trans, self.hda_manager, self.decode_id(from_hda_id), folder_id, ldda_message - ) - if from_hdca_id: - return self._copy_hdca_to_library_folder( - trans, self.hda_manager, self.decode_id(from_hdca_id), folder_id, ldda_message - ) - - # check for extended metadata, store it and pop it out of the param - # otherwise sanitize_param will have a fit - ex_meta_payload = payload.pop("extended_metadata", None) - - # Now create the desired content object, either file or folder. - if create_type == "file": - status, output = self._upload_library_dataset(trans, folder_id, **payload) - elif create_type == "folder": - status, output = self._create_folder(trans, folder_id, **payload) - elif create_type == "collection": - # Not delegating to library_common, so need to check access to parent - # folder here. - self.check_user_can_add_to_library_item(trans, parent, check_accessible=True) - create_params = api_payload_to_create_params(payload) - create_params["parent"] = parent - dataset_collection_manager = trans.app.dataset_collection_manager - dataset_collection_instance = dataset_collection_manager.create(**create_params) - return [ - dictify_dataset_collection_instance( - dataset_collection_instance, security=trans.security, url_builder=trans.url_builder, parent=parent - ) - ] - if status != 200: - trans.response.status = status - return output - else: - rval = [] - for v in output.values(): - if ex_meta_payload is not None: - # If there is extended metadata, store it, attach it to the dataset, and index it - ex_meta = ExtendedMetadata(ex_meta_payload) - trans.sa_session.add(ex_meta) - v.extended_metadata = ex_meta - trans.sa_session.add(v) - with transaction(trans.sa_session): - trans.sa_session.commit() - for path, value in self._scan_json_block(ex_meta_payload): - meta_i = ExtendedMetadataIndex(ex_meta, path, value) - trans.sa_session.add(meta_i) - with transaction(trans.sa_session): - trans.sa_session.commit() - if isinstance(v, trans.app.model.LibraryDatasetDatasetAssociation): - v = v.library_dataset - encoded_id = trans.security.encode_id(v.id) - if create_type == "folder": - encoded_id = f"F{encoded_id}" - rval.append( - dict( - id=encoded_id, name=v.name, url=url_for("library_content", library_id=library_id, id=encoded_id) - ) - ) - return rval - - def _upload_library_dataset(self, trans, folder_id: int, **kwd): - replace_dataset: Optional[LibraryDataset] = None - upload_option = kwd.get("upload_option", "upload_file") - dbkey = kwd.get("dbkey", "?") - if isinstance(dbkey, list): - last_used_build = dbkey[0] - else: - last_used_build = dbkey - is_admin = trans.user_is_admin - current_user_roles = trans.get_current_user_roles() - folder = trans.sa_session.get(LibraryFolder, folder_id) - self._check_access(trans, is_admin, folder, current_user_roles) - self._check_add(trans, is_admin, folder, current_user_roles) - library = folder.parent_library - if folder and last_used_build in ["None", None, "?"]: - last_used_build = folder.genome_build - error = False - if upload_option == "upload_paths": - validate_path_upload(trans) # Duplicate check made in _upload_dataset. - elif roles := kwd.get("roles", ""): - # Check to see if the user selected roles to associate with the DATASET_ACCESS permission - # on the dataset that would cause accessibility issues. - vars = dict(DATASET_ACCESS_in=roles) - permissions, in_roles, error, message = trans.app.security_agent.derive_roles_from_access( - trans, library.id, "api", library=True, **vars - ) - if error: - return 400, message - else: - created_outputs_dict = self._upload_dataset( - trans, folder_id=folder.id, replace_dataset=replace_dataset, **kwd - ) - if created_outputs_dict: - if isinstance(created_outputs_dict, str): - return 400, created_outputs_dict - elif isinstance(created_outputs_dict, tuple): - return created_outputs_dict[0], created_outputs_dict[1] - return 200, created_outputs_dict - else: - return 400, "Upload failed" - - def _scan_json_block(self, meta, prefix=""): - """ - Scan a json style data structure, and emit all fields and their values. - Example paths - - Data - { "data" : [ 1, 2, 3 ] } - - Path: - /data == [1,2,3] - - /data/[0] == 1 - - """ - if isinstance(meta, dict): - for a in meta: - yield from self._scan_json_block(meta[a], f"{prefix}/{a}") - elif isinstance(meta, list): - for i, a in enumerate(meta): - yield from self._scan_json_block(a, prefix + "[%d]" % (i)) - else: - # BUG: Everything is cast to string, which can lead to false positives - # for cross type comparisions, ie "True" == True - yield prefix, (f"{meta}").encode() - - @expose_api - def update(self, trans, id, library_id, payload, **kwd): - """ - PUT /api/libraries/{library_id}/contents/{id} - - Create an ImplicitlyConvertedDatasetAssociation. - - .. seealso:: :class:`galaxy.model.ImplicitlyConvertedDatasetAssociation` - - :type id: str - :param id: the encoded id of the library item to return - :type library_id: str - :param library_id: the encoded id of the library that contains this item - :type payload: dict - :param payload: dictionary structure containing:: - 'converted_dataset_id': - - :rtype: None - :returns: None - """ - if "converted_dataset_id" in payload: - converted_id = payload.pop("converted_dataset_id") - content = self.get_library_dataset(trans, id, check_ownership=False, check_accessible=False) - content_conv = self.get_library_dataset(trans, converted_id, check_ownership=False, check_accessible=False) - assoc = trans.app.model.ImplicitlyConvertedDatasetAssociation( - parent=content.library_dataset_dataset_association, - dataset=content_conv.library_dataset_dataset_association, - file_type=content_conv.library_dataset_dataset_association.extension, - metadata_safe=True, - ) - trans.sa_session.add(assoc) - with transaction(trans.sa_session): - trans.sa_session.commit() - - def _decode_library_content_id(self, content_id): - if len(content_id) % 16 == 0: - return "LibraryDataset", content_id - elif content_id.startswith("F"): - return "LibraryFolder", content_id[1:] - else: - raise HTTPBadRequest(f"Malformed library content id ( {str(content_id)} ) specified, unable to decode.") - - @expose_api - def delete(self, trans, library_id, id, **kwd): - """ - DELETE /api/libraries/{library_id}/contents/{id} - - Delete the LibraryDataset with the given ``id``. - - :type id: str - :param id: the encoded id of the library dataset to delete - :type kwd: dict - :param kwd: (optional) dictionary structure containing: - - * payload: a dictionary itself containing: - * purge: if True, purge the LD - - :rtype: dict - :returns: an error object if an error occurred or a dictionary containing: - * id: the encoded id of the library dataset, - * deleted: if the library dataset was marked as deleted, - * purged: if the library dataset was purged - """ - purge = False - if kwd.get("payload", None): - purge = util.string_as_bool(kwd["payload"].get("purge", False)) - - rval = {"id": id} - try: - ld = self.get_library_dataset(trans, id, check_ownership=False, check_accessible=True) - user_is_admin = trans.user_is_admin - can_modify = trans.app.security_agent.can_modify_library_item(trans.user.all_roles(), ld) - log.debug("is_admin: %s, can_modify: %s", user_is_admin, can_modify) - if not (user_is_admin or can_modify): - trans.response.status = 403 - rval.update({"error": "Unauthorized to delete or purge this library dataset"}) - return rval - - ld.deleted = True - if purge: - ld.purged = True - trans.sa_session.add(ld) - with transaction(trans.sa_session): - trans.sa_session.commit() - - # TODO: had to change this up a bit from Dataset.user_can_purge - dataset = ld.library_dataset_dataset_association.dataset - no_history_assoc = len(dataset.history_associations) == len(dataset.purged_history_associations) - no_library_assoc = dataset.library_associations == [ld.library_dataset_dataset_association] - can_purge_dataset = not dataset.purged and no_history_assoc and no_library_assoc - - if can_purge_dataset: - try: - ld.library_dataset_dataset_association.dataset.full_delete() - trans.sa_session.add(ld.dataset) - except Exception: - pass - # flush now to preserve deleted state in case of later interruption - with transaction(trans.sa_session): - trans.sa_session.commit() - rval["purged"] = True - with transaction(trans.sa_session): - trans.sa_session.commit() - rval["deleted"] = True - - except exceptions.httpexceptions.HTTPInternalServerError: - log.exception("Library_contents API, delete: uncaught HTTPInternalServerError: %s, %s", id, str(kwd)) - raise - except exceptions.httpexceptions.HTTPException: - raise - except Exception as exc: - log.exception("library_contents API, delete: uncaught exception: %s, %s", id, str(kwd)) - trans.response.status = 500 - rval.update({"error": util.unicodify(exc)}) - return rval +router = Router(tags=["libraries"]) + + +class FormDataApiRoute(APIContentTypeRoute): + match_content_type = "multipart/form-data" + + +class JsonApiRoute(APIContentTypeRoute): + match_content_type = "application/json" + + +LibraryContentsCreateForm = as_form(LibraryContentsFileCreatePayload) + + +@router.cbv +class FastAPILibraryContents: + service: LibraryContentsService = depends(LibraryContentsService) + + @router.get( + "/api/libraries/{library_id}/contents", + summary="Return a list of library files and folders.", + deprecated=True, + ) + def index( + self, + library_id: DecodedDatabaseIdField, + trans: ProvidesUserContext = DependsOnTrans, + ) -> LibraryContentsIndexListResponse: + """This endpoint is deprecated. Please use GET /api/folders/{folder_id}/contents instead.""" + return self.service.index(trans, library_id) + + @router.get( + "/api/libraries/{library_id}/contents/{id}", + name="library_content", + summary="Return a library file or folder.", + deprecated=True, + ) + def show( + self, + library_id: DecodedDatabaseIdField, + id: MaybeLibraryFolderOrDatasetID, + trans: ProvidesUserContext = DependsOnTrans, + ) -> AnyLibraryContentsShowResponse: + """This endpoint is deprecated. Please use GET /api/libraries/datasets/{library_id} instead.""" + return self.service.show(trans, id) + + @router.post( + "/api/libraries/{library_id}/contents", + summary="Create a new library file or folder.", + deprecated=True, + route_class_override=JsonApiRoute, + ) + def create_json( + self, + library_id: DecodedDatabaseIdField, + payload: AnyLibraryContentsCreatePayload, + trans: ProvidesHistoryContext = DependsOnTrans, + ) -> AnyLibraryContentsCreateResponse: + """This endpoint is deprecated. Please use POST /api/folders/{folder_id} or POST /api/folders/{folder_id}/contents instead.""" + return self.service.create(trans, library_id, payload) + + @router.post( + "/api/libraries/{library_id}/contents", + summary="Create a new library file or folder.", + deprecated=True, + route_class_override=FormDataApiRoute, + ) + async def create_form( + self, + request: Request, + library_id: DecodedDatabaseIdField, + payload: LibraryContentsFileCreatePayload = Depends(LibraryContentsCreateForm.as_form), + files: Optional[List[UploadFile]] = None, + trans: ProvidesHistoryContext = DependsOnTrans, + ) -> AnyLibraryContentsCreateResponse: + """This endpoint is deprecated. Please use POST /api/folders/{folder_id} or POST /api/folders/{folder_id}/contents instead.""" + # FastAPI's UploadFile is a very light wrapper around starlette's UploadFile + if not files: + data = await request.form() + upload_files = [] + for upload_file in data.values(): + if isinstance(upload_file, StarletteUploadFile): + with tempfile.NamedTemporaryFile( + dir=trans.app.config.new_file_path, prefix="upload_file_data_", delete=False + ) as dest: + shutil.copyfileobj(upload_file.file, dest) # type: ignore[misc] # https://github.com/python/mypy/issues/15031 + upload_file.file.close() + upload_files.append(dict(filename=upload_file.filename, local_filename=dest.name)) + payload.upload_files = upload_files + + return self.service.create(trans, library_id, payload) + + @router.put( + "/api/libraries/{library_id}/contents/{id}", + summary="Update a library file or folder.", + deprecated=True, + ) + def update( + self, + library_id: DecodedDatabaseIdField, + id: DecodedDatabaseIdField, + payload, + trans: ProvidesUserContext = DependsOnTrans, + ) -> None: + """This endpoint is deprecated. Please use PATCH /api/libraries/datasets/{library_id} instead.""" + return self.service.update(trans, id, payload) + + @router.delete( + "/api/libraries/{library_id}/contents/{id}", + summary="Delete a library file or folder.", + deprecated=True, + ) + def delete( + self, + library_id: DecodedDatabaseIdField, + id: DecodedDatabaseIdField, + payload: Optional[LibraryContentsDeletePayload] = Body(None), + trans: ProvidesHistoryContext = DependsOnTrans, + ) -> LibraryContentsDeleteResponse: + """This endpoint is deprecated. Please use DELETE /api/libraries/datasets/{library_id} instead.""" + return self.service.delete(trans, id, payload or LibraryContentsDeletePayload()) diff --git a/lib/galaxy/webapps/galaxy/buildapp.py b/lib/galaxy/webapps/galaxy/buildapp.py index 37780b8f0f4e..e1897420f9a5 100644 --- a/lib/galaxy/webapps/galaxy/buildapp.py +++ b/lib/galaxy/webapps/galaxy/buildapp.py @@ -853,19 +853,6 @@ def populate_api_routes(webapp, app): conditions=dict(method=["POST", "GET"]), ) - webapp.mapper.resource( - "content", - "contents", - controller="library_contents", - name_prefix="library_", - path_prefix="/api/libraries/{library_id}", - parent_resources=dict(member_name="library", collection_name="libraries"), - ) - - _add_item_extended_metadata_controller( - webapp, name_prefix="library_dataset_", path_prefix="/api/libraries/{library_id}/contents/{library_content_id}" - ) - webapp.mapper.connect( "build_for_rerun", "/api/jobs/{id}/build_for_rerun", diff --git a/lib/galaxy/webapps/galaxy/services/library_contents.py b/lib/galaxy/webapps/galaxy/services/library_contents.py new file mode 100644 index 000000000000..55db4038fcc1 --- /dev/null +++ b/lib/galaxy/webapps/galaxy/services/library_contents.py @@ -0,0 +1,286 @@ +import logging +from typing import ( + List, + Tuple, + Union, +) + +from fastapi import Path +from typing_extensions import Annotated + +from galaxy import exceptions +from galaxy.actions.library import LibraryActions +from galaxy.managers.collections import DatasetCollectionManager +from galaxy.managers.context import ( + ProvidesHistoryContext, + ProvidesUserContext, +) +from galaxy.managers.hdas import HDAManager +from galaxy.model import ( + Library, + tags, +) +from galaxy.model.base import transaction +from galaxy.schema.fields import DecodedDatabaseIdField +from galaxy.schema.library_contents import ( + AnyLibraryContentsCreatePayload, + AnyLibraryContentsCreateResponse, + AnyLibraryContentsShowResponse, + LibraryContentsCreateDatasetCollectionResponse, + LibraryContentsCreateDatasetResponse, + LibraryContentsCreateFileListResponse, + LibraryContentsCreateFolderListResponse, + LibraryContentsDeletePayload, + LibraryContentsDeleteResponse, + LibraryContentsIndexDatasetResponse, + LibraryContentsIndexFolderResponse, + LibraryContentsIndexListResponse, + LibraryContentsShowDatasetResponse, + LibraryContentsShowFolderResponse, + LibraryContentsUpdatePayload, +) +from galaxy.security.idencoding import IdEncodingHelper +from galaxy.webapps.base.controller import ( + UsesExtendedMetadataMixin, + UsesLibraryMixinItems, +) +from galaxy.webapps.galaxy.services.base import ServiceBase + +log = logging.getLogger(__name__) + +MaybeLibraryFolderOrDatasetID = Annotated[ + str, + Path( + title="The encoded ID of a library folder or dataset.", + example="F0123456789ABCDEF", + min_length=16, + pattern="F?[0-9a-fA-F]+", + ), +] + + +class LibraryContentsService(ServiceBase, LibraryActions, UsesLibraryMixinItems, UsesExtendedMetadataMixin): + """ + Interface/service shared by controllers for interacting with the contents of a library contents. + """ + + def __init__( + self, + security: IdEncodingHelper, + hda_manager: HDAManager, + collection_manager: DatasetCollectionManager, + ): + super().__init__(security) + self.hda_manager = hda_manager + self.collection_manager = collection_manager + + def index( + self, + trans: ProvidesUserContext, + library_id: DecodedDatabaseIdField, + ) -> LibraryContentsIndexListResponse: + """Return a list of library files and folders.""" + rval: List[Union[LibraryContentsIndexFolderResponse, LibraryContentsIndexDatasetResponse]] = [] + current_user_roles = trans.get_current_user_roles() + library = trans.sa_session.get(Library, library_id) + if not library: + raise exceptions.RequestParameterInvalidException("No library found with the id provided.") + if not (trans.user_is_admin or trans.app.security_agent.can_access_library(current_user_roles, library)): + raise exceptions.RequestParameterInvalidException("No library found with the id provided.") + # appending root folder + url = self._url_for(trans, library_id, library.root_folder.id, "folder") + rval.append(LibraryContentsIndexFolderResponse(id=library.root_folder.id, type="folder", name="/", url=url)) + library.root_folder.api_path = "" + # appending all other items in the library recursively + for content in self._traverse(trans, library.root_folder, current_user_roles): + url = self._url_for(trans, library_id, content.id, content.api_type) + response_model: Union[LibraryContentsIndexFolderResponse, LibraryContentsIndexDatasetResponse] + common_args = dict(id=content.id, type=content.api_type, name=content.api_path, url=url) + if content.api_type == "folder": + response_model = LibraryContentsIndexFolderResponse(**common_args) + else: + response_model = LibraryContentsIndexDatasetResponse(**common_args) + rval.append(response_model) + return LibraryContentsIndexListResponse(root=rval) + + def show( + self, + trans: ProvidesUserContext, + id: MaybeLibraryFolderOrDatasetID, + ) -> AnyLibraryContentsShowResponse: + """Returns information about library file or folder.""" + class_name, content_id = self._decode_library_content_id(id) + if class_name == "LibraryFolder": + content = self.get_library_folder(trans, content_id, check_ownership=False, check_accessible=True) + return LibraryContentsShowFolderResponse(**content.to_dict(view="element")) + else: + content = self.get_library_dataset(trans, content_id, check_ownership=False, check_accessible=True) + rval_dict = content.to_dict(view="element") + tag_manager = tags.GalaxyTagHandler(trans.sa_session) + rval_dict["tags"] = tag_manager.get_tags_list(content.library_dataset_dataset_association.tags) + return LibraryContentsShowDatasetResponse(**rval_dict) + + def create( + self, + trans: ProvidesHistoryContext, + library_id: DecodedDatabaseIdField, + payload: AnyLibraryContentsCreatePayload, + ) -> AnyLibraryContentsCreateResponse: + """Create a new library file or folder.""" + if trans.user_is_bootstrap_admin: + raise exceptions.RealUserRequiredException("Only real users can create a new library file or folder.") + # security is checked in the downstream controller + parent = self.get_library_folder(trans, payload.folder_id, check_ownership=False, check_accessible=False) + # The rest of the security happens in the library_common controller. + + # are we copying an HDA to the library folder? + # we'll need the id and any message to attach, then branch to that private function + if payload.create_type == "file": + if payload.from_hda_id: + rval = self._copy_hda_to_library_folder( + trans, self.hda_manager, payload.from_hda_id, payload.folder_id, payload.ldda_message + ) + return LibraryContentsCreateDatasetResponse(**rval) + elif payload.from_hdca_id: + rval = self._copy_hdca_to_library_folder( + trans, self.hda_manager, payload.from_hdca_id, payload.folder_id, payload.ldda_message + ) + return LibraryContentsCreateDatasetCollectionResponse(root=rval) + + # Now create the desired content object, either file or folder. + if payload.create_type == "file": + rval = self._upload_library_dataset(trans, payload) + return LibraryContentsCreateFileListResponse(root=self._create_response(trans, payload, rval, library_id)) + elif payload.create_type == "folder": + rval = self._create_folder(trans, payload) + return LibraryContentsCreateFolderListResponse(root=self._create_response(trans, payload, rval, library_id)) + elif payload.create_type == "collection": + rval = self._create_collection(trans, payload, parent) + return LibraryContentsCreateDatasetCollectionResponse(root=rval) + else: + raise exceptions.RequestParameterInvalidException("Invalid create_type specified.") + + def update( + self, + trans: ProvidesUserContext, + id: DecodedDatabaseIdField, + payload: LibraryContentsUpdatePayload, + ) -> None: + """Create an ImplicitlyConvertedDatasetAssociation.""" + if payload.converted_dataset_id: + content = self.get_library_dataset(trans, id, check_ownership=False, check_accessible=False) + content_conv = self.get_library_dataset( + trans, payload.converted_dataset_id, check_ownership=False, check_accessible=False + ) + assoc = trans.app.model.ImplicitlyConvertedDatasetAssociation( + parent=content.library_dataset_dataset_association, + dataset=content_conv.library_dataset_dataset_association, + file_type=content_conv.library_dataset_dataset_association.extension, + metadata_safe=True, + ) + trans.sa_session.add(assoc) + with transaction(trans.sa_session): + trans.sa_session.commit() + + def delete( + self, + trans: ProvidesHistoryContext, + id: DecodedDatabaseIdField, + payload: LibraryContentsDeletePayload, + ) -> LibraryContentsDeleteResponse: + """Delete the LibraryDataset with the given ``id``.""" + rval = {"id": id} + ld = self.get_library_dataset(trans, id, check_ownership=False, check_accessible=True) + user_is_admin = trans.user_is_admin + can_modify = trans.app.security_agent.can_modify_library_item(trans.user.all_roles(), ld) + log.debug("is_admin: %s, can_modify: %s", user_is_admin, can_modify) + if not (user_is_admin or can_modify): + raise exceptions.InsufficientPermissionsException("Unauthorized to delete or purge this library dataset") + + ld.deleted = True + if payload.purge: + ld.purged = True + trans.sa_session.add(ld) + with transaction(trans.sa_session): + trans.sa_session.commit() + + # TODO: had to change this up a bit from Dataset.user_can_purge + dataset = ld.library_dataset_dataset_association.dataset + no_history_assoc = len(dataset.history_associations) == len(dataset.purged_history_associations) + no_library_assoc = dataset.library_associations == [ld.library_dataset_dataset_association] + can_purge_dataset = not dataset.purged and no_history_assoc and no_library_assoc + + if can_purge_dataset: + try: + ld.library_dataset_dataset_association.dataset.full_delete() + trans.sa_session.add(ld.dataset) + except Exception: + pass + # flush now to preserve deleted state in case of later interruption + with transaction(trans.sa_session): + trans.sa_session.commit() + rval["purged"] = True + with transaction(trans.sa_session): + trans.sa_session.commit() + rval["deleted"] = True + return LibraryContentsDeleteResponse(**rval) + + def _decode_library_content_id( + self, + content_id: MaybeLibraryFolderOrDatasetID, + ) -> Tuple: + if len(content_id) % 16 == 0: + return "LibraryDataset", content_id + elif content_id.startswith("F"): + return "LibraryFolder", content_id[1:] + else: + raise exceptions.MalformedId( + f"Malformed library content id ( {str(content_id)} ) specified, unable to decode." + ) + + def _url_for(self, trans: ProvidesUserContext, library_id, id, type): + encoded_library_id = trans.security.encode_id(library_id) + encoded_id = trans.security.encode_id(id) + if type == "folder": + encoded_id = f"F{encoded_id}" + return ( + trans.url_builder("library_content", library_id=encoded_library_id, id=encoded_id) + if trans.url_builder + else None + ) + + def _traverse(self, trans: ProvidesUserContext, folder, current_user_roles): + admin = trans.user_is_admin + rval = [] + for subfolder in folder.active_folders: + if not admin: + can_access, folder_ids = trans.app.security_agent.check_folder_contents( + trans.user, current_user_roles, subfolder + ) + if (admin or can_access) and not subfolder.deleted: + subfolder.api_path = f"{folder.api_path}/{subfolder.name}" + subfolder.api_type = "folder" + rval.append(subfolder) + rval.extend(self._traverse(trans, subfolder, current_user_roles)) + for ld in folder.datasets: + if not admin: + can_access = trans.app.security_agent.can_access_dataset( + current_user_roles, ld.library_dataset_dataset_association.dataset + ) + if (admin or can_access) and not ld.deleted: + ld.api_path = f"{folder.api_path}/{ld.name}" + ld.api_type = "file" + rval.append(ld) + return rval + + def _create_response(self, trans, payload, output, library_id): + rval = [] + for v in output.values(): + if payload.extended_metadata is not None: + # If there is extended metadata, store it, attach it to the dataset, and index it + self.create_extended_metadata(trans, payload.extended_metadata) + if isinstance(v, trans.app.model.LibraryDatasetDatasetAssociation): + v = v.library_dataset + url = self._url_for(trans, library_id, v.id, payload.create_type) + rval.append(dict(id=v.id, name=v.name, url=url)) + return rval diff --git a/lib/galaxy_test/api/test_libraries.py b/lib/galaxy_test/api/test_libraries.py index 6cbfaa21e0fe..ba0bd7850870 100644 --- a/lib/galaxy_test/api/test_libraries.py +++ b/lib/galaxy_test/api/test_libraries.py @@ -313,9 +313,9 @@ def test_legacy_upload_unknown_datatype(self): "upload_option": "upload_file", "files_0|url_paste": FILE_URL, } - create_response = self._post(f"libraries/{library['id']}/contents", payload) + create_response = self._post(f"libraries/{library['id']}/contents", payload, json=True) self._assert_status_code_is(create_response, 400) - assert create_response.json() == "Requested extension 'xxx' unknown, cannot upload dataset." + assert create_response.json()["err_msg"] == "Requested extension 'xxx' unknown, cannot upload dataset." @skip_if_github_down @requires_new_library @@ -552,7 +552,7 @@ def test_create_datasets_in_library_from_collection(self): history_id, contents=["xxx", "yyy"], direct_upload=True, wait=True ).json()["outputs"][0]["id"] payload = {"from_hdca_id": hdca_id, "create_type": "file", "folder_id": folder_id} - create_response = self._post(f"libraries/{library['id']}/contents", payload) + create_response = self._post(f"libraries/{library['id']}/contents", payload, json=True) self._assert_status_code_is(create_response, 200) @requires_new_library @@ -588,7 +588,7 @@ def _create_folder(self, library): create_type="folder", name="New Folder", ) - return self._post(f"libraries/{library['id']}/contents", data=create_data) + return self._post(f"libraries/{library['id']}/contents", data=create_data, json=True) def _create_subfolder(self, containing_folder_id): create_data = dict( @@ -605,6 +605,6 @@ def _create_dataset_in_folder_in_library(self, library_name, content="1 2 3", wa history_id = self.dataset_populator.new_history() hda_id = self.dataset_populator.new_dataset(history_id, content=content, wait=wait)["id"] payload = {"from_hda_id": hda_id, "create_type": "file", "folder_id": folder_id} - ld = self._post(f"libraries/{folder_id}/contents", payload) + ld = self._post(f"libraries/{library['id']}/contents", payload, json=True) ld.raise_for_status() return ld diff --git a/lib/galaxy_test/api/test_library_contents.py b/lib/galaxy_test/api/test_library_contents.py new file mode 100644 index 000000000000..2bc097d82a43 --- /dev/null +++ b/lib/galaxy_test/api/test_library_contents.py @@ -0,0 +1,228 @@ +from galaxy_test.base.populators import ( + DatasetCollectionPopulator, + DatasetPopulator, + LibraryPopulator, +) +from ._framework import ApiTestCase + + +class TestLibraryContentsApi(ApiTestCase): + dataset_populator: DatasetPopulator + + def setUp(self): + super().setUp() + self.dataset_populator = DatasetPopulator(self.galaxy_interactor) + self.dataset_collection_populator = DatasetCollectionPopulator(self.galaxy_interactor) + self.library_populator = LibraryPopulator(self.galaxy_interactor) + + self.library = self.library_populator.new_private_library("TestLibrary") + self.history = self.dataset_populator.new_history() + + def test_create_folder(self): + folder_list = self._create_library_content(type="folder") + assert isinstance(folder_list, list), "Expected response to be a list" + for folder in folder_list: + self._assert_has_keys(folder, "id", "name") + + def test_create_file_from_hda(self): + file_item = self._create_library_content(type="from_hda") + self._assert_has_keys(file_item, "id", "name") + + def test_create_file_from_hdca(self): + files = self._create_library_content(type="from_hdca") + assert isinstance(files, list), "Response should be a list of files" + for file_item in files: + self._assert_has_keys(file_item, "id", "name") + + def test_create_invalid(self): + library_id = self.library["id"] + folder_id = self.library["root_folder_id"] + + payload = {"folder_id": folder_id, "create_type": "invalid_type"} + response = self._post(f"/api/libraries/{library_id}/contents", data=payload, json=True) + self._assert_status_code_is(response, 400) + + def test_index(self): + library_id = self.library["id"] + response = self._get(f"/api/libraries/{library_id}/contents") + self._assert_status_code_is(response, 200) + + contents = response.json() + assert isinstance(contents, list), "Expected response to be a list" + + for item in contents: + self._assert_has_keys(item, "id", "name", "type", "url") + + def test_get_library_contents_invalid_id(self): + invalid_library_id = "invalid_id" + response = self._get(f"/api/libraries/{invalid_library_id}/contents") + self._assert_status_code_is(response, 400) + + def test_get_library_folder(self): + library_id = self.library["id"] + folder_id = self._create_library_content(type="folder")[0]["id"] + response = self._get(f"/api/libraries/{library_id}/contents/{folder_id}") + self._assert_status_code_is(response, 200) + + folder_info = response.json() + self._assert_has_keys( + folder_info, + "model_class", + "id", + "parent_id", + "name", + "description", + "item_count", + "genome_build", + "update_time", + "deleted", + "library_path", + "parent_library_id", + ) + + def test_get_library_file_from_hda(self): + library_id = self.library["id"] + file_id = self._create_library_content(type="from_hda")["id"] + response = self._get(f"/api/libraries/{library_id}/contents/{file_id}") + self._assert_status_code_is(response, 200) + + file_info = response.json() + self._assert_has_keys( + file_info, + "id", + "ldda_id", + "parent_library_id", + "folder_id", + "model_class", + "state", + "name", + "file_name", + "created_from_basename", + "uploaded_by", + "message", + "date_uploaded", + "update_time", + "file_size", + "file_ext", + "data_type", + "genome_build", + "misc_info", + "misc_blurb", + "peek", + "uuid", + "metadata_dbkey", + "metadata_data_lines", + "tags", + ) + + def test_get_library_file_from_hdca(self): + library_id = self.library["id"] + file_id = self._create_library_content(type="from_hdca")[0]["id"] + response = self._get(f"/api/libraries/{library_id}/contents/{file_id}") + self._assert_status_code_is(response, 200) + + file_info = response.json() + self._assert_has_keys( + file_info, + "id", + "ldda_id", + "parent_library_id", + "folder_id", + "model_class", + "state", + "name", + "file_name", + "created_from_basename", + "uploaded_by", + "message", + "date_uploaded", + "update_time", + "file_size", + "file_ext", + "data_type", + "genome_build", + "misc_info", + "misc_blurb", + "peek", + "uuid", + "metadata_dbkey", + "metadata_data_lines", + "tags", + ) + + def test_get_invalid_library_item(self): + library_id = self.library["id"] + invalid_item_id = "invalid_id" + response = self._get(f"/api/libraries/{library_id}/contents/{invalid_item_id}") + self._assert_status_code_is(response, 400) + + def test_delete_library_item_from_hda(self): + library_id = self.library["id"] + file_id = self._create_library_content(type="from_hda")["id"] + + response = self._delete(f"/api/libraries/{library_id}/contents/{file_id}") + self._assert_status_code_is(response, 200) + + def test_delete_library_item_from_hdca(self): + library_id = self.library["id"] + file_id = self._create_library_content(type="from_hdca")[0]["id"] + + response = self._delete(f"/api/libraries/{library_id}/contents/{file_id}") + self._assert_status_code_is(response, 200) + + def test_delete_library_item_from_hda_purged(self): + library_id = self.library["id"] + file_id = self._create_library_content(type="from_hda")["id"] + payload = {"purged": True} + response = self._delete(f"/api/libraries/{library_id}/contents/{file_id}", data=payload, json=True) + self._assert_status_code_is(response, 200) + + def test_delete_library_item_from_hdca_purged(self): + library_id = self.library["id"] + file_id = self._create_library_content(type="from_hdca")[0]["id"] + payload = {"purged": True} + response = self._delete(f"/api/libraries/{library_id}/contents/{file_id}", data=payload, json=True) + self._assert_status_code_is(response, 200) + + def test_delete_invalid_library_item(self): + library_id = self.library["id"] + invalid_item_id = "invalid_id" + response_invalid = self._delete(f"/api/libraries/{library_id}/contents/{invalid_item_id}") + self._assert_status_code_is(response_invalid, 400) + + def _create_library_content(self, type): + folder_id = self.library["root_folder_id"] + library_id = self.library["id"] + + if type == "folder": + folder_name = "NewFolder" + payload = { + "folder_id": folder_id, + "create_type": "folder", + "name": folder_name, + "description": "Test", + } + + elif type == "from_hda": + dataset_id = self.dataset_populator.new_dataset(self.history)["id"] + payload = { + "folder_id": folder_id, + "create_type": "file", + "from_hda_id": dataset_id, + "ldda_message": "Test", + } + + elif type == "from_hdca": + hdca_id = self.dataset_collection_populator.create_list_in_history( + self.history, contents=["dataset01", "dataset02"], direct_upload=True, wait=True + ).json()["outputs"][0]["id"] + payload = { + "folder_id": folder_id, + "create_type": "file", + "from_hdca_id": hdca_id, + "ldda_message": "Test", + } + + response = self._post(f"/api/libraries/{library_id}/contents", data=payload, json=True) + self._assert_status_code_is(response, 200) + return response.json() diff --git a/lib/galaxy_test/api/test_tags.py b/lib/galaxy_test/api/test_tags.py index 035f675aecb5..cdce0a0705af 100644 --- a/lib/galaxy_test/api/test_tags.py +++ b/lib/galaxy_test/api/test_tags.py @@ -114,7 +114,9 @@ def _get_item(self, item_id: str): def test_upload_file_contents_with_tags(self): initial_tags = ["name:foobar", "barfoo"] - ld = self.library_populator.new_library_dataset(name=f"test-library-dataset-{uuid4()}", tags=initial_tags) + ld = self.library_populator.new_library_dataset( + name=f"test-library-dataset-{uuid4()}", tags=json.dumps(initial_tags) + ) assert ld["tags"] == initial_tags diff --git a/lib/galaxy_test/base/populators.py b/lib/galaxy_test/base/populators.py index 08c7e08dcdb8..0ee28827d414 100644 --- a/lib/galaxy_test/base/populators.py +++ b/lib/galaxy_test/base/populators.py @@ -2807,10 +2807,9 @@ def show(): return show().json() def raw_library_contents_create(self, library_id, payload, files=None): - if files is None: - files = {} - url_rel = f"libraries/{library_id}/contents" + if not files: + return self.galaxy_interactor.post(url_rel, payload, json=True) return self.galaxy_interactor.post(url_rel, payload, files=files) def show_ld_raw(self, library_id: str, library_dataset_id: str) -> Response: