Unverified Commit 54de00e2 authored by Marius van den Beek's avatar Marius van den Beek Committed by GitHub
Browse files

Merge pull request #19694 from mvdbeek/bwa_index_directory_datatype

[24.2] Add bwa_mem2_index directory datatype, framework enhancements for testing directories
parents a2e598b1 15bec21b
Loading
Loading
Loading
Loading
+2 −0
Original line number Diff line number Diff line
@@ -6749,6 +6749,8 @@ export interface components {
            hashes?: components["schemas"]["FetchDatasetHash"][] | null;
            /** Info */
            info?: string | null;
            /** Metadata */
            metadata?: Record<string, never> | null;
            /** Name */
            name?: string | number | boolean | null;
            /**
+1 −0
Original line number Diff line number Diff line
@@ -304,6 +304,7 @@
      <converter file="archive_to_directory.xml" target_datatype="directory"/>
    </datatype>
    <datatype extension="directory" type="galaxy.datatypes.data:Directory"/>
    <datatype extension="bwa_mem2_index" display_in_upload="true" type="galaxy.datatypes.data:Directory" subclass="true"/>
    <datatype extension="zarr" type="galaxy.datatypes.data:ZarrDirectory" />
    <datatype extension="ome_zarr" type="galaxy.datatypes.images:OMEZarr" />
    <datatype extension="yaml" type="galaxy.datatypes.text:Yaml" display_in_upload="true" />
+8 −0
Original line number Diff line number Diff line
@@ -80,6 +80,7 @@ class ModelPersistenceContext(metaclass=abc.ABCMeta):
        name,
        filename=None,
        extra_files=None,
        metadata=None,
        metadata_source_name=None,
        info=None,
        library_folder=None,
@@ -152,6 +153,12 @@ class ModelPersistenceContext(metaclass=abc.ABCMeta):
        if final_job_state == galaxy.model.Job.states.ERROR and not self.get_implicit_collection_jobs_association_id():
            primary_data.visible = True

        if metadata:
            for key, value in metadata.items():
                metadata_element = primary_data.datatype.metadata_spec.get(key)
                if metadata_element and metadata_element.set_in_upload:
                    setattr(primary_data.metadata, key, value)

        for source_dict in sources:
            source = galaxy.model.DatasetSource()
            source.source_uri = source_dict["source_uri"]
@@ -860,6 +867,7 @@ def persist_hdas(elements, model_persistence_context, final_job_state="ok"):
                    name=name,
                    filename=discovered_file.path,
                    extra_files=extra_files,
                    metadata=element.get("metadata"),
                    info=info,
                    tag_list=tag_list,
                    link_data=link_data,
+3 −0
Original line number Diff line number Diff line
import json
from enum import Enum
from typing import (
    Any,
    Dict,
    List,
    Optional,
    Union,
@@ -181,6 +183,7 @@ class PathDataElement(BaseDataElement):
class CompositeDataElement(BaseDataElement):
    src: Literal["composite"]
    composite: "CompositeItems"
    metadata: Optional[Dict[str, Any]] = None


class CompositeItems(FetchBaseModel):
+18 −5
Original line number Diff line number Diff line
@@ -11,11 +11,13 @@ import os
from typing import (
    Any,
    BinaryIO,
    Callable,
    Dict,
    List,
    Optional,
    Tuple,
    TYPE_CHECKING,
    Union,
)

import yaml
@@ -84,9 +86,10 @@ class StagingInterface(metaclass=abc.ABCMeta):
        use_path_paste: bool = LOAD_TOOLS_FROM_PATH,
        to_posix_lines: bool = True,
        job_dir: str = ".",
        resolve_data: Optional[Callable[[str], Optional[str]]] = None,
    ) -> Tuple[Dict[str, Any], List[Dict[str, Any]]]:
        def upload_func_fetch(upload_target: UploadTarget) -> Dict[str, Any]:
            def _attach_file(upload_payload: Dict[str, Any], uri: str, index: int = 0) -> Dict[str, str]:
            def _attach_file(upload_payload: Dict[str, Any], uri: str, index: int = 0) -> Dict[str, Union[str, bool]]:
                uri = path_or_uri_to_uri(uri)
                is_path = uri.startswith("file://")
                if not is_path or use_path_paste:
@@ -119,6 +122,8 @@ class StagingInterface(metaclass=abc.ABCMeta):
                    for i, composite_data in enumerate(upload_target.composite_data):
                        composite_item_src = _attach_file(fetch_payload, composite_data, index=i)
                        composite_items.append(composite_item_src)
                    if "metadata" in upload_target.properties:
                        fetch_payload["targets"][0]["elements"][0]["metadata"] = upload_target.properties["metadata"]
                    fetch_payload["targets"][0]["elements"][0]["src"] = "composite"
                    fetch_payload["targets"][0]["elements"][0]["composite"] = {
                        "items": composite_items,
@@ -142,11 +147,18 @@ class StagingInterface(metaclass=abc.ABCMeta):
                if tags:
                    fetch_payload["targets"][0]["elements"][0]["tags"] = tags
            elif isinstance(upload_target, DirectoryUploadTarget):
                fetch_payload = _fetch_payload(history_id, file_type="directory")
                fetch_payload["targets"][0].pop("elements")
                fetch_payload = _fetch_payload(history_id, file_type=upload_target.file_type)
                element = fetch_payload["targets"][0]["elements"][0]
                element["name"] = upload_target.name
                tar_path = upload_target.tar_path
                src = _attach_file(fetch_payload, tar_path)
                fetch_payload["targets"][0]["elements_from"] = src
                extra_files = _attach_file(fetch_payload, tar_path)
                extra_files["fuzzy_root"] = False
                extra_files["items_from"] = "archive"
                # {"src": "pasted", "paste_content": ""} because
                # we need some primary file even if we don't have one
                element["src"] = "pasted"
                element["paste_content"] = ""
                element["extra_files"] = extra_files
            elif isinstance(upload_target, ObjectUploadTarget):
                content = json.dumps(upload_target.object)
                fetch_payload = _fetch_payload(history_id, file_type="expression.json")
@@ -263,6 +275,7 @@ class StagingInterface(metaclass=abc.ABCMeta):
            upload,
            create_collection_func,
            tool_or_workflow,
            resolve_data=resolve_data,
        )

    # extension point for planemo to override logging
Loading