Unverified Commit 0304821e authored by Marius van den Beek's avatar Marius van den Beek Committed by GitHub
Browse files

Merge pull request #14167 from mvdbeek/fix_folder_import

[22.05] Fix importing whole folders as datasets
parents 802d8976 90c2c1fa
Loading
Loading
Loading
Loading
+1 −8
Original line number Diff line number Diff line
@@ -31,12 +31,6 @@ SerializationKeysQueryParam: Optional[str] = Query(
    description="Comma-separated list of keys to be passed to the serializer",
)

SerializationDefaultViewQueryParam: Optional[str] = Query(
    None,
    title="Default View",
    description="The item view that will be used in case no particular view was specified.",
)

FilterQueryQueryParam: Optional[List[str]] = Query(
    default=None,
    title="Filter Query",
@@ -67,9 +61,8 @@ def parse_serialization_params(
def query_serialization_params(
    view: Optional[str] = SerializationViewQueryParam,
    keys: Optional[str] = SerializationKeysQueryParam,
    default_view: Optional[str] = SerializationDefaultViewQueryParam,
) -> SerializationParams:
    return parse_serialization_params(view=view, keys=keys, default_view=default_view, format=format)
    return parse_serialization_params(view=view, keys=keys)


def get_value_filter_query_params(
+1 −1
Original line number Diff line number Diff line
@@ -587,7 +587,7 @@ class FastAPIHistoryContents:
        ),
        serialization_params: SerializationParams = Depends(query_serialization_params),
        payload: CreateHistoryContentPayload = Body(...),
    ) -> AnyHistoryContentItem:
    ) -> Union[AnyHistoryContentItem, List[AnyHistoryContentItem]]:
        """Create a new `HDA` or `HDCA` in the given History."""
        payload.type = type or payload.type
        return self.service.create(trans, history_id, payload, serialization_params)
+3 −4
Original line number Diff line number Diff line
@@ -511,7 +511,7 @@ class HistoriesContentsService(ServiceBase, ServesExportStores, ConsumesModelSto
        history_id: EncodedDatabaseIdField,
        payload: CreateHistoryContentPayload,
        serialization_params: SerializationParams,
    ) -> AnyHistoryContentItem:
    ) -> Union[AnyHistoryContentItem, List[AnyHistoryContentItem]]:
        """
        Create a new HDA or HDCA.

@@ -520,6 +520,7 @@ class HistoriesContentsService(ServiceBase, ServesExportStores, ConsumesModelSto
        """
        history = self.history_manager.get_owned(self.decode_id(history_id), trans.user, current_history=trans.history)

        serialization_params.default_view = "detailed"
        history_content_type = payload.type
        if history_content_type == HistoryContentType.dataset:
            source = payload.source
@@ -1334,7 +1335,7 @@ class HistoriesContentsService(ServiceBase, ServesExportStores, ConsumesModelSto
                    history, add_to_history=True
                )
                hda_dict = self.hda_serializer.serialize_to_view(
                    hda, user=trans.user, trans=trans, default_view="detailed", **serialization_params.dict()
                    hda, user=trans.user, trans=trans, **serialization_params.dict()
                )
                rval.append(hda_dict)
        else:
@@ -1368,7 +1369,6 @@ class HistoriesContentsService(ServiceBase, ServesExportStores, ConsumesModelSto
            return None

        trans.sa_session.flush()
        serialization_params.default_view = "detailed"
        return self.hda_serializer.serialize_to_view(hda, user=trans.user, trans=trans, **serialization_params.dict())

    def __create_hda_from_ldda(self, trans, history: History, ldda_id: EncodedDatabaseIdField):
@@ -1465,7 +1465,6 @@ class HistoriesContentsService(ServiceBase, ServesExportStores, ConsumesModelSto

        # if the consumer specified keys or view, use the secondary serializer
        if serialization_params.view or serialization_params.keys:
            serialization_params.default_view = "detailed"
            return self.hdca_serializer.serialize_to_view(
                dataset_collection_instance, user=trans.user, trans=trans, **serialization_params.dict()
            )
+16 −1
Original line number Diff line number Diff line
from galaxy_test.base.populators import LibraryPopulator
from galaxy_test.base.populators import (
    DatasetPopulator,
    LibraryPopulator,
)
from ._framework import ApiTestCase


class FoldersApiTestCase(ApiTestCase):
    def setUp(self):
        super().setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.library_populator = LibraryPopulator(self.galaxy_interactor)
        self.library = self.library_populator.new_library("FolderTestsLibrary")

@@ -78,6 +82,17 @@ class FoldersApiTestCase(ApiTestCase):
        undeleted_folder = undelete_response.json()
        assert undeleted_folder["deleted"] is False

    def test_import_folder_to_history(self):
        library, response = self.library_populator.fetch_single_url_to_folder()
        dataset = self.library_populator.get_library_contents_with_path(library["id"], "/4.bed")
        with self.dataset_populator.test_history() as history_id:
            create_data = {"source": "library_folder", "content": dataset["folder_id"]}
            create_response = self._post(f"histories/{history_id}/contents", create_data, json=True)
            create_response.raise_for_status()
            datasets = create_response.json()
            assert len(datasets) == 1
            assert datasets[0]["name"] == "4.bed"

    def test_update_deleted_raise_403(self):
        folder = self._create_folder("Test Update Deleted Folder")
        folder_id = folder["id"]
+3 −28
Original line number Diff line number Diff line
@@ -10,15 +10,13 @@ from galaxy.model.unittest_utils.store_fixtures import (
from galaxy_test.base.populators import (
    DatasetCollectionPopulator,
    DatasetPopulator,
    FILE_URL,
    LibraryPopulator,
    skip_if_github_down,
    skip_without_asgi,
)
from ._framework import ApiTestCase

FILE_URL = "https://raw.githubusercontent.com/galaxyproject/galaxy/dev/test-data/4.bed"
FILE_MD5 = "37b59762b59fff860460522d271bc111"


class LibrariesApiTestCase(ApiTestCase):
    def setUp(self):
@@ -268,38 +266,15 @@ class LibrariesApiTestCase(ApiTestCase):
        assert dataset["file_size"] == 61, dataset

    def test_fetch_single_url_to_folder(self):
        library, response = self._fetch_single_url_to_folder()
        library, response = self.library_populator.fetch_single_url_to_folder()
        dataset = self.library_populator.get_library_contents_with_path(library["id"], "/4.bed")
        assert dataset["file_size"] == 61, dataset

    def test_fetch_single_url_with_invalid_datatype(self):
        _, response = self._fetch_single_url_to_folder("xxx", assert_ok=False)
        _, response = self.library_populator.fetch_single_url_to_folder("xxx", assert_ok=False)
        self._assert_status_code_is(response, 400)
        assert response.json()["err_msg"] == "Requested extension 'xxx' unknown, cannot upload dataset."

    def _fetch_single_url_to_folder(self, file_type="auto", assert_ok=True):
        history_id, library, destination = self._setup_fetch_to_folder("single_url")
        items = [
            {
                "src": "url",
                "url": FILE_URL,
                "MD5": FILE_MD5,
                "ext": file_type,
            }
        ]
        targets = [
            {
                "destination": destination,
                "items": items,
            }
        ]
        payload = {
            "history_id": history_id,  # TODO: Shouldn't be needed :(
            "targets": targets,
            "validate_hashes": True,
        }
        return library, self.dataset_populator.fetch(payload, assert_ok=assert_ok)

    def test_legacy_upload_unknown_datatype(self):
        library = self.library_populator.new_private_library("ForLegacyUpload")
        folder_response = self._create_folder(library)
Loading