Unverified Commit 88324fc2 authored by mvdbeek's avatar mvdbeek
Browse files

Fix composite data uploads that use substitute_name_with_metadata

parent 6a00e754
Loading
Loading
Loading
Loading
+31 −19
Original line number Diff line number Diff line
@@ -706,30 +706,34 @@ class Data(metaclass=DataMeta):
        """This function is called on the dataset before metadata is set."""
        dataset.clear_associated_files(metadata_safe=True)

    def __new_composite_file(self, name, optional=False, mimetype=None, description=None, substitute_name_with_metadata=None, is_binary=False, to_posix_lines=True, space_to_tab=False, **kwds):
        kwds['name'] = name
        kwds['optional'] = optional
        kwds['mimetype'] = mimetype
        kwds['description'] = description
        kwds['substitute_name_with_metadata'] = substitute_name_with_metadata
        kwds['is_binary'] = is_binary
        kwds['to_posix_lines'] = to_posix_lines
        kwds['space_to_tab'] = space_to_tab
        return Bunch(**kwds)
    def __new_composite_file(
        self,
        name,
        optional=False,
        mimetype=None,
        description=None,
        substitute_name_with_metadata=None,
        is_binary=False,
        to_posix_lines=True,
        space_to_tab=False,
        **kwds,
    ):
        kwds["name"] = name
        kwds["optional"] = optional
        kwds["mimetype"] = mimetype
        kwds["description"] = description
        kwds["substitute_name_with_metadata"] = substitute_name_with_metadata
        kwds["is_binary"] = is_binary
        kwds["to_posix_lines"] = to_posix_lines
        kwds["space_to_tab"] = space_to_tab

        composite_file = Bunch(**kwds)
        return composite_file

    def add_composite_file(self, name, **kwds):
        # self.composite_files = self.composite_files.copy()
        self.composite_files[name] = self.__new_composite_file(name, **kwds)

    def __substitute_composite_key(self, key, composite_file, dataset=None):
        if composite_file.substitute_name_with_metadata:
            if dataset:
                meta_value = str(dataset.metadata.get(composite_file.substitute_name_with_metadata))
            else:
                meta_value = self.spec[composite_file.substitute_name_with_metadata].default  # type: ignore
            return key % meta_value
        return key

    @property
    def writable_files(self):
        files = {}
@@ -739,6 +743,14 @@ class Data(metaclass=DataMeta):
            files[key] = value
        return files

    def get_writable_files_for_dataset(self, dataset):
        files = {}
        if self.composite_type != "auto_primary_file":
            files[self.primary_file_name] = self.__new_composite_file(self.primary_file_name)
        for key, value in self.get_composite_files(dataset).items():
            files[key] = value
        return files

    def get_composite_files(self, dataset=None):
        def substitute_composite_key(key, composite_file):
            if composite_file.substitute_name_with_metadata:
+18 −5
Original line number Diff line number Diff line
@@ -133,17 +133,30 @@ def _fetch_target(upload_config, target):
        composite = item.pop("composite", None)
        if datatype and datatype.composite_type:
            composite_type = datatype.composite_type
            writable_files = datatype.writable_files
            assert composite_type == "auto_primary_file", "basic composite uploads not yet implemented"

            # get_composite_dataset_name finds dataset name from basename of contents
            # and such but we're not implementing that here yet. yagni?
            # also need name...
            name = item.get("name") or 'Composite Dataset'
            dataset_bunch = Bunch(
            metadata = {
                composite_file.substitute_name_with_metadata: datatype.metadata_spec[
                    composite_file.substitute_name_with_metadata
                ].default
                for composite_file in datatype.composite_files.values()
                if composite_file.substitute_name_with_metadata
            }
            name = item.get("name") or "Composite Dataset"
            metadata["base_name"] = name
            dataset = Bunch(
                name=name,
                metadata=metadata,
            )
            writable_files = datatype.get_writable_files_for_dataset(dataset)
            primary_file = sniff.stream_to_file(
                StringIO(datatype.generate_primary_file(dataset)),
                prefix="upload_auto_primary_file",
                dir=".",
            )
            primary_file = sniff.stream_to_file(StringIO(datatype.generate_primary_file(dataset_bunch)), prefix='upload_auto_primary_file', dir=".")
            extra_files_path = f"{primary_file}_extra"
            os.mkdir(extra_files_path)
            rval: Dict[str, Any] = {
@@ -157,7 +170,7 @@ def _fetch_target(upload_config, target):
            }
            _copy_and_validate_simple_attributes(item, rval)
            composite_items = composite.get("elements", [])
            keys = [value.name for value in writable_files.values()]
            keys = list(writable_files.keys())
            composite_item_idx = 0
            for composite_item in composite_items:
                if composite_item_idx >= len(keys):
+22 −1
Original line number Diff line number Diff line
@@ -419,6 +419,24 @@ class ToolsUploadTestCase(ApiTestCase):
            }
        }
        inputs, datsets = stage_inputs(self.galaxy_interactor, history_id, job, use_path_paste=False)
        self.dataset_populator.wait_for_history(history_id, assert_ok=True)

    @skip_without_datatype("velvet")
    @uses_test_history(require_new=False)
    def test_composite_datatype_pbed_stage_fetch(self, history_id):
        job = {
            "input1": {
                "class": "File",
                "format": "pbed",
                "composite_data": [
                    "test-data/rgenetics.bim",
                    "test-data/rgenetics.bed",
                    "test-data/rgenetics.fam",
                ],
            }
        }
        inputs, datsets = stage_inputs(self.galaxy_interactor, history_id, job, use_path_paste=False)
        self.dataset_populator.wait_for_history(history_id, assert_ok=True)

    @skip_without_datatype("velvet")
    @uses_test_history(require_new=False)
@@ -434,7 +452,10 @@ class ToolsUploadTestCase(ApiTestCase):
                ]
            }
        }
        inputs, datsets = stage_inputs(self.galaxy_interactor, history_id, job, use_path_paste=False, use_fetch_api=False)
        inputs, datsets = stage_inputs(
            self.galaxy_interactor, history_id, job, use_path_paste=False, use_fetch_api=False
        )
        self.dataset_populator.wait_for_history(history_id, assert_ok=True)

    @skip_without_datatype("velvet")
    @uses_test_history(require_new=False)