Unverified Commit 472ebfe8 authored by Marius van den Beek's avatar Marius van den Beek Committed by GitHub
Browse files

Merge pull request #17067 from mvdbeek/fix_store_by_id_outputs_to_working_directory_extra_files

[23.0] Fix extra files collection if using ``store_by="id"`` and `outputs_to_working_directory`
parents 1b0c0bb3 7fe19a7c
Loading
Loading
Loading
Loading
+7 −3
Original line number Diff line number Diff line
@@ -718,9 +718,13 @@ def default_exit_code_file(files_dir, id_tag):
    return os.path.join(files_dir, f"galaxy_{id_tag}.ec")


def collect_extra_files(object_store, dataset, job_working_directory):
def collect_extra_files(object_store, dataset, job_working_directory, outputs_to_working_directory=False):
    # TODO: should this use compute_environment to determine the extra files path ?
    file_name = dataset.dataset.extra_files_path_name_from(object_store)
    real_file_name = file_name = dataset.dataset.extra_files_path_name_from(object_store)
    if outputs_to_working_directory:
        # OutputsToWorkingDirectoryPathRewriter always rewrites extra files to uuid path,
        # so we have to collect from that path even if the real extra files path is dataset_N_files
        file_name = f"dataset_{dataset.dataset.uuid}_files"
    output_location = "outputs"
    temp_file_path = os.path.join(job_working_directory, output_location, file_name)
    if not os.path.exists(temp_file_path):
@@ -739,7 +743,7 @@ def collect_extra_files(object_store, dataset, job_working_directory):
            for f in files:
                object_store.update_from_file(
                    dataset.dataset,
                    extra_dir=os.path.normpath(os.path.join(file_name, os.path.relpath(root, temp_file_path))),
                    extra_dir=os.path.normpath(os.path.join(real_file_name, os.path.relpath(root, temp_file_path))),
                    alt_name=f,
                    file_name=os.path.join(root, f),
                    create=True,
+1 −1
Original line number Diff line number Diff line
@@ -1652,7 +1652,7 @@ class MinimalJobWrapper(HasResourceParameters):
            dataset.dataset.uuid = context["uuid"]
        self.__update_output(job, dataset)
        if not purged:
            collect_extra_files(self.object_store, dataset, self.working_directory)
            collect_extra_files(self.object_store, dataset, self.working_directory, self.outputs_to_working_directory)
        if job.states.ERROR == final_job_state:
            dataset.blurb = "error"
            if not implicit_collection_jobs:
+18 −0
Original line number Diff line number Diff line
@@ -21,3 +21,21 @@ class TestChangeDatatypeStoreByIdIntegration(integration_util.IntegrationTestCas
        super().setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.workflow_populator = WorkflowPopulator(self.galaxy_interactor)


class StoreByIdTestCase(integration_util.IntegrationInstance):
    framework_tool_and_types = True

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        config["object_store_store_by"] = "id"
        config["outputs_to_working_directory"] = True


instance = integration_util.integration_module_instance(StoreByIdTestCase)

test_tools = integration_util.integration_tool_runner(
    [
        "composite_output_tests",
    ]
)