Unverified Commit d9a4e4ad authored by John Chilton's avatar John Chilton Committed by GitHub
Browse files

Merge pull request #19865 from jmchilton/handle_percents_in_exports

[24.2] Handle directories with percents directories with export_remote.xml.
parents 8468ade1 ff011fbe
Loading
Loading
Loading
Loading
+8 −0
Original line number Diff line number Diff line
@@ -2669,6 +2669,14 @@ class DirectoryUriToolParameter(SimpleTextToolParameter):
        if not user_has_access:
            raise ParameterValueError(f"The user cannot access {value}.", self.name)

    def to_param_dict_string(self, value, other_values=None) -> str:
        """Called via __str__ when used in the Cheetah template"""
        if value is None:
            value = ""
        elif not isinstance(value, str):
            value = str(value)
        return value


class RulesListToolParameter(BaseJsonToolParameter):
    """
+40 −0
Original line number Diff line number Diff line
@@ -205,6 +205,46 @@ class TestRemoteFilesIntegration(ConfiguresRemoteFilesIntegrationTestCase):
            with open(os.path.join(ftp_dir, "my_cool", "utf8_name_😻.txt")) as f:
                assert "example content\n" == f.read()

    def test_export_remote_tool_with_space(self):
        dataset_populator = self.dataset_populator
        ftp_dir = self.user_ftp_dir
        _write_file_fixtures(self.root, ftp_dir)
        dir_with_space = os.path.join(ftp_dir, "space dir")
        os.makedirs(dir_with_space)
        with dataset_populator.test_history() as history_id:
            dataset = dataset_populator.new_dataset(history_id, content="example content", wait=True, name="foo")
            infile = {"src": "hda", "id": dataset["id"]}
            inputs = {
                "d_uri": "gxftp://space dir/",
                "export_type|export_type_selector": "datasets_named",
                "export_type|datasets_0|infile": infile,
                "export_type|datasets_0|name": ".my_cool/utf8_name_😻.txt",
            }
            response = dataset_populator.run_tool("export_remote", inputs, history_id)
            dataset_populator.wait_for_job(response["jobs"][0]["id"], assert_ok=True)
            with open(os.path.join(ftp_dir, "space dir", "my_cool", "utf8_name_😻.txt")) as f:
                assert "example content\n" == f.read()

    def test_export_remote_tool_with_space_encoded(self):
        dataset_populator = self.dataset_populator
        ftp_dir = self.user_ftp_dir
        _write_file_fixtures(self.root, ftp_dir)
        dir_with_space = os.path.join(ftp_dir, "space%20dir")
        os.makedirs(dir_with_space)
        with dataset_populator.test_history() as history_id:
            dataset = dataset_populator.new_dataset(history_id, content="example content", wait=True, name="foo")
            infile = {"src": "hda", "id": dataset["id"]}
            inputs = {
                "d_uri": "gxftp://space%20dir/",
                "export_type|export_type_selector": "datasets_named",
                "export_type|datasets_0|infile": infile,
                "export_type|datasets_0|name": ".my_cool/utf8_name_😻.txt",
            }
            response = dataset_populator.run_tool("export_remote", inputs, history_id)
            dataset_populator.wait_for_job(response["jobs"][0]["id"], assert_ok=True)
            with open(os.path.join(ftp_dir, "space%20dir", "my_cool", "utf8_name_😻.txt")) as f:
                assert "example content\n" == f.read()

    def test_export_remote_tool_default_duplicate_name_fails(self):
        dataset_populator = self.dataset_populator
        ftp_dir = self.user_ftp_dir
+13 −3
Original line number Diff line number Diff line
@@ -35,15 +35,23 @@ def write_if_not_exists(file_sources, target_uri, real_data_path):
    file_source.write_from(file_source_path.path, real_data_path)


def get_directory_uri(args):
    directory_uri = args.directory_uri
    if not directory_uri:
        inputs = json.load(open(args.inputs, "r"))
        directory_uri = inputs["d_uri"]
    if not directory_uri.endswith("/"):
        directory_uri = f"{directory_uri}/"
    return directory_uri


def main(argv=None):
    if argv is None:
        argv = sys.argv[1:]
    args = _parser().parse_args(argv)
    exit_code = 0
    file_sources = get_file_sources(args.file_sources)
    directory_uri = args.directory_uri
    if not directory_uri.endswith("/"):
        directory_uri = f"{directory_uri}/"
    directory_uri = get_directory_uri(args)
    export_metadata_files = args.export_metadata_files
    with open(args.files_to_export) as f:
        files_to_export = json.load(f)
@@ -68,6 +76,8 @@ def main(argv=None):

def _parser():
    parser = argparse.ArgumentParser()
    parser.add_argument("--inputs", type=str, help="galaxy inputs")
    # post 2026.XX drop the directory-uri argument and just assume inputs always contains d_uri.
    parser.add_argument("--directory-uri", type=str, help="directory target URI")
    parser.add_argument("--file-sources", type=str, help="file sources json")
    parser.add_argument("--files-to-export", type=str, help="files to export")
+2 −1
Original line number Diff line number Diff line
@@ -8,7 +8,7 @@
    <command><![CDATA[
python '$__tool_directory__/export_remote.py'
    --file-sources '$file_sources'
    --directory-uri '$d_uri'
    --inputs '$inputs'
    --files-to-export '$files_to_export'
    #if $include_metadata_files:
        --export-metadata-files $include_metadata_files
@@ -17,6 +17,7 @@ python '$__tool_directory__/export_remote.py'
    ]]></command>
    <configfiles>
        <file_sources name="file_sources"/>
        <inputs name="inputs" />
        <configfile name="files_to_export">#import json
#from galaxy.util import filesystem_safe_string
#if $export_type.export_type_selector == "datasets_auto":