Unverified Commit 6c354e22 authored by Marius van den Beek's avatar Marius van den Beek Committed by GitHub
Browse files

Merge pull request #13182 from kxk302/release_21.09

[21.09] Backport of iRODS Object Store path name fix in #13080
parents 043371e0 2b70e34a
Loading
Loading
Loading
Loading
+1 −1
Original line number Diff line number Diff line
@@ -429,7 +429,7 @@ class IRODSObjectStore(DiskObjectStore, CloudConfigMixin):
                log.debug("Pushing cache file '%s' of size %s bytes to collection '%s'", source_file, os.path.getsize(source_file), rel_path)

                # Add the source file to the irods collection
                self.session.data_objects.put(source_file, f"{collection_path}/", **options)
                self.session.data_objects.put(source_file, data_object_path, **options)

                end_time = datetime.now()
                log.debug("Pushed cache file '%s' to collection '%s' (%s bytes transfered in %s sec)",
+1 −1
Original line number Diff line number Diff line
@@ -183,7 +183,7 @@ def test_upload_datatype_dos_disk_and_disk(distributed_instance, test_data, temp

@pytest.mark.parametrize('test_data', TEST_CASES.values(), ids=list(TEST_CASES.keys()))
def test_upload_datatype_irods(irods_instance, test_data, temp_file):
    upload_datatype_helper(irods_instance, test_data, temp_file)
    upload_datatype_helper(irods_instance, test_data, temp_file, True)


@pytest.mark.parametrize('test_data', TEST_CASES.values(), ids=list(TEST_CASES.keys()))
+10 −1
Original line number Diff line number Diff line
import collections
import os
import shutil

import pytest

@@ -59,7 +60,7 @@ def test_upload_datatype_auto(instance, test_data, temp_file):
    upload_datatype_helper(instance, test_data, temp_file)


def upload_datatype_helper(instance, test_data, temp_file):
def upload_datatype_helper(instance, test_data, temp_file, delete_cache_dir=False):
    is_compressed = False
    for is_method in (is_bz2, is_gzip, is_zip):
        is_compressed = is_method(test_data.path)
@@ -93,6 +94,14 @@ def upload_datatype_helper(instance, test_data, temp_file):
    datatype = registry.datatypes_by_extension[file_ext]
    datatype_compressed = getattr(datatype, "compressed", False)
    if not is_compressed or datatype_compressed:
        if delete_cache_dir:
            # Delete cache directory and then re-create it. This way we confirm
            # that dataset is fetched from the object store, not from the cache
            temp_dir = instance.get_object_store_kwargs()['temp_directory']
            cache_dir = temp_dir + '/object_store_cache'
            shutil.rmtree(cache_dir)
            os.mkdir(cache_dir)

        # download file and verify it hasn't been manipulated
        temp_file.write(instance.dataset_populator.get_history_dataset_content(history_id=instance.history_id,
                                                                               dataset=dataset,