Commit dd6bfeae authored by Yakubov, Sergey's avatar Yakubov, Sergey
Browse files

Merge branch '75-refactor-pulling-data-from-cache' into 'dev'

Resolve "Refactor pulling data from cache"

Closes #75

See merge request !59
parents 6fd33f80 6e08eb02
Loading
Loading
Loading
Loading
Loading
+4 −9
Original line number Diff line number Diff line
@@ -603,8 +603,7 @@ class BamNative(CompressedArchive, _BamOrSam):
            return f"Binary bam alignments file ({nice_size(dataset.get_size())})"

    def to_archive(self, dataset: DatasetProtocol, name: str = "", trans=None) -> Iterable:
        if dataset.dataset.object_store:
            dataset.dataset.object_store.update_cache(dataset.dataset, trans=trans)
        dataset.sync_cache(user=trans.user)
        rel_paths = []
        file_paths = []
        rel_paths.append(f"{name or dataset.file_name}.{dataset.extension}")
@@ -703,15 +702,12 @@ class BamNative(CompressedArchive, _BamOrSam):
        headers = kwd.get("headers", {})
        preview = util.string_as_bool(preview)
        if offset is not None:
            if dataset.dataset.object_store:
                dataset.dataset.object_store.update_cache(dataset.dataset, trans=trans)
            dataset.sync_cache(user=trans.user)
            return self.get_chunk(trans, dataset, offset, ck_size), headers
        elif to_ext or not preview:
            return super().display_data(trans, dataset, preview, filename, to_ext, **kwd)
        else:
            if dataset.dataset.object_store:
                dataset.dataset.object_store.update_cache(dataset.dataset, trans=trans)

            dataset.sync_cache(user=trans.user)
            column_names = dataset.metadata.column_names
            if not column_names:
                column_names = []
@@ -2114,8 +2110,7 @@ class H5MLM(H5):
            to_ext = to_ext or dataset.extension
            return self._serve_raw(dataset, to_ext, headers, **kwd)

        if dataset.dataset.object_store:
            dataset.dataset.object_store.update_cache(dataset.dataset, trans=trans)
        dataset.sync_cache(user=trans.user)

        out_dict: Dict = {}
        try:
+1 −2
Original line number Diff line number Diff line
@@ -236,8 +236,7 @@ class _BlastDb(Data):
                **kwd,
            )

        if data.dataset.object_store:
            data.dataset.object_store.update_cache(data.dataset, trans=trans)
        dataset.sync_cache(user=trans.user)

        if self.file_ext == "blastdbn":
            title = "This is a nucleotide BLAST database"
+9 −13
Original line number Diff line number Diff line
@@ -391,6 +391,7 @@ class Data(metaclass=DataMeta):
        ext = data.extension
        path = data.file_name
        efp = data.extra_files_path

        # Add any central file to the archive,

        display_name = os.path.splitext(outfname)[0]
@@ -438,29 +439,27 @@ class Data(metaclass=DataMeta):
        headers["Content-Disposition"] = f'attachment; filename="{filename}"'
        return open(dataset.file_name, mode="rb"), headers

    def to_archive(self, dataset: DatasetProtocol, name: str = "", trans=None) -> Iterable:
    def to_archive(self, dataset: DatasetProtocol, name: str = "", user=None) -> Iterable:
        """
        Collect archive paths and file handles that need to be exported when archiving `dataset`.

        :param dataset: HistoryDatasetAssociation
        :param name: archive name, in collection context corresponds to collection name(s) and element_identifier,
                     joined by '/', e.g 'fastq_collection/sample1/forward'
        :trans name: current transaction
        :user name: current user
        """
        rel_paths = []
        file_paths = []
        dataset.sync_cache(user=user)
        if dataset.datatype.composite_type or dataset.extension.endswith("html"):
            main_file = f"{name}.html"
            rel_paths.append(main_file)
            if dataset.dataset.object_store:
                dataset.dataset.object_store.update_cache(dataset.dataset, trans=trans)
            dataset.sync_cache(dir_only=True, extra_dir=dataset.dataset.extra_files_path_name, user=user)
            file_paths.append(dataset.file_name)
            for fpath, rpath in self.__archive_extra_files_path(dataset.extra_files_path):
                rel_paths.append(os.path.join(name, rpath))
                file_paths.append(fpath)
        else:
            if dataset.dataset.object_store:
                dataset.dataset.object_store.update_cache(dataset.dataset, trans=trans)
            rel_paths.append(f"{name or dataset.file_name}.{dataset.extension}")
            file_paths.append(dataset.file_name)
        return zip(file_paths, rel_paths)
@@ -471,6 +470,8 @@ class Data(metaclass=DataMeta):
        composite_extensions.append("data_manager_json")  # for downloading bundles if bundled.

        if data.extension in composite_extensions:
            # sync cache for extra files
            data.sync_cache(dir_only=True, extra_dir=data.dataset.extra_files_path_name, trans=trans.user)
            return self._archive_composite_dataset(trans, data, headers, do_action=kwd.get("do_action", "zip"))
        else:
            headers["Content-Length"] = str(file_size)
@@ -542,10 +543,7 @@ class Data(metaclass=DataMeta):
        if filename and filename != "index":
            # For files in extra_files_path
            extra_dir = dataset.dataset.extra_files_path_name
            if dataset.dataset.object_store:
                dataset.dataset.object_store.update_cache(
                    dataset.dataset, extra_dir=extra_dir, alt_name=filename, trans=trans
                )
            dataset.sync_cache(extra_dir=extra_dir, alt_name=filename, user=trans.user)
            file_path = trans.app.object_store.get_filename(dataset.dataset, extra_dir=extra_dir, alt_name=filename)
            if os.path.exists(file_path):
                if os.path.isdir(file_path):
@@ -587,9 +585,7 @@ class Data(metaclass=DataMeta):
                raise ObjectNotFound(f"Could not find '{filename}' on the extra files path {file_path}.")
        self._clean_and_set_mime_type(trans, dataset.get_mime(), headers)

        if dataset.dataset.object_store:
            dataset.dataset.object_store.update_cache(dataset.dataset, trans=trans)

        dataset.sync_cache(user=trans.user)

        downloading = to_ext is not None
        file_size = _get_file_size(dataset)
+1 −2
Original line number Diff line number Diff line
@@ -284,8 +284,7 @@ class _Isa(Data):
        if not preview:
            return super().display_data(trans, dataset, preview, filename, to_ext, **kwd)

        if dataset.dataset.object_store:
            dataset.dataset.object_store.update_cache(dataset.dataset, trans=trans)
        dataset.sync_cache(user=trans.user)

        # prepare the preview of the ISA dataset
        investigation = self._get_investigation(dataset)
+3 −0
Original line number Diff line number Diff line
@@ -83,6 +83,9 @@ class DatasetProtocol(
    def get_converted_files_by_type(self, file_type):
        ...

    def sync_cache(self, **kwargs):
        ...

    def get_mime(self) -> str:
        ...

Loading