Commit 01b5a23c authored by Yakubov, Sergey's avatar Yakubov, Sergey
Browse files

pass user instead of trans to object store

parent 8b5f13b2
Loading
Loading
Loading
Loading
Loading
+4 −4
Original line number Diff line number Diff line
@@ -603,7 +603,7 @@ class BamNative(CompressedArchive, _BamOrSam):
            return f"Binary bam alignments file ({nice_size(dataset.get_size())})"

    def to_archive(self, dataset: DatasetProtocol, name: str = "", trans=None) -> Iterable:
        dataset.sync_cache(trans=trans)
        dataset.sync_cache(user=trans.user)
        rel_paths = []
        file_paths = []
        rel_paths.append(f"{name or dataset.file_name}.{dataset.extension}")
@@ -702,12 +702,12 @@ class BamNative(CompressedArchive, _BamOrSam):
        headers = kwd.get("headers", {})
        preview = util.string_as_bool(preview)
        if offset is not None:
            dataset.sync_cache(trans=trans)
            dataset.sync_cache(user=trans.user)
            return self.get_chunk(trans, dataset, offset, ck_size), headers
        elif to_ext or not preview:
            return super().display_data(trans, dataset, preview, filename, to_ext, **kwd)
        else:
            dataset.sync_cache(trans=trans)
            dataset.sync_cache(user=trans.user)
            column_names = dataset.metadata.column_names
            if not column_names:
                column_names = []
@@ -2110,7 +2110,7 @@ class H5MLM(H5):
            to_ext = to_ext or dataset.extension
            return self._serve_raw(dataset, to_ext, headers, **kwd)

        dataset.sync_cache(trans=trans)
        dataset.sync_cache(user=trans.user)

        out_dict: Dict = {}
        try:
+1 −1
Original line number Diff line number Diff line
@@ -236,7 +236,7 @@ class _BlastDb(Data):
                **kwd,
            )

        dataset.sync_cache(trans=trans)
        dataset.sync_cache(user=trans.user)

        if self.file_ext == "blastdbn":
            title = "This is a nucleotide BLAST database"
+6 −6
Original line number Diff line number Diff line
@@ -438,27 +438,27 @@ class Data(metaclass=DataMeta):
        headers["Content-Disposition"] = f'attachment; filename="{filename}"'
        return open(dataset.file_name, mode="rb"), headers

    def to_archive(self, dataset: DatasetProtocol, name: str = "", trans=None) -> Iterable:
    def to_archive(self, dataset: DatasetProtocol, name: str = "", user=None) -> Iterable:
        """
        Collect archive paths and file handles that need to be exported when archiving `dataset`.

        :param dataset: HistoryDatasetAssociation
        :param name: archive name, in collection context corresponds to collection name(s) and element_identifier,
                     joined by '/', e.g 'fastq_collection/sample1/forward'
        :trans name: current transaction
        :user name: current user
        """
        rel_paths = []
        file_paths = []
        if dataset.datatype.composite_type or dataset.extension.endswith("html"):
            main_file = f"{name}.html"
            rel_paths.append(main_file)
            dataset.sync_cache(trans=trans)
            dataset.sync_cache(user=user)
            file_paths.append(dataset.file_name)
            for fpath, rpath in self.__archive_extra_files_path(dataset.extra_files_path):
                rel_paths.append(os.path.join(name, rpath))
                file_paths.append(fpath)
        else:
            dataset.sync_cache(trans=trans)
            dataset.sync_cache(user=user)
            rel_paths.append(f"{name or dataset.file_name}.{dataset.extension}")
            file_paths.append(dataset.file_name)
        return zip(file_paths, rel_paths)
@@ -540,7 +540,7 @@ class Data(metaclass=DataMeta):
        if filename and filename != "index":
            # For files in extra_files_path
            extra_dir = dataset.dataset.extra_files_path_name
            dataset.sync_cache(extra_dir=extra_dir, alt_name=filename, trans=trans)
            dataset.sync_cache(extra_dir=extra_dir, alt_name=filename, user=trans.user)
            file_path = trans.app.object_store.get_filename(dataset.dataset, extra_dir=extra_dir, alt_name=filename)
            if os.path.exists(file_path):
                if os.path.isdir(file_path):
@@ -582,7 +582,7 @@ class Data(metaclass=DataMeta):
                raise ObjectNotFound(f"Could not find '{filename}' on the extra files path {file_path}.")
        self._clean_and_set_mime_type(trans, dataset.get_mime(), headers)

        dataset.sync_cache(trans=trans)
        dataset.sync_cache(user=trans.user)

        downloading = to_ext is not None
        file_size = _get_file_size(dataset)
+1 −1
Original line number Diff line number Diff line
@@ -284,7 +284,7 @@ class _Isa(Data):
        if not preview:
            return super().display_data(trans, dataset, preview, filename, to_ext, **kwd)

        dataset.sync_cache(trans=trans)
        dataset.sync_cache(user=trans.user)

        # prepare the preview of the ISA dataset
        investigation = self._get_investigation(dataset)
+1 −1
Original line number Diff line number Diff line
@@ -774,7 +774,7 @@ class BaseFastq(Sequence):
        to_ext: Optional[str] = None,
        **kwd,
    ):
        dataset.sync_cache(trans=trans)
        dataset.sync_cache(user=trans.user)

        headers = kwd.get("headers", {})
        if preview:
Loading