Unverified Commit 0bc1a46b authored by mvdbeek's avatar mvdbeek
Browse files

Merge branch 'release_24.0' into release_24.1

parents 8d13d706 cd2f1f8b
Loading
Loading
Loading
Loading
+5 −1
Original line number Diff line number Diff line
@@ -210,6 +210,9 @@ export default {
            // logic from legacy code
            return !!(this.contains_file_or_folder && Galaxy.user);
        },
        totalRows: function () {
            return this.metadata?.total_rows ?? 0;
        },
    },
    created() {
        const Galaxy = getGalaxyInstance();
@@ -241,7 +244,8 @@ export default {
                const selected = await this.services.getFilteredFolderContents(
                    this.folder_id,
                    this.unselected,
                    this.$parent.searchText
                    this.$parent.searchText,
                    this.totalRows
                );
                this.$emit("setBusy", false);
                return selected;
+4 −3
Original line number Diff line number Diff line
@@ -30,12 +30,13 @@ export class Services {
        }
    }

    async getFilteredFolderContents(id, excluded, searchText) {
    async getFilteredFolderContents(id, excluded, searchText, limit) {
        // The intent of this method is to get folder contents applying
        // seachText filters only; we explicitly set limit to 0
        // seachText filters only; limit should match the total number of
        // items in the folder, so that all items are returned.
        const config = {
            params: {
                limit: 0,
                limit,
            },
        };
        searchText = searchText?.trim();
+5 −5
Original line number Diff line number Diff line
@@ -2005,13 +2005,13 @@ class MinimalJobWrapper(HasResourceParameters):
        # Once datasets are collected, set the total dataset size (includes extra files)
        for dataset_assoc in job.output_datasets:
            dataset = dataset_assoc.dataset.dataset
            if not dataset.purged:
            # assume all datasets in a job get written to the same objectstore
            quota_source_info = dataset.quota_source_info
            collected_bytes += dataset.set_total_size()
            else:
            if dataset.purged:
                # Purge, in case job wrote directly to object store
                dataset.full_delete()
                collected_bytes = 0

        user = job.user
        if user and collected_bytes > 0 and quota_source_info is not None and quota_source_info.use:
+3 −3
Original line number Diff line number Diff line
@@ -668,7 +668,7 @@ class ModelImportStore(metaclass=abc.ABCMeta):
                                assert file_source_root
                                dataset_extra_files_path = os.path.join(file_source_root, dataset_extra_files_path)
                                persist_extra_files(self.object_store, dataset_extra_files_path, dataset_instance)
                            # Don't trust serialized file size
                            # Only trust file size if the dataset is purged. If we keep the data we should check the file size.
                            dataset_instance.dataset.file_size = None
                            dataset_instance.dataset.set_total_size()  # update the filesize record in the database