Loading .circleci/config.yml +1 −1 Original line number Diff line number Diff line Loading @@ -34,7 +34,7 @@ jobs: command: |- # Add github.com to known hosts mkdir -p ~/.ssh echo 'github.com ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ== echo 'github.com ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCj7ndNxQowgcQnjshcLrqPEiiphnt+VTTvDP6mHBL9j1aNUkY4Ue1gvwnGLVlOhGeYrnZaMgRK6+PKCUXaDbC7qtbW8gIkhL7aGCsOr/C56SJMy/BCZfxd1nWzAOxSDPgVsmerOBYfNqltV9/hWCqBywINIR+5dIg6JTJ72pcEpEjcYgXkE2YEFXV1JHnsKgbLWNlhScqb2UmyRkQyytRLtL+38TGxkxCflmO+5Z8CSSNY7GidjMIZ7Q4zMjA2n1nGrlTDkzwDCsw+wqFPGQA179cnfGWOWRVruj16z6XyvxvjJwbz0wQZ75XK5tKSb7FNyeIEs4TT4jk+S4dhPeAUC5y+bDYirYgM4GC7uEnztnZyaVWQ7B381AK4Qdrwt51ZqExKbQpTUNn+EjqoTwvqNj4kqx5QUCI0ThS/YkOxJCXmPUWZbhjpCg56i+2aB6CmK2JGhn57K5mj0MNdBXA4/WnwH6XoPWJzK5Nyu2zB3nAZp+S5hpQs+p1vN1/wsjk= ' >> ~/.ssh/known_hosts # Add the user ssh key and set correct perms Loading lib/galaxy/tool_util/xsd/galaxy.xsd +5 −0 Original line number Diff line number Diff line Loading @@ -2275,6 +2275,11 @@ $attribute_list::5 </xs:annotation> <xs:attributeGroup ref="AssertAttributeN"/> <xs:attributeGroup ref="AssertAttributeNegate"/> <xs:attribute name="comment" type="xs:string"> <xs:annotation> <xs:documentation xml:lang="en">Comment character(s) used to skip comment lines (which should not be used for counting columns)</xs:documentation> </xs:annotation> </xs:attribute> </xs:complexType> <xs:complexType name="AssertHasArchiveMember"> <xs:annotation> Loading lib/galaxy/webapps/base/api.py +1 −0 Original line number Diff line number Diff line Loading @@ -173,6 +173,7 @@ def add_empty_response_middleware(app: FastAPI) -> None: def add_sentry_middleware(app: FastAPI) -> None: from sentry_sdk.integrations.asgi import SentryAsgiMiddleware app.add_middleware(SentryAsgiMiddleware) Loading scripts/cleanup_datasets/admin_cleanup_datasets.py +16 −11 Original line number Diff line number Diff line Loading @@ -204,13 +204,13 @@ def administrative_delete_datasets( # We really only need the id column here, but sqlalchemy barfs when # trying to select only 1 column hda_ids_query = sa.select( (app.model.HistoryDatasetAssociation.table.c.id, app.model.HistoryDatasetAssociation.table.c.deleted), (app.model.HistoryDatasetAssociation.__table__.c.id, app.model.HistoryDatasetAssociation.__table__.c.deleted), whereclause=and_( app.model.Dataset.table.c.deleted == false(), app.model.HistoryDatasetAssociation.table.c.update_time < cutoff_time, app.model.HistoryDatasetAssociation.table.c.deleted == false(), app.model.Dataset.__table__.c.deleted == false(), app.model.HistoryDatasetAssociation.__table__.c.update_time < cutoff_time, app.model.HistoryDatasetAssociation.__table__.c.deleted == false(), ), from_obj=[sa.outerjoin(app.model.Dataset.table, app.model.HistoryDatasetAssociation.table)], from_obj=[sa.outerjoin(app.model.Dataset.__table__, app.model.HistoryDatasetAssociation.__table__)], ) # Add all datasets associated with Histories to our list Loading @@ -232,16 +232,21 @@ def administrative_delete_datasets( # Process each of the Dataset objects for hda_id in hda_ids: user_query = sa.select( [app.model.HistoryDatasetAssociation.table, app.model.History.table, app.model.User.table], whereclause=and_(app.model.HistoryDatasetAssociation.table.c.id == hda_id), [app.model.HistoryDatasetAssociation.__table__, app.model.History.__table__, app.model.User.__table__], whereclause=and_(app.model.HistoryDatasetAssociation.__table__.c.id == hda_id), from_obj=[ sa.join(app.model.User.table, app.model.History.table).join(app.model.HistoryDatasetAssociation.table) sa.join(app.model.User.__table__, app.model.History.__table__).join( app.model.HistoryDatasetAssociation.__table__ ) ], use_labels=True, ) for result in app.sa_session.execute(user_query): user_notifications[result[app.model.User.table.c.email]].append( (result[app.model.HistoryDatasetAssociation.table.c.name], result[app.model.History.table.c.name]) user_notifications[result[app.model.User.__table__.c.email]].append( ( result[app.model.HistoryDatasetAssociation.__table__.c.name], result[app.model.History.__table__.c.name], ) ) deleted_instance_count += 1 if not info_only and not email_only: Loading Loading @@ -282,7 +287,7 @@ def _get_tool_id_for_hda(app, hda_id): job = ( app.sa_session.query(app.model.Job) .join(app.model.JobToOutputDatasetAssociation) .filter(app.model.JobToOutputDatasetAssociation.table.c.dataset_id == hda_id) .filter(app.model.JobToOutputDatasetAssociation.__table__.c.dataset_id == hda_id) .first() ) if job is not None: Loading scripts/cleanup_datasets/cleanup_datasets.py +42 −40 Original line number Diff line number Diff line Loading @@ -223,13 +223,13 @@ def delete_userless_histories(app, cutoff_time, info_only=False, force_retry=Fal start = time.time() if force_retry: histories = app.sa_session.query(app.model.History).filter( and_(app.model.History.table.c.user_id == null(), app.model.History.update_time < cutoff_time) and_(app.model.History.__table__.c.user_id == null(), app.model.History.update_time < cutoff_time) ) else: histories = app.sa_session.query(app.model.History).filter( and_( app.model.History.table.c.user_id == null(), app.model.History.table.c.deleted == false(), app.model.History.__table__.c.user_id == null(), app.model.History.__table__.c.deleted == false(), app.model.History.update_time < cutoff_time, ) ) Loading Loading @@ -257,7 +257,7 @@ def purge_histories(app, cutoff_time, remove_from_disk, info_only=False, force_r if force_retry: histories = ( app.sa_session.query(app.model.History) .filter(and_(app.model.History.table.c.deleted == true(), app.model.History.update_time < cutoff_time)) .filter(and_(app.model.History.__table__.c.deleted == true(), app.model.History.update_time < cutoff_time)) .options(joinedload("datasets")) ) else: Loading @@ -265,8 +265,8 @@ def purge_histories(app, cutoff_time, remove_from_disk, info_only=False, force_r app.sa_session.query(app.model.History) .filter( and_( app.model.History.table.c.deleted == true(), app.model.History.table.c.purged == false(), app.model.History.__table__.c.deleted == true(), app.model.History.__table__.c.purged == false(), app.model.History.update_time < cutoff_time, ) ) Loading Loading @@ -307,14 +307,16 @@ def purge_libraries(app, cutoff_time, remove_from_disk, info_only=False, force_r start = time.time() if force_retry: libraries = app.sa_session.query(app.model.Library).filter( and_(app.model.Library.table.c.deleted == true(), app.model.Library.table.c.update_time < cutoff_time) and_( app.model.Library.__table__.c.deleted == true(), app.model.Library.__table__.c.update_time < cutoff_time ) ) else: libraries = app.sa_session.query(app.model.Library).filter( and_( app.model.Library.table.c.deleted == true(), app.model.Library.table.c.purged == false(), app.model.Library.table.c.update_time < cutoff_time, app.model.Library.__table__.c.deleted == true(), app.model.Library.__table__.c.purged == false(), app.model.Library.__table__.c.update_time < cutoff_time, ) ) for library in libraries: Loading Loading @@ -342,16 +344,16 @@ def purge_folders(app, cutoff_time, remove_from_disk, info_only=False, force_ret if force_retry: folders = app.sa_session.query(app.model.LibraryFolder).filter( and_( app.model.LibraryFolder.table.c.deleted == true(), app.model.LibraryFolder.table.c.update_time < cutoff_time, app.model.LibraryFolder.__table__.c.deleted == true(), app.model.LibraryFolder.__table__.c.update_time < cutoff_time, ) ) else: folders = app.sa_session.query(app.model.LibraryFolder).filter( and_( app.model.LibraryFolder.table.c.deleted == true(), app.model.LibraryFolder.table.c.purged == false(), app.model.LibraryFolder.table.c.update_time < cutoff_time, app.model.LibraryFolder.__table__.c.deleted == true(), app.model.LibraryFolder.__table__.c.purged == false(), app.model.LibraryFolder.__table__.c.update_time < cutoff_time, ) ) for folder in folders: Loading @@ -368,34 +370,34 @@ def delete_datasets(app, cutoff_time, remove_from_disk, info_only=False, force_r start = time.time() if force_retry: history_dataset_ids_query = sa.select( (app.model.Dataset.table.c.id, app.model.Dataset.table.c.state), whereclause=app.model.HistoryDatasetAssociation.table.c.update_time < cutoff_time, from_obj=[sa.outerjoin(app.model.Dataset.table, app.model.HistoryDatasetAssociation.table)], (app.model.Dataset.__table__.c.id, app.model.Dataset.__table__.c.state), whereclause=app.model.HistoryDatasetAssociation.__table__.c.update_time < cutoff_time, from_obj=[sa.outerjoin(app.model.Dataset.__table__, app.model.HistoryDatasetAssociation.__table__)], ) library_dataset_ids_query = sa.select( (app.model.LibraryDataset.table.c.id, app.model.LibraryDataset.table.c.deleted), whereclause=app.model.LibraryDataset.table.c.update_time < cutoff_time, from_obj=[app.model.LibraryDataset.table], (app.model.LibraryDataset.__table__.c.id, app.model.LibraryDataset.__table__.c.deleted), whereclause=app.model.LibraryDataset.__table__.c.update_time < cutoff_time, from_obj=[app.model.LibraryDataset.__table__], ) else: # We really only need the id column here, but sqlalchemy barfs when trying to select only 1 column history_dataset_ids_query = sa.select( (app.model.Dataset.table.c.id, app.model.Dataset.table.c.state), (app.model.Dataset.__table__.c.id, app.model.Dataset.__table__.c.state), whereclause=and_( app.model.Dataset.table.c.deleted == false(), app.model.HistoryDatasetAssociation.table.c.update_time < cutoff_time, app.model.HistoryDatasetAssociation.table.c.deleted == true(), app.model.Dataset.__table__.c.deleted == false(), app.model.HistoryDatasetAssociation.__table__.c.update_time < cutoff_time, app.model.HistoryDatasetAssociation.__table__.c.deleted == true(), ), from_obj=[sa.outerjoin(app.model.Dataset.table, app.model.HistoryDatasetAssociation.table)], from_obj=[sa.outerjoin(app.model.Dataset.__table__, app.model.HistoryDatasetAssociation.__table__)], ) library_dataset_ids_query = sa.select( (app.model.LibraryDataset.table.c.id, app.model.LibraryDataset.table.c.deleted), (app.model.LibraryDataset.__table__.c.id, app.model.LibraryDataset.__table__.c.deleted), whereclause=and_( app.model.LibraryDataset.table.c.deleted == true(), app.model.LibraryDataset.table.c.purged == false(), app.model.LibraryDataset.table.c.update_time < cutoff_time, app.model.LibraryDataset.__table__.c.deleted == true(), app.model.LibraryDataset.__table__.c.purged == false(), app.model.LibraryDataset.__table__.c.update_time < cutoff_time, ), from_obj=[app.model.LibraryDataset.table], from_obj=[app.model.LibraryDataset.__table__], ) deleted_dataset_count = 0 deleted_instance_count = 0 Loading Loading @@ -471,18 +473,18 @@ def purge_datasets(app, cutoff_time, remove_from_disk, info_only=False, force_re if force_retry: datasets = app.sa_session.query(app.model.Dataset).filter( and_( app.model.Dataset.table.c.deleted == true(), app.model.Dataset.table.c.purgable == true(), app.model.Dataset.table.c.update_time < cutoff_time, app.model.Dataset.__table__.c.deleted == true(), app.model.Dataset.__table__.c.purgable == true(), app.model.Dataset.__table__.c.update_time < cutoff_time, ) ) else: datasets = app.sa_session.query(app.model.Dataset).filter( and_( app.model.Dataset.table.c.deleted == true(), app.model.Dataset.table.c.purgable == true(), app.model.Dataset.table.c.purged == false(), app.model.Dataset.table.c.update_time < cutoff_time, app.model.Dataset.__table__.c.deleted == true(), app.model.Dataset.__table__.c.purgable == true(), app.model.Dataset.__table__.c.purged == false(), app.model.Dataset.__table__.c.update_time < cutoff_time, ) ) for dataset in datasets: Loading Loading @@ -555,12 +557,12 @@ def _delete_dataset(dataset, app, remove_from_disk, info_only=False, is_deletabl # lets create a list of metadata files, then perform actions on them for hda in dataset.history_associations: for metadata_file in app.sa_session.query(app.model.MetadataFile).filter( app.model.MetadataFile.table.c.hda_id == hda.id app.model.MetadataFile.__table__.c.hda_id == hda.id ): metadata_files.append(metadata_file) for ldda in dataset.library_associations: for metadata_file in app.sa_session.query(app.model.MetadataFile).filter( app.model.MetadataFile.table.c.lda_id == ldda.id app.model.MetadataFile.__table__.c.lda_id == ldda.id ): metadata_files.append(metadata_file) for metadata_file in metadata_files: Loading Loading
.circleci/config.yml +1 −1 Original line number Diff line number Diff line Loading @@ -34,7 +34,7 @@ jobs: command: |- # Add github.com to known hosts mkdir -p ~/.ssh echo 'github.com ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ== echo 'github.com ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCj7ndNxQowgcQnjshcLrqPEiiphnt+VTTvDP6mHBL9j1aNUkY4Ue1gvwnGLVlOhGeYrnZaMgRK6+PKCUXaDbC7qtbW8gIkhL7aGCsOr/C56SJMy/BCZfxd1nWzAOxSDPgVsmerOBYfNqltV9/hWCqBywINIR+5dIg6JTJ72pcEpEjcYgXkE2YEFXV1JHnsKgbLWNlhScqb2UmyRkQyytRLtL+38TGxkxCflmO+5Z8CSSNY7GidjMIZ7Q4zMjA2n1nGrlTDkzwDCsw+wqFPGQA179cnfGWOWRVruj16z6XyvxvjJwbz0wQZ75XK5tKSb7FNyeIEs4TT4jk+S4dhPeAUC5y+bDYirYgM4GC7uEnztnZyaVWQ7B381AK4Qdrwt51ZqExKbQpTUNn+EjqoTwvqNj4kqx5QUCI0ThS/YkOxJCXmPUWZbhjpCg56i+2aB6CmK2JGhn57K5mj0MNdBXA4/WnwH6XoPWJzK5Nyu2zB3nAZp+S5hpQs+p1vN1/wsjk= ' >> ~/.ssh/known_hosts # Add the user ssh key and set correct perms Loading
lib/galaxy/tool_util/xsd/galaxy.xsd +5 −0 Original line number Diff line number Diff line Loading @@ -2275,6 +2275,11 @@ $attribute_list::5 </xs:annotation> <xs:attributeGroup ref="AssertAttributeN"/> <xs:attributeGroup ref="AssertAttributeNegate"/> <xs:attribute name="comment" type="xs:string"> <xs:annotation> <xs:documentation xml:lang="en">Comment character(s) used to skip comment lines (which should not be used for counting columns)</xs:documentation> </xs:annotation> </xs:attribute> </xs:complexType> <xs:complexType name="AssertHasArchiveMember"> <xs:annotation> Loading
lib/galaxy/webapps/base/api.py +1 −0 Original line number Diff line number Diff line Loading @@ -173,6 +173,7 @@ def add_empty_response_middleware(app: FastAPI) -> None: def add_sentry_middleware(app: FastAPI) -> None: from sentry_sdk.integrations.asgi import SentryAsgiMiddleware app.add_middleware(SentryAsgiMiddleware) Loading
scripts/cleanup_datasets/admin_cleanup_datasets.py +16 −11 Original line number Diff line number Diff line Loading @@ -204,13 +204,13 @@ def administrative_delete_datasets( # We really only need the id column here, but sqlalchemy barfs when # trying to select only 1 column hda_ids_query = sa.select( (app.model.HistoryDatasetAssociation.table.c.id, app.model.HistoryDatasetAssociation.table.c.deleted), (app.model.HistoryDatasetAssociation.__table__.c.id, app.model.HistoryDatasetAssociation.__table__.c.deleted), whereclause=and_( app.model.Dataset.table.c.deleted == false(), app.model.HistoryDatasetAssociation.table.c.update_time < cutoff_time, app.model.HistoryDatasetAssociation.table.c.deleted == false(), app.model.Dataset.__table__.c.deleted == false(), app.model.HistoryDatasetAssociation.__table__.c.update_time < cutoff_time, app.model.HistoryDatasetAssociation.__table__.c.deleted == false(), ), from_obj=[sa.outerjoin(app.model.Dataset.table, app.model.HistoryDatasetAssociation.table)], from_obj=[sa.outerjoin(app.model.Dataset.__table__, app.model.HistoryDatasetAssociation.__table__)], ) # Add all datasets associated with Histories to our list Loading @@ -232,16 +232,21 @@ def administrative_delete_datasets( # Process each of the Dataset objects for hda_id in hda_ids: user_query = sa.select( [app.model.HistoryDatasetAssociation.table, app.model.History.table, app.model.User.table], whereclause=and_(app.model.HistoryDatasetAssociation.table.c.id == hda_id), [app.model.HistoryDatasetAssociation.__table__, app.model.History.__table__, app.model.User.__table__], whereclause=and_(app.model.HistoryDatasetAssociation.__table__.c.id == hda_id), from_obj=[ sa.join(app.model.User.table, app.model.History.table).join(app.model.HistoryDatasetAssociation.table) sa.join(app.model.User.__table__, app.model.History.__table__).join( app.model.HistoryDatasetAssociation.__table__ ) ], use_labels=True, ) for result in app.sa_session.execute(user_query): user_notifications[result[app.model.User.table.c.email]].append( (result[app.model.HistoryDatasetAssociation.table.c.name], result[app.model.History.table.c.name]) user_notifications[result[app.model.User.__table__.c.email]].append( ( result[app.model.HistoryDatasetAssociation.__table__.c.name], result[app.model.History.__table__.c.name], ) ) deleted_instance_count += 1 if not info_only and not email_only: Loading Loading @@ -282,7 +287,7 @@ def _get_tool_id_for_hda(app, hda_id): job = ( app.sa_session.query(app.model.Job) .join(app.model.JobToOutputDatasetAssociation) .filter(app.model.JobToOutputDatasetAssociation.table.c.dataset_id == hda_id) .filter(app.model.JobToOutputDatasetAssociation.__table__.c.dataset_id == hda_id) .first() ) if job is not None: Loading
scripts/cleanup_datasets/cleanup_datasets.py +42 −40 Original line number Diff line number Diff line Loading @@ -223,13 +223,13 @@ def delete_userless_histories(app, cutoff_time, info_only=False, force_retry=Fal start = time.time() if force_retry: histories = app.sa_session.query(app.model.History).filter( and_(app.model.History.table.c.user_id == null(), app.model.History.update_time < cutoff_time) and_(app.model.History.__table__.c.user_id == null(), app.model.History.update_time < cutoff_time) ) else: histories = app.sa_session.query(app.model.History).filter( and_( app.model.History.table.c.user_id == null(), app.model.History.table.c.deleted == false(), app.model.History.__table__.c.user_id == null(), app.model.History.__table__.c.deleted == false(), app.model.History.update_time < cutoff_time, ) ) Loading Loading @@ -257,7 +257,7 @@ def purge_histories(app, cutoff_time, remove_from_disk, info_only=False, force_r if force_retry: histories = ( app.sa_session.query(app.model.History) .filter(and_(app.model.History.table.c.deleted == true(), app.model.History.update_time < cutoff_time)) .filter(and_(app.model.History.__table__.c.deleted == true(), app.model.History.update_time < cutoff_time)) .options(joinedload("datasets")) ) else: Loading @@ -265,8 +265,8 @@ def purge_histories(app, cutoff_time, remove_from_disk, info_only=False, force_r app.sa_session.query(app.model.History) .filter( and_( app.model.History.table.c.deleted == true(), app.model.History.table.c.purged == false(), app.model.History.__table__.c.deleted == true(), app.model.History.__table__.c.purged == false(), app.model.History.update_time < cutoff_time, ) ) Loading Loading @@ -307,14 +307,16 @@ def purge_libraries(app, cutoff_time, remove_from_disk, info_only=False, force_r start = time.time() if force_retry: libraries = app.sa_session.query(app.model.Library).filter( and_(app.model.Library.table.c.deleted == true(), app.model.Library.table.c.update_time < cutoff_time) and_( app.model.Library.__table__.c.deleted == true(), app.model.Library.__table__.c.update_time < cutoff_time ) ) else: libraries = app.sa_session.query(app.model.Library).filter( and_( app.model.Library.table.c.deleted == true(), app.model.Library.table.c.purged == false(), app.model.Library.table.c.update_time < cutoff_time, app.model.Library.__table__.c.deleted == true(), app.model.Library.__table__.c.purged == false(), app.model.Library.__table__.c.update_time < cutoff_time, ) ) for library in libraries: Loading Loading @@ -342,16 +344,16 @@ def purge_folders(app, cutoff_time, remove_from_disk, info_only=False, force_ret if force_retry: folders = app.sa_session.query(app.model.LibraryFolder).filter( and_( app.model.LibraryFolder.table.c.deleted == true(), app.model.LibraryFolder.table.c.update_time < cutoff_time, app.model.LibraryFolder.__table__.c.deleted == true(), app.model.LibraryFolder.__table__.c.update_time < cutoff_time, ) ) else: folders = app.sa_session.query(app.model.LibraryFolder).filter( and_( app.model.LibraryFolder.table.c.deleted == true(), app.model.LibraryFolder.table.c.purged == false(), app.model.LibraryFolder.table.c.update_time < cutoff_time, app.model.LibraryFolder.__table__.c.deleted == true(), app.model.LibraryFolder.__table__.c.purged == false(), app.model.LibraryFolder.__table__.c.update_time < cutoff_time, ) ) for folder in folders: Loading @@ -368,34 +370,34 @@ def delete_datasets(app, cutoff_time, remove_from_disk, info_only=False, force_r start = time.time() if force_retry: history_dataset_ids_query = sa.select( (app.model.Dataset.table.c.id, app.model.Dataset.table.c.state), whereclause=app.model.HistoryDatasetAssociation.table.c.update_time < cutoff_time, from_obj=[sa.outerjoin(app.model.Dataset.table, app.model.HistoryDatasetAssociation.table)], (app.model.Dataset.__table__.c.id, app.model.Dataset.__table__.c.state), whereclause=app.model.HistoryDatasetAssociation.__table__.c.update_time < cutoff_time, from_obj=[sa.outerjoin(app.model.Dataset.__table__, app.model.HistoryDatasetAssociation.__table__)], ) library_dataset_ids_query = sa.select( (app.model.LibraryDataset.table.c.id, app.model.LibraryDataset.table.c.deleted), whereclause=app.model.LibraryDataset.table.c.update_time < cutoff_time, from_obj=[app.model.LibraryDataset.table], (app.model.LibraryDataset.__table__.c.id, app.model.LibraryDataset.__table__.c.deleted), whereclause=app.model.LibraryDataset.__table__.c.update_time < cutoff_time, from_obj=[app.model.LibraryDataset.__table__], ) else: # We really only need the id column here, but sqlalchemy barfs when trying to select only 1 column history_dataset_ids_query = sa.select( (app.model.Dataset.table.c.id, app.model.Dataset.table.c.state), (app.model.Dataset.__table__.c.id, app.model.Dataset.__table__.c.state), whereclause=and_( app.model.Dataset.table.c.deleted == false(), app.model.HistoryDatasetAssociation.table.c.update_time < cutoff_time, app.model.HistoryDatasetAssociation.table.c.deleted == true(), app.model.Dataset.__table__.c.deleted == false(), app.model.HistoryDatasetAssociation.__table__.c.update_time < cutoff_time, app.model.HistoryDatasetAssociation.__table__.c.deleted == true(), ), from_obj=[sa.outerjoin(app.model.Dataset.table, app.model.HistoryDatasetAssociation.table)], from_obj=[sa.outerjoin(app.model.Dataset.__table__, app.model.HistoryDatasetAssociation.__table__)], ) library_dataset_ids_query = sa.select( (app.model.LibraryDataset.table.c.id, app.model.LibraryDataset.table.c.deleted), (app.model.LibraryDataset.__table__.c.id, app.model.LibraryDataset.__table__.c.deleted), whereclause=and_( app.model.LibraryDataset.table.c.deleted == true(), app.model.LibraryDataset.table.c.purged == false(), app.model.LibraryDataset.table.c.update_time < cutoff_time, app.model.LibraryDataset.__table__.c.deleted == true(), app.model.LibraryDataset.__table__.c.purged == false(), app.model.LibraryDataset.__table__.c.update_time < cutoff_time, ), from_obj=[app.model.LibraryDataset.table], from_obj=[app.model.LibraryDataset.__table__], ) deleted_dataset_count = 0 deleted_instance_count = 0 Loading Loading @@ -471,18 +473,18 @@ def purge_datasets(app, cutoff_time, remove_from_disk, info_only=False, force_re if force_retry: datasets = app.sa_session.query(app.model.Dataset).filter( and_( app.model.Dataset.table.c.deleted == true(), app.model.Dataset.table.c.purgable == true(), app.model.Dataset.table.c.update_time < cutoff_time, app.model.Dataset.__table__.c.deleted == true(), app.model.Dataset.__table__.c.purgable == true(), app.model.Dataset.__table__.c.update_time < cutoff_time, ) ) else: datasets = app.sa_session.query(app.model.Dataset).filter( and_( app.model.Dataset.table.c.deleted == true(), app.model.Dataset.table.c.purgable == true(), app.model.Dataset.table.c.purged == false(), app.model.Dataset.table.c.update_time < cutoff_time, app.model.Dataset.__table__.c.deleted == true(), app.model.Dataset.__table__.c.purgable == true(), app.model.Dataset.__table__.c.purged == false(), app.model.Dataset.__table__.c.update_time < cutoff_time, ) ) for dataset in datasets: Loading Loading @@ -555,12 +557,12 @@ def _delete_dataset(dataset, app, remove_from_disk, info_only=False, is_deletabl # lets create a list of metadata files, then perform actions on them for hda in dataset.history_associations: for metadata_file in app.sa_session.query(app.model.MetadataFile).filter( app.model.MetadataFile.table.c.hda_id == hda.id app.model.MetadataFile.__table__.c.hda_id == hda.id ): metadata_files.append(metadata_file) for ldda in dataset.library_associations: for metadata_file in app.sa_session.query(app.model.MetadataFile).filter( app.model.MetadataFile.table.c.lda_id == ldda.id app.model.MetadataFile.__table__.c.lda_id == ldda.id ): metadata_files.append(metadata_file) for metadata_file in metadata_files: Loading