Loading client/src/components/User/DiskUsage/Management/Cleanup/categories.js +20 −1 Original line number Diff line number Diff line import _l from "utils/localization"; import { cleanupDatasets, fetchDiscardedDatasets, fetchDiscardedDatasetsSummary } from "../services"; import { cleanupDatasets, fetchDiscardedDatasets, fetchDiscardedDatasetsSummary, cleanupHistories, fetchDiscardedHistories, fetchDiscardedHistoriesSummary, } from "../services"; export const cleanupCategories = [ { Loading @@ -17,6 +24,18 @@ export const cleanupCategories = [ fetchItems: fetchDiscardedDatasets, cleanupItems: cleanupDatasets, }, { id: "deleted_histories", name: _l("Deleted histories"), description: _l( "When you delete a history it's not immediately removed from the disk (so you can recover it later)." + " But this means it's still taking space until you permanently delete it." + " Here you can quickly find and remove those histories to free up some space" ), fetchSummary: fetchDiscardedHistoriesSummary, fetchItems: fetchDiscardedHistories, cleanupItems: cleanupHistories, }, ], }, ]; client/src/components/User/DiskUsage/Management/services.js +67 −0 Original line number Diff line number Diff line Loading @@ -9,6 +9,7 @@ const isDeleted = "q=deleted-eq&qv=True"; const isNotPurged = "q=purged-eq&qv=False"; const maxItemsToFetch = 500; const discardedDatasetsQueryParams = `${isDataset}&${isDeleted}&${isNotPurged}&limit=${maxItemsToFetch}`; const discardedHistoriesQueryParams = `&${isDeleted}&${isNotPurged}&limit=${maxItemsToFetch}`; /** * Calculates the total amount of bytes that can be cleaned by permanently removing Loading Loading @@ -105,6 +106,72 @@ export async function cleanupDatasets(datasets) { return result; } export async function fetchDiscardedHistoriesSummary() { const summaryKeys = "size"; const url = `${getAppRoot()}api/histories?keys=${summaryKeys}&${discardedHistoriesQueryParams}`; try { const { data } = await axios.get(url); const totalSizeInBytes = data.reduce((partial_sum, item) => partial_sum + item.size, 0); return new CleanableSummary({ totalSize: totalSizeInBytes, totalItems: data.length, }); } catch (e) { rethrowSimple(e); } } export async function fetchDiscardedHistories(options = {}) { let params = ""; if (options.sortBy) { const sortPostfix = options.sortDesc ? "-dsc" : "-asc"; params += `order=${options.sortBy}${sortPostfix}&`; } if (options.limit) { params += `limit=${options.limit}&`; } if (options.offset) { params += `offset=${options.offset}&`; } const url = `${getAppRoot()}api/histories?keys=${datasetKeys}&${discardedHistoriesQueryParams}&${params}`; try { const { data } = await axios.get(url); return data; } catch (e) { rethrowSimple(e); } } async function purgeHistory(historyId) { const payload = { purge: true, }; const url = `${getAppRoot()}api/histories/${historyId}`; try { const { data } = await axios.delete(url, { data: payload }); return data; } catch (e) { rethrowSimple(e); } } export async function cleanupHistories(histories) { const result = new CleanupResult(); const historiesTable = histories.reduce((acc, item) => ((acc[item.id] = item), acc), {}); // TODO: Promise.all() and do this in parallel? Or add a bulk delete endpoint? try { for (const history of histories) { const requestResult = await purgeHistory(history.id); console.debug(requestResult); result.totalFreeBytes += historiesTable[history.id].size; result.totalItemCount += 1; } } catch (error) { result.errorMessage = error; } return result; } /** * Maps the error messages with the dataset name for user display. * @param datasetsTable Datasets dictionary indexed by ID Loading Loading
client/src/components/User/DiskUsage/Management/Cleanup/categories.js +20 −1 Original line number Diff line number Diff line import _l from "utils/localization"; import { cleanupDatasets, fetchDiscardedDatasets, fetchDiscardedDatasetsSummary } from "../services"; import { cleanupDatasets, fetchDiscardedDatasets, fetchDiscardedDatasetsSummary, cleanupHistories, fetchDiscardedHistories, fetchDiscardedHistoriesSummary, } from "../services"; export const cleanupCategories = [ { Loading @@ -17,6 +24,18 @@ export const cleanupCategories = [ fetchItems: fetchDiscardedDatasets, cleanupItems: cleanupDatasets, }, { id: "deleted_histories", name: _l("Deleted histories"), description: _l( "When you delete a history it's not immediately removed from the disk (so you can recover it later)." + " But this means it's still taking space until you permanently delete it." + " Here you can quickly find and remove those histories to free up some space" ), fetchSummary: fetchDiscardedHistoriesSummary, fetchItems: fetchDiscardedHistories, cleanupItems: cleanupHistories, }, ], }, ];
client/src/components/User/DiskUsage/Management/services.js +67 −0 Original line number Diff line number Diff line Loading @@ -9,6 +9,7 @@ const isDeleted = "q=deleted-eq&qv=True"; const isNotPurged = "q=purged-eq&qv=False"; const maxItemsToFetch = 500; const discardedDatasetsQueryParams = `${isDataset}&${isDeleted}&${isNotPurged}&limit=${maxItemsToFetch}`; const discardedHistoriesQueryParams = `&${isDeleted}&${isNotPurged}&limit=${maxItemsToFetch}`; /** * Calculates the total amount of bytes that can be cleaned by permanently removing Loading Loading @@ -105,6 +106,72 @@ export async function cleanupDatasets(datasets) { return result; } export async function fetchDiscardedHistoriesSummary() { const summaryKeys = "size"; const url = `${getAppRoot()}api/histories?keys=${summaryKeys}&${discardedHistoriesQueryParams}`; try { const { data } = await axios.get(url); const totalSizeInBytes = data.reduce((partial_sum, item) => partial_sum + item.size, 0); return new CleanableSummary({ totalSize: totalSizeInBytes, totalItems: data.length, }); } catch (e) { rethrowSimple(e); } } export async function fetchDiscardedHistories(options = {}) { let params = ""; if (options.sortBy) { const sortPostfix = options.sortDesc ? "-dsc" : "-asc"; params += `order=${options.sortBy}${sortPostfix}&`; } if (options.limit) { params += `limit=${options.limit}&`; } if (options.offset) { params += `offset=${options.offset}&`; } const url = `${getAppRoot()}api/histories?keys=${datasetKeys}&${discardedHistoriesQueryParams}&${params}`; try { const { data } = await axios.get(url); return data; } catch (e) { rethrowSimple(e); } } async function purgeHistory(historyId) { const payload = { purge: true, }; const url = `${getAppRoot()}api/histories/${historyId}`; try { const { data } = await axios.delete(url, { data: payload }); return data; } catch (e) { rethrowSimple(e); } } export async function cleanupHistories(histories) { const result = new CleanupResult(); const historiesTable = histories.reduce((acc, item) => ((acc[item.id] = item), acc), {}); // TODO: Promise.all() and do this in parallel? Or add a bulk delete endpoint? try { for (const history of histories) { const requestResult = await purgeHistory(history.id); console.debug(requestResult); result.totalFreeBytes += historiesTable[history.id].size; result.totalItemCount += 1; } } catch (error) { result.errorMessage = error; } return result; } /** * Maps the error messages with the dataset name for user display. * @param datasetsTable Datasets dictionary indexed by ID Loading