1 import { usePreventLeave } from '@proton/components';
2 import { CryptoProxy } from '@proton/crypto';
4 queryDeleteChildrenLinks,
5 queryDeleteTrashedLinks,
6 queryEmptyTrashOfShare,
9 } from '@proton/shared/lib/api/drive/link';
10 import { queryMoveLink } from '@proton/shared/lib/api/drive/share';
11 import { queryVolumeEmptyTrash } from '@proton/shared/lib/api/drive/volume';
12 import { API_CODES } from '@proton/shared/lib/constants';
13 import { BATCH_REQUEST_SIZE, MAX_THREADS_PER_REQUEST } from '@proton/shared/lib/drive/constants';
14 import runInQueue from '@proton/shared/lib/helpers/runInQueue';
15 import { encryptPassphrase, generateLookupHash } from '@proton/shared/lib/keys/driveKeys';
16 import { getDecryptedSessionKey } from '@proton/shared/lib/keys/drivePassphrase';
17 import chunk from '@proton/utils/chunk';
18 import groupWith from '@proton/utils/groupWith';
20 import { EnrichedError } from '../../utils/errorHandling/EnrichedError';
21 import { ValidationError } from '../../utils/errorHandling/ValidationError';
22 import { useDebouncedRequest } from '../_api';
23 import { useDriveEventManager } from '../_events';
24 import { useDefaultShare, useShare } from '../_shares';
25 import { useVolumesState } from '../_volumes';
26 import useLink from './useLink';
27 import useLinks from './useLinks';
28 import useLinksState from './useLinksState';
30 const INVALID_REQUEST_ERROR_CODES = [
31 API_CODES.ALREADY_EXISTS_ERROR,
32 API_CODES.INVALID_REQUIREMENT_ERROR,
33 API_CODES.NOT_ALLOWED_ERROR,
36 interface APIResponses {
46 * useLinksActions provides actions for manipulating with links in batches.
48 export function useLinksActions({
52 queryDeleteChildrenLinks: typeof queryDeleteChildrenLinks;
53 queryDeleteTrashedLinks: typeof queryDeleteTrashedLinks;
54 queryEmptyTrashOfShare: typeof queryEmptyTrashOfShare;
55 queryRestoreLinks: typeof queryRestoreLinks;
56 queryTrashLinks: typeof queryTrashLinks;
59 const { preventLeave } = usePreventLeave();
60 const debouncedRequest = useDebouncedRequest();
61 const events = useDriveEventManager();
62 const { getLink, getLinkPassphraseAndSessionKey, getLinkPrivateKey, getLinkHashKey } = useLink();
63 const { getLinks } = useLinks();
64 const { lockLinks, lockTrash, unlockLinks } = useLinksState();
65 const { getDefaultShare } = useDefaultShare();
67 const { getShareCreatorKeys } = useShare();
68 const volumeState = useVolumesState();
71 * withLinkLock is helper to lock provided `linkIds` before the action done
72 * using `callback`, and ensure links are unlocked after its done no matter
73 * the result of the action.
75 const withLinkLock = async <T>(shareId: string, linkIds: string[], callback: () => Promise<T>): Promise<T> => {
76 lockLinks(shareId, linkIds);
78 return await callback();
80 const volumeId = volumeState.findVolumeId(shareId);
82 await events.pollEvents.volumes(volumeId);
84 unlockLinks(shareId, linkIds);
88 const moveLink = async (
89 abortSignal: AbortSignal,
98 newParentLinkId: string;
106 { passphrase, passphraseSessionKey },
109 { privateKey: addressKey, address },
110 ] = await Promise.all([
111 getLink(abortSignal, shareId, linkId),
112 getLinkPassphraseAndSessionKey(abortSignal, shareId, linkId),
113 getLinkPrivateKey(abortSignal, newShareId, newParentLinkId),
114 getLinkHashKey(abortSignal, newShareId, newParentLinkId),
115 getShareCreatorKeys(abortSignal, newShareId),
118 if (link.corruptedLink) {
119 throw new Error('Cannot move corrupted file');
122 const [currentParentPrivateKey, Hash, ContentHash, { NodePassphrase }] = await Promise.all([
123 getLinkPrivateKey(abortSignal, shareId, link.parentLinkId),
124 generateLookupHash(link.name, newParentHashKey).catch((e) =>
126 new EnrichedError('Failed to generate lookup hash during move', {
130 newShareId: newShareId === shareId ? undefined : newShareId,
137 link.digests?.sha1 &&
138 generateLookupHash(link.digests.sha1, newParentHashKey).catch((e) =>
140 new EnrichedError('Failed to generate content hash during move', {
144 newShareId: newShareId === shareId ? undefined : newShareId,
151 encryptPassphrase(newParentPrivateKey, addressKey, passphrase, passphraseSessionKey).catch((e) =>
153 new EnrichedError('Failed to encrypt link passphrase during move', {
157 newShareId: newShareId === shareId ? undefined : newShareId,
166 const sessionKeyName = await getDecryptedSessionKey({
167 data: link.encryptedName,
168 privateKeys: currentParentPrivateKey,
171 new EnrichedError('Failed to decrypt link name session key during move', {
175 newShareId: newShareId === shareId ? undefined : newShareId,
183 const { message: encryptedName } = await CryptoProxy.encryptMessage({
185 stripTrailingSpaces: true,
186 sessionKey: sessionKeyName,
187 encryptionKeys: newParentPrivateKey,
188 signingKeys: addressKey,
191 new EnrichedError('Failed to encrypt link name during move', {
195 newShareId: newShareId === shareId ? undefined : newShareId,
203 if (!link.signatureAddress) {
204 throw new Error('Moving anonymous file is not yet supported');
207 await debouncedRequest({
208 ...queryMoveLink(shareId, linkId, {
211 ParentLinkID: newParentLinkId,
213 NameSignatureEmail: address.Email,
214 NewShareID: newShareId === shareId ? undefined : newShareId,
219 if (INVALID_REQUEST_ERROR_CODES.includes(err?.data?.Code)) {
220 throw new ValidationError(err.data.Error);
225 const originalParentId = link.parentLinkId;
226 return originalParentId;
229 const moveLinks = async (
230 abortSignal: AbortSignal,
242 newParentLinkId: string;
244 onMoved?: (linkId: string) => void;
245 onError?: (linkId: string) => void;
249 return withLinkLock(shareId, linkIds, async () => {
250 const originalParentIds: { [linkId: string]: string } = {};
251 const successes: string[] = [];
252 const failures: { [linkId: string]: any } = {};
254 const moveQueue = linkIds.map((linkId) => async () => {
255 return moveLink(abortSignal, { shareId, newParentLinkId, linkId, newShareId, silence })
256 .then((originalParentId) => {
257 successes.push(linkId);
258 originalParentIds[linkId] = originalParentId;
262 failures[linkId] = error;
267 await preventLeave(runInQueue(moveQueue, MAX_THREADS_PER_REQUEST));
268 return { successes, failures, originalParentIds };
273 * batchHelper makes easier to do any action with many links in several
274 * batches to make sure API can handle it (to not send thousands of links
275 * in one request), all run in parallel (up to a reasonable limit).
277 const batchHelper = async <T>(
278 abortSignal: AbortSignal,
281 query: (batchLinkIds: string[], shareId: string) => any,
282 maxParallelRequests = MAX_THREADS_PER_REQUEST
284 return withLinkLock(shareId, linkIds, async () => {
285 const responses: { batchLinkIds: string[]; response: T }[] = [];
286 const successes: string[] = [];
287 const failures: { [linkId: string]: any } = {};
289 const batches = chunk(linkIds, BATCH_REQUEST_SIZE);
291 const queue = batches.map(
292 (batchLinkIds) => () =>
293 debouncedRequest<T>(query(batchLinkIds, shareId), abortSignal)
294 .then((response) => {
295 responses.push({ batchLinkIds, response });
296 batchLinkIds.forEach((linkId) => successes.push(linkId));
299 batchLinkIds.forEach((linkId) => (failures[linkId] = error));
302 await preventLeave(runInQueue(queue, maxParallelRequests));
311 const batchHelperMultipleShares = async (
312 abortSignal: AbortSignal,
313 ids: { shareId: string; linkId: string }[],
314 query: (batchLinkIds: string[], shareId: string) => any,
315 maxParallelRequests = MAX_THREADS_PER_REQUEST
317 const groupedByShareId = groupWith((a, b) => a.shareId === b.shareId, ids);
319 const results = await Promise.all(
320 groupedByShareId.map((group) => {
321 return batchHelper<APIResponses>(
324 group.map(({ linkId }) => linkId),
331 const { responses, failures } = accumulateResults(results);
332 const successes: string[] = [];
333 responses.forEach(({ batchLinkIds, response }) => {
334 response.Responses.forEach(({ Response }, index) => {
335 const linkId = batchLinkIds[index];
336 if (!Response.Error) {
337 successes.push(linkId);
338 } else if (INVALID_REQUEST_ERROR_CODES.includes(Response.Code)) {
339 failures[linkId] = new ValidationError(Response.Error);
341 failures[linkId] = Response.Error;
345 return { responses, successes, failures };
348 const trashLinks = async (
349 abortSignal: AbortSignal,
350 ids: { shareId: string; linkId: string; parentLinkId: string }[]
352 const linksByParentIds = groupWith((a, b) => a.parentLinkId === b.parentLinkId, ids);
354 const results = await Promise.all(
355 linksByParentIds.map((linksGroup) => {
356 const groupParentLinkId = linksGroup[0].parentLinkId;
358 return batchHelperMultipleShares(abortSignal, linksGroup, (batchLinkIds, shareId) => {
359 return queries.queryTrashLinks(shareId, groupParentLinkId, batchLinkIds);
364 return accumulateResults(results);
367 const restoreLinks = async (abortSignal: AbortSignal, ids: { shareId: string; linkId: string }[]) => {
369 Make sure to restore the most freshly trashed links first to ensure
370 the potential parents are restored first because it is not possible
371 to restore child if the parent stays in the trash.
372 If user does not select the parent anyway, it is fine, it will just
373 show error notification that some link(s) were not restored.
375 const links = await getLinks(abortSignal, ids);
376 const sortedLinks = links.sort((a, b) => (b.trashed || 0) - (a.trashed || 0));
377 const sortedLinkIds = sortedLinks.map(({ linkId, rootShareId }) => ({ linkId, shareId: rootShareId }));
379 // Limit restore to one thread at a time only to make sure links are
380 // restored in proper order (parents need to be restored before childs).
381 const maxParallelRequests = 1;
383 const results = await batchHelperMultipleShares(
386 (batchLinkIds, shareId) => {
387 return queries.queryRestoreLinks(shareId, batchLinkIds);
395 const deleteChildrenLinks = async (
396 abortSignal: AbortSignal,
398 parentLinkId: string,
401 return batchHelper(abortSignal, shareId, linkIds, (batchLinkIds) =>
402 queryDeleteChildrenLinks(shareId, parentLinkId, batchLinkIds)
406 const deleteTrashedLinks = async (abortSignal: AbortSignal, ids: { linkId: string; shareId: string }[]) => {
407 return batchHelperMultipleShares(abortSignal, ids, (batchLinkIds, shareId) => {
408 return queries.queryDeleteTrashedLinks(shareId, batchLinkIds);
412 const emptyTrash = async (abortSignal: AbortSignal) => {
413 const { volumeId } = await getDefaultShare();
415 await debouncedRequest(queryVolumeEmptyTrash(volumeId), abortSignal);
417 await events.pollEvents.volumes(volumeId);
430 export default function useLinksActionsWithQuieries() {
431 return useLinksActions({
434 queryDeleteChildrenLinks,
435 queryDeleteTrashedLinks,
436 queryEmptyTrashOfShare,
442 interface Result<T> {
444 batchLinkIds: string[];
449 [linkId: string]: any;
453 function accumulateResults<T>(results: Result<T>[]): Result<T> {
454 return results.reduce(
456 acc.responses.push(...result.responses);
457 acc.successes.push(...result.successes);
458 acc.failures = { ...acc.failures, ...result.failures };