1 import type { Action, Reducer } from 'redux';
3 import { itemEq } from '@proton/pass/lib/items/item.predicates';
4 import { AddressType } from '@proton/pass/lib/monitor/types';
16 itemBulkDeleteProgress,
18 itemBulkRestoreProgress,
19 itemBulkTrashProgress,
45 resolveAddressMonitor,
50 secureLinksRemoveInactive,
58 vaultMoveAllItemsProgress,
59 } from '@proton/pass/store/actions';
60 import { sanitizeWithCallbackAction } from '@proton/pass/store/actions/enhancers/callback';
61 import type { WrappedOptimisticState } from '@proton/pass/store/optimistic/types';
62 import { combineOptimisticReducers } from '@proton/pass/store/optimistic/utils/combine-optimistic-reducers';
63 import withOptimistic from '@proton/pass/store/optimistic/with-optimistic';
66 type IndexedByShareIdAndItemId,
73 } from '@proton/pass/types';
74 import { prop } from '@proton/pass/utils/fp/lens';
75 import { notIn, or } from '@proton/pass/utils/fp/predicates';
76 import { objectDelete } from '@proton/pass/utils/object/delete';
77 import { objectFilter } from '@proton/pass/utils/object/filter';
78 import { objectMap } from '@proton/pass/utils/object/map';
79 import { fullMerge, partialMerge } from '@proton/pass/utils/object/merge';
80 import { getEpoch } from '@proton/pass/utils/time/epoch';
81 import { toMap } from '@proton/shared/lib/helpers/object';
83 /** itemIds are only guaranteed to be unique per share not globally,
84 * therefore we must index the item entries by `shareId` */
85 export type ItemsByShareId = IndexedByShareIdAndItemId<ItemRevision>;
86 export type ItemRevisionUpdate = RequiredProps<Partial<ItemRevision>, 'itemId' | 'shareId'>;
88 /** Updates an existing item in the state if both
89 * the shareId and itemId already exist */
90 export const updateItem =
91 ({ shareId, itemId, ...update }: ItemRevisionUpdate) =>
92 (state: ItemsByShareId) =>
93 state[shareId]?.[itemId] ? partialMerge(state, { [shareId]: { [itemId]: update } }) : state;
95 /** Applies a batch of item updates to the state,
96 * ensuring each of them previously existed */
97 export const updateItems = (data: ItemRevisionUpdate[]) => (state: ItemsByShareId) => {
98 const updates = data.filter(({ shareId, itemId }) => Boolean(state[shareId]?.[itemId]));
99 if (updates.length === 0) return state;
103 updates.reduce<IndexedByShareIdAndItemId<Partial<ItemRevision>>>((acc, { shareId, itemId, ...update }) => {
104 acc[shareId] = acc[shareId] ?? {};
105 acc[shareId][itemId] = update;
111 export const addItems = (data: ItemRevision[]) => (state: ItemsByShareId) =>
114 data.reduce<IndexedByShareIdAndItemId<ItemRevision>>((acc, item) => {
115 const { shareId, itemId } = item;
116 acc[shareId] = acc[shareId] ?? {};
117 acc[shareId][itemId] = item;
122 export const withOptimisticItemsByShareId = withOptimistic<ItemsByShareId>(
125 initiate: itemCreationIntent.optimisticMatch,
126 fail: itemCreationFailure.optimisticMatch,
127 revert: [itemCreationSuccess.optimisticMatch, itemCreationDismiss.optimisticMatch],
130 initiate: itemEditIntent.optimisticMatch,
131 fail: itemEditFailure.optimisticMatch,
132 commit: itemEditSuccess.optimisticMatch,
133 revert: itemEditDismiss.optimisticMatch,
136 initiate: itemMoveIntent.optimisticMatch,
137 commit: itemMoveSuccess.optimisticMatch,
138 revert: itemMoveFailure.optimisticMatch,
141 initiate: itemTrashIntent.optimisticMatch,
142 commit: itemTrashSuccess.optimisticMatch,
143 revert: itemTrashFailure.optimisticMatch,
146 initiate: itemRestoreIntent.optimisticMatch,
147 commit: itemRestoreSuccess.optimisticMatch,
148 revert: itemRestoreFailure.optimisticMatch,
151 initiate: itemDeleteIntent.optimisticMatch,
152 commit: itemDeleteSuccess.optimisticMatch,
153 revert: itemDeleteFailure.optimisticMatch,
156 (state = {}, action: Action) => {
157 if (bootSuccess.match(action) && action.payload?.items !== undefined) return action.payload.items;
158 if (syncSuccess.match(action)) return action.payload.items;
159 if (sharesSync.match(action)) return fullMerge(state, action.payload.items);
161 if (itemCreationIntent.match(action)) {
162 const { shareId, optimisticId, createTime, ...item } = action.payload;
163 const optimisticItem = state?.[shareId]?.[optimisticId];
166 * FIXME: we could rely on an optimistic revisionTime update
167 * optimistically bump the revision number in the case of retries,
168 * the correct revision number will be set on item creation success.
169 * This allows this item to be correctly marked as failed.
171 return fullMerge(state, {
174 aliasEmail: item.type === 'alias' ? item.extraData.aliasEmail : null,
175 contentFormatVersion: ContentFormatVersion.Item,
178 flags: 1 /** default to unmonitored */,
179 itemId: optimisticId,
181 modifyTime: createTime,
183 revision: optimisticItem !== undefined ? optimisticItem.revision + 1 : 0,
184 revisionTime: createTime,
186 state: ItemState.Active,
192 if (itemCreationSuccess.match(action)) {
193 const { shareId, item, alias } = action.payload;
195 return fullMerge(state, {
197 ...(alias ? { [alias.itemId]: alias } : {}),
203 if (importItemsProgress.match(action)) {
204 const { shareId, items } = action.payload;
205 return fullMerge(state, { [shareId]: toMap(items, 'itemId') });
208 if (itemTrashIntent.match(action)) {
209 const { item, shareId } = action.payload;
210 const { itemId } = item;
212 return updateItem({ shareId, itemId, state: ItemState.Trashed })(state);
215 if (itemRestoreIntent.match(action)) {
216 const { item, shareId } = action.payload;
217 const { itemId } = item;
219 return updateItem({ shareId, itemId, state: ItemState.Active })(state);
222 if (itemEditIntent.match(action)) {
223 const { shareId, itemId, ...item } = action.payload;
224 const { revision } = state[shareId][itemId];
226 /* FIXME: see `itemCreationIntent.match`
227 * optimistically bump the revision number in the case of retries,
228 * the correct revision number will be set on item edit success.
229 * This allows this item to be correctly marked as failed */
230 return updateItem({ shareId, itemId, data: item, revision: revision + 1 })(state);
233 if (or(itemEditSuccess.match, setItemFlags.success.match, aliasSyncStatusToggle.success.match)(action)) {
234 const { shareId, itemId, item } = action.payload;
235 return fullMerge(state, { [shareId]: { [itemId]: item } });
238 if (itemsEditSync.match(action)) {
239 const { items } = action.payload;
240 return addItems(items)(state);
243 if (itemsUsedSync.match(action)) {
244 const { items } = action.payload;
245 return updateItems(items)(state);
248 if (itemDeleteIntent.match(action)) {
249 const { shareId, item } = action.payload;
250 return { ...state, [shareId]: objectDelete(state[shareId], item.itemId) };
253 if (itemsDeleteSync.match(action)) {
254 const { shareId } = action.payload;
255 const itemIds = new Set(action.payload.itemIds);
257 return { ...state, [shareId]: objectFilter(state[shareId], (itemId) => !itemIds.has(itemId)) };
261 * BE side and under the hood, moving an item
262 * will delete the item and re-create a new one.
263 * That's why we are relying on an optimisticId
264 * on an `itemMoveIntent`. This is similar to
265 * the `itemCreationIntent` flow with the extra
266 * deletion of the item to be moved.
268 if (itemMoveIntent.match(action)) {
269 const { item, optimisticId, shareId } = action.payload;
271 { ...state, [item.shareId]: objectDelete(state[item.shareId], item.itemId) },
277 itemId: optimisticId,
278 modifyTime: getEpoch(),
285 if (itemMoveSuccess.match(action)) {
286 const { item, shareId, optimisticId } = action.payload;
288 { ...state, [shareId]: objectDelete(state[item.shareId], optimisticId) },
289 { [shareId]: { [item.itemId]: item } }
293 if (itemPinSuccess.match(action)) {
294 const { shareId, itemId } = action.payload;
295 return updateItem({ shareId, itemId, pinned: true })(state);
298 if (itemUnpinSuccess.match(action)) {
299 const { shareId, itemId } = action.payload;
300 return updateItem({ shareId, itemId, pinned: false })(state);
303 if (sharedVaultCreated.match(action) && action.payload.move) {
304 const { shareId } = action.payload.share;
305 const { before, after } = action.payload.move;
308 { ...state, [before.shareId]: objectDelete(state[before.shareId], before.itemId) },
309 { [shareId]: { [after.itemId]: after } }
313 if (or(emptyTrashProgress.match, itemBulkDeleteProgress.match)(action)) {
314 const deletedItemIds = action.payload.batch.map(prop('ItemID'));
315 return objectMap(state, (shareId, items) =>
316 shareId === action.payload.shareId ? objectFilter(items, notIn(deletedItemIds)) : items
320 if (or(restoreTrashProgress.match, itemBulkRestoreProgress.match)(action)) {
321 const { shareId, batch } = action.payload;
324 batch.map<ItemRevisionUpdate>(({ ItemID: itemId }) => ({
327 state: ItemState.Active,
332 if (itemAutofilled.match(action)) {
333 const { shareId, itemId } = action.payload;
334 return updateItem({ shareId, itemId, lastUseTime: getEpoch() })(state);
337 if (or(vaultDeleteSuccess.match, shareDeleteSync.match, shareLeaveSuccess.match)(action)) {
338 return objectDelete(state, action.payload.shareId);
341 if (inviteAcceptSuccess.match(action)) {
342 return fullMerge(state, { [action.payload.share.shareId]: toMap(action.payload.items, 'itemId') });
345 if (or(itemBulkMoveProgress.match, vaultMoveAllItemsProgress.match)(action)) {
346 const { shareId, batch, destinationShareId, movedItems } = action.payload;
348 { ...state, [shareId]: objectFilter(state[shareId], notIn(batch.map(prop('itemId')))) },
349 { [destinationShareId]: toMap(movedItems, 'itemId') }
353 if (itemBulkTrashProgress.match(action)) {
354 const { batch, shareId } = action.payload;
357 batch.map<ItemRevisionUpdate>(({ ItemID: itemId }) => ({
360 state: ItemState.Trashed,
365 if (resolveAddressMonitor.success.match(action)) {
366 const dto = action.payload;
367 if (dto.type === AddressType.ALIAS) {
368 const { shareId, itemId } = dto;
369 return updateItem({ shareId, itemId, flags: 0 })(state);
373 if (aliasSyncPending.success.match(action)) {
374 const { items, shareId } = action.payload;
375 return partialMerge(state, { [shareId]: toMap(items, 'itemId') });
380 { sanitizeAction: sanitizeWithCallbackAction }
383 export type ItemsByOptimisticId = { [optimisticId: string]: UniqueItem };
385 const itemsByOptimisticId: Reducer<ItemsByOptimisticId> = (state = {}, action) => {
386 if (or(itemCreationSuccess.match, itemMoveSuccess.match, itemMoveFailure.match)(action)) {
387 const { optimisticId, item } = action.payload;
388 const { itemId, shareId } = item;
390 return fullMerge(state, { [optimisticId]: { shareId, itemId } });
396 /** revision number is stored on the `EditDraft` type in order
397 * to future-proof drafts v2 : this will allow detecting stale
398 * draft entries if an item was updated while having a draft. */
399 export type DraftBase =
400 | { mode: 'new'; type: ItemType }
401 | { mode: 'edit'; itemId: string; shareId: string; revision: number };
403 export type Draft<V extends {} = any> = DraftBase & { formData: V };
404 export type EditDraft = Extract<Draft, { mode: 'edit' }>;
405 export type NewDraft = Extract<Draft, { mode: 'new' }>;
407 /** Draft state now supports pushing multiple entries so as to future-proof
408 * drafts v2. In the extension, we are stil relying on a single active draft
409 * and all drafts will be garbage collected on extension boot. This behaviour
410 * does not make sense for the web-app and is unavailable for web. */
411 const draftsReducer: Reducer<Draft[]> = (state = [], action) => {
412 /* Ensures only one new item draft exists and that we do not
413 * have duplicates for item edit drafts */
414 const sanitizeDrafts = (drafts: Draft[], draft: DraftBase) => {
415 if (draft.mode === 'new') return drafts.filter(({ mode }) => mode !== 'new');
416 else return drafts.filter((entry) => entry.mode === 'new' || !itemEq(draft)(entry));
419 if (draftSave.match(action)) return [action.payload, ...sanitizeDrafts(state, action.payload)];
420 if (draftDiscard.match(action)) return sanitizeDrafts(state, action.payload);
421 if (draftsGarbageCollect.match(action)) return [];
426 const secureLinksReducer: Reducer<IndexedByShareIdAndItemId<SecureLink[]>> = (state = {}, action) => {
427 if (or(secureLinksGet.success.match, secureLinksRemoveInactive.success.match)(action)) {
428 return action.payload.reduce<IndexedByShareIdAndItemId<SecureLink[]>>((acc, link) => {
429 const { shareId, itemId } = link;
430 const secureLink = acc[shareId]?.[itemId];
432 if (!secureLink) acc[shareId] = { ...(acc[shareId] ?? {}), [itemId]: [link] };
433 else secureLink.push(link);
439 if (secureLinkCreate.success.match(action)) {
440 const secureLink = action.payload;
441 const { shareId, itemId } = secureLink;
442 const links = state?.[shareId]?.[itemId] ?? [];
444 return partialMerge(state, { [shareId]: { [itemId]: links.concat(secureLink) } });
447 if (secureLinkRemove.success.match(action)) {
448 const { shareId, itemId, linkId } = action.payload;
449 const links = state[shareId][itemId].filter((link) => link.linkId !== linkId);
451 return partialMerge(state, { [shareId]: { [itemId]: links } });
457 export type ItemsState = {
458 byShareId: WrappedOptimisticState<ItemsByShareId>;
459 byOptimisticId: ItemsByOptimisticId;
463 export default combineOptimisticReducers({
464 byShareId: withOptimisticItemsByShareId.reducer,
465 byOptimisticId: itemsByOptimisticId,
466 drafts: draftsReducer,
467 secureLinks: secureLinksReducer,