add prefetching to missed entityUpdates, instance/patch on EntityUpdate

When processing the missed entityUpdates in EventQueue in EventBusClient
, we group entityUpdates based on typeRefs and listIds and do
loadMultiple requests instead of loading them one-by-one (prefetching).
Additionally, when the client is online, the server enriches the
WebSocket message with either the instance (in case of a CREATE event),
or with the patches list (in case of an UPDATE event) so that we do not
need to do an additional GET request and can either put the instance
into the cache or update the entry on the cache using the PatchMerger
instead.

Co-authored-by: abp <abp@tutao.de>
Co-authored-by: das <das@tutao.de>
Co-authored-by: jomapp <17314077+jomapp@users.noreply.github.com>
Co-authored-by: Kinan <104761667+kibibytium@users.noreply.github.com>
Co-authored-by: map <mpfau@users.noreply.github.com>
Co-authored-by: sug <sug@tutao.de>
This commit is contained in:
abp 2025-06-13 17:27:15 +02:00
parent 681b22cb4f
commit 4db7e9b1e9
No known key found for this signature in database
GPG key ID: 791D4EC38A7AA7C2
63 changed files with 4193 additions and 2291 deletions

@ -1 +1 @@
Subproject commit 200f65e9a5111c8f7b19fd884f4c942b61a1579b
Subproject commit daba6d10456b5a5f3d3616e6199b3849f19e7507

View file

@ -520,7 +520,6 @@ export function assembleEditResultAndAssignFromExisting(existingEvent: CalendarE
newEvent._ownerGroup = existingEvent._ownerGroup
newEvent._permissions = existingEvent._permissions
newEvent._original = existingEvent._original
return {
hasUpdateWorthyChanges: eventHasChanged(newEvent, existingEvent),
newEvent,

View file

@ -116,6 +116,7 @@ import { lang } from "../../../common/misc/LanguageViewModel.js"
import { NativePushServiceApp } from "../../../common/native/main/NativePushServiceApp.js"
import { getClientOnlyCalendars } from "../gui/CalendarGuiUtils.js"
import { SyncTracker } from "../../../common/api/main/SyncTracker.js"
import { CacheMode } from "../../../common/api/worker/rest/EntityRestClient"
const TAG = "[CalendarModel]"
const EXTERNAL_CALENDAR_RETRY_LIMIT = 3
@ -824,7 +825,8 @@ export class CalendarModel {
try {
// We are not supposed to load files without the key provider, but we hope that the key
// was already resolved and the entity updated.
const file = await this.entityClient.load(FileTypeRef, fileId)
const file = await this.entityClient.load(FileTypeRef, fileId, { cacheMode: CacheMode.WriteOnly })
// const file = await this.entityClient.load(FileTypeRef, fileId)
const dataFile = await this.fileController.getAsDataFile(file)
const { parseCalendarFile } = await import("../../../common/calendar/import/CalendarImporter.js")
return await parseCalendarFile(dataFile)

View file

@ -2,11 +2,10 @@ import { CalendarSearchResultListEntry } from "./CalendarSearchListView.js"
import { SearchRestriction, SearchResult } from "../../../../common/api/worker/search/SearchTypes.js"
import { EntityEventsListener, EventController } from "../../../../common/api/main/EventController.js"
import { CalendarEvent, CalendarEventTypeRef, ContactTypeRef, MailTypeRef } from "../../../../common/api/entities/tutanota/TypeRefs.js"
import { CLIENT_ONLY_CALENDARS, OperationType } from "../../../../common/api/common/TutanotaConstants.js"
import { CLIENT_ONLY_CALENDARS } from "../../../../common/api/common/TutanotaConstants.js"
import { assertIsEntity2, elementIdPart, GENERATED_MAX_ID, getElementId, isSameId, ListElement } from "../../../../common/api/common/utils/EntityUtils.js"
import { ListLoadingState, ListState } from "../../../../common/gui/base/List.js"
import {
assertNotNull,
deepEqual,
downcast,
getEndOfDay,
@ -30,7 +29,7 @@ import stream from "mithril/stream"
import { generateCalendarInstancesInRange, retrieveClientOnlyEventsForUser } from "../../../../common/calendar/date/CalendarUtils.js"
import { LoginController } from "../../../../common/api/main/LoginController.js"
import { EntityClient } from "../../../../common/api/common/EntityClient.js"
import { containsEventOfType, EntityUpdateData, getEventOfType, isUpdateForTypeRef } from "../../../../common/api/common/utils/EntityUpdateUtils.js"
import { EntityUpdateData, isUpdateForTypeRef } from "../../../../common/api/common/utils/EntityUpdateUtils.js"
import { CalendarInfo } from "../../model/CalendarModel.js"
import m from "mithril"
import { CalendarFacade } from "../../../../common/api/worker/facades/lazy/CalendarFacade.js"
@ -200,51 +199,10 @@ export class CalendarSearchViewModel {
private readonly entityEventsListener: EntityEventsListener = async (updates) => {
for (const update of updates) {
const mergedUpdate = this.mergeOperationsIfNeeded(update, updates)
if (mergedUpdate == null) continue
await this.entityEventReceived(mergedUpdate)
await this.entityEventReceived(update)
}
}
private mergeOperationsIfNeeded(update: EntityUpdateData, updates: readonly EntityUpdateData[]): EntityUpdateData | null {
// We are trying to keep the mails that are moved and would match the search criteria displayed.
// This is a bit hacky as we reimplement part of the filtering by list.
// Ideally search result would update by itself and we would only need to reconcile the changes.
if (!isUpdateForTypeRef(MailTypeRef, update) || this.searchResult == null) {
return update
}
if (update.operation === OperationType.CREATE && containsEventOfType(updates, OperationType.DELETE, update.instanceId)) {
// This is a move operation, is destination list included in the restrictions?
if (this.listIdMatchesRestriction(update.instanceListId, this.searchResult.restriction)) {
// If it's included, we want to keep showing the item but we will simulate the UPDATE
return { ...update, operation: OperationType.UPDATE }
} else {
// If it's not going to be included we might as well skip the create operation
return null
}
} else if (update.operation === OperationType.DELETE && containsEventOfType(updates, OperationType.CREATE, update.instanceId)) {
// This is a move operation and we are in the delete part of it.
// Grab the other part to check the move destination.
const createOperation = assertNotNull(getEventOfType(updates, OperationType.CREATE, update.instanceId))
// Is destination included in the search?
if (this.listIdMatchesRestriction(createOperation.instanceListId, this.searchResult.restriction)) {
// If so, skip the delete.
return null
} else {
// Otherwise delete
return update
}
} else {
return update
}
}
private listIdMatchesRestriction(listId: string, restriction: SearchRestriction): boolean {
return restriction.folderIds.length === 0 || restriction.folderIds.includes(listId)
}
onNewUrl(args: Record<string, any>, requestedPath: string) {
let restriction
try {

View file

@ -81,6 +81,8 @@ import { CalendarEventTypeRef } from "../../../common/api/entities/tutanota/Type
import { CustomUserCacheHandler } from "../../../common/api/worker/rest/cacheHandler/CustomUserCacheHandler"
import { EphemeralCacheStorage } from "../../../common/api/worker/rest/EphemeralCacheStorage"
import { CustomCalendarEventCacheHandler } from "../../../common/api/worker/rest/cacheHandler/CustomCalendarEventCacheHandler"
import { PatchMerger } from "../../../common/api/worker/offline/PatchMerger"
import { EventInstancePrefetcher } from "../../../common/api/worker/EventInstancePrefetcher"
assertWorkerOrNode()
@ -90,6 +92,7 @@ export type CalendarWorkerLocatorType = {
serviceExecutor: IServiceExecutor
crypto: CryptoFacade
instancePipeline: InstancePipeline
patchMerger: PatchMerger
applicationTypesFacade: ApplicationTypesFacade
cacheStorage: CacheStorage
cache: EntityRestInterface
@ -242,7 +245,9 @@ export async function initLocator(worker: CalendarWorkerImpl, browserData: Brows
locator.cacheStorage = maybeUninitializedStorage
locator.cache = new DefaultEntityRestCache(entityRestClient, maybeUninitializedStorage, typeModelResolver)
locator.patchMerger = new PatchMerger(locator.cacheStorage, locator.instancePipeline, typeModelResolver, () => locator.crypto)
locator.cache = new DefaultEntityRestCache(entityRestClient, maybeUninitializedStorage, typeModelResolver, locator.patchMerger)
locator.cachingEntityClient = new EntityClient(locator.cache, typeModelResolver)
const nonCachingEntityClient = new EntityClient(entityRestClient, typeModelResolver)
@ -494,6 +499,8 @@ export async function initLocator(worker: CalendarWorkerImpl, browserData: Brows
noOp,
)
const eventInstancePrefetcher = new EventInstancePrefetcher(locator.cache)
locator.eventBusClient = new EventBusClient(
eventBusCoordinator,
locator.cache as EntityRestCache,
@ -505,6 +512,8 @@ export async function initLocator(worker: CalendarWorkerImpl, browserData: Brows
mainInterface.progressTracker,
mainInterface.syncTracker,
typeModelResolver,
locator.crypto,
eventInstancePrefetcher,
)
locator.login.init(locator.eventBusClient)
locator.Const = Const

View file

@ -0,0 +1,9 @@
//@bundleInto:common-min
import { TutanotaError } from "@tutao/tutanota-error"
export class PatchOperationError extends TutanotaError {
constructor(m: string) {
super("PatchOperationError", m)
}
}

View file

@ -1,30 +1,49 @@
import { OperationType } from "../TutanotaConstants.js"
import { EntityUpdate } from "../../entities/sys/TypeRefs.js"
import { SomeEntity } from "../EntityTypes.js"
import { AppName, isSameTypeRef, isSameTypeRefByAttr, TypeRef } from "@tutao/tutanota-utils"
import { EntityUpdate, Patch } from "../../entities/sys/TypeRefs.js"
import { ServerModelParsedInstance, SomeEntity } from "../EntityTypes.js"
import { AppName, isSameTypeRef, TypeRef } from "@tutao/tutanota-utils"
import { isSameId } from "./EntityUtils.js"
import { ClientTypeModelResolver } from "../EntityFunctions"
import { Nullable } from "@tutao/tutanota-utils/dist/Utils"
/**
* A type similar to {@link EntityUpdate} but mapped to make it easier to work with.
*/
export type EntityUpdateData = {
typeRef: TypeRef<any>
instanceListId: string
instanceId: string
operation: OperationType
instance: Nullable<ServerModelParsedInstance>
// emptyList: when server did not send patchList, or empty re-write to the server database.
// length > 0: normal case for patch
patches: Nullable<Array<Patch>>
/// whether EventInstancePrefetcher download this instance already or not
isPrefetched: boolean
}
export async function entityUpdateToUpdateData(clientTypeModelResolver: ClientTypeModelResolver, update: EntityUpdate): Promise<EntityUpdateData> {
export async function entityUpdateToUpdateData(
clientTypeModelResolver: ClientTypeModelResolver,
update: EntityUpdate,
instance: Nullable<ServerModelParsedInstance> = null,
isPrefetched: boolean = false,
): Promise<EntityUpdateData> {
const typeId = update.typeId ? parseInt(update.typeId) : null
const typeIdOfEntityUpdateType = typeId
? new TypeRef<SomeEntity>(update.application as AppName, typeId)
: clientTypeModelResolver.resolveTypeRefFromAppAndTypeNameLegacy(update.application as AppName, update.type)
return {
typeRef: typeIdOfEntityUpdateType,
instanceListId: update.instanceListId,
instanceId: update.instanceId,
operation: update.operation as OperationType,
patches: update.patch?.patches ?? null,
instance,
isPrefetched,
}
}
@ -39,19 +58,3 @@ export function isUpdateFor<T extends SomeEntity>(entity: T, update: EntityUpdat
(update.instanceListId === "" ? isSameId(update.instanceId, entity._id) : isSameId([update.instanceListId, update.instanceId], entity._id))
)
}
export function containsEventOfType(events: ReadonlyArray<EntityUpdateData>, operationType: OperationType, elementId: Id): boolean {
return events.some((event) => event.operation === operationType && event.instanceId === elementId)
}
export function getEventOfType<T extends EntityUpdateData | EntityUpdate>(events: ReadonlyArray<T>, type: OperationType, elementId: Id): T | null {
return events.find((event) => event.operation === type && event.instanceId === elementId) ?? null
}
export function getEntityUpdateId(update: EntityUpdateData): Id | IdTuple {
if (update.instanceListId !== "") {
return [update.instanceListId, update.instanceId]
} else {
return update.instanceId
}
}

View file

@ -11,7 +11,6 @@ import {
deepEqual,
Hex,
hexToBase64,
isEmpty,
isSameTypeRef,
pad,
repeat,
@ -25,9 +24,11 @@ import {
ClientModelEncryptedParsedInstance,
ClientModelParsedInstance,
ClientModelUntypedInstance,
ClientTypeModel,
ElementEntity,
Entity,
ModelValue,
ParsedInstance,
ParsedValue,
SomeEntity,
TypeModel,
@ -37,7 +38,7 @@ import { ClientTypeReferenceResolver, PatchOperationType } from "../EntityFuncti
import { Nullable } from "@tutao/tutanota-utils/dist/Utils"
import { AttributeModel } from "../AttributeModel"
import { createPatch, createPatchList, Patch, PatchList } from "../../entities/sys/TypeRefs"
import { instance } from "testdouble"
import { ProgrammingError } from "../error/ProgrammingError"
/**
* the maximum ID for elements stored on the server (number with the length of 10 bytes) => 2^80 - 1
@ -359,7 +360,7 @@ export async function computePatches(
): Promise<Patch[]> {
let patches: Patch[] = []
for (const [valueIdStr, modelValue] of Object.entries(typeModel.values)) {
if (modelValue.final && !(modelValue.name == "_ownerEncSessionKey" || modelValue.name == "_ownerKeyVersion")) {
if (modelValue.final) {
continue
}
const attributeId = parseInt(valueIdStr)
@ -396,6 +397,7 @@ export async function computePatches(
// keys are in the format attributeId:attributeName when networkDebugging is enabled
attributeIdStr += ":" + modelAssociation.name
}
if (modelAssociation.type == AssociationType.Aggregation) {
const appName = modelAssociation.dependency ?? typeModel.app
const typeId = modelAssociation.refTypeId
@ -403,6 +405,15 @@ export async function computePatches(
const originalAggregatedEntities = (originalInstance[attributeId] ?? []) as Array<ClientModelParsedInstance>
const modifiedAggregatedEntities = (modifiedInstance[attributeId] ?? []) as Array<ClientModelParsedInstance>
const modifiedAggregatedUntypedEntities = (modifiedUntypedInstance[attributeIdStr] ?? []) as Array<ClientModelUntypedInstance>
const modifiedAggregateIds = modifiedAggregatedEntities.map(
(instance) => instance[assertNotNull(AttributeModel.getAttributeId(aggregateTypeModel, "_id"))] as Id,
)
if (!isDistinctAggregateIds(modifiedAggregateIds)) {
throw new ProgrammingError(
"Duplicate aggregate ids in the modified instance: " + AttributeModel.getAttribute(modifiedInstance, "_id", typeModel),
)
}
const addedItems = modifiedAggregatedUntypedEntities.filter(
(element) =>
!originalAggregatedEntities.some((item) => {
@ -471,7 +482,6 @@ export async function computePatches(
})
patches = patches.concat(items)
}
if (modelAssociation.cardinality == Cardinality.Any) {
if (removedItems.length > 0) {
const removedAggregateIds = removedItems.map(
(instance) => instance[assertNotNull(AttributeModel.getAttributeId(aggregateTypeModel, "_id"))] as Id,
@ -493,32 +503,6 @@ export async function computePatches(
}),
)
}
} else if (isEmpty(originalAggregatedEntities)) {
// ZeroOrOne with original aggregation on server is []
patches.push(
createPatch({
attributePath: attributeIdStr,
value: JSON.stringify(modifiedAggregatedUntypedEntities),
patchOperation: PatchOperationType.ADD_ITEM,
}),
)
} else {
// ZeroOrOne or One with original aggregation on server already there (i.e. it is a list of one)
const aggregateId = AttributeModel.getAttribute(assertNotNull(originalAggregatedEntities[0]), "_id", aggregateTypeModel)
const fullPath = `${attributeIdStr}/${aggregateId}/`
const items = await computePatches(
originalAggregatedEntities[0],
modifiedAggregatedEntities[0],
modifiedAggregatedUntypedEntities[0],
aggregateTypeModel,
typeReferenceResolver,
isNetworkDebuggingEnabled,
)
items.map((item) => {
item.attributePath = fullPath + item.attributePath
})
patches = patches.concat(items)
}
} else {
// non aggregation associations
const originalAssociationValue = (originalInstance[attributeId] ?? []) as Array<Id | IdTuple>
@ -529,15 +513,6 @@ export async function computePatches(
// Only Any associations support ADD_ITEM and REMOVE_ITEM operations
// All cardinalities support REPLACE operation
if (modelAssociation.cardinality == Cardinality.Any) {
if (addedItems.length > 0) {
patches.push(
createPatch({
attributePath: attributeIdStr,
value: JSON.stringify(addedItems),
patchOperation: PatchOperationType.ADD_ITEM,
}),
)
}
if (removedItems.length > 0) {
patches.push(
createPatch({
@ -637,6 +612,11 @@ export function timestampToGeneratedId(timestamp: number, serverBytes: number =
return base64ToBase64Ext(hexToBase64(hex))
}
function isDistinctAggregateIds(array: Array<Id>) {
const checkSet = new Set(array)
return checkSet.size === array.length
}
/**
* Extracts the timestamp from a GeneratedId
* @param base64Ext The id as base64Ext
@ -692,10 +672,10 @@ export function assertIsEntity2<T extends SomeEntity>(type: TypeRef<T>): (entity
* Only use for new entities, the {@param entity} won't be usable for updates anymore after this.
*/
export function removeTechnicalFields<E extends Partial<SomeEntity>>(entity: E) {
// we want to restrict outer function to entity types but internally we also want to handle aggregates
// we want to restrict outer function to entity types, but internally we also want to handle aggregates
function _removeTechnicalFields(erased: Record<string, any>) {
for (const key of Object.keys(erased)) {
if (key.startsWith("_finalEncrypted") || key.startsWith("_defaultEncrypted") || key.startsWith("_errors")) {
if (key.startsWith("_finalIvs") || key.startsWith("_errors")) {
delete erased[key]
} else {
const value = erased[key]
@ -707,6 +687,7 @@ export function removeTechnicalFields<E extends Partial<SomeEntity>>(entity: E)
}
_removeTechnicalFields(entity)
return entity
}
/**

View file

@ -1,5 +1,5 @@
const modelInfo = {
version: 131,
version: 132,
}
export default modelInfo

File diff suppressed because it is too large Load diff

View file

@ -802,6 +802,9 @@ export type EntityUpdate = {
instanceId: string;
operation: NumberString;
typeId: null | NumberString;
instance: null | string;
patch: null | PatchList;
}
export const VersionTypeRef: TypeRef<Version> = new TypeRef("sys", 480)
@ -3931,20 +3934,6 @@ export type Patch = {
attributePath: string;
value: null | string;
}
export const PatchListTypeRef: TypeRef<PatchList> = new TypeRef("sys", 2572)
export function createPatchList(values: StrippedEntity<PatchList>): PatchList {
return Object.assign(create(typeModels[PatchListTypeRef.typeId], PatchListTypeRef), values)
}
export type PatchList = {
_type: TypeRef<PatchList>;
_original?: PatchList
_format: NumberString;
patches: Patch[];
}
export const IdentityKeyPairTypeRef: TypeRef<IdentityKeyPair> = new TypeRef("sys", 2575)
export function createIdentityKeyPair(values: StrippedEntity<IdentityKeyPair>): IdentityKeyPair {
@ -4050,3 +4039,17 @@ export type RolloutGetOut = {
rollouts: Rollout[];
}
export const PatchListTypeRef: TypeRef<PatchList> = new TypeRef("sys", 2614)
export function createPatchList(values: StrippedEntity<PatchList>): PatchList {
return Object.assign(create(typeModels[PatchListTypeRef.typeId], PatchListTypeRef), values)
}
export type PatchList = {
_type: TypeRef<PatchList>;
_original?: PatchList
_id: Id;
patches: Patch[];
}

View file

@ -1,6 +1,7 @@
import stream from "mithril/stream"
import type { ProgressMonitorId } from "../common/utils/ProgressMonitor"
import { ProgressMonitor } from "../common/utils/ProgressMonitor"
import { IProgressMonitor, ProgressMonitor, ProgressMonitorId } from "../common/utils/ProgressMonitor"
import { EstimatingProgressMonitor } from "../common/utils/EstimatingProgressMonitor"
import Es from "../../../mail-app/translations/es"
export type ExposedProgressTracker = Pick<ProgressTracker, "registerMonitor" | "workDoneForMonitor">
@ -12,7 +13,7 @@ export type ExposedProgressTracker = Pick<ProgressTracker, "registerMonitor" | "
export class ProgressTracker {
// Will stream a number between 0 and 1
onProgressUpdate: stream<number>
private readonly monitors: Map<ProgressMonitorId, ProgressMonitor>
private readonly monitors: Map<ProgressMonitorId, EstimatingProgressMonitor>
private idCounter: ProgressMonitorId
constructor() {
@ -31,8 +32,8 @@ export class ProgressTracker {
*/
registerMonitorSync(work: number): ProgressMonitorId {
const id = this.idCounter++
const monitor = new ProgressMonitor(work, (percentage) => this.onProgress(id, percentage))
const monitor = new EstimatingProgressMonitor(work, (percentage) => this.onProgress(id, percentage))
monitor.continueEstimation()
this.monitors.set(id, monitor)
return id
@ -47,7 +48,7 @@ export class ProgressTracker {
this.getMonitor(id)?.workDone(amount)
}
getMonitor(id: ProgressMonitorId): ProgressMonitor | null {
getMonitor(id: ProgressMonitorId): IProgressMonitor | null {
return this.monitors.get(id) ?? null
}

View file

@ -12,13 +12,14 @@ import {
createWebsocketLeaderStatus,
EntityEventBatch,
EntityEventBatchTypeRef,
EntityUpdate,
WebsocketCounterData,
WebsocketCounterDataTypeRef,
WebsocketEntityDataTypeRef,
WebsocketLeaderStatus,
WebsocketLeaderStatusTypeRef,
} from "../entities/sys/TypeRefs.js"
import { binarySearch, delay, identity, lastThrow, ofClass, promiseMap, randomIntFromInterval, TypeRef } from "@tutao/tutanota-utils"
import { AppName, assertNotNull, binarySearch, delay, identity, lastThrow, ofClass, promiseMap, randomIntFromInterval, TypeRef } from "@tutao/tutanota-utils"
import { OutOfSyncError } from "../common/error/OutOfSyncError"
import { CloseEventBusOption, GroupType, SECOND_MS } from "../common/TutanotaConstants"
import { CancelledError } from "../common/error/CancelledError"
@ -37,9 +38,14 @@ import { PhishingMarkerWebsocketDataTypeRef, ReportedMailFieldMarker } from "../
import { UserFacade } from "./facades/UserFacade"
import { ExposedProgressTracker } from "../main/ProgressTracker.js"
import { SyncTracker } from "../main/SyncTracker.js"
import { Entity, ServerModelUntypedInstance } from "../common/EntityTypes"
import { Entity, ServerModelParsedInstance, ServerModelUntypedInstance } from "../common/EntityTypes"
import { InstancePipeline } from "./crypto/InstancePipeline"
import { EntityUpdateData, entityUpdateToUpdateData } from "../common/utils/EntityUpdateUtils"
import { CryptoFacade } from "./crypto/CryptoFacade"
import { Nullable } from "@tutao/tutanota-utils/dist/Utils"
import { EntityAdapter } from "./crypto/EntityAdapter"
import { EventInstancePrefetcher } from "./EventInstancePrefetcher"
import { AttributeModel } from "../common/AttributeModel"
assertWorkerOrNode()
@ -121,7 +127,7 @@ export class EventBusClient {
private lastAntiphishingMarkersId: Id | null = null
/** Queue to process all events. */
/** Qrueue to process all events. */
private readonly eventQueue: EventQueue
/** Queue that handles incoming websocket messages only. Caches them until we process downloaded ones and then adds them to eventQueue. */
@ -152,6 +158,8 @@ export class EventBusClient {
private readonly progressTracker: ExposedProgressTracker,
private readonly syncTracker: SyncTracker,
private readonly typeModelResolver: TypeModelResolver,
private readonly cryptoFacade: CryptoFacade,
private readonly eventInstancePrefetcher: EventInstancePrefetcher,
) {
// We are not connected by default and will not try to unless connect() is called
this.state = EventBusState.Terminated
@ -160,8 +168,8 @@ export class EventBusClient {
this.socket = null
this.reconnectTimer = null
this.connectTimer = null
this.eventQueue = new EventQueue("ws_opt", true, (modification) => this.eventQueueCallback(modification))
this.entityUpdateMessageQueue = new EventQueue("ws_msg", false, (batch) => this.entityUpdateMessageQueueCallback(batch))
this.eventQueue = new EventQueue("ws_opt", (modification) => this.eventQueueCallback(modification))
this.entityUpdateMessageQueue = new EventQueue("ws_msg", (batch) => this.entityUpdateMessageQueueCallback(batch))
this.reset()
}
@ -208,7 +216,8 @@ export class EventBusClient {
"&accessToken=" +
authHeaders.accessToken +
(this.lastAntiphishingMarkersId ? "&lastPhishingMarkersId=" + this.lastAntiphishingMarkersId : "") +
(env.clientName ? "&clientName=" + env.clientName : "")
(env.clientName ? "&clientName=" + env.clientName : "") +
(env.networkDebugging ? "&network-debugging=" + "enable-network-debugging" : "")
const path = "/event?" + authQuery
this.unsubscribeFromOldWebsocket()
@ -283,7 +292,8 @@ export class EventBusClient {
}
private async decodeEntityEventValue<E extends Entity>(messageType: TypeRef<E>, untypedInstance: ServerModelUntypedInstance): Promise<E> {
return await this.instancePipeline.decryptAndMap(messageType, untypedInstance, null)
const untypedInstanceSanitized = AttributeModel.removeNetworkDebuggingInfoIfNeeded(untypedInstance)
return await this.instancePipeline.decryptAndMap(messageType, untypedInstanceSanitized, null)
}
private onError(error: any) {
@ -297,7 +307,11 @@ export class EventBusClient {
case MessageType.EntityUpdate: {
const entityUpdateData = await this.decodeEntityEventValue(WebsocketEntityDataTypeRef, JSON.parse(value))
this.typeModelResolver.setServerApplicationTypesModelHash(entityUpdateData.applicationTypesHash)
const updates = await promiseMap(entityUpdateData.entityUpdates, (event) => entityUpdateToUpdateData(this.typeModelResolver, event))
const updates = await promiseMap(entityUpdateData.entityUpdates, async (event) => {
let instance = await this.getInstanceFromEntityEvent(event)
return entityUpdateToUpdateData(this.typeModelResolver, event, instance)
})
this.entityUpdateMessageQueue.add(entityUpdateData.eventBatchId, entityUpdateData.eventBatchOwner, updates)
break
}
@ -331,6 +345,25 @@ export class EventBusClient {
}
}
private async getInstanceFromEntityEvent(event: EntityUpdate): Promise<Nullable<ServerModelParsedInstance>> {
if (event.instance != null) {
const typeRef = new TypeRef<any>(event.application as AppName, parseInt(event.typeId!))
const serverTypeModel = await this.typeModelResolver.resolveServerTypeReference(typeRef)
const untypedInstance = JSON.parse(event.instance) as ServerModelUntypedInstance
const untypedInstanceSanitized = AttributeModel.removeNetworkDebuggingInfoIfNeeded(untypedInstance)
const encryptedParsedInstance = await this.instancePipeline.typeMapper.applyJsTypes(serverTypeModel, untypedInstanceSanitized)
const entityAdapter = await EntityAdapter.from(serverTypeModel, encryptedParsedInstance, this.instancePipeline)
if (this.userFacade.hasGroup(assertNotNull(entityAdapter._ownerGroup))) {
// if the user was just assigned to a new group, it might it is not yet on the user facade,
// we can't decrypt the instance in that case.
const migratedEntity = await this.cryptoFacade.applyMigrations(typeRef, entityAdapter)
const sessionKey = await this.cryptoFacade.resolveSessionKey(migratedEntity)
return await this.instancePipeline.cryptoMapper.decryptParsedInstance(serverTypeModel, encryptedParsedInstance, sessionKey)
}
}
return null
}
private onClose(event: CloseEvent) {
this.failedConnectionAttempts++
console.log("ws close event:", event, "state:", this.state)
@ -518,8 +551,12 @@ export class EventBusClient {
// Count all batches that will actually be processed so that the progress is correct
let totalExpectedBatches = 0
for (const batch of timeSortedEventBatches) {
const updates = await promiseMap(batch.events, (event) => entityUpdateToUpdateData(this.typeModelResolver, event))
const updates = await promiseMap(batch.events, async (event) => {
// const instance = await this.getInstanceFromEntityEvent(event)
return entityUpdateToUpdateData(this.typeModelResolver, event)
})
const batchWasAddedToQueue = this.addBatch(getElementId(batch), getListId(batch), updates, eventQueue)
if (batchWasAddedToQueue) {
// Set as last only if it was inserted with success
this.lastInitialEventBatch = getElementId(batch)
@ -527,10 +564,12 @@ export class EventBusClient {
}
}
const allEventsFlatMap = this.eventQueue.eventQueue.flatMap((eventQ) => eventQ.events)
// We only have the correct amount of total work after adding all entity event batches.
// The progress for processed batches is tracked inside the event queue.
const progressMonitor = new ProgressMonitorDelegate(this.progressTracker, totalExpectedBatches + 1)
console.log("ws", `progress monitor expects ${totalExpectedBatches} events`)
const progressMonitor = new ProgressMonitorDelegate(this.progressTracker, totalExpectedBatches + allEventsFlatMap.length + 1)
console.log("ws", `progress monitor expects ${totalExpectedBatches + allEventsFlatMap.length} events`)
await progressMonitor.workDone(1) // show progress right away
eventQueue.setProgressMonitor(progressMonitor)
@ -539,6 +578,8 @@ export class EventBusClient {
this.syncTracker.markSyncAsDone()
}
await this.eventInstancePrefetcher.preloadEntities(allEventsFlatMap, progressMonitor)
// We've loaded all the batches, we've added them to the queue, we can let the cache remember sync point for us to detect out of sync now.
// It is possible that we will record the time before the batch will be processed but the risk is low.
await this.cache.recordSyncTime()
@ -641,7 +682,7 @@ export class EventBusClient {
if (index < 0) {
lastForGroup.splice(-index, 0, batchId)
// only add the batch if it was not process before
// only add the batch if it was not processed before
wasAdded = eventQueue.add(batchId, groupId, events)
} else {
wasAdded = false
@ -651,7 +692,7 @@ export class EventBusClient {
lastForGroup.shift()
}
this.lastEntityEventIds.set(batchId, lastForGroup)
this.lastEntityEventIds.set(groupId, lastForGroup)
if (wasAdded) {
this.lastAddedBatchForGroup.set(groupId, batchId)

View file

@ -0,0 +1,165 @@
import { EntityUpdateData } from "../common/utils/EntityUpdateUtils"
import { Mail, MailDetailsBlobTypeRef, MailTypeRef } from "../entities/tutanota/TypeRefs"
import { elementIdPart, ensureBase64Ext, isSameId, listIdPart } from "../common/utils/EntityUtils"
import { assertNotNull, getTypeString, groupBy, isNotNull, isSameTypeRef, parseTypeString, TypeRef } from "@tutao/tutanota-utils"
import { parseKeyVersion } from "./facades/KeyLoaderFacade"
import { VersionedEncryptedKey } from "./crypto/CryptoWrapper"
import { OperationType } from "../common/TutanotaConstants"
import { NotAuthorizedError, NotFoundError } from "../common/error/RestError"
import { CacheStorage, Range } from "./rest/DefaultEntityRestCache"
import { Nullable } from "@tutao/tutanota-utils/dist/Utils"
import { ServerTypeModelResolver } from "../common/EntityFunctions"
import { ListElementEntity, SomeEntity } from "../common/EntityTypes"
import { CacheMode, type EntityRestInterface } from "./rest/EntityRestClient"
import { ProgressMonitorDelegate } from "./ProgressMonitorDelegate"
export class EventInstancePrefetcher {
constructor(private readonly entityCache: EntityRestInterface) {}
/**
* We preload list element entities in case we get updates for multiple instances of a single list.
* So that single item requests for those instances will be served from the cache.
*/
public async preloadEntities(allEventsFromAllBatch: Array<EntityUpdateData>, progressMonitor: ProgressMonitorDelegate): Promise<void> {
const start = new Date().getTime()
console.log("====== PREFETCH ============")
const preloadMap = await this.groupedListElementUpdatedInstances(allEventsFromAllBatch, progressMonitor)
await this.loadGroupedListElementEntities(allEventsFromAllBatch, preloadMap, progressMonitor)
console.log("====== PREFETCH END ============", new Date().getTime() - start, "ms")
}
private async loadGroupedListElementEntities(
allEventsFromAllBatch: Array<EntityUpdateData>,
preloadMap: Map<string, Map<Id, Map<Id, number[]>>>,
progressMonitor: ProgressMonitorDelegate,
): Promise<void> {
for (const [typeRefString, groupedListIds] of preloadMap.entries()) {
const typeRef = parseTypeString(typeRefString) as TypeRef<ListElementEntity>
for (const [listId, elementIdsAndIndexes] of groupedListIds.entries()) {
// This prevents requests to conversationEntries which were always singleRequests
if (elementIdsAndIndexes.size > 1) {
try {
const elementIds = Array.from(elementIdsAndIndexes.keys())
const instances = await this.entityCache.loadMultiple<ListElementEntity>(typeRef, listId, elementIds, undefined, {
cacheMode: CacheMode.WriteOnly,
})
if (isSameTypeRef(MailTypeRef, typeRef)) {
await this.fetchMailDetailsBlob(instances)
}
this.setEventsWithInstancesAsPrefetched(allEventsFromAllBatch, instances, elementIdsAndIndexes, progressMonitor)
} catch (e) {
if (isExpectedErrorForSynchronization(e)) {
console.log(`could not preload, probably lost group membership ( or not added yet ) for list ${typeRefString}/${listId}`)
} else {
console.warn(`failed to preload ${typeRefString}/${listId}`, e)
}
}
}
}
}
}
private async fetchMailDetailsBlob(instances: Array<SomeEntity>) {
const mailsWithMailDetails = instances.filter((mail: Mail) => isNotNull(mail.mailDetails)) as Array<Mail>
const mailDetailsByList = groupBy(mailsWithMailDetails, (m) => listIdPart(assertNotNull(m.mailDetails)))
for (const [listId, mails] of mailDetailsByList.entries()) {
const mailDetailsElementIds = mails.map((m) => elementIdPart(assertNotNull(m.mailDetails)))
const initialMap: Map<Id, Mail> = new Map()
const mailDetailsElementIdToMail = mails.reduce((previous: Map<Id, Mail>, current) => {
previous.set(elementIdPart(assertNotNull(current.mailDetails)), current)
return previous
}, initialMap)
await this.entityCache.loadMultiple(
MailDetailsBlobTypeRef,
listId,
mailDetailsElementIds,
async (mailDetailsElementId: Id) => {
const mail = assertNotNull(mailDetailsElementIdToMail.get(mailDetailsElementId))
return {
key: mail._ownerEncSessionKey,
encryptingKeyVersion: parseKeyVersion(mail._ownerKeyVersion ?? "0"),
} as VersionedEncryptedKey
},
{ cacheMode: CacheMode.ReadAndWrite },
)
}
}
private setEventsWithInstancesAsPrefetched(
allEventsFromAllBatch: Array<EntityUpdateData>,
instances: Array<ListElementEntity>,
elementIdsAndIndexes: Map<Id, number[]>,
progressMonitor: ProgressMonitorDelegate,
) {
for (const { _id } of instances) {
const elementId = elementIdPart(_id)
const elementEventBatchIndexes = elementIdsAndIndexes.get(elementId) || []
for (const index of elementEventBatchIndexes) {
allEventsFromAllBatch[index].isPrefetched = true
progressMonitor.workDone(1)
}
}
}
// @VisibleForTesting
public async groupedListElementUpdatedInstances(
allEventsFromAllBatch: Array<EntityUpdateData>,
progressMonitor: ProgressMonitorDelegate,
): Promise<Map<string, Map<Id, Map<Id, number[]>>>> {
const prefetchMap: Map<string, Map<Id, Map<Id, number[]>>> = new Map()
let total = 0
for (const [eventIndexInList, entityUpdateData] of allEventsFromAllBatch.entries()) {
const typeIdentifier = getTypeString(entityUpdateData.typeRef)
// if CREATE update itself have a instance, we don't need to fetch it.
// EventRestCache will update the database
// or,
// if we have UPDATE event with patches, we can also re-create server state locally ( happens in EntityrestCache)
// if we don't have this instance in database, we anyway don't need this event
const isCreateWithInstance = entityUpdateData.operation === OperationType.CREATE && entityUpdateData.instance != null
const isUpdateWithPatches = entityUpdateData.operation === OperationType.UPDATE && entityUpdateData.patches != null
const isListElement = entityUpdateData.instanceListId != ""
if (isCreateWithInstance || isUpdateWithPatches || !isListElement) {
progressMonitor.workDone(1)
total += 1
continue
}
if (entityUpdateData.operation === OperationType.DELETE) {
progressMonitor.workDone(1)
total += 1
continue
} else {
const isTypeIdentifierInitialized = prefetchMap.has(typeIdentifier)
if (!isTypeIdentifierInitialized) {
prefetchMap.set(typeIdentifier, new Map().set(entityUpdateData.instanceListId, new Map()))
}
const isInstanceListInitialized = prefetchMap?.get(typeIdentifier)?.has(entityUpdateData.instanceListId)
if (!isInstanceListInitialized) {
prefetchMap.get(typeIdentifier)?.set(entityUpdateData.instanceListId, new Map())
}
const isInstanceIdInitialized = prefetchMap?.get(typeIdentifier)?.get(entityUpdateData.instanceListId)?.has(entityUpdateData.instanceId)
if (!isTypeIdentifierInitialized || !isInstanceListInitialized || !isInstanceIdInitialized) {
prefetchMap.get(typeIdentifier)!.get(entityUpdateData.instanceListId)!.set(entityUpdateData.instanceId, [])
}
}
const singleEntityUpdateEventIndexes = prefetchMap.get(typeIdentifier)!.get(entityUpdateData.instanceListId)!.get(entityUpdateData.instanceId)!
singleEntityUpdateEventIndexes.push(eventIndexInList)
total += 1
}
return prefetchMap
}
}
/**
* Returns whether the error is expected for the cases where our local state might not be up-to-date with the server yet. E.g. we might be processing an update
* for the instance that was already deleted. Normally this would be optimized away but it might still happen due to timing.
*/
function isExpectedErrorForSynchronization(e: Error): boolean {
return e instanceof NotFoundError || e instanceof NotAuthorizedError
}

View file

@ -1,7 +1,4 @@
import { OperationType } from "../common/TutanotaConstants.js"
import { findAllAndRemove, isSameTypeRef } from "@tutao/tutanota-utils"
import { ConnectionError, ServiceUnavailableError } from "../common/error/RestError.js"
import { ProgrammingError } from "../common/error/ProgrammingError.js"
import { ProgressMonitorDelegate } from "./ProgressMonitorDelegate.js"
import { EntityUpdateData } from "../common/utils/EntityUpdateUtils"
@ -13,66 +10,11 @@ export type QueuedBatch = {
type WritableQueuedBatch = QueuedBatch & { events: EntityUpdateData[] }
export const enum EntityModificationType {
CREATE = "CREATE",
UPDATE = "UPDATE",
DELETE = "DELETE",
}
type QueueAction = (nextElement: QueuedBatch) => Promise<void>
/**
* Checks which modification is applied in the given batch for the entity id.
* @param batch entity updates of the batch.
* @private visibleForTests
*/
export function batchMod(batchId: Id, batch: ReadonlyArray<EntityUpdateData>, entityUpdate: EntityUpdateData): EntityModificationType {
for (const batchEvent of batch) {
if (
entityUpdate.instanceId === batchEvent.instanceId &&
entityUpdate.instanceListId === batchEvent.instanceListId &&
isSameTypeRef(entityUpdate.typeRef, batchEvent.typeRef)
) {
switch (batchEvent.operation) {
case OperationType.CREATE:
return EntityModificationType.CREATE
case OperationType.UPDATE:
return EntityModificationType.UPDATE
case OperationType.DELETE:
return EntityModificationType.DELETE
default:
throw new ProgrammingError(`Unknown operation: ${batchEvent.operation}`)
}
}
}
throw new ProgrammingError(
`Batch does not have events for ${entityUpdate.typeRef.app}/${entityUpdate.typeRef.typeId} ${lastOperationKey(entityUpdate)}, batchId: ${batchId}`,
)
}
// A key for _lastOperationForEntity.
// At runtime just an element id or listId/elementId.
// Adding brand for type safety.
type LastOperationKey = string & { __brand: "lastOpeKey" }
function lastOperationKey(update: EntityUpdateData): LastOperationKey {
const typeIdentifier = `${update.typeRef.app}/${update.typeRef.typeId}`
if (update.instanceListId) {
return `${typeIdentifier}/${update.instanceListId}/${update.instanceId}` as LastOperationKey
} else {
return `${typeIdentifier}/${update.instanceId}` as LastOperationKey
}
}
export class EventQueue {
/** Batches to process. Oldest first. */
private readonly eventQueue: Array<WritableQueuedBatch>
// the last processed operation for a given entity id
private readonly lastOperationForEntity: Map<LastOperationKey, QueuedBatch>
public readonly eventQueue: Array<WritableQueuedBatch>
private processingBatch: QueuedBatch | null
private paused: boolean
private progressMonitor: ProgressMonitorDelegate | null
@ -80,12 +22,10 @@ export class EventQueue {
/**
* @param tag identifier to make for better log messages
* @param optimizationEnabled whether the queue should try to optimize events and remove unnecessary ones with the knowledge of newer ones
* @param queueAction which is executed for each batch. Must *never* throw.
*/
constructor(private readonly tag: string, private readonly optimizationEnabled: boolean, private readonly queueAction: QueueAction) {
constructor(private readonly tag: string, private readonly queueAction: QueueAction) {
this.eventQueue = []
this.lastOperationForEntity = new Map()
this.processingBatch = null
this.paused = false
this.progressMonitor = null
@ -113,97 +53,17 @@ export class EventQueue {
batchId,
}
if (!this.optimizationEnabled) {
newBatch.events.push(...newEvents)
} else {
this.optimizingAddEvents(newBatch, batchId, groupId, newEvents)
}
if (newBatch.events.length !== 0) {
this.eventQueue.push(newBatch)
for (const update of newBatch.events) {
this.lastOperationForEntity.set(lastOperationKey(update), newBatch)
}
}
// ensures that events are processed when not paused
// ensures that events are processed when not **paused**
this.start()
return newBatch.events.length > 0
}
private optimizingAddEvents(newBatch: WritableQueuedBatch, batchId: Id, groupId: Id, newEvents: ReadonlyArray<EntityUpdateData>): void {
for (const newEvent of newEvents) {
const lastOpKey = lastOperationKey(newEvent)
const lastBatchForEntity = this.lastOperationForEntity.get(lastOpKey)
if (
lastBatchForEntity == null ||
(this.processingBatch != null && this.processingBatch === lastBatchForEntity) ||
groupId !== lastBatchForEntity.groupId
) {
// If there's no current operation, there's nothing to merge, just add
// If current operation is already being processed, don't modify it, we cannot merge anymore and should just append.
newBatch.events.push(newEvent)
} else {
const newEntityModification = batchMod(batchId, newEvents, newEvent)
const lastEntityModification = batchMod(lastBatchForEntity.batchId, lastBatchForEntity.events, newEvent)
if (newEntityModification === EntityModificationType.UPDATE) {
switch (lastEntityModification) {
case EntityModificationType.CREATE:
// Skip create because the create was not processed yet and we will download the updated version already
break
case EntityModificationType.UPDATE:
// Skip update because the previous update was not processed yet and we will download the updated version already
break
case EntityModificationType.DELETE:
throw new ProgrammingError(
`UPDATE not allowed after DELETE. Last batch: ${lastBatchForEntity.batchId}, new batch: ${batchId}, ${newEvent.typeRef.typeId} ${lastOpKey}`,
)
}
} else if (newEntityModification === EntityModificationType.DELETE) {
// delete all other events because they don't matter if the entity is already gone
this.removeEventsForInstance(lastOpKey)
// set last operation early to make sure that it's not some empty batch that is the last operation, otherwise batchMod will fail.
// this shouldn't happen (because delete + create for the same entity in the same batch is not really a thing) and is a bit hacky,
// but it works?
this.lastOperationForEntity.set(lastOpKey, newBatch)
// add delete event
newBatch.events.push(newEvent)
} else if (newEntityModification === EntityModificationType.CREATE) {
if (lastEntityModification === EntityModificationType.DELETE || lastEntityModification === EntityModificationType.CREATE) {
// It is likely custom id instance which got re-created
newBatch.events.push(newEvent)
} else {
throw new ProgrammingError(
`Impossible modification combination ${lastEntityModification} ${newEntityModification} ${JSON.stringify(newEvent)}`,
)
}
} else {
throw new ProgrammingError(
`Impossible modification combination ${lastEntityModification} ${newEntityModification} ${JSON.stringify(newEvent)}`,
)
}
}
}
}
private removeEventsForInstance(operationKey: LastOperationKey, startIndex: number = 0): void {
// We keep empty batches because we expect certain number of batches to be processed and it's easier to just keep them.
for (let i = startIndex; i < this.eventQueue.length; i++) {
const batchInThePast = this.eventQueue[i]
if (this.processingBatch === batchInThePast) {
continue
}
// this will remove all events for the element id from the batch
// we keep delete events because they don't hurt generally and we also want things to be timely deleted
findAllAndRemove(batchInThePast.events, (event) => event.operation !== OperationType.DELETE && lastOperationKey(event) === operationKey)
}
}
start() {
if (this.processingBatch) {
return
@ -232,18 +92,11 @@ export class EventQueue {
this.progressMonitor?.workDone(1)
this.processingBatch = null
// When we are done with the batch, we don't want to merge with it anymore
for (const event of next.events) {
const concatenatedId = lastOperationKey(event)
if (this.lastOperationForEntity.get(concatenatedId) === next) {
this.lastOperationForEntity.delete(concatenatedId)
}
}
// do this *before* processNext() is called
this.processNext()
})
.catch((e) => {
console.log("EventQueue", this.tag, this.optimizationEnabled, "error", next, e)
console.log("EventQueue", this.tag, "error", next, e)
// processing continues if the event bus receives a new event
this.processingBatch = null
@ -260,10 +113,6 @@ export class EventQueue {
this.eventQueue.splice(0)
this.processingBatch = null
for (const k of this.lastOperationForEntity.keys()) {
this.lastOperationForEntity.delete(k)
}
}
pause() {

View file

@ -25,8 +25,16 @@ import {
PublicKeyIdentifierType,
SYSTEM_GROUP_MAIL_ADDRESS,
} from "../../common/TutanotaConstants"
import { HttpMethod, TypeModelResolver } from "../../common/EntityFunctions"
import { BucketPermission, GroupMembership, InstanceSessionKey, PatchListTypeRef, Permission } from "../../entities/sys/TypeRefs.js"
import { HttpMethod, PatchOperationType, TypeModelResolver } from "../../common/EntityFunctions"
import {
BucketPermission,
createPatch,
createPatchList,
GroupMembership,
InstanceSessionKey,
PatchListTypeRef,
Permission,
} from "../../entities/sys/TypeRefs.js"
import {
BucketPermissionTypeRef,
createInstanceSessionKey,
@ -87,6 +95,9 @@ import { KeyRotationFacade } from "../facades/KeyRotationFacade.js"
import { InstancePipeline } from "./InstancePipeline"
import { EntityAdapter } from "./EntityAdapter"
import { typeModelToRestPath } from "../rest/EntityRestClient"
import { convertJsToDbType } from "./ModelMapper"
import { ValueType } from "../../common/EntityConstants"
import { AttributeModel } from "../../common/AttributeModel"
assertWorkerOrNode()
@ -774,7 +785,6 @@ export class CryptoFacade {
private async updateOwnerEncSessionKey(instance: EntityAdapter, ownerGroupKey: VersionedKey, resolvedSessionKey: AesKey) {
const newOwnerEncSessionKey = encryptKeyWithVersionedKey(ownerGroupKey, resolvedSessionKey)
const oldInstance = structuredClone(instance)
this.setOwnerEncSessionKey(instance, newOwnerEncSessionKey)
const id = instance._id
@ -783,19 +793,27 @@ export class CryptoFacade {
const headers = this.userFacade.createAuthHeaders()
headers.v = String(instance.typeModel.version)
const untypedInstance = await this.instancePipeline.typeMapper.applyDbTypes(
instance.typeModel as ClientTypeModel,
instance.encryptedParsedInstance as ClientModelEncryptedParsedInstance,
)
let ownerEncSessionKeyAttributeIdStr = assertNotNull(AttributeModel.getAttributeId(typeModel, "_ownerEncSessionKey")).toString()
let ownerKeyVersionAttributeIdStr = assertNotNull(AttributeModel.getAttributeId(typeModel, "_ownerKeyVersion")).toString()
if (env.networkDebugging) {
ownerEncSessionKeyAttributeIdStr += ":_ownerEncSessionKey"
ownerKeyVersionAttributeIdStr += ":_ownerKeyVersion"
}
const patchList = await computePatchPayload(
oldInstance.encryptedParsedInstance as ClientModelEncryptedParsedInstance,
instance.encryptedParsedInstance as ClientModelEncryptedParsedInstance,
untypedInstance,
instance.typeModel,
this.typeModelResolver.resolveClientTypeReference.bind(this.typeModelResolver),
env.networkDebugging,
)
const patchList = createPatchList({
patches: [
createPatch({
patchOperation: PatchOperationType.REPLACE,
value: uint8ArrayToBase64(newOwnerEncSessionKey.key),
attributePath: ownerEncSessionKeyAttributeIdStr,
}),
createPatch({
patchOperation: PatchOperationType.REPLACE,
value: newOwnerEncSessionKey.encryptingKeyVersion.toString(),
attributePath: ownerKeyVersionAttributeIdStr,
}),
],
})
const patchPayload = await this.instancePipeline.mapAndEncrypt(PatchListTypeRef, patchList, null)

View file

@ -137,7 +137,7 @@ export class CryptoMapper {
return decrypted
}
private async decryptAggregateAssociation(
public async decryptAggregateAssociation(
associationServerTypeModel: ServerTypeModel | ClientTypeModel,
encryptedInstanceValues: Array<ServerModelEncryptedParsedInstance>,
sk: Nullable<AesKey>,

View file

@ -10,7 +10,7 @@ import { TypeModelResolver } from "../../common/EntityFunctions"
assertWorkerOrNode()
export const UPDATE_SESSION_KEYS_SERVICE_DEBOUNCE_MS = 50
export const UPDATE_SESSION_KEYS_SERVICE_DEBOUNCE_MS = 2500
/**
* This queue collects updates for ownerEncSessionKeys and debounces the update request to the UpdateSessionKeysService,

View file

@ -0,0 +1,380 @@
// read from the offline db according to the list and element id on the entityUpdate
// decrypt encrypted fields using the OwnerEncSessionKey on the entry from the offline db
// apply patch operations using a similar logic from the server
// update the instance in the offline db
import {
EncryptedParsedAssociation,
EncryptedParsedValue,
Entity,
ModelValue,
ParsedAssociation,
ParsedInstance,
ParsedValue,
ServerModelEncryptedParsedInstance,
ServerModelParsedInstance,
ServerModelUntypedInstance,
ServerTypeModel,
} from "../../common/EntityTypes"
import { Patch } from "../../entities/sys/TypeRefs"
import { assertNotNull, Base64, deepEqual, isEmpty, lazy, promiseMap, TypeRef } from "@tutao/tutanota-utils"
import { AttributeModel } from "../../common/AttributeModel"
import { CacheStorage } from "../rest/DefaultEntityRestCache"
import { Nullable } from "@tutao/tutanota-utils/dist/Utils"
import { PatchOperationError } from "../../common/error/PatchOperationError"
import { AssociationType, Cardinality } from "../../common/EntityConstants"
import { PatchOperationType, ServerTypeModelResolver } from "../../common/EntityFunctions"
import { InstancePipeline } from "../crypto/InstancePipeline"
import { isSameId, removeTechnicalFields } from "../../common/utils/EntityUtils"
import { convertDbToJsType } from "../crypto/ModelMapper"
import { decryptValue } from "../crypto/CryptoMapper"
import { VersionedEncryptedKey } from "../crypto/CryptoWrapper"
import { AesKey, extractIvFromCipherText } from "@tutao/tutanota-crypto"
import { CryptoFacade } from "../crypto/CryptoFacade"
import { parseKeyVersion } from "../facades/KeyLoaderFacade"
export class PatchMerger {
constructor(
private readonly cacheStorage: CacheStorage,
public readonly instancePipeline: InstancePipeline,
private readonly serverTypeResolver: ServerTypeModelResolver,
private readonly cryptoFacade: lazy<CryptoFacade>,
) {}
// visible for testing
public async getPatchedInstanceParsed(
instanceType: TypeRef<Entity>,
listId: Nullable<Id>,
elementId: Id,
patches: Array<Patch>,
): Promise<ServerModelParsedInstance | null> {
const parsedInstance = await this.cacheStorage.getParsed(instanceType, listId, elementId)
if (parsedInstance != null) {
const typeModel = await this.serverTypeResolver.resolveServerTypeReference(instanceType)
// We need to preserve the order of patches, so no promiseMap here
for (const patch of patches) {
await this.applySinglePatch(parsedInstance, typeModel, patch)
}
return parsedInstance
}
return null
}
public async patchAndStoreInstance(
instanceType: TypeRef<Entity>,
listId: Nullable<Id>,
elementId: Id,
patches: Array<Patch>,
): Promise<Nullable<ServerModelParsedInstance>> {
const patchAppliedInstance = await this.getPatchedInstanceParsed(instanceType, listId, elementId, patches)
if (patchAppliedInstance == null) {
return null
}
await this.cacheStorage.put(instanceType, patchAppliedInstance)
return patchAppliedInstance
}
private async applySinglePatch(parsedInstance: ServerModelParsedInstance, typeModel: ServerTypeModel, patch: Patch) {
try {
const pathList: Array<string> = patch.attributePath.split("/") //== /$mailId/$attrIdRecipient/${aggregateIdRecipient}/${attrIdName}
const pathResult: PathResult = await this.traversePath(parsedInstance, typeModel, pathList)
const attributeId = pathResult.attributeId
const pathResultTypeModel = pathResult.typeModel
// We need to map and decrypt for REPLACE and ADDITEM as the payloads are encrypted, REMOVEITEM only has either aggregate ids, generated ids, or id tuples
if (patch.patchOperation !== PatchOperationType.REMOVE_ITEM) {
const encryptedParsedValue: Nullable<EncryptedParsedValue | EncryptedParsedAssociation> = await this.parseValueOnPatch(pathResult, patch.value)
const isAggregation = pathResultTypeModel.associations[attributeId]?.type === AssociationType.Aggregation
const isEncryptedValue = pathResultTypeModel.values[attributeId]?.encrypted
let value: Nullable<ParsedValue | ParsedAssociation>
if ((isAggregation && typeModel.encrypted) || isEncryptedValue) {
const sk = await this.getSessionKey(parsedInstance, typeModel)
value = await this.decryptValueOnPatchIfNeeded(pathResult, encryptedParsedValue, sk)
} else {
value = await this.decryptValueOnPatchIfNeeded(pathResult, encryptedParsedValue, null)
}
await this.applyPatchOperation(patch.patchOperation, pathResult, value)
} else {
let idArray = JSON.parse(patch.value!) as Array<any>
await this.applyPatchOperation(patch.patchOperation, pathResult, idArray)
}
} catch (e) {
throw new PatchOperationError(e)
}
}
public async getSessionKey(parsedInstance: ServerModelParsedInstance, typeModel: ServerTypeModel) {
const _ownerEncSessionKey = AttributeModel.getAttribute<Uint8Array>(parsedInstance, "_ownerEncSessionKey", typeModel)
const _ownerKeyVersion = parseKeyVersion(AttributeModel.getAttribute<string>(parsedInstance, "_ownerKeyVersion", typeModel))
const _ownerGroup = AttributeModel.getAttribute<Id>(parsedInstance, "_ownerGroup", typeModel)
const versionedEncryptedKey = {
encryptingKeyVersion: _ownerKeyVersion,
key: _ownerEncSessionKey,
} as VersionedEncryptedKey
return await this.cryptoFacade().decryptSessionKey(_ownerGroup, versionedEncryptedKey)
}
private async applyPatchOperation(
patchOperation: Values<PatchOperationType>,
pathResult: PathResult,
value: Nullable<ParsedValue | ParsedAssociation> | Array<Id | IdTuple>,
) {
const { attributeId, instanceToChange, typeModel } = pathResult
const isValue = typeModel.values[attributeId] !== undefined
const isAssociation = typeModel.associations[attributeId] !== undefined
const isAggregationAssociation = isAssociation && typeModel.associations[attributeId].type === AssociationType.Aggregation
switch (patchOperation) {
case PatchOperationType.ADD_ITEM: {
if (isValue) {
throw new PatchOperationError(
"AddItem operation is supported for associations only, but the operation was called on value with id " + attributeId,
)
}
let associationArray = instanceToChange[attributeId] as ParsedAssociation
const valuesToAdd = value as ParsedAssociation
const commonAssociationItems = associationArray.filter((association) => valuesToAdd.some((item) => deepEqual(item, association)))
if (!isEmpty(commonAssociationItems)) {
console.log(
`PatchMerger attempted to add an already existing item to an association. Common items: ${JSON.stringify(commonAssociationItems)}`,
)
}
if (isAggregationAssociation) {
const modelAssociation = typeModel.associations[attributeId]
const appName = modelAssociation.dependency ?? typeModel.app
const aggregationTypeModel = await this.serverTypeResolver.resolveServerTypeReference(new TypeRef(appName, modelAssociation.refTypeId))
const aggregationsWithCommonIdsButDifferentValues = associationArray.filter((aggregate: ParsedInstance) =>
valuesToAdd.some((item: ParsedInstance) => {
const aggregateIdAttributeId = assertNotNull(AttributeModel.getAttributeId(aggregationTypeModel, "_id"))
const itemWithoutFinalIvs = removeTechnicalFields(structuredClone(item))
const aggregateWithoutFinalIvs = removeTechnicalFields(structuredClone(aggregate))
return (
aggregate[aggregateIdAttributeId] === item[aggregateIdAttributeId] && !deepEqual(itemWithoutFinalIvs, aggregateWithoutFinalIvs)
)
}),
)
if (!isEmpty(aggregationsWithCommonIdsButDifferentValues)) {
throw new PatchOperationError(
`PatchMerger attempted to add an existing aggregate with different values.
existing items: ${JSON.stringify(associationArray)},
values attempted to be added: ${JSON.stringify(valuesToAdd)}`,
)
}
}
const newAssociationValue = associationArray.concat(valuesToAdd)
instanceToChange[attributeId] = distinctAssociations(newAssociationValue)
break
}
case PatchOperationType.REMOVE_ITEM: {
if (isValue) {
throw new PatchOperationError(
"AddItem operation is supported for associations only, but the operation was called on value with id " + attributeId,
)
}
if (!isAggregationAssociation) {
const associationArray = instanceToChange[attributeId] as Array<Id | IdTuple>
const idsToRemove = value as Array<Id | IdTuple>
const remainingAssociations = associationArray.filter(
(element) =>
!idsToRemove.some((item) => {
return isSameId(element, item) // use is same id on the ids instead
}),
)
instanceToChange[attributeId] = distinctAssociations(remainingAssociations)
} else {
const modelAssociation = typeModel.associations[attributeId]
const appName = modelAssociation.dependency ?? typeModel.app
const aggregationTypeModel = await this.serverTypeResolver.resolveServerTypeReference(new TypeRef(appName, modelAssociation.refTypeId))
const aggregationArray = instanceToChange[attributeId] as Array<ParsedInstance>
const idsToRemove = value as Array<Id>
const remainingAggregations = aggregationArray.filter(
(element) =>
!idsToRemove.some((item) => {
const aggregateIdAttributeId = assertNotNull(AttributeModel.getAttributeId(aggregationTypeModel, "_id"))
return isSameId(item as Id, element[aggregateIdAttributeId] as Id)
}),
)
instanceToChange[attributeId] = distinctAssociations(remainingAggregations)
}
break
}
case PatchOperationType.REPLACE: {
if (isValue) {
instanceToChange[attributeId] = value as ParsedValue
} else if (!isAggregationAssociation) {
instanceToChange[attributeId] = value as ParsedAssociation
} else {
throw new PatchOperationError("attempted to replace aggregation " + typeModel.associations[attributeId].name + " on " + typeModel.name)
}
break
}
}
}
private async parseValueOnPatch(
pathResult: PathResult,
value: string | null,
): Promise<Nullable<EncryptedParsedValue> | Nullable<EncryptedParsedAssociation>> {
const { typeModel, attributeId } = pathResult
const isValue = typeModel.values[attributeId] !== undefined
const isAssociation = typeModel.associations[attributeId] !== undefined
const isAggregation = isAssociation && typeModel.associations[attributeId].type === AssociationType.Aggregation
const isNonAggregateAssociation = isAssociation && !isAggregation
if (isValue) {
const valueInfo = typeModel.values[attributeId]
const valueType = valueInfo.type
if (value == null || value === "" || valueInfo.encrypted) {
return value
} else {
return convertDbToJsType(valueType, value)
}
} else if (isAssociation) {
if (isNonAggregateAssociation) {
return JSON.parse(value!)
} else {
const aggregatedEntities = JSON.parse(value!) as Array<ServerModelUntypedInstance>
aggregatedEntities.map(AttributeModel.removeNetworkDebuggingInfoIfNeeded)
const modelAssociation = typeModel.associations[attributeId]
const appName = modelAssociation.dependency ?? typeModel.app
const aggregationTypeModel = await this.serverTypeResolver.resolveServerTypeReference(new TypeRef(appName, modelAssociation.refTypeId))
return await promiseMap(
aggregatedEntities,
async (entity: ServerModelUntypedInstance) => await this.instancePipeline.typeMapper.applyJsTypes(aggregationTypeModel, entity),
)
}
}
return null
}
private async decryptValueOnPatchIfNeeded(
pathResult: PathResult,
value: Nullable<EncryptedParsedValue | EncryptedParsedAssociation>,
sk: Nullable<AesKey>,
): Promise<Nullable<ParsedValue> | Nullable<ParsedAssociation>> {
const { typeModel, attributeId } = pathResult
const isValue = typeModel.values[attributeId] !== undefined
const isAggregation = typeModel.associations[attributeId] !== undefined && typeModel.associations[attributeId].type === AssociationType.Aggregation
if (isValue) {
if (sk !== null) {
const encryptedValueInfo = typeModel.values[attributeId] as ModelValue & { encrypted: true }
const encryptedValue = value
if (encryptedValue == null) {
delete pathResult.instanceToChange._finalIvs[attributeId]
} else if (encryptedValue === "") {
// the encrypted value is "" if the decrypted value is the default value
// storing this marker lets us restore that empty string when we re-encrypt the instance.
// check out encrypt in CryptoMapper to see the other side of this.
pathResult.instanceToChange._finalIvs[attributeId] = null
} else if (encryptedValueInfo.final && encryptedValue) {
// the server needs to be able to check if an encrypted final field changed.
// that's only possible if we re-encrypt using a deterministic IV, because the ciphertext changes if
// the IV or the value changes.
// storing the IV we used for the initial encryption lets us reuse it later.
pathResult.instanceToChange._finalIvs[attributeId] = extractIvFromCipherText(encryptedValue as Base64)
}
return decryptValue(encryptedValueInfo, encryptedValue as Base64, sk)
}
return value
} else if (isAggregation) {
const encryptedAggregatedEntities = value as Array<ServerModelEncryptedParsedInstance>
const modelAssociation = typeModel.associations[attributeId]
const appName = modelAssociation.dependency ?? typeModel.app
const aggregationTypeModel = await this.serverTypeResolver.resolveServerTypeReference(new TypeRef(appName, modelAssociation.refTypeId))
return await this.instancePipeline.cryptoMapper.decryptAggregateAssociation(aggregationTypeModel, encryptedAggregatedEntities, sk)
}
return value // id and idTuple associations are never encrypted
}
private async traversePath(parsedInstance: ServerModelParsedInstance, serverTypeModel: ServerTypeModel, path: Array<string>): Promise<PathResult> {
if (path.length == 0) {
throw new PatchOperationError("Invalid attributePath, expected non-empty attributePath")
}
const pathItem = path.shift()!
try {
let attributeId: number
if (env.networkDebugging) {
attributeId = parseInt(pathItem.split(":")[0])
} else {
attributeId = parseInt(pathItem)
}
if (!Object.keys(parsedInstance).some((attribute) => attribute == attributeId.toString())) {
throw new PatchOperationError("attribute id " + attributeId + " not found on the parsed instance. Type: " + serverTypeModel.name)
}
if (path.length == 0) {
return {
attributeId: attributeId,
instanceToChange: parsedInstance,
typeModel: serverTypeModel,
} as PathResult
}
const isAggregation = serverTypeModel.associations[attributeId].type === AssociationType.Aggregation
if (!isAggregation) {
throw new PatchOperationError("Expected the attribute id " + attributeId + " to be an aggregate on the type: " + serverTypeModel.name)
}
const modelAssociation = serverTypeModel.associations[attributeId]
const appName = modelAssociation.dependency ?? serverTypeModel.app
const aggregationTypeModel = await this.serverTypeResolver.resolveServerTypeReference(new TypeRef(appName, modelAssociation.refTypeId))
const maybeAggregateIdPathItem = path.shift()!
const aggregateArray = parsedInstance[attributeId] as Array<ServerModelParsedInstance>
const aggregatedEntity = assertNotNull(
aggregateArray.find((entity) => {
const aggregateIdAttributeId = assertNotNull(AttributeModel.getAttributeId(aggregationTypeModel, "_id"))
return isSameId(maybeAggregateIdPathItem, entity[aggregateIdAttributeId] as Id)
}),
)
return this.traversePath(aggregatedEntity, aggregationTypeModel, path)
} catch (e) {
throw new PatchOperationError("An error occurred while traversing path " + path + e.message)
}
}
private assertCorrectAssociationCardinality(pathResult: PathResult, valuesToAdd: ParsedAssociation): void {
const modelAssociation = pathResult.typeModel.associations[pathResult.attributeId]!
const cardinality = modelAssociation.cardinality
if ((cardinality == Cardinality.ZeroOrOne && valuesToAdd.length > 1) || (cardinality == Cardinality.One && valuesToAdd.length != 1)) {
throw new PatchOperationError(
`invalid value / cardinality combination for value ${pathResult.attributeId} on association ${modelAssociation.name}: ${cardinality}, val.len: ${valuesToAdd.length}`,
)
}
}
private assertCorrectValueCardinality(pathResult: PathResult, valueToAdd: Nullable<ParsedValue>): void {
const modelValue = pathResult.typeModel.values[pathResult.attributeId]
const cardinality = modelValue.cardinality
if (cardinality == Cardinality.One && valueToAdd === null) {
throw new PatchOperationError(
`invalid value / cardinality combination for value ${pathResult.attributeId} on value ${modelValue.name}: ${cardinality}, isNull: ${true}`,
)
}
}
}
export function distinctAssociations(associationArray: ParsedAssociation) {
return associationArray.reduce((acc: Array<any>, current) => {
if (
!acc.some((item) => {
if (item._finalIvs !== undefined) {
const itemWithoutFinalIvs = removeTechnicalFields(structuredClone(item) as ParsedInstance)
const currentWithoutFinalIvs = removeTechnicalFields(structuredClone(current) as ParsedInstance)
return deepEqual(itemWithoutFinalIvs, currentWithoutFinalIvs)
}
return deepEqual(item, current)
})
) {
acc.push(current)
}
return acc
}, [])
}
export type PathResult = {
instanceToChange: ServerModelParsedInstance
attributeId: number
typeModel: ServerTypeModel
}

View file

@ -1,5 +1,4 @@
import { ListElementEntity, SomeEntity } from "../../common/EntityTypes"
import { QueuedBatch } from "../EventQueue.js"
import { ProgrammingError } from "../../common/error/ProgrammingError"
import { TypeRef } from "@tutao/tutanota-utils"
import { EntityRestCache } from "./DefaultEntityRestCache.js"
@ -15,6 +14,10 @@ export class AdminClientDummyEntityRestCache implements EntityRestCache {
throw new ProgrammingError("erase not implemented")
}
deleteFromCacheIfExists<T extends SomeEntity>(typeRef: TypeRef<T>, listId: Id | null, elementId: Iterable<Id>): Promise<void> {
throw new Error("deleteFromCacheIdExists not implemented.")
}
async eraseMultiple<T extends SomeEntity>(listId: Id, instances: Array<T>): Promise<void> {
throw new ProgrammingError("eraseMultiple not implemented")
}

View file

@ -1,5 +1,4 @@
import {
CacheMode,
EntityRestClient,
EntityRestClientEraseOptions,
EntityRestClientLoadOptions,
@ -9,7 +8,7 @@ import {
OwnerEncSessionKeyProvider,
} from "./EntityRestClient"
import { OperationType } from "../../common/TutanotaConstants"
import { assertNotNull, getFirstOrThrow, getTypeString, groupBy, isSameTypeRef, lastThrow, TypeRef } from "@tutao/tutanota-utils"
import { assertNotNull, deepEqual, getFirstOrThrow, getTypeString, isEmpty, isSameTypeRef, lastThrow, TypeRef } from "@tutao/tutanota-utils"
import {
AuditLogEntryTypeRef,
BucketPermissionTypeRef,
@ -26,9 +25,9 @@ import {
UserGroupRootTypeRef,
} from "../../entities/sys/TypeRefs.js"
import { ValueType } from "../../common/EntityConstants.js"
import { NotAuthorizedError, NotFoundError } from "../../common/error/RestError"
import { CalendarEventUidIndexTypeRef, MailDetailsBlobTypeRef, MailSetEntryTypeRef, MailTypeRef } from "../../entities/tutanota/TypeRefs.js"
import {
computePatches,
CUSTOM_MAX_ID,
CUSTOM_MIN_ID,
elementIdPart,
@ -36,19 +35,30 @@ import {
GENERATED_MAX_ID,
GENERATED_MIN_ID,
get_IdValue,
getElementId,
isCustomIdType,
listIdPart,
} from "../../common/utils/EntityUtils"
import { ProgrammingError } from "../../common/error/ProgrammingError"
import { assertWorkerOrNode } from "../../common/Env"
import type { Entity, ListElementEntity, ServerModelParsedInstance, SomeEntity, TypeModel } from "../../common/EntityTypes"
import type {
ClientModelParsedInstance,
ClientTypeModel,
Entity,
ListElementEntity,
ServerModelParsedInstance,
SomeEntity,
TypeModel,
} from "../../common/EntityTypes"
import { ENTITY_EVENT_BATCH_EXPIRE_MS } from "../EventBusClient"
import { CustomCacheHandlerMap } from "./cacheHandler/CustomCacheHandler.js"
import { containsEventOfType, EntityUpdateData, getEventOfType, isUpdateForTypeRef } from "../../common/utils/EntityUpdateUtils.js"
import { EntityUpdateData } from "../../common/utils/EntityUpdateUtils.js"
import { TypeModelResolver } from "../../common/EntityFunctions"
import { AttributeModel } from "../../common/AttributeModel"
import { collapseId, expandId } from "./RestClientIdUtils"
import { PatchMerger } from "../offline/PatchMerger"
import { NotAuthorizedError, NotFoundError } from "../../common/error/RestError"
import { Nullable } from "@tutao/tutanota-utils/dist/Utils"
import { BitArray } from "@tutao/tutanota-crypto"
assertWorkerOrNode()
@ -120,8 +130,14 @@ export interface EntityRestCache extends EntityRestInterface {
* Detect if out of sync based on stored "lastUpdateTime" and the current server time
*/
isOutOfSync(): Promise<boolean>
/**
* Delete a cached entity. Sometimes this is necessary to do to ensure you always load the new version
*/
deleteFromCacheIfExists<T extends SomeEntity>(typeRef: TypeRef<T>, listId: Id | null, elementId: Iterable<Id>): Promise<void>
}
// todo: remove this and use from offlineStorage.ts/Range
export type Range = { lower: Id; upper: Id }
export type LastUpdateTime = { type: "recorded"; time: number } | { type: "never" } | { type: "uninitialized" }
@ -275,6 +291,7 @@ export class DefaultEntityRestCache implements EntityRestCache {
private readonly entityRestClient: EntityRestClient,
private readonly storage: CacheStorage,
private readonly typeModelResolver: TypeModelResolver,
private readonly patchMerger: PatchMerger,
) {}
async load<T extends SomeEntity>(typeRef: TypeRef<T>, id: PropertyType<T, "_id">, opts: EntityRestClientLoadOptions = {}): Promise<T> {
@ -376,11 +393,10 @@ export class DefaultEntityRestCache implements EntityRestCache {
return this.entityRestClient.getRestClient().getServerTimestampMs()
}
/**
* Delete a cached entity. Sometimes this is necessary to do to ensure you always load the new version
*/
deleteFromCacheIfExists<T extends SomeEntity>(typeRef: TypeRef<T>, listId: Id | null, elementId: Id): Promise<void> {
return this.storage.deleteIfExists(typeRef, listId, elementId)
async deleteFromCacheIfExists<T extends SomeEntity>(typeRef: TypeRef<T>, listId: Id | null, elementId: Iterable<Id>): Promise<void> {
for (const eId in elementId) {
await this.storage.deleteIfExists(typeRef, listId, eId)
}
}
private async _loadMultiple<T extends SomeEntity>(
@ -726,90 +742,32 @@ export class DefaultEntityRestCache implements EntityRestCache {
*
* @return Promise, which resolves to the array of valid events (if response is NotFound or NotAuthorized we filter it out)
*/
// discuss:
// this function no longer makes use of network client, probably better to move this to outside. we also had an idea of a new call that combines pre fetching
// of instances and does this.
// if we do that we can also remove this from EntityRestCache interface
async entityEventsReceived(events: readonly EntityUpdateData[], batchId: Id, groupId: Id): Promise<readonly EntityUpdateData[]> {
await this.recordSyncTime()
// we handle post multiple create operations separately to optimize the number of requests with getMultiple
const createUpdatesForLETs: EntityUpdateData[] = []
const regularUpdates: EntityUpdateData[] = [] // all updates not resulting from post multiple requests
for (const update of events) {
// monitor application is ignored
if (update.typeRef.app === "monitor") continue
// mailSetEntries are ignored because move operations are handled as a special event (and no post multiple is possible)
if (
update.operation === OperationType.CREATE &&
getUpdateInstanceId(update).instanceListId != null &&
!isUpdateForTypeRef(MailTypeRef, update) &&
!isUpdateForTypeRef(MailSetEntryTypeRef, update)
) {
createUpdatesForLETs.push(update)
} else {
regularUpdates.push(update)
}
}
const createUpdatesForLETsPerList = groupBy(createUpdatesForLETs, (update) => update.instanceListId)
const postMultipleEventUpdates: EntityUpdateData[][] = []
// we first handle potential post multiple updates in get multiple requests
for (let [instanceListId, updates] of createUpdatesForLETsPerList) {
const firstUpdate = updates[0]
const typeRef = firstUpdate.typeRef
const ids = updates.map((update) => update.instanceId)
// We only want to load the instances that are in cache range
const customHandler = this.storage.getCustomCacheHandlerMap().get(typeRef)
const idsInCacheRange =
customHandler && customHandler.getElementIdsInCacheRange
? await customHandler.getElementIdsInCacheRange(this.storage, instanceListId, ids)
: await this.getElementIdsInCacheRange(typeRef, instanceListId, ids)
if (idsInCacheRange.length === 0) {
postMultipleEventUpdates.push(updates)
} else {
const updatesNotInCacheRange =
idsInCacheRange.length === updates.length ? [] : updates.filter((update) => !idsInCacheRange.includes(update.instanceId))
try {
// loadMultiple is only called to cache the elements and check which ones return errors
const returnedInstances = await this._loadMultiple(typeRef, instanceListId, idsInCacheRange, undefined, { cacheMode: CacheMode.WriteOnly })
//We do not want to pass updates that caused an error
if (returnedInstances.length !== idsInCacheRange.length) {
const returnedIds = returnedInstances.map((instance) => getElementId(instance))
postMultipleEventUpdates.push(updates.filter((update) => returnedIds.includes(update.instanceId)).concat(updatesNotInCacheRange))
} else {
postMultipleEventUpdates.push(updates)
}
} catch (e) {
if (e instanceof NotAuthorizedError) {
// return updates that are not in cache Range if NotAuthorizedError (for those updates that are in cache range)
postMultipleEventUpdates.push(updatesNotInCacheRange)
} else {
throw e
}
}
}
}
const regularUpdates = events.filter((u) => u.typeRef.app !== "monitor")
// we need an array of UpdateEntityData
const otherEventUpdates: EntityUpdateData[] = []
const filteredUpdateEvents: EntityUpdateData[] = []
for (let update of regularUpdates) {
const { operation, typeRef } = update
const { instanceListId, instanceId } = getUpdateInstanceId(update)
switch (operation) {
case OperationType.UPDATE: {
const handledUpdate = await this.processUpdateEvent(typeRef, update)
const handledUpdate = await this.processUpdateEvent(update)
if (handledUpdate) {
otherEventUpdates.push(handledUpdate)
filteredUpdateEvents.push(handledUpdate)
}
break // do break instead of continue to avoid ide warnings
}
case OperationType.DELETE: {
if (isSameTypeRef(MailSetEntryTypeRef, typeRef) && containsEventOfType(events, OperationType.CREATE, instanceId)) {
// move for mail is handled in create event.
} else if (isSameTypeRef(MailTypeRef, typeRef)) {
if (isSameTypeRef(MailTypeRef, typeRef)) {
// delete mailDetails if they are available (as we don't send an event for this type)
const mail = await this.storage.get(typeRef, instanceListId, instanceId)
if (mail) {
@ -822,13 +780,13 @@ export class DefaultEntityRestCache implements EntityRestCache {
} else {
await this.storage.deleteIfExists(typeRef, instanceListId, instanceId)
}
otherEventUpdates.push(update)
filteredUpdateEvents.push(update)
break // do break instead of continue to avoid ide warnings
}
case OperationType.CREATE: {
const handledUpdate = await this.processCreateEvent(typeRef, update, events)
const handledUpdate = await this.processCreateEvent(typeRef, update)
if (handledUpdate) {
otherEventUpdates.push(handledUpdate)
filteredUpdateEvents.push(handledUpdate)
}
break // do break instead of continue to avoid ide warnings
}
@ -865,112 +823,121 @@ export class DefaultEntityRestCache implements EntityRestCache {
// the whole batch has been written successfully
await this.storage.putLastBatchIdForGroup(groupId, batchId)
// merge the results
return otherEventUpdates.concat(postMultipleEventUpdates.flat())
return filteredUpdateEvents
}
/** Returns {null} when the update should be skipped. */
private async processCreateEvent(
typeRef: TypeRef<any>,
update: EntityUpdateData,
batch: ReadonlyArray<EntityUpdateData>,
): Promise<EntityUpdateData | null> {
private async processCreateEvent(typeRef: TypeRef<any>, update: EntityUpdateData): Promise<EntityUpdateData | null> {
// do not return undefined to avoid implicit returns
const { instanceId, instanceListId } = getUpdateInstanceId(update)
// We put new instances into cache only when it's a new instance in the cached range which is only for the list instances.
if (instanceListId != null) {
const deleteEvent = getEventOfType(batch, OperationType.DELETE, instanceId)
// TODO This is basically a patch for the mailSetEntry
const mailSetEntry =
deleteEvent && isSameTypeRef(MailSetEntryTypeRef, typeRef)
? await this.storage.getParsed(typeRef, deleteEvent.instanceListId, instanceId)
: null
// avoid downloading new mailSetEntry in case of move event (DELETE + CREATE)
if (deleteEvent != null && mailSetEntry != null) {
// It is a move event for cached mailSetEntry
await this.storage.deleteIfExists(typeRef, deleteEvent.instanceListId, instanceId)
await this.updateListIdOfMailSetEntryAndUpdateCache(mailSetEntry, instanceListId, instanceId)
return update
} else {
// If there is a custom handler we follow its decision.
// Otherwise, we do a range check to see if we need to keep the range up-to-date.
const shouldLoad =
this.storage.getCustomCacheHandlerMap().get(typeRef)?.shouldLoadOnCreateEvent?.(update) ??
(await this.storage.isElementIdInCacheRange(typeRef, instanceListId, instanceId))
if (shouldLoad) {
// No need to try to download something that's not there anymore
// We do not consult custom handlers here because they are only needed for list elements.
let shouldUpdateDb = !update.isPrefetched && this.storage.getCustomCacheHandlerMap().get(typeRef)?.shouldLoadOnCreateEvent?.(update)
// Otherwise, we do a range check to see if we need to keep the range up-to-date. No need to load anything out of range
shouldUpdateDb = shouldUpdateDb ?? (await this.storage.isElementIdInCacheRange(typeRef, instanceListId, instanceId))
// if we have an instance attached, just update with it
// else we assume eventBusClient already did the pre-fetching, so no need to do anything
if (shouldUpdateDb && update.instance != null) {
console.log("putting the entity on the create event for ", getTypeString(typeRef), instanceListId, instanceId, " to the storage")
await this.storage.put(update.typeRef, update.instance)
} else if (shouldUpdateDb) {
console.log("downloading create event for", getTypeString(typeRef), instanceListId, instanceId)
return this.entityRestClient
.loadParsedInstance(typeRef, [instanceListId, instanceId])
.then((entity) => this.storage.put(typeRef, entity))
.then(() => update)
.catch((e) => {
try {
const parsedInstance = await this.entityRestClient.loadParsedInstance(typeRef, [instanceListId, instanceId])
await this.storage.put(update.typeRef, parsedInstance)
} catch (e) {
if (isExpectedErrorForSynchronization(e)) {
return null
} else {
throw e
}
})
}
}
return update
} else {
return update
}
}
} else {
return update
}
}
/**
* Updates the given mailSetEntry with the new list id and add it to the cache.
*/
private async updateListIdOfMailSetEntryAndUpdateCache(mailSetEntry: ServerModelParsedInstance, newListId: Id, elementId: Id) {
// In case of a move operation we have to replace the list id always, as the mailSetEntry is stored in another folder.
const typeModel = await this.typeModelResolver.resolveServerTypeReference(MailSetEntryTypeRef)
const attributeId = AttributeModel.getAttributeId(typeModel, "_id")
if (attributeId == null) {
throw new ProgrammingError("no _id for mail set entry in type model ")
}
mailSetEntry[attributeId] = [newListId, elementId]
await this.storage.put(MailSetEntryTypeRef, mailSetEntry)
}
/** Returns {null} when the update should be skipped. */
private async processUpdateEvent(typeRef: TypeRef<SomeEntity>, update: EntityUpdateData): Promise<EntityUpdateData | null> {
const { instanceId, instanceListId } = getUpdateInstanceId(update)
const cached = await this.storage.getParsed(typeRef, instanceListId, instanceId)
// No need to try to download something that's not there anymore
private async processUpdateEvent(update: EntityUpdateData): Promise<EntityUpdateData | null> {
if (update.patches) {
const patchAppliedInstance = await this.patchMerger.patchAndStoreInstance(update.typeRef, update.instanceListId, update.instanceId, update.patches)
if (patchAppliedInstance == null) {
const newEntity = await this.entityRestClient.loadParsedInstance(update.typeRef, collapseId(update.instanceListId, update.instanceId))
await this.storage.put(update.typeRef, newEntity)
} else {
await this.assertInstanceOnUpdateIsSameAsPatched(update, patchAppliedInstance)
}
} else if (!update.isPrefetched) {
const cached = await this.storage.getParsed(update.typeRef, update.instanceListId, update.instanceId)
if (cached != null) {
try {
// in case this is an update for the user instance: if the password changed we'll be logged out at this point
// if we don't catch the expected NotAuthenticated Error that results from trying to load anything with
// the old user.
// Letting the NotAuthenticatedError propagate to the main thread instead of trying to handle it ourselves
// or throwing out the update drops us onto the login page and into the session recovery flow if the user
// clicks their saved credentials again, but lets them still use offline login if they try to use the
// outdated credentials while not connected to the internet.
if (isSameTypeRef(typeRef, GroupTypeRef)) {
console.log("DefaultEntityRestCache - processUpdateEvent of type Group:" + instanceId)
if (isSameTypeRef(update.typeRef, GroupTypeRef)) {
console.log("DefaultEntityRestCache - processUpdateEvent of type Group:" + update.instanceId)
}
const newEntity = await this.entityRestClient.loadParsedInstance(typeRef, collapseId(instanceListId, instanceId))
await this.storage.put(typeRef, newEntity)
const newEntity = await this.entityRestClient.loadParsedInstance(update.typeRef, collapseId(update.instanceListId, update.instanceId))
await this.storage.put(update.typeRef, newEntity)
return update
} catch (e) {
// If the entity is not there anymore we should evict it from the cache and not keep the outdated/nonexisting instance around.
// Even for list elements this should be safe as the instance is not there anymore and is definitely not in this version
if (isExpectedErrorForSynchronization(e)) {
console.log(`Instance not found when processing update for ${JSON.stringify(update)}, deleting from the cache.`)
await this.storage.deleteIfExists(typeRef, instanceListId, instanceId)
await this.storage.deleteIfExists(update.typeRef, update.instanceListId, update.instanceId)
return null
} else {
throw e
}
}
}
}
return update
}
private async assertInstanceOnUpdateIsSameAsPatched(update: EntityUpdateData, patchAppliedInstance: Nullable<ServerModelParsedInstance>) {
if (update.instance != null && update.patches != null && !deepEqual(update.instance, patchAppliedInstance)) {
const instancePipeline = this.patchMerger.instancePipeline
const typeModel = await this.typeModelResolver.resolveServerTypeReference(update.typeRef)
const typeReferenceResolver = this.typeModelResolver.resolveClientTypeReference.bind(this.typeModelResolver)
let sk: Nullable<BitArray> = null
if (typeModel.encrypted) {
sk = await this.patchMerger.getSessionKey(assertNotNull(patchAppliedInstance), typeModel)
}
const patchedEncryptedParsedInstance = await instancePipeline.cryptoMapper.encryptParsedInstance(
typeModel as unknown as ClientTypeModel,
assertNotNull(patchAppliedInstance) as unknown as ClientModelParsedInstance,
sk,
)
const patchedUntypedInstance = await instancePipeline.typeMapper.applyDbTypes(
typeModel as unknown as ClientTypeModel,
patchedEncryptedParsedInstance,
)
const patchDiff = await computePatches(
update.instance as unknown as ClientModelParsedInstance,
assertNotNull(patchAppliedInstance) as unknown as ClientModelParsedInstance,
patchedUntypedInstance,
typeModel,
typeReferenceResolver,
true,
)
if (!isEmpty(patchDiff)) {
console.log("instance on the update: ", update.instance)
console.log("patched instance: ", patchAppliedInstance)
console.log("patches on the update: ", update.patches)
throw new ProgrammingError(
"instance with id [" +
update.instanceListId +
", " +
update.instanceId +
`] has not been successfully patched. Type: ${getTypeString(update.typeRef)}, computePatches: ${JSON.stringify(patchDiff)}`,
)
}
}
}
/**
*
* @returns {Array<Id>} the ids that are in cache range and therefore should be cached

View file

@ -217,7 +217,7 @@ export class EntityRestClient implements EntityRestInterface {
private readonly authDataProvider: AuthDataProvider,
private readonly restClient: RestClient,
private readonly lazyCrypto: lazy<CryptoFacade>,
private readonly instancePipeline: InstancePipeline,
public readonly instancePipeline: InstancePipeline,
private readonly blobAccessTokenFacade: BlobAccessTokenFacade,
private readonly typeModelResolver: TypeModelResolver,
) {}
@ -600,7 +600,8 @@ export class EntityRestClient implements EntityRestInterface {
const parsedInstance = await this.instancePipeline.modelMapper.mapToClientModelParsedInstance(instance._type as TypeRef<any>, instance)
const typeModel = await this.typeModelResolver.resolveClientTypeReference(instance._type)
const typeReferenceResolver = this.typeModelResolver.resolveClientTypeReference.bind(this.typeModelResolver)
const untypedInstance = await this.instancePipeline.mapAndEncrypt(downcast(instance._type), instance, sessionKey)
const encryptedParsedInstance = await this.instancePipeline.cryptoMapper.encryptParsedInstance(typeModel, parsedInstance, sessionKey)
const untypedInstance = await this.instancePipeline.typeMapper.applyDbTypes(typeModel, encryptedParsedInstance)
// figure out differing fields and build the PATCH request payload
const patchList = await computePatchPayload(
originalParsedInstance,

View file

@ -14,6 +14,7 @@ export class CustomMailEventCacheHandler implements CustomCacheHandler<Mail> {
// - we need them to display the folder contents
// - will very likely be loaded by indexer later
// - we might have the instance in offline cache already because of notification process
// however, they are already preloaded by the EventBusClient
return true
}

View file

@ -5,7 +5,7 @@ import { LanguageViewModel } from "../../misc/LanguageViewModel"
import { IdTupleWrapper, NotificationInfo } from "../../api/entities/sys/TypeRefs"
import { CredentialEncryptionMode } from "../../misc/credentials/CredentialEncryptionMode.js"
import { ExtendedNotificationMode } from "../../native/common/generatedipc/ExtendedNotificationMode"
import { assertNotNull, base64ToBase64Url, neverNull } from "@tutao/tutanota-utils"
import { assertNotNull, base64ToBase64Url, getFirstOrThrow, groupBy, neverNull } from "@tutao/tutanota-utils"
import { log } from "../DesktopLog"
import tutanotaModelInfo from "../../api/entities/tutanota/ModelInfo"
import { handleRestError } from "../../api/common/error/RestError"
@ -19,7 +19,8 @@ import { StrippedEntity } from "../../api/common/utils/EntityUtils"
import { EncryptedParsedInstance, ServerModelUntypedInstance, TypeModel } from "../../api/common/EntityTypes"
import { AttributeModel } from "../../api/common/AttributeModel"
import { InstancePipeline } from "../../api/worker/crypto/InstancePipeline"
import { ClientTypeModelResolver, TypeModelResolver } from "../../api/common/EntityFunctions"
import { ClientTypeModelResolver } from "../../api/common/EntityFunctions"
import { UnencryptedCredentials } from "../../native/common/generatedipc/UnencryptedCredentials"
const TAG = "[notifications]"
@ -27,6 +28,7 @@ export type MailMetadata = {
senderAddress: string
firstRecipientAddress: string | null
id: IdTuple
notificationInfo: StrippedEntity<NotificationInfo>
}
export class TutaNotificationHandler {
@ -44,33 +46,45 @@ export class TutaNotificationHandler {
private readonly typeModelResolver: ClientTypeModelResolver,
) {}
async onMailNotification(sseInfo: SseInfo, notificationInfo: StrippedEntity<NotificationInfo>) {
const appWindow = this.windowManager.getAll().find((window) => window.getUserId() === notificationInfo.userId)
async onMailNotification(sseInfo: SseInfo, notificationInfos: Array<StrippedEntity<NotificationInfo>>) {
const infosByListId = groupBy(notificationInfos, (ni) => assertNotNull(ni.mailId).listId)
for (const [listId, infos] of infosByListId.entries()) {
const firstNotificationInfo = getFirstOrThrow(infos)
const appWindow = this.windowManager.getAll().find((window) => window.getUserId() === firstNotificationInfo.userId)
if (appWindow && appWindow.isFocused()) {
// no need for notification if user is looking right at the window
return
continue
}
// we can't download the email if we don't have access to credentials
const canShowExtendedNotification =
(await this.nativeCredentialFacade.getCredentialEncryptionMode()) === CredentialEncryptionMode.DEVICE_LOCK &&
(await this.sseStorage.getExtendedNotificationConfig(notificationInfo.userId)) !== ExtendedNotificationMode.NoSenderOrSubject
(await this.sseStorage.getExtendedNotificationConfig(firstNotificationInfo.userId)) !== ExtendedNotificationMode.NoSenderOrSubject
if (!canShowExtendedNotification) {
const notificationId = notificationInfo.mailId
? `${notificationInfo.mailId.listId},${notificationInfo.mailId?.listElementId}`
: notificationInfo.userId
this.notifier.submitGroupedNotification(this.lang.get("pushNewMail_msg"), notificationInfo.mailAddress, notificationId, (res) =>
this.onMailNotificationClick(res, notificationInfo),
const notificationId = firstNotificationInfo.mailId
? `${firstNotificationInfo.mailId.listId},${firstNotificationInfo.mailId?.listElementId}`
: firstNotificationInfo.userId
this.notifier.submitGroupedNotification(this.lang.get("pushNewMail_msg"), firstNotificationInfo.mailAddress, notificationId, (res) =>
this.onMailNotificationClick(res, firstNotificationInfo),
)
return
} else {
const credentials = await this.nativeCredentialFacade.loadByUserId(firstNotificationInfo.userId)
if (credentials == null) {
log.warn(`Not found credentials to download notification, userId ${firstNotificationInfo.userId}`)
continue
}
const mailMetadata = await this.downloadMailMetadata(sseInfo, notificationInfo)
if (mailMetadata == null) return
this.notifier.submitGroupedNotification(mailMetadata.senderAddress, mailMetadata.firstRecipientAddress ?? "", mailMetadata.id.join(","), (res) =>
this.onMailNotificationClick(res, notificationInfo),
const infosToFetch = infos.slice(0, 5) // don't show notifications for more than five mails at a time
const mailMetadata = await this.downloadMailMetadata(sseInfo, listId, infosToFetch, credentials)
console.log(">>>>>>>>>>>>", mailMetadata)
for (const mailMeta of mailMetadata) {
this.notifier.submitGroupedNotification(mailMeta.senderAddress, mailMeta.firstRecipientAddress ?? "", mailMeta.id.join(","), (res) =>
this.onMailNotificationClick(res, mailMeta.notificationInfo),
)
}
}
}
}
private onMailNotificationClick(res: NotificationResult, notificationInfo: StrippedEntity<NotificationInfo>) {
if (res === NotificationResult.Click) {
@ -91,15 +105,21 @@ export class TutaNotificationHandler {
}
}
private async downloadMailMetadata(sseInfo: SseInfo, ni: StrippedEntity<NotificationInfo>): Promise<MailMetadata | null> {
const url = this.makeMailMetadataUrl(sseInfo, assertNotNull(ni.mailId))
private async downloadMailMetadata(
sseInfo: SseInfo,
listId: Id,
notificationInfos: Array<StrippedEntity<NotificationInfo>>,
credentials: UnencryptedCredentials,
): Promise<Array<MailMetadata>> {
const result: Array<MailMetadata> = []
// decrypt access token
const credentials = await this.nativeCredentialFacade.loadByUserId(ni.userId)
if (credentials == null) {
log.warn(`Not found credentials to download notification, userId ${ni.userId}`)
return null
}
const first = notificationInfos[0]
const url = this.makeMailMetadataUrl(
sseInfo,
assertNotNull(listId),
notificationInfos.map((ni) => assertNotNull(ni.mailId)),
)
log.debug(TAG, "downloading mail notification metadata")
const headers: Record<string, string> = {
@ -114,22 +134,39 @@ export class TutaNotificationHandler {
throw handleRestError(neverNull(response.status), url.toString(), response.headers.get("Error-Id"), null)
}
const parsedResponse = await response.json()
const untypedInstances = (await response.json()) as Array<ServerModelUntypedInstance>
const mailModel = await this.typeModelResolver.resolveClientTypeReference(MailTypeRef)
const mailAddressModel = await this.typeModelResolver.resolveClientTypeReference(MailAddressTypeRef)
result.push(
...(await Promise.all(
untypedInstances.map(async (untypedInstance) => {
const mailEncryptedParsedInstance: EncryptedParsedInstance = await this.nativeInstancePipeline.typeMapper.applyJsTypes(
mailModel,
parsedResponse as ServerModelUntypedInstance,
untypedInstance,
)
const notificationInfo = notificationInfos.filter(
(info) =>
assertNotNull(info.mailId).listElementId ===
AttributeModel.getAttribute<IdTuple>(mailEncryptedParsedInstance, "_id", mailModel)[1],
)[0]
return this.encryptedMailToMailMetaData(mailModel, mailAddressModel, mailEncryptedParsedInstance, notificationInfo)
}),
)),
)
return this.encryptedMailToMailMetaData(mailModel, mailAddressModel, mailEncryptedParsedInstance)
} catch (e) {
log.debug(TAG, "Error fetching mail metadata, " + (e as Error).message)
return null
}
return result
}
private encryptedMailToMailMetaData(mailModel: TypeModel, mailAddressModel: TypeModel, mi: EncryptedParsedInstance): MailMetadata {
private encryptedMailToMailMetaData(
mailModel: TypeModel,
mailAddressModel: TypeModel,
mi: EncryptedParsedInstance,
notificationInfo: StrippedEntity<NotificationInfo>,
): MailMetadata {
const mailId = AttributeModel.getAttribute<IdTuple>(mi, "_id", mailModel)
const firstRecipient = AttributeModel.getAttributeorNull<EncryptedParsedInstance[] | null>(mi, "firstRecipient", mailModel)
@ -140,12 +177,15 @@ export class TutaNotificationHandler {
id: mailId,
senderAddress: senderAddress,
firstRecipientAddress: firstRecipient ? AttributeModel.getAttribute(firstRecipient[0], "address", mailAddressModel) : null,
notificationInfo,
}
}
private makeMailMetadataUrl(sseInfo: SseInfo, mailId: IdTupleWrapper): URL {
private makeMailMetadataUrl(sseInfo: SseInfo, listId: Id, mailIds: Array<IdTupleWrapper>): URL {
const url = new URL(sseInfo.sseOrigin)
url.pathname = `rest/tutanota/mail/${base64ToBase64Url(mailId.listId)}/${base64ToBase64Url(mailId.listElementId)}`
const listElementIds = mailIds.map((mailId) => base64ToBase64Url(mailId.listElementId)).join(",")
url.pathname = `rest/tutanota/mail/${base64ToBase64Url(listId)}`
url.searchParams.set("ids", listElementIds)
return url
}

View file

@ -136,10 +136,12 @@ export class TutaSseFacade implements SseEventHandler {
await this.sseStorage.recordMissedNotificationCheckTime()
const sseInfo = this.currentSseInfo
if (sseInfo == null) return
for (const notificationInfoUntyped of encryptedMissedNotification.notificationInfos) {
const notificationInfo = await this.nativeInstancePipeline.decryptAndMap(NotificationInfoTypeRef, notificationInfoUntyped, null)
await this.notificationHandler.onMailNotification(sseInfo, notificationInfo)
}
const notificationInfos = await Promise.all(
encryptedMissedNotification.notificationInfos.map(
async (notificationInfoUntyped) => await this.nativeInstancePipeline.decryptAndMap(NotificationInfoTypeRef, notificationInfoUntyped, null),
),
)
await this.notificationHandler.onMailNotification(sseInfo, notificationInfos)
await this.handleAlarmNotification(encryptedMissedNotification)
}
@ -187,7 +189,7 @@ export class TutaSseFacade implements SseEventHandler {
const sseInfo = assertNotNull(this.currentSseInfo)
const url = this.makeMissedNotificationUrl(sseInfo)
log.debug("downloading missed notification")
log.debug("downloading missed notification", url)
const headers: Record<string, string> = {
userIds: sseInfo.userIds[0],
v: typeModels[MissedNotificationTypeRef.typeId].version,

View file

@ -9,7 +9,7 @@ export const enum NotificationType {
export class Notifications {
showNotification(type: NotificationType, title: string, options?: NotificationOptions, onclick: Notification["onclick"] = noOp): Notification | null {
if (!isApp() && typeof window.Notification !== "undefined" && window.Notification.permission === "granted") {
if (!isApp() && !isDesktop() && typeof window.Notification !== "undefined" && window.Notification.permission === "granted") {
try {
const actualOptions: NotificationOptions = Object.assign(
{},

View file

@ -18,8 +18,6 @@ import {
splitInChunks,
} from "@tutao/tutanota-utils"
import {
ConversationEntry,
ConversationEntryTypeRef,
Mail,
MailboxGroupRoot,
MailboxProperties,
@ -39,13 +37,13 @@ import {
SimpleMoveMailTarget,
} from "../../../common/api/common/TutanotaConstants.js"
import { CUSTOM_MIN_ID, elementIdPart, getElementId, listIdPart } from "../../../common/api/common/utils/EntityUtils.js"
import { containsEventOfType, EntityUpdateData, isUpdateForTypeRef } from "../../../common/api/common/utils/EntityUpdateUtils.js"
import { EntityUpdateData, isUpdateForTypeRef } from "../../../common/api/common/utils/EntityUpdateUtils.js"
import m from "mithril"
import { WebsocketCounterData } from "../../../common/api/entities/sys/TypeRefs.js"
import { Notifications, NotificationType } from "../../../common/gui/Notifications.js"
import { lang } from "../../../common/misc/LanguageViewModel.js"
import { ProgrammingError } from "../../../common/api/common/error/ProgrammingError.js"
import { NotAuthorizedError, NotFoundError, PreconditionFailedError } from "../../../common/api/common/error/RestError.js"
import { NotFoundError, PreconditionFailedError } from "../../../common/api/common/error/RestError.js"
import { UserError } from "../../../common/api/main/UserError.js"
import { EventController } from "../../../common/api/main/EventController.js"
import { InboxRuleHandler } from "./InboxRuleHandler.js"
@ -156,11 +154,7 @@ export class MailModel {
if (isUpdateForTypeRef(MailFolderTypeRef, update)) {
await this.init()
m.redraw()
} else if (
isUpdateForTypeRef(MailTypeRef, update) &&
update.operation === OperationType.CREATE &&
!containsEventOfType(updates, OperationType.DELETE, update.instanceId)
) {
} else if (isUpdateForTypeRef(MailTypeRef, update) && update.operation === OperationType.CREATE) {
if (this.inboxRuleHandler && this.connectivityModel) {
const mailId: IdTuple = [update.instanceListId, update.instanceId]
try {
@ -593,45 +587,6 @@ export class MailModel {
return [...this.mailSets.values()].filter((f) => f.folders.importedMailSet).map((f) => f.folders.importedMailSet!)
}
async loadConversationsForAllMails(mails: ReadonlyArray<Mail>): Promise<ReadonlyArray<Mail>> {
let conversationEntries: ConversationEntry[] = []
for (const mail of mails) {
await this.entityClient.loadAll(ConversationEntryTypeRef, listIdPart(mail.conversationEntry)).then(
async (entries) => {
conversationEntries.push(...entries)
},
async (e) => {
// Most likely the conversation entry list does not exist anymore. The server does not distinguish between the case when the
// list does not exist and when we have no permission on it (and for good reasons, it prevents enumeration).
// Most often it happens when we are not fully synced with the server yet and the primary mail does not even exist.
if (!(e instanceof NotAuthorizedError)) {
throw e
}
},
)
}
// If there are no conversationEntries (somehow they didn't load), just return the mails back
if (conversationEntries.length < 0) {
return mails
}
const byList = groupBy(conversationEntries, (c) => c.mail && listIdPart(c.mail))
const allMails: Mail[] = []
for (const [listId, conversations] of byList.entries()) {
if (!listId) continue
const loaded = await this.entityClient.loadMultiple(
MailTypeRef,
listId,
conversations.map((c) => elementIdPart(assertNotNull(c.mail))),
)
allMails.push(...loaded)
}
return allMails
}
/** Resolve conversation list ids to the IDs of mails in those conversations. */
async resolveConversationsForMails(mails: readonly Mail[]): Promise<IdTuple[]> {
return await this.mailFacade.resolveConversations(mails.map((m) => listIdPart(m.conversationEntry)))

View file

@ -203,35 +203,33 @@ export class ConversationViewModel {
return listIdPart(this._primaryViewModel.mail.conversationEntry)
}
private async loadConversation() {
private async loadConversation(): Promise<void> {
try {
// Catch errors but only for loading conversation entries.
// if success, proceed with loading mails
// otherwise do the error handling
try {
if (!this.showFullConversation()) {
this.conversation = this.conversationItemsForSelectedMailOnly()
} else {
// Catch errors but only for loading conversation entries.
// if success, proceed with loading mails
// otherwise do the error handling
this.conversation = await this.entityClient.loadAll(ConversationEntryTypeRef, listIdPart(this.primaryMail.conversationEntry)).then(
async (entries) => {
const entries = await this.entityClient.loadAll(ConversationEntryTypeRef, listIdPart(this.primaryMail.conversationEntry))
// if the primary mail is not along conversation then only display the primary mail
if (!entries.some((entry) => isSameId(entry.mail, this.primaryMail._id))) {
return this.conversationItemsForSelectedMailOnly()
this.conversation = this.conversationItemsForSelectedMailOnly()
} else {
const allMails = await this.loadMails(entries)
return this.createConversationItems(entries, allMails)
this.conversation = this.createConversationItems(entries, allMails)
}
},
async (e) => {
}
} catch (e) {
if (e instanceof NotAuthorizedError) {
// Most likely the conversation entry list does not exist anymore. The server does not distinguish between the case when the
// list does not exist and when we have no permission on it (and for good reasons, it prevents enumeration).
// Most often it happens when we are not fully synced with the server yet and the primary mail does not even exist.
return this.conversationItemsForSelectedMailOnly()
this.conversation = this.conversationItemsForSelectedMailOnly()
} else {
throw e
}
},
)
}
} finally {
this.onUiUpdate()

View file

@ -677,6 +677,9 @@ export class MailViewModel {
instanceListId: importedFolder.entries,
operation: OperationType.CREATE,
typeRef: MailSetEntryTypeRef,
instance: null,
patches: null,
isPrefetched: false,
})
})
}

View file

@ -259,19 +259,6 @@ export class MailViewerViewModel {
}
m.redraw()
// We need the conversation entry in order to reply to the message.
// We don't want the user to have to wait for it to load when they click reply,
// So we load it here pre-emptively to make sure it is in the cache.
this.entityClient.load(ConversationEntryTypeRef, this.mail.conversationEntry).catch((e) => {
if (e instanceof NotFoundError) {
console.log("could load conversation entry as it has been moved/deleted already", e)
} else if (isOfflineError(e)) {
console.log("failed to load conversation entry, because of a lost connection", e)
} else {
throw e
}
})
} finally {
this.renderIsDelayed = false
}

View file

@ -59,7 +59,7 @@ import { LoginController } from "../../../common/api/main/LoginController.js"
import { EntityClient, loadMultipleFromLists } from "../../../common/api/common/EntityClient.js"
import { SearchRouter } from "../../../common/search/view/SearchRouter.js"
import { MailOpenedListener } from "../../mail/view/MailViewModel.js"
import { containsEventOfType, EntityUpdateData, getEventOfType, isUpdateForTypeRef } from "../../../common/api/common/utils/EntityUpdateUtils.js"
import { EntityUpdateData, isUpdateForTypeRef } from "../../../common/api/common/utils/EntityUpdateUtils.js"
import { CalendarInfo } from "../../../calendar-app/calendar/model/CalendarModel.js"
import { locator } from "../../../common/api/main/CommonLocator.js"
import m from "mithril"
@ -309,51 +309,10 @@ export class SearchViewModel {
private readonly entityEventsListener: EntityEventsListener = async (updates) => {
for (const update of updates) {
const mergedUpdate = this.mergeOperationsIfNeeded(update, updates)
if (mergedUpdate == null) continue
await this.entityEventReceived(mergedUpdate)
await this.entityEventReceived(update)
}
}
private mergeOperationsIfNeeded(update: EntityUpdateData, updates: readonly EntityUpdateData[]): EntityUpdateData | null {
// We are trying to keep the mails that are moved and would match the search criteria displayed.
// This is a bit hacky as we reimplement part of the filtering by list.
// Ideally search result would update by itself and we would only need to reconcile the changes.
if (!isUpdateForTypeRef(MailTypeRef, update) || this.searchResult == null) {
return update
}
if (update.operation === OperationType.CREATE && containsEventOfType(updates, OperationType.DELETE, update.instanceId)) {
// This is a move operation, is destination list included in the restrictions?
if (this.listIdMatchesRestriction(update.instanceListId, this.searchResult.restriction)) {
// If it's included, we want to keep showing the item but we will simulate the UPDATE
return { ...update, operation: OperationType.UPDATE }
} else {
// If it's not going to be included we might as well skip the create operation
return null
}
} else if (update.operation === OperationType.DELETE && containsEventOfType(updates, OperationType.CREATE, update.instanceId)) {
// This is a move operation and we are in the delete part of it.
// Grab the other part to check the move destination.
const createOperation = assertNotNull(getEventOfType(updates, OperationType.CREATE, update.instanceId))
// Is destination included in the search?
if (this.listIdMatchesRestriction(createOperation.instanceListId, this.searchResult.restriction)) {
// If so, skip the delete.
return null
} else {
// Otherwise delete
return update
}
} else {
return update
}
}
private listIdMatchesRestriction(listId: string, restriction: SearchRestriction): boolean {
return restriction.folderIds.length === 0 || restriction.folderIds.includes(listId)
}
onNewUrl(args: Record<string, any>, requestedPath: string) {
const query = args.query ?? ""
let restriction

View file

@ -110,7 +110,7 @@ export class IndexedDbIndexer implements Indexer {
_indexedGroupIds: Array<Id>
/** @private visibleForTesting */
readonly eventQueue = new EventQueue("indexer", true, (batch) => this._processEntityEvents(batch))
readonly eventQueue = new EventQueue("indexer", (batch) => this._processEntityEvents(batch))
constructor(
private readonly serverDateProvider: DateProvider,
@ -127,7 +127,7 @@ export class IndexedDbIndexer implements Indexer {
// correctly initialized during init()
this._indexedGroupIds = []
this.initiallyLoadedBatchIdsPerGroup = new Map()
this._realtimeEventQueue = new EventQueue("indexer_realtime", false, (nextElement: QueuedBatch) => {
this._realtimeEventQueue = new EventQueue("indexer_realtime", (nextElement: QueuedBatch) => {
// During initial loading we remember the last batch we loaded
// so if we get updates from EventBusClient here for things that are already loaded we discard them
const loadedIdForGroup = this.initiallyLoadedBatchIdsPerGroup.get(nextElement.groupId)

View file

@ -104,6 +104,8 @@ import { DateProvider } from "../../../common/api/common/DateProvider"
import type { ContactSearchFacade } from "../index/ContactSearchFacade"
import type { IndexedDbSearchFacade } from "../index/IndexedDbSearchFacade.js"
import type { OfflineStorageSearchFacade } from "../index/OfflineStorageSearchFacade.js"
import { PatchMerger } from "../../../common/api/worker/offline/PatchMerger"
import { EventInstancePrefetcher } from "../../../common/api/worker/EventInstancePrefetcher"
assertWorkerOrNode()
@ -115,6 +117,7 @@ export type WorkerLocatorType = {
asymmetricCrypto: AsymmetricCryptoFacade
crypto: CryptoFacade
instancePipeline: InstancePipeline
patchMerger: PatchMerger
applicationTypesFacade: ApplicationTypesFacade
cacheStorage: CacheStorage
cache: EntityRestInterface
@ -341,10 +344,12 @@ export async function initLocator(worker: WorkerImpl, browserData: BrowserData)
return new PdfWriter(new TextEncoder(), undefined)
}
locator.patchMerger = new PatchMerger(locator.cacheStorage, locator.instancePipeline, typeModelResolver, () => locator.crypto)
// We don't want to cache within the admin client
let cache: DefaultEntityRestCache | null = null
if (!isAdminClient()) {
cache = new DefaultEntityRestCache(entityRestClient, maybeUninitializedStorage, typeModelResolver)
cache = new DefaultEntityRestCache(entityRestClient, maybeUninitializedStorage, typeModelResolver, locator.patchMerger)
}
locator.cache = cache ?? entityRestClient
@ -370,7 +375,7 @@ export async function initLocator(worker: WorkerImpl, browserData: BrowserData)
// We create empty CustomCacheHandlerMap because this cache is separate anyway and user updates don't matter.
const cacheStorage = new EphemeralCacheStorage(locator.instancePipeline.modelMapper, typeModelResolver, new CustomCacheHandlerMap())
return new BulkMailLoader(
new EntityClient(new DefaultEntityRestCache(entityRestClient, cacheStorage, typeModelResolver), typeModelResolver),
new EntityClient(new DefaultEntityRestCache(entityRestClient, cacheStorage, typeModelResolver, locator.patchMerger), typeModelResolver),
new EntityClient(entityRestClient, typeModelResolver),
mailFacade,
)
@ -703,7 +708,7 @@ export async function initLocator(worker: WorkerImpl, browserData: BrowserData)
await indexer.processEntityEvents(events, batchId, groupId)
},
)
const prefetcher = new EventInstancePrefetcher(locator.cache)
locator.eventBusClient = new EventBusClient(
eventBusCoordinator,
cache ?? new AdminClientDummyEntityRestCache(),
@ -715,6 +720,8 @@ export async function initLocator(worker: WorkerImpl, browserData: BrowserData)
mainInterface.progressTracker,
mainInterface.syncTracker,
typeModelResolver,
locator.crypto,
prefetcher,
)
locator.login.init(locator.eventBusClient)
locator.Const = Const

View file

@ -157,8 +157,10 @@ import "./api/worker/search/OfflineStorageContactIndexerBackendTest.js"
import "./api/worker/search/OfflineStorageContactSearchFacadeTest.js"
import "./api/worker/rest/CustomUserCacheHandlerTest.js"
import "./api/common/utils/QueryTokenUtilsTest.js"
import "./api/worker/offline/PatchMergerTest.js"
import "./contacts/ContactModelTest.js"
import "./api/worker/search/OfflinestorageIndexerTest.js"
import "./api/worker/EventInstancePrefetcherTest.js"
import * as td from "testdouble"
import { random } from "@tutao/tutanota-crypto"

View file

@ -2,13 +2,13 @@ import type { BrowserData } from "../../src/common/misc/ClientConstants.js"
import { DbEncryptionData } from "../../src/common/api/worker/search/SearchTypes.js"
import { IndexerCore } from "../../src/mail-app/workerUtils/index/IndexerCore.js"
import { DbFacade, DbTransaction } from "../../src/common/api/worker/search/DbFacade.js"
import { assertNotNull, clone, deepEqual, defer, Thunk, typedEntries, TypeRef } from "@tutao/tutanota-utils"
import { assertNotNull, clone, deepEqual, defer, isNotNull, Thunk, typedEntries, TypeRef } from "@tutao/tutanota-utils"
import type { DesktopKeyStoreFacade } from "../../src/common/desktop/DesktopKeyStoreFacade.js"
import { mock } from "@tutao/tutanota-test-utils"
import { aes256RandomKey, fixedIv, uint8ArrayToKey } from "@tutao/tutanota-crypto"
import { ScheduledPeriodicId, ScheduledTimeoutId, Scheduler } from "../../src/common/api/common/utils/Scheduler.js"
import { matchers, object, when } from "testdouble"
import { Entity, ModelValue, TypeModel } from "../../src/common/api/common/EntityTypes.js"
import { Entity, ModelValue, ParsedInstance, TypeModel } from "../../src/common/api/common/EntityTypes.js"
import { create } from "../../src/common/api/common/utils/EntityUtils.js"
import { ClientModelInfo, ServerModelInfo, ServerModels, TypeModelResolver } from "../../src/common/api/common/EntityFunctions.js"
import { type fetch as undiciFetch, type Response } from "undici"
@ -280,7 +280,7 @@ The last expected item is ${JSON.stringify(expectedArray.at(-1))} but got ${JSON
}
}
export function removeFinalIvs(instance: Entity): Entity {
export function removeFinalIvs(instance: Entity | ParsedInstance): Entity | ParsedInstance {
delete instance["_finalIvs"]
delete instance["_original"]
const keys = Object.keys(instance)
@ -293,18 +293,11 @@ export function removeFinalIvs(instance: Entity): Entity {
return instance
}
export function removeOriginals(instance: Entity | null): Entity | null {
if (instance === null) {
return null
}
if (instance["_original"]) {
export function removeOriginals<T extends Entity>(instance: T | null): T | null {
if (isNotNull(instance) && typeof instance == "object") {
delete instance["_original"]
}
const keys = Object.keys(instance)
for (const key of keys) {
const maybeAggregate = instance[key]
if (maybeAggregate instanceof Object) {
removeOriginals(maybeAggregate)
for (const i of Object.values(instance).filter(isNotNull)) {
removeOriginals(i)
}
}
return instance

View file

@ -103,39 +103,6 @@ o.spec("EntityUtils", function () {
removeTechnicalFields(entityCopy as ElementEntity)
o(entityCopy as unknown).deepEquals(originalEntity)
})
o("it removes _finalEncrypted fields directly on the entity", function () {
const originalEntity = { ...makeEntity(), _finalEncryptedThing: [1, 2, 3] }
const entityCopy = clone(originalEntity)
removeTechnicalFields(entityCopy as ElementEntity)
o(entityCopy as unknown).deepEquals({
_id: "test",
_type: typeRef,
_ownerGroup: null,
_ownerEncSessionKey: null,
})
})
o("it removes _finalEncrypted fields deeper in the entity", function () {
const originalEntity = {
...makeEntity(),
nested: {
test: "yes",
_finalEncryptedThing: [1, 2, 3],
},
}
const entityCopy = clone(originalEntity)
removeTechnicalFields(entityCopy as ElementEntity)
o(entityCopy as unknown).deepEquals({
_id: "test",
_type: typeRef,
_ownerGroup: null,
_ownerEncSessionKey: null,
nested: {
test: "yes",
},
})
})
})
o.spec("computePatches", function () {
@ -433,8 +400,18 @@ o.spec("EntityUtils", function () {
o("computePatches works on aggregations and additem operation", async function () {
const testEntity = await createFilledTestEntity()
testEntity.testAssociation.push(await createTestEntityWithDummyResolver(TestAggregateRef, { _id: "newAgId" }))
testEntity.testAssociation.push(await createTestEntityWithDummyResolver(TestAggregateRef, { _id: "newAgId2" }))
testEntity.testAssociation.push(
await createTestEntityWithDummyResolver(TestAggregateRef, {
_id: "newAgId",
testNumber: "1",
}),
)
testEntity.testAssociation.push(
await createTestEntityWithDummyResolver(TestAggregateRef, {
_id: "newAgId2",
testNumber: "2",
}),
)
let sk = aes256RandomKey()
const originalParsedInstance = await dummyInstancePipeline.modelMapper.mapToClientModelParsedInstance(
@ -528,6 +505,34 @@ o.spec("EntityUtils", function () {
])
})
o("computePatches works on aggregations of cardinality zeroorone", async function () {
const testEntity = await createFilledTestEntity()
testEntity.testAssociation[0].testZeroOrOneAggregation = null
let sk = aes256RandomKey()
const originalParsedInstance = await dummyInstancePipeline.modelMapper.mapToClientModelParsedInstance(
TestTypeRef,
assertNotNull(testEntity._original),
)
const currentParsedInstance = await dummyInstancePipeline.modelMapper.mapToClientModelParsedInstance(TestTypeRef, testEntity)
const currentUntypedInstance = await dummyInstancePipeline.mapAndEncrypt(TestTypeRef, testEntity, sk)
let objectDiff = await computePatches(
originalParsedInstance,
currentParsedInstance,
currentUntypedInstance,
testTypeModel,
dummyTypeReferenceResolver,
false,
)
o(objectDiff).deepEquals([
createPatch({
attributePath: "3/aggId/10",
value: '["aggOnAggId"]',
patchOperation: PatchOperationType.REMOVE_ITEM,
}),
])
})
o("computePatches works on aggregates on aggregations and additem operation", async function () {
const testEntity = await createFilledTestEntity()
@ -634,6 +639,12 @@ o.spec("EntityUtils", function () {
testBytes: null,
} as TestAggregateOnAggregate,
],
testZeroOrOneAggregation: {
_type: TestAggregateOnAggregateRef,
_finalIvs: {},
_id: "aggOnAggId",
testBytes: null,
} as TestAggregateOnAggregate,
} as TestAggregate,
],
testBoolean: false,

View file

@ -32,6 +32,8 @@ import { SyncTracker } from "../../../../src/common/api/main/SyncTracker.js"
import { InstancePipeline } from "../../../../src/common/api/worker/crypto/InstancePipeline"
import { TypeModelResolver } from "../../../../src/common/api/common/EntityFunctions"
import { EntityUpdateData } from "../../../../src/common/api/common/utils/EntityUpdateUtils"
import { CryptoFacade } from "../../../../src/common/api/worker/crypto/CryptoFacade"
import { EventInstancePrefetcher } from "../../../../src/common/api/worker/EventInstancePrefetcher"
o.spec("EventBusClientTest", function () {
let ebc: EventBusClient
@ -48,6 +50,8 @@ o.spec("EventBusClientTest", function () {
let socketFactory: (path: string) => WebSocket
let typeModelResolver: TypeModelResolver
let entityClient: EntityClient
let cryptoFacadeMock: CryptoFacade
let eventInstancePrefetcher: EventInstancePrefetcher
function initEventBus() {
ebc = new EventBusClient(
@ -61,6 +65,8 @@ o.spec("EventBusClientTest", function () {
progressTrackerMock,
syncTrackerMock,
typeModelResolver,
cryptoFacadeMock,
eventInstancePrefetcher,
)
}
@ -82,6 +88,7 @@ o.spec("EventBusClientTest", function () {
listenerMock = object()
progressTrackerMock = object()
syncTrackerMock = object()
eventInstancePrefetcher = object()
cacheMock = object({
async entityEventsReceived(events): Promise<ReadonlyArray<EntityUpdateData>> {
return events.slice()
@ -114,6 +121,7 @@ o.spec("EventBusClientTest", function () {
when(userMock.getLoggedInUser()).thenReturn(user)
when(userMock.isFullyLoggedIn()).thenReturn(true)
when(userMock.createAuthHeaders()).thenReturn({})
when(eventInstancePrefetcher.preloadEntities(matchers.anything(), matchers.anything())).thenResolve()
restClient = new EntityRestClientMock()
@ -124,6 +132,7 @@ o.spec("EventBusClientTest", function () {
typeModelResolver = clientInitializedTypeModelResolver()
entityClient = new EntityClient(restClient, typeModelResolver)
instancePipeline = instancePipelineFromTypeModelResolver(typeModelResolver)
cryptoFacadeMock = object()
initEventBus()
})
@ -174,6 +183,9 @@ o.spec("EventBusClientTest", function () {
operation: OperationType.CREATE,
instanceId: update.instanceId,
instanceListId: update.instanceListId,
instance: null,
patches: null,
isPrefetched: false,
}
const eventsReceivedDefer = defer()

View file

@ -73,12 +73,18 @@ o.spec("EventBusEventCoordinatorTest", () => {
instanceId: userId,
instanceListId: "",
operation: OperationType.UPDATE,
instance: null,
patches: null,
isPrefetched: false,
},
{
typeRef: UserGroupKeyDistributionTypeRef,
instanceId: userGroupId,
instanceListId: "",
operation: OperationType.CREATE,
instance: null,
patches: null,
isPrefetched: false,
},
]
@ -97,6 +103,9 @@ o.spec("EventBusEventCoordinatorTest", () => {
instanceId: userId,
instanceListId: "",
operation: OperationType.UPDATE,
instance: null,
patches: null,
isPrefetched: false,
},
]
@ -117,6 +126,9 @@ o.spec("EventBusEventCoordinatorTest", () => {
instanceListId,
instanceId,
operation: OperationType.CREATE,
instance: null,
patches: null,
isPrefetched: false,
},
]

View file

@ -0,0 +1,662 @@
import o from "@tutao/otest"
import { CacheStorage, DefaultEntityRestCache, EntityRestCache } from "../../../../src/common/api/worker/rest/DefaultEntityRestCache"
import { UserFacade } from "../../../../src/common/api/worker/facades/UserFacade"
import { EntityUpdateTypeRef, GroupMembershipTypeRef, User, UserTypeRef } from "../../../../src/common/api/entities/sys/TypeRefs"
import { TypeModelResolver } from "../../../../src/common/api/common/EntityFunctions"
import { EntityUpdateData, entityUpdateToUpdateData } from "../../../../src/common/api/common/utils/EntityUpdateUtils"
import { clientInitializedTypeModelResolver, createTestEntity, modelMapperFromTypeModelResolver } from "../../TestUtils"
import { CalendarEventTypeRef, MailDetailsBlobTypeRef, MailTypeRef } from "../../../../src/common/api/entities/tutanota/TypeRefs"
import { OperationType } from "../../../../src/common/api/common/TutanotaConstants"
import { matchers, object, verify, when } from "testdouble"
import { downcast, getTypeString, promiseMap } from "@tutao/tutanota-utils"
import { EventInstancePrefetcher } from "../../../../src/common/api/worker/EventInstancePrefetcher"
import { CacheMode, EntityRestClient, EntityRestClientLoadOptions } from "../../../../src/common/api/worker/rest/EntityRestClient"
import { elementIdPart, listIdPart, timestampToGeneratedId } from "../../../../src/common/api/common/utils/EntityUtils"
import { Entity, ServerModelParsedInstance } from "../../../../src/common/api/common/EntityTypes"
import { mapToObject } from "@tutao/tutanota-test-utils"
import { ProgressMonitorDelegate } from "../../../../src/common/api/worker/ProgressMonitorDelegate"
o.spec("EventInstancePrefetcherTest", function () {
let cacheStoragex: CacheStorage
let entityCacheClient: EntityRestCache
let entityRestClient: EntityRestClient
let userMock: UserFacade
let user: User
let typeModelResolver: TypeModelResolver
let eventInstancePrefetcher: EventInstancePrefetcher
const fetchBlobOpt: EntityRestClientLoadOptions = { cacheMode: CacheMode.ReadAndWrite }
const fetchInstanceOpt: EntityRestClientLoadOptions = { cacheMode: CacheMode.WriteOnly }
let modelMapper
let progressMonitorMock: ProgressMonitorDelegate
let id1: Id = timestampToGeneratedId(2)
let id2: Id = timestampToGeneratedId(3)
let id3: Id = timestampToGeneratedId(4)
let id4: Id = timestampToGeneratedId(5)
o.beforeEach(async function () {
cacheStoragex = object<CacheStorage>()
entityRestClient = object()
progressMonitorMock = object()
typeModelResolver = clientInitializedTypeModelResolver()
modelMapper = modelMapperFromTypeModelResolver(typeModelResolver)
user = createTestEntity(UserTypeRef, {
userGroup: createTestEntity(GroupMembershipTypeRef, {
group: "userGroupId",
}),
})
userMock = object("user")
when(userMock.getLoggedInUser()).thenReturn(user)
when(userMock.isFullyLoggedIn()).thenReturn(true)
when(userMock.createAuthHeaders()).thenReturn({})
when(entityRestClient.mapInstancesToEntity(matchers.anything(), matchers.anything())).thenDo((typeRef, parsedInstances) => {
return promiseMap(parsedInstances, (parsedInstance) => modelMapper.mapToInstance(typeRef, parsedInstance))
})
entityCacheClient = new DefaultEntityRestCache(entityRestClient, cacheStoragex, typeModelResolver, object())
eventInstancePrefetcher = new EventInstancePrefetcher(entityCacheClient)
})
async function toStorableInstance(entity: Entity): Promise<ServerModelParsedInstance> {
return downcast<ServerModelParsedInstance>(await modelMapper.mapToClientModelParsedInstance(entity._type, entity))
}
o("When there is at least one element per list - fetch all of em", async () => {
const updateTemplate = await entityUpdateToUpdateData(
typeModelResolver,
createTestEntity(EntityUpdateTypeRef, {
_id: "eventBatch",
application: "tutanota",
typeId: MailTypeRef.typeId.toString(),
operation: OperationType.CREATE,
instance: null,
}),
)
const firstUpdate: EntityUpdateData = Object.assign(structuredClone(updateTemplate), {
instanceListId: "firstListId",
instanceId: id1,
})
const secondUpdate: EntityUpdateData = Object.assign(structuredClone(updateTemplate), {
instanceListId: "firstListId",
instanceId: id2,
})
const thirdUpdate: EntityUpdateData = Object.assign(structuredClone(updateTemplate), {
instanceListId: "secondListId",
instanceId: id1,
})
const fourthUpdate: EntityUpdateData = Object.assign(structuredClone(updateTemplate), {
instanceListId: "secondListId",
instanceId: id2,
})
when(entityRestClient.loadMultipleParsedInstances(MailTypeRef, "firstListId", Array.of(id1, id2), undefined, fetchInstanceOpt)).thenResolve([])
when(entityRestClient.loadMultipleParsedInstances(MailTypeRef, "secondListId", Array.of(id1, id2), undefined, fetchInstanceOpt)).thenResolve([])
await eventInstancePrefetcher.preloadEntities(Array.of(firstUpdate, secondUpdate, thirdUpdate, fourthUpdate), progressMonitorMock)
verify(entityRestClient.loadMultipleParsedInstances(MailTypeRef, "firstListId", Array.of(id1, id2), undefined, fetchInstanceOpt), { times: 1 })
verify(entityRestClient.loadMultipleParsedInstances(MailTypeRef, "secondListId", Array.of(id1, id2), undefined, fetchInstanceOpt), { times: 1 })
})
o("Do not prefetch element type", async () => {
const updateTemplate: EntityUpdateData = {
typeRef: UserTypeRef,
instanceId: id1,
instanceListId: "",
operation: OperationType.CREATE,
patches: null,
instance: null,
isPrefetched: false,
}
const firstUpdate: EntityUpdateData = Object.assign(structuredClone(updateTemplate), { instanceId: id1 })
const secondUpdate: EntityUpdateData = Object.assign(structuredClone(updateTemplate), { instanceId: id2 })
const allEventsFromAllBatch = Array.of(firstUpdate, secondUpdate)
const instancesToFetch = await eventInstancePrefetcher.groupedListElementUpdatedInstances(allEventsFromAllBatch, progressMonitorMock)
o(mapToObject(instancesToFetch)).deepEquals({})
})
// make sure instance that are deleted are not fetched otherwise whole request will fail with NotFound
o("When an instance is deleted at the end still fetch previous event", async () => {
const updateTemplate: EntityUpdateData = {
typeRef: CalendarEventTypeRef,
operation: OperationType.CREATE,
instance: null,
patches: null,
instanceListId: "",
instanceId: "",
isPrefetched: false,
}
const firstUpdate: EntityUpdateData = Object.assign(structuredClone(updateTemplate), {
instanceListId: "firstListId",
instanceId: id1,
})
const secondUpdate: EntityUpdateData = Object.assign(structuredClone(firstUpdate), { instanceId: id2 })
const thirdUpdate: EntityUpdateData = Object.assign(structuredClone(updateTemplate), {
instanceListId: "secondListId",
instanceId: id3,
})
const fourthUpdate: EntityUpdateData = Object.assign(structuredClone(thirdUpdate), { operation: OperationType.DELETE })
const fifthUpdate: EntityUpdateData = Object.assign(structuredClone(thirdUpdate), {
instanceId: id4,
})
const allUpdates = Array.of(firstUpdate, secondUpdate, thirdUpdate, fourthUpdate, fifthUpdate)
const instancesToFetch = (await eventInstancePrefetcher.groupedListElementUpdatedInstances(allUpdates, progressMonitorMock)).get(
getTypeString(updateTemplate.typeRef),
)!
o(mapToObject(instancesToFetch.get("firstListId")!)).deepEquals(
mapToObject(
new Map([
[id1, [0]],
[id2, [1]],
]),
),
)
const expectedOnlySecondListWithoutId3 = mapToObject(
new Map(
new Map([
[id3, [2]],
[id4, [4]],
]),
),
)
o(mapToObject(instancesToFetch.get("secondListId")!)).deepEquals(expectedOnlySecondListWithoutId3)
})
o("Returns indexes of multiple batches for a single element with multiple updates", async () => {
const updateTemplate: EntityUpdateData = {
typeRef: CalendarEventTypeRef,
operation: OperationType.CREATE,
instance: null,
patches: null,
instanceListId: "",
instanceId: "",
isPrefetched: false,
}
const firstUpdate: EntityUpdateData = Object.assign(structuredClone(updateTemplate), {
instanceListId: "firstListId",
instanceId: id1,
})
const secondUpdate: EntityUpdateData = Object.assign(structuredClone(firstUpdate), {
operation: OperationType.UPDATE,
})
const thirdUpdate: EntityUpdateData = Object.assign(structuredClone(firstUpdate), {
operation: OperationType.UPDATE,
})
const fourthUpdate: EntityUpdateData = Object.assign(structuredClone(updateTemplate), {
instanceListId: "secondListId",
instanceId: id2,
})
const allUpdates = Array.of(firstUpdate, secondUpdate, fourthUpdate, thirdUpdate)
const instancesToFetch = (await eventInstancePrefetcher.groupedListElementUpdatedInstances(allUpdates, progressMonitorMock)).get(
getTypeString(updateTemplate.typeRef),
)!
o(mapToObject(instancesToFetch.get("firstListId")!)).deepEquals(mapToObject(new Map([[id1, [0, 1, 3]]])))
o(mapToObject(instancesToFetch.get("secondListId")!)).deepEquals(mapToObject(new Map([[id2, [2]]])))
})
o("When a create event have a instance attached to it do not fetch it", async () => {
const testEntity = createTestEntity(EntityUpdateTypeRef, {
operation: OperationType.CREATE,
instanceListId: "firstListId",
instanceId: id1,
instance: downcast({}),
application: MailTypeRef.app,
typeId: MailTypeRef.typeId.toString(),
})
const firstUpdate = await entityUpdateToUpdateData(typeModelResolver, testEntity, downcast({}))
const secondUpdate = Object.assign(structuredClone(firstUpdate), { instance: null, instanceId: id2 })
const instancesToFetch = (await eventInstancePrefetcher.groupedListElementUpdatedInstances(Array.of(firstUpdate, secondUpdate), progressMonitorMock))
.get(getTypeString(MailTypeRef))!
.get(firstUpdate.instanceListId)!
const expectedOnlyUpdateWithoutInstance = mapToObject(new Map([[id2, [1]]]))
o(mapToObject(instancesToFetch)).deepEquals(expectedOnlyUpdateWithoutInstance)
})
o("When a update event have a patchList attached to it do not fetch it", async () => {
const firstUpdate = await entityUpdateToUpdateData(
typeModelResolver,
createTestEntity(EntityUpdateTypeRef, {
operation: OperationType.UPDATE,
instanceListId: "firstListId",
instanceId: id1,
patch: downcast({ patches: [] }),
application: MailTypeRef.app,
typeId: MailTypeRef.typeId.toString(),
}),
)
const secondUpdate = Object.assign(structuredClone(firstUpdate), { patches: null, instanceId: id2 })
const instancesToFetch = (await eventInstancePrefetcher.groupedListElementUpdatedInstances(Array.of(firstUpdate, secondUpdate), progressMonitorMock))
.get(getTypeString(MailTypeRef))!
.get(firstUpdate.instanceListId)!
o(mapToObject(instancesToFetch)).deepEquals(mapToObject(new Map([[id2, [1]]])))
})
o("Ignores update events for non list elements", async () => {
const firstUpdate = await entityUpdateToUpdateData(
typeModelResolver,
createTestEntity(EntityUpdateTypeRef, {
operation: OperationType.UPDATE,
instanceListId: "",
instanceId: id1,
application: MailTypeRef.app,
typeId: MailTypeRef.typeId.toString(),
}),
)
const secondUpdate = Object.assign(structuredClone(firstUpdate), { instanceListId: "listId", instanceId: id2 })
const instancesToFetch = (
await eventInstancePrefetcher.groupedListElementUpdatedInstances(Array.of(firstUpdate, secondUpdate), progressMonitorMock)
).get(getTypeString(MailTypeRef))!
const expectedOnlyListElementInstance = mapToObject(new Map([["listId", new Map([[id2, [1]]])]]))
o(mapToObject(instancesToFetch)).deepEquals(expectedOnlyListElementInstance)
})
o("should load mailDetails for create mail event", async () => {
const firstMail = createTestEntity(
MailTypeRef,
{ _id: ["firstMailListId", id1], mailDetails: ["archiveId", "firstBlob"] },
{ populateAggregates: true },
)
const secondMail = createTestEntity(
MailTypeRef,
{ _id: ["firstMailListId", id2], mailDetails: ["archiveId", "secondBlob"] },
{ populateAggregates: true },
)
const thirdMail = createTestEntity(
MailTypeRef,
{ _id: ["secondMailListId", id3], mailDetails: ["archiveId", "thirdBlob"] },
{ populateAggregates: true },
)
const fourthMail = createTestEntity(
MailTypeRef,
{ _id: ["secondMailListId", id4], mailDetails: ["archiveId", "fourthBlob"] },
{ populateAggregates: true },
)
const firstUpdate: EntityUpdateData = {
instanceId: elementIdPart(firstMail._id),
instanceListId: listIdPart(firstMail._id),
operation: OperationType.CREATE,
instance: null,
patches: null,
isPrefetched: false,
typeRef: MailTypeRef,
}
const secondUpdate: EntityUpdateData = Object.assign(structuredClone(firstUpdate), {
instanceId: elementIdPart(secondMail._id),
instanceListId: listIdPart(secondMail._id),
operation: OperationType.UPDATE,
})
const thirdUpdate: EntityUpdateData = Object.assign(structuredClone(firstUpdate), {
instanceId: elementIdPart(thirdMail._id),
instanceListId: listIdPart(thirdMail._id),
})
const fourthUpdate: EntityUpdateData = Object.assign(structuredClone(firstUpdate), {
instanceId: elementIdPart(fourthMail._id),
instanceListId: listIdPart(fourthMail._id),
})
when(
entityRestClient.loadMultipleParsedInstances(MailDetailsBlobTypeRef, "archiveId", matchers.anything(), matchers.anything(), matchers.anything()),
).thenResolve([])
when(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
firstUpdate.instanceListId,
[firstUpdate.instanceId, secondUpdate.instanceId],
matchers.anything(),
fetchInstanceOpt,
),
).thenResolve(Array.of(await toStorableInstance(firstMail), await toStorableInstance(secondMail)))
// even though thirdMail is also in the same list as fourthMail, we "simulate" some missing instances in server side. and return only one
when(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
fourthUpdate.instanceListId,
[thirdUpdate.instanceId, fourthUpdate.instanceId],
matchers.anything(),
fetchInstanceOpt,
),
).thenResolve(Array.of(await toStorableInstance(fourthMail)))
await eventInstancePrefetcher.preloadEntities(Array.of(firstUpdate, secondUpdate, thirdUpdate, fourthUpdate), progressMonitorMock)
// Check if there are tests for the loop going correctly (for (const [listId, mails] of mailDetailsByList.entries()) {)
verify(
entityRestClient.loadMultipleParsedInstances(MailDetailsBlobTypeRef, "archiveId", ["firstBlob", "secondBlob"], matchers.anything(), fetchBlobOpt),
{
times: 1,
},
)
verify(entityRestClient.loadMultipleParsedInstances(MailDetailsBlobTypeRef, "archiveId", ["fourthBlob"], matchers.anything(), fetchBlobOpt), {
times: 1,
})
})
o("should ignore all error while fetching", async () => {
const firstMail = createTestEntity(MailTypeRef, { _id: ["firstMailListId", id1] })
const secondMail = createTestEntity(MailTypeRef, { _id: ["secondMailListId", id2] })
const thirdMail = createTestEntity(MailTypeRef, { _id: ["firstMailListId", id3] })
const fourthMail = createTestEntity(MailTypeRef, { _id: ["secondMailListId", id4] })
const firstUpdate: EntityUpdateData = {
instanceId: elementIdPart(firstMail._id),
instanceListId: listIdPart(firstMail._id),
operation: OperationType.CREATE,
instance: null,
patches: null,
isPrefetched: false,
typeRef: MailTypeRef,
}
const secondUpdate: EntityUpdateData = Object.assign(structuredClone(firstUpdate), {
instanceListId: listIdPart(secondMail._id),
instanceId: elementIdPart(secondMail._id),
})
const thirdUpdate: EntityUpdateData = Object.assign(structuredClone(firstUpdate), {
instanceListId: listIdPart(thirdMail._id),
instanceId: elementIdPart(thirdMail._id),
operation: OperationType.UPDATE,
})
const fourthUpdate: EntityUpdateData = Object.assign(structuredClone(firstUpdate), {
instanceListId: listIdPart(fourthMail._id),
instanceId: elementIdPart(fourthMail._id),
operation: OperationType.UPDATE,
})
when(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
firstUpdate.instanceListId,
[firstUpdate.instanceId],
matchers.anything(),
fetchInstanceOpt,
),
).thenReturn(Promise.reject("first error"))
when(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
secondUpdate.instanceListId,
[secondUpdate.instanceId],
matchers.anything(),
fetchInstanceOpt,
),
).thenReturn(Promise.reject("second error"))
when(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
thirdUpdate.instanceListId,
[thirdUpdate.instanceId],
matchers.anything(),
fetchInstanceOpt,
),
).thenReturn(Promise.reject("third error"))
when(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
fourthUpdate.instanceListId,
[fourthUpdate.instanceId],
matchers.anything(),
fetchInstanceOpt,
),
).thenReturn(Promise.reject("fourth error"))
await eventInstancePrefetcher.preloadEntities(Array.of(firstUpdate, secondUpdate, thirdUpdate, fourthUpdate), progressMonitorMock)
o(firstUpdate.isPrefetched).equals(false)
o(secondUpdate.isPrefetched).equals(false)
o(thirdUpdate.isPrefetched).equals(false)
o(fourthUpdate.isPrefetched).equals(false)
verify(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
firstUpdate.instanceListId,
[firstUpdate.instanceId, thirdUpdate.instanceId],
matchers.anything(),
matchers.anything(),
),
{ times: 1 },
)
verify(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
secondUpdate.instanceListId,
[secondUpdate.instanceId, fourthUpdate.instanceId],
matchers.anything(),
matchers.anything(),
),
{ times: 1 },
)
})
o("set preFetched flag to true for fetched instances", async () => {
const passMail = createTestEntity(MailTypeRef, { _id: ["firstMailListId", id1] }, { populateAggregates: true })
const secondPassMail = createTestEntity(MailTypeRef, { _id: ["firstMailListId", id3] }, { populateAggregates: true })
const failMail = createTestEntity(MailTypeRef, { _id: ["secondMailListId", id2] }, { populateAggregates: true })
const secondFailMail = createTestEntity(MailTypeRef, { _id: ["secondMailListId", id4] }, { populateAggregates: true })
const passingUpdate: EntityUpdateData = {
instanceId: elementIdPart(passMail._id),
instanceListId: listIdPart(passMail._id),
operation: OperationType.CREATE,
instance: null,
patches: null,
isPrefetched: false,
typeRef: MailTypeRef,
}
const secondPassingUpdate = Object.assign(structuredClone(passingUpdate), {
instanceListId: listIdPart(secondPassMail._id),
instanceId: elementIdPart(secondPassMail._id),
})
const failingUpdate = Object.assign(structuredClone(passingUpdate), {
instanceListId: listIdPart(failMail._id),
instanceId: elementIdPart(failMail._id),
})
const secondFailingUpdate = Object.assign(structuredClone(passingUpdate), {
instanceListId: listIdPart(secondFailMail._id),
instanceId: elementIdPart(secondFailMail._id),
})
when(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
passingUpdate.instanceListId,
[passingUpdate.instanceId, secondPassingUpdate.instanceId],
undefined,
fetchInstanceOpt,
),
).thenResolve(Array.of(await toStorableInstance(passMail), await toStorableInstance(secondPassMail)))
when(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
failingUpdate.instanceListId,
[failingUpdate.instanceId, secondFailingUpdate.instanceId],
undefined,
fetchInstanceOpt,
),
).thenResolve([])
await eventInstancePrefetcher.preloadEntities(Array.of(passingUpdate, failingUpdate, secondPassingUpdate, secondFailingUpdate), progressMonitorMock)
o(passingUpdate.isPrefetched).equals(true)
o(secondPassingUpdate.isPrefetched).equals(true)
o(failingUpdate.isPrefetched).equals(false)
o(secondFailingUpdate.isPrefetched).equals(false)
})
o("set preFetched flag to false for missing instances", async () => {
const firstMail = createTestEntity(MailTypeRef, { _id: ["mailListId", id1] }, { populateAggregates: true })
const secondMail = createTestEntity(MailTypeRef, { _id: ["mailListId", id2] }, { populateAggregates: true })
const thirdMail = createTestEntity(MailTypeRef, { _id: ["mailListId", id3] }, { populateAggregates: true })
const firstMailUpdate: EntityUpdateData = {
typeRef: MailTypeRef,
instanceListId: "mailListId",
instanceId: elementIdPart(firstMail._id),
operation: OperationType.CREATE,
instance: null,
patches: null,
isPrefetched: false,
}
const secondMailUpdate: EntityUpdateData = Object.assign(structuredClone(firstMailUpdate), { instanceId: elementIdPart(secondMail._id) })
const thirdMailUpdate: EntityUpdateData = Object.assign(structuredClone(firstMailUpdate), { instanceId: elementIdPart(thirdMail._id) })
// only return first & third mail
when(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
"mailListId",
[firstMailUpdate.instanceId, secondMailUpdate.instanceId, thirdMailUpdate.instanceId],
undefined,
fetchInstanceOpt,
),
).thenResolve(Array.of(await toStorableInstance(firstMail), await toStorableInstance(thirdMail)))
await eventInstancePrefetcher.preloadEntities(Array.of(firstMailUpdate, secondMailUpdate, thirdMailUpdate), progressMonitorMock)
o(firstMailUpdate.isPrefetched).equals(true)
o(thirdMailUpdate.isPrefetched).equals(true)
o(secondMailUpdate.isPrefetched).equals(false)
})
o("Multiple events of same instance are marked as prefetched", async () => {
const createEvent: EntityUpdateData = {
typeRef: MailTypeRef,
instanceListId: "mailListId",
instanceId: id1,
operation: OperationType.CREATE,
patches: null,
instance: null,
isPrefetched: false,
}
const updateEvent: EntityUpdateData = Object.assign(structuredClone(createEvent), { operation: OperationType.UPDATE })
const createSecondEvent: EntityUpdateData = Object.assign(structuredClone(createEvent), {
instanceId: id2,
})
const updateSecondEvent: EntityUpdateData = Object.assign(structuredClone(createSecondEvent), {
operation: OperationType.UPDATE,
})
const mail = createTestEntity(
MailTypeRef,
{
_id: ["mailListId", id1],
mailDetails: ["archiveId", "firstBlob"],
},
{ populateAggregates: true },
)
const secondMail = createTestEntity(
MailTypeRef,
{
_id: ["mailListId", id2],
mailDetails: ["archiveId", "firstBlob"],
},
{ populateAggregates: true },
)
when(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
createEvent.instanceListId,
[createEvent.instanceId, createSecondEvent.instanceId],
undefined,
fetchInstanceOpt,
),
).thenResolve(Array.of(await toStorableInstance(mail), await toStorableInstance(secondMail)))
when(
entityRestClient.loadMultipleParsedInstances(MailDetailsBlobTypeRef, "archiveId", matchers.anything(), matchers.anything(), matchers.anything()),
).thenResolve([])
await eventInstancePrefetcher.preloadEntities(Array.of(createEvent, updateEvent, createSecondEvent, updateSecondEvent), progressMonitorMock)
o(createEvent.isPrefetched).equals(true)
o(updateEvent.isPrefetched).equals(true)
})
o("prefetched flag is not set to true if mailDetails blob fails to download", async () => {
const mail = createTestEntity(
MailTypeRef,
{
_id: ["firstMailListId", id1],
mailDetails: ["archiveId", "firstBlob"],
},
{ populateAggregates: true },
)
const secondMail = createTestEntity(
MailTypeRef,
{
_id: ["firstMailListId", id2],
mailDetails: ["archiveId", "secondBlob"],
},
{ populateAggregates: true },
)
const mailUpdate: EntityUpdateData = {
instanceId: elementIdPart(mail._id),
instanceListId: listIdPart(mail._id),
operation: OperationType.CREATE,
instance: null,
patches: null,
isPrefetched: false,
typeRef: MailTypeRef,
}
const secondMailUpdate: EntityUpdateData = Object.assign(structuredClone(mailUpdate), {
instanceId: id2,
})
when(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
mailUpdate.instanceListId,
[mailUpdate.instanceId, secondMailUpdate.instanceId],
undefined,
fetchInstanceOpt,
),
).thenResolve(Array.of(await toStorableInstance(mail), await toStorableInstance(secondMail)))
when(entityRestClient.loadMultipleParsedInstances(MailDetailsBlobTypeRef, "archiveId", ["firstBlob"], matchers.anything(), fetchBlobOpt)).thenReturn(
Promise.reject("second error"),
)
await eventInstancePrefetcher.preloadEntities(Array.of(mailUpdate, secondMailUpdate), progressMonitorMock)
o(mailUpdate.isPrefetched).equals(false)
o(secondMailUpdate.isPrefetched).equals(false)
verify(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
mailUpdate.instanceListId,
[mailUpdate.instanceId, secondMailUpdate.instanceId],
matchers.anything(),
matchers.anything(),
),
{ times: 1 },
)
verify(
entityRestClient.loadMultipleParsedInstances(
MailDetailsBlobTypeRef,
"archiveId",
["firstBlob", "secondBlob"],
matchers.anything(),
matchers.anything(),
),
{
times: 1,
},
)
})
})

View file

@ -223,7 +223,7 @@ o.spec("CryptoMapper", function () {
const sk = [4136869568, 4101282953, 2038999435, 962526794, 1053028316, 3236029410, 1618615449, 3232287205]
const encryptedInstance: ServerModelEncryptedParsedInstance = {
1: "AV1kmZZfCms1pNvUtGrdhOlnDAr3zb2JWpmlpWEhgG5zqYK3g7PfRsi0vQAKLxXmrNRGp16SBKBa0gqXeFw9F6l7nbGs3U8uNLvs6Fi+9IWj",
3: [{ 2: "123", 6: "someCustomId", 9: [] }],
3: [{ 2: "123", 6: "someCustomId", 9: [], 10: [] }],
7: "AWBaC3ipyi9kxJn7USkbW1SLXPjgU8T5YqpIP/dmTbyRwtXFU9tQbYBm12gNpI9KJfwO14FN25hjC3SlngSBlzs=",
4: ["associatedElementId"],
5: new Date("2025-01-01T13:00:00.000Z"),
@ -248,7 +248,7 @@ o.spec("CryptoMapper", function () {
5: new Date("2025-01-01T13:00:00.000Z"),
7: true,
// 6 is _id and will be generated
3: [{ 2: "123", 6: "aggregateId", 9: [] }],
3: [{ 2: "123", 6: "aggregateId", 9: [], 10: [] }],
4: ["associatedElementId"],
_finalIvs: { 1: new Uint8Array([93, 100, 153, 150, 95, 10, 107, 53, 164, 219, 212, 180, 106, 221, 132, 233]) },
} as unknown as ClientModelParsedInstance
@ -271,7 +271,7 @@ o.spec("CryptoMapper", function () {
o("decryptParsedInstance with missing sk sets _errors", async function () {
const encryptedInstance: ServerModelEncryptedParsedInstance = {
1: "AV1kmZZfCms1pNvUtGrdhOlnDAr3zb2JWpmlpWEhgG5zqYK3g7PfRsi0vQAKLxXmrNRGp16SBKBa0gqXeFw9F6l7nbGs3U8uNLvs6Fi+9IWj",
3: [{ 2: "123", 6: "someCustomId", 9: [] }],
3: [{ 2: "123", 6: "someCustomId", 9: [], 10: [] }],
4: ["associatedElementId"],
5: new Date("2025-01-01T13:00:00.000Z"),
} as any as ServerModelEncryptedParsedInstance
@ -285,7 +285,7 @@ o.spec("CryptoMapper", function () {
1: "encrypted string",
5: new Date("2025-01-01T13:00:00.000Z"),
// 6 is _id and will be generated
3: [{ 2: "123", 9: [] }],
3: [{ 2: "123", 9: [], 10: [] }],
4: ["associatedElementId"],
_finalIvs: { 1: new Uint8Array([93, 100, 153, 150, 95, 10, 107, 53, 164, 219, 212, 180, 106, 221, 132, 233]) },
} as unknown as ClientModelParsedInstance
@ -297,7 +297,7 @@ o.spec("CryptoMapper", function () {
const encryptedInstance: ServerModelEncryptedParsedInstance = {
1: "",
3: [{ 2: "123", 6: "someCustomId", 9: [] }],
3: [{ 2: "123", 6: "someCustomId", 9: [], 10: [] }],
4: ["associatedElementId"],
5: new Date("2025-01-01T13:00:00.000Z"),
} as any as ServerModelEncryptedParsedInstance
@ -314,7 +314,7 @@ o.spec("CryptoMapper", function () {
1: "",
5: new Date("2025-01-01T13:00:00.000Z"),
// 6 is _id and will be generated
3: [{ 2: "123", 9: [] }],
3: [{ 2: "123", 9: [], 10: [] }],
4: ["associatedElementId"],
_finalIvs: { 1: null },
} as unknown as ClientModelParsedInstance
@ -327,7 +327,7 @@ o.spec("CryptoMapper", function () {
const sk = [4136869568, 4101282953, 2038999435, 962526794, 1053028316, 3236029410, 1618615449, 3232287205]
const encryptedInstance: ServerModelEncryptedParsedInstance = {
1: "AV1kmZZfCms1pNvUtGrdhOlnDAr3zb2pmlpWEhgG5iwzqYK3g7PfRsi0vQAKLxXmrNRGp16SBKBa0gqXeFw9F6l7nbGs3U8uNLvs6Fi+9IWj",
3: [{ 2: "123", 6: "someCustomId", 9: [] }],
3: [{ 2: "123", 6: "someCustomId", 9: [], 10: [] }],
4: ["associatedElementId"],
5: new Date("2025-01-01T13:00:00.000Z"),
} as any as ServerModelEncryptedParsedInstance

View file

@ -132,6 +132,15 @@ export const testAggregateModel: TypeModel = {
final: false,
dependency: "tutanota",
},
"10": {
id: 10,
name: "testZeroOrOneAggregation",
type: AssociationType.Aggregation,
cardinality: Cardinality.ZeroOrOne,
refTypeId: 44,
final: false,
dependency: "tutanota",
},
},
version: 0,
versioned: false,
@ -182,6 +191,7 @@ export type TestAggregate = Entity & {
_id: Id
testNumber: NumberString
testSecondLevelAssociation: TestAggregateOnAggregate[]
testZeroOrOneAggregation: TestAggregateOnAggregate | null
}
export type TestEntity = Entity & {

View file

@ -81,7 +81,7 @@ o.spec("ModelMapper", function () {
const parsedInstance: ServerModelParsedInstance = {
1: "some encrypted string",
5: new Date("2025-01-01T13:00:00.000Z"),
3: [{ 2: "123", 6: "123456", _finalIvs: {}, 9: [] } as unknown as ServerModelParsedInstance],
3: [{ 2: "123", 6: "123456", _finalIvs: {}, 9: [], 10: [] } as unknown as ServerModelParsedInstance],
12: "generatedId",
13: ["listId", "elementId"],
4: ["associatedElementId"],
@ -102,6 +102,7 @@ o.spec("ModelMapper", function () {
testNumber: "123",
_id: "123456",
testSecondLevelAssociation: [],
testZeroOrOneAggregation: null,
})
o(mappedInstance.testElementAssociation).equals("associatedElementId")
o(mappedInstance.testGeneratedId).equals("generatedId")

View file

@ -15,7 +15,13 @@ import { AttributeModel } from "../../../../../src/common/api/common/AttributeMo
const serverModelUntypedInstanceNetworkDebugging: ServerModelUntypedInstance = {
"1:testValue": "test string",
"3:testAssociation": [{ "2:testNumber": "123", "9:testSecondLevelAssociation": [] }],
"3:testAssociation": [
{
"2:testNumber": "123",
"9:testSecondLevelAssociation": [],
"10:testZeroOrOneAggregation": [],
},
],
"4:testListAssociation": ["assocId"],
"5:testDate": "1735736415000",
"7:testBoolean": "encryptedBool",
@ -23,7 +29,7 @@ const serverModelUntypedInstanceNetworkDebugging: ServerModelUntypedInstance = {
const serverModelUntypedInstance: ServerModelUntypedInstance = {
"1": "test string",
"3": [{ "2": "123", 9: [] }],
"3": [{ "2": "123", "9": [], "10": [] }],
"4": ["assocId"],
"5": "1735736415000",
"7": "encryptedBool",
@ -31,7 +37,7 @@ const serverModelUntypedInstance: ServerModelUntypedInstance = {
const clientModelEncryptedParsedInstance: ClientModelEncryptedParsedInstance = {
"1": "base64EncodedString",
"3": [{ "2": "123", 9: [] }],
"3": [{ "2": "123", "9": [], "10": [] }],
"4": ["assocId"],
"5": new Date("2025-01-01T13:00:15Z"),
"7": "encryptedBool",
@ -39,7 +45,7 @@ const clientModelEncryptedParsedInstance: ClientModelEncryptedParsedInstance = {
const faultyEncryptedParsedInstance: ClientModelEncryptedParsedInstance = {
"1": new Uint8Array(2),
"3": [{ "2": "123", 9: [] }],
"3": [{ "2": "123", "9": [], "10": [] }],
"4": ["assocId"],
"5": new Date("2025-01-01T13:00:15Z"),
} as unknown as ClientModelEncryptedParsedInstance

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -102,9 +102,12 @@ o.spec("ContactIndexer", () => {
function createUpdate(operation: OperationType, instanceListId: Id, instanceId: Id, typeRef: TypeRef<any> = ContactTypeRef): EntityUpdateData {
return {
operation,
instanceId,
instanceListId,
typeRef,
operation: operation,
instanceId: instanceId,
instanceListId: instanceListId,
typeRef: typeRef,
instance: null,
patches: null,
isPrefetched: false,
}
}

View file

@ -1,25 +1,29 @@
import o from "@tutao/otest"
import { batchMod, EntityModificationType, EventQueue, QueuedBatch } from "../../../../../src/common/api/worker/EventQueue.js"
import { EntityUpdateTypeRef, GroupTypeRef } from "../../../../../src/common/api/entities/sys/TypeRefs.js"
import { EventQueue, QueuedBatch } from "../../../../../src/common/api/worker/EventQueue.js"
import { GroupTypeRef } from "../../../../../src/common/api/entities/sys/TypeRefs.js"
import { OperationType } from "../../../../../src/common/api/common/TutanotaConstants.js"
import { defer, delay } from "@tutao/tutanota-utils"
import { ConnectionError } from "../../../../../src/common/api/common/error/RestError.js"
import { ContactTypeRef, MailboxGroupRootTypeRef, MailTypeRef } from "../../../../../src/common/api/entities/tutanota/TypeRefs.js"
import { MailboxGroupRootTypeRef, MailTypeRef } from "../../../../../src/common/api/entities/tutanota/TypeRefs.js"
import { spy } from "@tutao/tutanota-test-utils"
import { EntityUpdateData } from "../../../../../src/common/api/common/utils/EntityUpdateUtils"
import { createTestEntity } from "../../../TestUtils.js"
o.spec("EventQueueTest", function () {
let queue: EventQueue
let processElement: any
let lastProcess: { resolve: () => void; reject: (Error) => void; promise: Promise<void> }
const noPatchesAndInstance: Pick<EntityUpdateData, "instance" | "patches"> = {
instance: null,
patches: null,
}
const newUpdate = (type: OperationType, instanceId: string): EntityUpdateData => {
return {
operation: type,
instanceId,
instanceListId: "",
typeRef: MailTypeRef,
...noPatchesAndInstance,
} as Partial<EntityUpdateData> as EntityUpdateData
}
@ -32,7 +36,7 @@ o.spec("EventQueueTest", function () {
}
return Promise.resolve()
})
queue = new EventQueue("test!", true, processElement)
queue = new EventQueue("test!", processElement)
})
o("pause and resume", async function () {
@ -89,7 +93,7 @@ o.spec("EventQueueTest", function () {
}
return Promise.resolve()
})
let queue = new EventQueue("test 2!", true, (nextElement: QueuedBatch) => {
let queue = new EventQueue("test 2!", (nextElement: QueuedBatch) => {
if (nextElement.batchId === "2") {
return Promise.reject(new ConnectionError("no connection"))
} else {
@ -103,359 +107,4 @@ o.spec("EventQueueTest", function () {
o(queue.queueSize()).equals(2)
o(queue.__processingBatch).equals(null)
})
o.spec("collapsing events", function () {
o.beforeEach(function () {
queue.pause()
})
o("create + delete == delete", async function () {
const createEvent = createUpdate(OperationType.CREATE, "new-mail-list", "1")
const deleteEvent = createUpdate(OperationType.DELETE, createEvent.instanceListId, createEvent.instanceId)
queue.add("batch-id-1", "group-id", [createEvent])
queue.add("batch-id-2", "group-id", [deleteEvent])
queue.resume()
await lastProcess.promise
const expectedDelete = createUpdate(OperationType.DELETE, createEvent.instanceListId, createEvent.instanceId)
o(processElement.invocations).deepEquals([
[{ events: [], batchId: "batch-id-1", groupId: "group-id" }],
[{ events: [expectedDelete], batchId: "batch-id-2", groupId: "group-id" }],
])
})
o("create + update == create", async function () {
const createEvent = createUpdate(OperationType.CREATE, "new-mail-list", "1")
const updateEvent = createUpdate(OperationType.UPDATE, createEvent.instanceListId, createEvent.instanceId)
queue.add("batch-id-1", "group-id", [createEvent])
queue.add("batch-id-2", "group-id", [updateEvent])
queue.resume()
await lastProcess.promise
const expectedCreate = createUpdate(OperationType.CREATE, createEvent.instanceListId, createEvent.instanceId)
o(processElement.invocations).deepEquals([
[{ events: [expectedCreate], batchId: "batch-id-1", groupId: "group-id" }],
// new update got optimized away on the spot
])
})
o("create + create == create + create", async function () {
const createEvent = createUpdate(OperationType.CREATE, "new-mail-list", "1")
const createEvent2 = createUpdate(OperationType.CREATE, createEvent.instanceListId, createEvent.instanceId)
queue.add("batch-id-1", "group-id", [createEvent])
queue.add("batch-id-2", "group-id", [createEvent2])
queue.resume()
await lastProcess.promise
const expectedCreate = createUpdate(OperationType.CREATE, createEvent.instanceListId, createEvent.instanceId)
const expectedCreate2 = createUpdate(OperationType.CREATE, createEvent.instanceListId, createEvent.instanceId)
o(processElement.invocations).deepEquals([
[{ events: [expectedCreate], batchId: "batch-id-1", groupId: "group-id" }],
[{ events: [expectedCreate2], batchId: "batch-id-2", groupId: "group-id" }],
])
})
o("create + update + delete == delete", async function () {
const createEvent = createUpdate(OperationType.CREATE, "new-mail-list", "1")
const updateEvent = createUpdate(OperationType.UPDATE, "new-mail-list", "1")
const deleteEvent = createUpdate(OperationType.DELETE, createEvent.instanceListId, createEvent.instanceId)
queue.add("batch-id-1", "group-id", [createEvent])
queue.add("batch-id-2", "group-id", [updateEvent])
queue.add("batch-id-3", "group-id", [deleteEvent])
queue.resume()
await lastProcess.promise
const expectedDelete = createUpdate(OperationType.DELETE, createEvent.instanceListId, createEvent.instanceId)
o(processElement.invocations).deepEquals([
[{ events: [], batchId: "batch-id-1", groupId: "group-id" }],
// update event was optimized away
[{ events: [expectedDelete], batchId: "batch-id-3", groupId: "group-id" }],
])
})
o("delete + create == delete + create", async function () {
// DELETE can happen after CREATE in case of custom id. We keep it as-is
const deleteEvent = createUpdate(OperationType.DELETE, "mail-list", "1")
const createEvent = createUpdate(OperationType.CREATE, "mail-list", "1")
queue.add("batch-id-0", "group-id", [deleteEvent])
queue.add("batch-id-1", "group-id", [createEvent])
queue.resume()
await lastProcess.promise
o(processElement.invocations).deepEquals([
[{ events: [deleteEvent], batchId: "batch-id-0", groupId: "group-id" }],
[{ events: [createEvent], batchId: "batch-id-1", groupId: "group-id" }],
])
})
o("delete + create + delete + create == delete + create", async function () {
// This tests that create still works a
const deleteEvent1 = createUpdate(OperationType.DELETE, "list", "1")
const nonEmptyEventInBetween = createUpdate(OperationType.CREATE, "list2", "2")
const createEvent1 = createUpdate(OperationType.CREATE, "list", "1")
const deleteEvent2 = createUpdate(OperationType.DELETE, "list", "1")
const createEvent2 = createUpdate(OperationType.CREATE, "list", "1")
queue.add("batch-id-1", "group-id", [deleteEvent1])
queue.add("batch-id-1.1", "group-id", [nonEmptyEventInBetween])
queue.add("batch-id-2", "group-id", [createEvent1])
queue.add("batch-id-3", "group-id", [deleteEvent2])
queue.add("batch-id-4", "group-id", [createEvent2])
queue.resume()
await lastProcess.promise
const expectedDelete = createUpdate(OperationType.DELETE, createEvent1.instanceListId, createEvent1.instanceId)
const expectedCreate = createUpdate(OperationType.CREATE, createEvent1.instanceListId, createEvent1.instanceId)
const expectedDelete2 = createUpdate(OperationType.DELETE, createEvent1.instanceListId, createEvent1.instanceId)
o(processElement.invocations).deepEquals([
[{ events: [expectedDelete], batchId: "batch-id-1", groupId: "group-id" }],
[{ events: [nonEmptyEventInBetween], batchId: "batch-id-1.1", groupId: "group-id" }],
[{ events: [], batchId: "batch-id-2", groupId: "group-id" }],
[{ events: [expectedDelete2], batchId: "batch-id-3", groupId: "group-id" }],
[{ events: [expectedCreate], batchId: "batch-id-4", groupId: "group-id" }],
])
})
o("delete (list 1) + create (list 2) == delete (list 1) + create (list 2)", async function () {
// entity updates with for the same element id but different list IDs do not influence each other
const deleteEvent1 = createUpdate(OperationType.DELETE, "list1", "1")
const createEvent1 = createUpdate(OperationType.CREATE, "list2", "1")
queue.add("batch-id-1", "group-id", [deleteEvent1])
queue.add("batch-id-2", "group-id", [createEvent1])
queue.resume()
await lastProcess.promise
const expectedDelete = createUpdate(OperationType.DELETE, deleteEvent1.instanceListId, deleteEvent1.instanceId)
const expectedCreate = createUpdate(OperationType.CREATE, createEvent1.instanceListId, createEvent1.instanceId)
o(processElement.invocations).deepEquals([
[{ events: [expectedDelete], batchId: "batch-id-1", groupId: "group-id" }],
[{ events: [expectedCreate], batchId: "batch-id-2", groupId: "group-id" }],
])
})
o("create (list 1) + update (list 1) + delete (list 2) == create (list 1) + delete (list 2)", async function () {
// entity updates with for the same element id but different list IDs do not influence each other
const createEvent1 = createUpdate(OperationType.CREATE, "list1", "1")
const updateEvent1 = createUpdate(OperationType.UPDATE, "list1", "1")
const deleteEvent1 = createUpdate(OperationType.DELETE, "list2", "1")
queue.add("batch-id-1", "group-id", [createEvent1])
queue.add("batch-id-2", "group-id", [updateEvent1])
queue.add("batch-id-3", "group-id", [deleteEvent1])
queue.resume()
await lastProcess.promise
const expectedCreate = createUpdate(OperationType.CREATE, createEvent1.instanceListId, createEvent1.instanceId)
const expectedDelete = createUpdate(OperationType.DELETE, deleteEvent1.instanceListId, deleteEvent1.instanceId)
o(processElement.invocations).deepEquals([
[{ events: [expectedCreate], batchId: "batch-id-1", groupId: "group-id" }],
[{ events: [expectedDelete], batchId: "batch-id-3", groupId: "group-id" }],
])
})
o("same batch in two different groups", async function () {
const createEvent1 = createUpdate(OperationType.CREATE, "old-mail-list", "1")
const createEvent2 = createUpdate(OperationType.CREATE, "old-mail-list", "1")
queue.add("batch-id-1", "group-id-1", [createEvent1])
queue.add("batch-id-1", "group-id-2", [createEvent2])
queue.resume()
await lastProcess.promise
o(processElement.invocations).deepEquals([
[{ events: [createEvent1], batchId: "batch-id-1", groupId: "group-id-1" }],
[{ events: [createEvent1], batchId: "batch-id-1", groupId: "group-id-2" }],
])
})
o(
"[delete (list 1) + create (list 2)] + delete (list 2) + create (list 2) = [delete (list 1) + create (list 2)] + delete (list 2) + create (list 2)",
async function () {
const deleteEvent1 = createUpdate(OperationType.DELETE, "l1", "1")
const createEvent1 = createUpdate(OperationType.CREATE, "l2", "1")
const deleteEvent2 = createUpdate(OperationType.DELETE, "l2", "1")
const createEvent2 = createUpdate(OperationType.CREATE, "l2", "1")
queue.add("batch-id-1", "group-id-1", [deleteEvent1, createEvent1])
queue.add("batch-id-2", "group-id-1", [deleteEvent2])
queue.add("batch-id-3", "group-id-1", [createEvent2])
queue.resume()
await lastProcess.promise
o(processElement.invocations).deepEquals([
[{ events: [deleteEvent1], batchId: "batch-id-1", groupId: "group-id-1" }],
[{ events: [deleteEvent2], batchId: "batch-id-2", groupId: "group-id-1" }],
[{ events: [createEvent2], batchId: "batch-id-3", groupId: "group-id-1" }],
])
},
)
o("optimization does not fail when there are new events with the same id but a different type", function () {
const batchId = "batch-id-1"
const groupId = "group-id-1"
const instanceId = "instance-id-1"
const updateEvent1 = createUpdate(OperationType.UPDATE, "", instanceId)
const updateEvent2 = createUpdate(OperationType.UPDATE, "", instanceId)
updateEvent1.typeRef = GroupTypeRef
updateEvent2.typeRef = MailboxGroupRootTypeRef
queue.add(batchId, groupId, [updateEvent1])
queue.add(batchId, groupId, [updateEvent2])
})
function createUpdate(type: OperationType, listId: Id, instanceId: Id): EntityUpdateData {
return {
typeRef: MailTypeRef,
operation: type,
instanceId,
instanceListId: listId,
}
}
})
o.spec("batchMod", function () {
const batchId = "batchId"
const instanceListId = "instanceListId"
const instanceId = "instanceId"
o("one entity with the same id and type", async () => {
o(
batchMod(
batchId,
[
{
typeRef: MailTypeRef,
instanceId,
instanceListId,
operation: OperationType.CREATE,
},
],
{
typeRef: MailTypeRef,
instanceId,
instanceListId,
operation: OperationType.CREATE,
},
),
).equals(EntityModificationType.CREATE)
})
o("there is another op with the same type but different element id", async () => {
o(
batchMod(
batchId,
[
{
typeRef: MailTypeRef,
instanceId: "instanceId2",
instanceListId,
operation: OperationType.DELETE,
},
{
typeRef: MailTypeRef,
instanceId,
instanceListId,
operation: OperationType.CREATE,
},
],
{
typeRef: MailTypeRef,
instanceId,
instanceListId,
operation: OperationType.CREATE,
},
),
).equals(EntityModificationType.CREATE)
})
o("there is another op with the same type but different list id", async () => {
o(
batchMod(
batchId,
[
{
typeRef: MailTypeRef,
instanceId,
instanceListId: "instanceListId2",
operation: OperationType.DELETE,
},
{
typeRef: MailTypeRef,
instanceId,
instanceListId,
operation: OperationType.CREATE,
},
],
{
typeRef: MailTypeRef,
instanceId,
instanceListId,
operation: OperationType.CREATE,
},
),
).equals(EntityModificationType.CREATE)
})
o("there is another op with the id but different type", async () => {
o(
batchMod(
batchId,
[
{
typeRef: ContactTypeRef,
instanceId,
instanceListId,
operation: OperationType.DELETE,
},
{
typeRef: MailTypeRef,
instanceId,
instanceListId,
operation: OperationType.CREATE,
},
],
{
typeRef: MailTypeRef,
instanceId,
instanceListId,
operation: OperationType.CREATE,
},
),
).equals(EntityModificationType.CREATE)
})
o("modification is based on operation of batch, not the argument", async () => {
o(
batchMod(
batchId,
[
{
typeRef: MailTypeRef,
instanceId,
instanceListId,
operation: OperationType.CREATE,
},
],
{
typeRef: MailTypeRef,
instanceId,
instanceListId,
operation: OperationType.DELETE,
},
),
).equals(EntityModificationType.CREATE)
})
})
})

View file

@ -17,10 +17,9 @@ import { base64ToUint8Array, byteLength, concat, utf8Uint8ArrayToString } from "
import type { SearchIndexEntry, SearchIndexMetaDataRow } from "../../../../../src/common/api/worker/search/SearchTypes.js"
import { GroupMembershipTypeRef, UserTypeRef } from "../../../../../src/common/api/entities/sys/TypeRefs.js"
import { ContactTypeRef, MailTypeRef } from "../../../../../src/common/api/entities/tutanota/TypeRefs.js"
import { GroupType, OperationType } from "../../../../../src/common/api/common/TutanotaConstants.js"
import { GroupType } from "../../../../../src/common/api/common/TutanotaConstants.js"
import { aes256RandomKey, fixedIv, unauthenticatedAesDecrypt } from "@tutao/tutanota-crypto"
import { createTestEntity } from "../../../TestUtils.js"
import { containsEventOfType, EntityUpdateData } from "../../../../../src/common/api/common/utils/EntityUpdateUtils.js"
import { ClientModelInfo } from "../../../../../src/common/api/common/EntityFunctions"
o.spec("Index Utils", () => {
@ -195,20 +194,6 @@ o.spec("Index Utils", () => {
o(filterMailMemberships(user)).deepEquals([mailGroup1, mailGroup2])
})
o("containsEventOfType", function () {
function createUpdate(type: OperationType, id: Id): EntityUpdateData {
return {
operation: type,
instanceId: id,
instanceListId: "",
} as Partial<EntityUpdateData> as EntityUpdateData
}
o(containsEventOfType([], OperationType.CREATE, "1")).equals(false)
o(containsEventOfType([createUpdate(OperationType.CREATE, "1")], OperationType.CREATE, "1")).equals(true)
o(containsEventOfType([createUpdate(OperationType.DELETE, "1")], OperationType.CREATE, "1")).equals(false)
o(containsEventOfType([createUpdate(OperationType.DELETE, "2")], OperationType.DELETE, "1")).equals(false)
})
o("byteLength", function () {
o(byteLength("")).equals(0)
o(byteLength("A")).equals(1)

View file

@ -56,6 +56,10 @@ o.spec("IndexedDbIndexer", () => {
throw new ProgrammingError("not supported")
},
}
const noPatchesAndInstance: Pick<EntityUpdateData, "instance" | "patches"> = {
instance: null,
patches: null,
}
let keyLoaderFacade: KeyLoaderFacade
let mailIndexer: MailIndexer
@ -648,6 +652,8 @@ o.spec("IndexedDbIndexer", () => {
instanceId: "realtime-event-id",
instanceListId: "",
operation: OperationType.CREATE,
...noPatchesAndInstance,
isPrefetched: false,
},
]
await indexer.processEntityEvents(realtimeEvents, previousNewestBatchId, groupId)
@ -719,6 +725,8 @@ o.spec("IndexedDbIndexer", () => {
instanceId: "realtime-event-id",
instanceListId: "",
operation: OperationType.CREATE,
...noPatchesAndInstance,
isPrefetched: false,
},
]
@ -760,6 +768,8 @@ o.spec("IndexedDbIndexer", () => {
instanceListId: "list",
instanceId: "event1",
type: "",
instance: null,
patch: null,
}),
createEntityUpdate({
typeId: MailTypeRef.typeId.toString(),
@ -768,6 +778,8 @@ o.spec("IndexedDbIndexer", () => {
instanceListId: "list",
instanceId: "event2",
type: "",
instance: null,
patch: null,
}),
],
}),
@ -791,6 +803,8 @@ o.spec("IndexedDbIndexer", () => {
instanceId: "realtime",
instanceListId: "list",
operation: OperationType.CREATE,
...noPatchesAndInstance,
isPrefetched: false,
},
]
await indexer.processEntityEvents(realtimeUpdates, realtimeBatchId, groupId)
@ -812,12 +826,16 @@ o.spec("IndexedDbIndexer", () => {
operation: OperationType.CREATE,
instanceListId: "list",
instanceId: "event1",
...noPatchesAndInstance,
isPrefetched: false,
},
{
typeRef: MailTypeRef,
operation: OperationType.CREATE,
instanceListId: "list",
instanceId: "event2",
...noPatchesAndInstance,
isPrefetched: false,
},
],
},
@ -1028,6 +1046,8 @@ o.spec("IndexedDbIndexer", () => {
operation: OperationType.CREATE,
instanceId: "id-1",
instanceListId: "",
...noPatchesAndInstance,
isPrefetched: false,
},
]
@ -1044,6 +1064,8 @@ o.spec("IndexedDbIndexer", () => {
operation: OperationType.CREATE,
instanceId: "id-2",
instanceListId: "",
...noPatchesAndInstance,
isPrefetched: false,
},
]
const batchId2 = "batch-id-2"
@ -1079,18 +1101,24 @@ o.spec("IndexedDbIndexer", () => {
operation: OperationType.CREATE,
instanceId: "id-1",
instanceListId: "create",
...noPatchesAndInstance,
isPrefetched: false,
},
{
typeRef: ContactTypeRef,
operation: OperationType.CREATE,
instanceId: "id-2",
instanceListId: "create",
...noPatchesAndInstance,
isPrefetched: false,
},
{
typeRef: MailTypeRef,
operation: OperationType.CREATE,
instanceId: "id-3",
instanceListId: "create",
...noPatchesAndInstance,
isPrefetched: false,
},
{
@ -1098,18 +1126,24 @@ o.spec("IndexedDbIndexer", () => {
operation: OperationType.UPDATE,
instanceId: "id-4",
instanceListId: "update",
...noPatchesAndInstance,
isPrefetched: false,
},
{
typeRef: ContactTypeRef,
operation: OperationType.UPDATE,
instanceId: "id-5",
instanceListId: "update",
...noPatchesAndInstance,
isPrefetched: false,
},
{
typeRef: MailTypeRef,
operation: OperationType.UPDATE,
instanceId: "id-6",
instanceListId: "update",
...noPatchesAndInstance,
isPrefetched: false,
},
{
@ -1117,18 +1151,24 @@ o.spec("IndexedDbIndexer", () => {
operation: OperationType.DELETE,
instanceId: "id-7",
instanceListId: "delete",
...noPatchesAndInstance,
isPrefetched: false,
},
{
typeRef: ContactTypeRef,
operation: OperationType.DELETE,
instanceId: "id-8",
instanceListId: "delete",
...noPatchesAndInstance,
isPrefetched: false,
},
{
typeRef: MailTypeRef,
operation: OperationType.DELETE,
instanceId: "id-9",
instanceListId: "delete",
...noPatchesAndInstance,
isPrefetched: false,
},
],
groupId: "blah",
@ -1323,6 +1363,8 @@ o.spec("IndexedDbIndexer", () => {
instanceId: "instanceId",
instanceListId: "instanceListId",
operation: OperationType.CREATE,
...noPatchesAndInstance,
isPrefetched: false,
},
]
when(mailIndexer.processEntityEvents(updates, matchers.anything(), matchers.anything())).thenDo(() => processDeferred.resolve())
@ -1362,6 +1404,8 @@ o.spec("IndexedDbIndexer", () => {
instanceId: "instanceId",
instanceListId: "instanceListId",
operation: OperationType.CREATE,
...noPatchesAndInstance,
isPrefetched: false,
},
]
when(mailIndexer.processEntityEvents(updates, matchers.anything(), matchers.anything())).thenDo(() => processDeferred.resolve())

View file

@ -36,7 +36,6 @@ import { ElementDataOS, GroupDataOS, ObjectStoreName, SearchIndexMetaDataOS, Sea
import { AttributeModel } from "../../../../../src/common/api/common/AttributeModel"
import { ClientModelInfo } from "../../../../../src/common/api/common/EntityFunctions"
import { EntityUpdateData } from "../../../../../src/common/api/common/utils/EntityUpdateUtils"
import { OperationType } from "../../../../../src/common/api/common/TutanotaConstants"
import { CancelledError } from "../../../../../src/common/api/common/error/CancelledError.js"
const mailTypeInfo = typeRefToTypeInfo(MailTypeRef)
@ -1029,12 +1028,6 @@ o.spec("IndexerCore", () => {
const instanceId = "L-dNNLe----1"
const instanceIdTimestamp = generatedIdToTimestamp(instanceId)
const event: EntityUpdateData = {
typeRef: MailTypeRef,
instanceId,
instanceListId: "",
operation: OperationType.CREATE,
}
const metaRowId = 3
const anotherMetaRowId = 4
const transaction: any = {
@ -1093,12 +1086,6 @@ o.spec("IndexerCore", () => {
let indexUpdate = _createNewIndexUpdate(mailTypeInfo)
let instanceId = "123"
let event: EntityUpdateData = {
typeRef: MailTypeRef,
instanceId,
instanceListId: "",
operation: OperationType.CREATE,
}
let transaction: any = {
get: (os, key) => {
o.check(os).equals(ElementDataOS)

View file

@ -859,15 +859,6 @@ o.spec("MailIndexer", () => {
}
})
function createUpdate(operation: OperationType, listId: Id, instanceId: Id): EntityUpdateData {
return {
operation: operation,
instanceListId: listId,
instanceId: instanceId,
typeRef: MailTypeRef,
}
}
function createMailInstances({
subject,
mailSetEntryId,

View file

@ -42,6 +42,11 @@ import { ClientModelInfo } from "../../../src/common/api/common/EntityFunctions"
import { EntityRestClient } from "../../../src/common/api/worker/rest/EntityRestClient"
o.spec("CalendarModel", function () {
const noPatchesAndInstance: Pick<EntityUpdateData, "instance" | "patches"> = {
instance: null,
patches: null,
}
o.spec("calendar events have same fields", function () {
let restClientMock: EntityRestClient
let calendarFacadeMock: CalendarFacade
@ -769,6 +774,8 @@ o.spec("CalendarModel", function () {
instanceListId: listIdPart(eventUpdate._id),
instanceId: elementIdPart(eventUpdate._id),
operation: OperationType.CREATE,
...noPatchesAndInstance,
isPrefetched: false,
})
o(model.getFileIdToSkippedCalendarEventUpdates().get(getElementId(calendarFile))!).deepEquals(eventUpdate)
@ -784,6 +791,8 @@ o.spec("CalendarModel", function () {
instanceListId: listIdPart(calendarFile._id),
instanceId: elementIdPart(calendarFile._id),
operation: OperationType.UPDATE,
...noPatchesAndInstance,
isPrefetched: false,
})
o(model.getFileIdToSkippedCalendarEventUpdates().size).deepEquals(0)

View file

@ -418,6 +418,9 @@ o.spec("CalendarViewModel", function () {
instanceListId: getListId(eventToDrag),
instanceId: getElementId(eventToDrag),
operation: OperationType.CREATE,
instance: null,
patches: null,
isPrefetched: false,
}
const updatedEventFromServer = makeEvent(getElementId(eventToDrag), newData, new Date(2021, 0, 5, 14, 30), assertNotNull(eventToDrag.uid))
entityClientMock.addListInstances(updatedEventFromServer)

View file

@ -82,7 +82,7 @@ o.spec("TutaNotificationHandler", () => {
userId: "user1",
})
await handler.onMailNotification(setupSseInfo(), notificationInfo)
await handler.onMailNotification(setupSseInfo(), [notificationInfo])
verify(
notifier.submitGroupedNotification("translated:pushNewMail_msg", notificationInfo.mailAddress, "mailListId,mailElementId", matchers.anything()),
@ -110,7 +110,7 @@ o.spec("TutaNotificationHandler", () => {
userId: "user1",
})
await handler.onMailNotification(setupSseInfo(), notificationInfo)
await handler.onMailNotification(setupSseInfo(), [notificationInfo])
verify(notifier.submitGroupedNotification(matchers.anything(), matchers.anything(), matchers.anything(), matchers.anything()), { times: 0 })
})
@ -131,7 +131,7 @@ o.spec("TutaNotificationHandler", () => {
userId: "user1",
})
await handler.onMailNotification(setupSseInfo(), notificationInfo)
await handler.onMailNotification(setupSseInfo(), [notificationInfo])
const listenerCaptor = matchers.captor()
verify(
@ -197,17 +197,17 @@ o.spec("TutaNotificationHandler", () => {
const requestDefer = mockFetchRequest(
fetch,
"http://something.com/rest/tutanota/mail/mailListId/mailElementId",
"http://something.com/rest/tutanota/mail/mailListId?ids=mailElementId",
{
v: tutanotaModelInfo.version.toString(),
cv: appVersion,
accessToken: "accessToken",
},
200,
mailLiteral,
[mailLiteral],
)
await handler.onMailNotification(sseInfo, notificationInfo)
await handler.onMailNotification(sseInfo, [notificationInfo])
await requestDefer

View file

@ -215,7 +215,8 @@ o.spec("TutaSseFacade", () => {
verify(
notificationHandler.onMailNotification(
sseInfo,
matchers.argThat((actualNotificationInfo) => {
matchers.argThat((actualNotificationInfos) => {
let actualNotificationInfo = actualNotificationInfos[0]
actualNotificationInfo.mailId._id = null
removeOriginals(actualNotificationInfo)
return deepEqual(actualNotificationInfo, notificationInfo)

View file

@ -11,8 +11,7 @@ import { instance, matchers, object, when } from "testdouble"
import { UserController } from "../../../src/common/api/main/UserController.js"
import { createTestEntity } from "../TestUtils.js"
import { EntityUpdateData } from "../../../src/common/api/common/utils/EntityUpdateUtils.js"
import { MailboxDetail, MailboxModel } from "../../../src/common/mailFunctionality/MailboxModel.js"
import { InboxRuleHandler } from "../../../src/mail-app/mail/model/InboxRuleHandler.js"
import { MailboxModel } from "../../../src/common/mailFunctionality/MailboxModel.js"
import { getElementId, getListId } from "../../../src/common/api/common/utils/EntityUtils.js"
import { MailModel } from "../../../src/mail-app/mail/model/MailModel.js"
import { EventController } from "../../../src/common/api/main/EventController.js"
@ -27,9 +26,7 @@ o.spec("MailModelTest", function () {
inboxFolder.folderType = MailSetKind.INBOX
const anotherFolder = createTestEntity(MailFolderTypeRef, { _id: ["folderListId", "archiveId"] })
anotherFolder.folderType = MailSetKind.ARCHIVE
let mailboxDetails: Partial<MailboxDetail>[]
let logins: LoginController
let inboxRuleHandler: InboxRuleHandler
let mailFacade: MailFacade
const restClient: EntityRestClientMock = new EntityRestClientMock()
@ -44,7 +41,6 @@ o.spec("MailModelTest", function () {
when(userController.isUpdateForLoggedInUserInstance(matchers.anything(), matchers.anything())).thenReturn(false)
when(logins.getUserController()).thenReturn(userController)
inboxRuleHandler = object()
model = new MailModel(
downcast({}),
mailboxModel,
@ -55,8 +51,6 @@ o.spec("MailModelTest", function () {
null,
() => null,
)
// not pretty, but works
// model.mailboxDetails(mailboxDetails as MailboxDetail[])
})
o("doesn't send notification for another folder", async function () {
const mailSetEntry = createTestEntity(MailSetEntryTypeRef, { _id: [anotherFolder.entries, "mailSetEntryId"] })
@ -102,6 +96,9 @@ o.spec("MailModelTest", function () {
operation,
instanceListId,
instanceId,
instance: null,
patches: null,
isPrefetched: false,
}
}
})

View file

@ -49,6 +49,7 @@ import { MailboxDetail, MailboxModel } from "../../../src/common/mailFunctionali
import { SendMailModel, TOO_MANY_VISIBLE_RECIPIENTS } from "../../../src/common/mailFunctionality/SendMailModel.js"
import { RecipientField } from "../../../src/common/mailFunctionality/SharedMailUtils.js"
import { getContactDisplayName } from "../../../src/common/contactsFunctionality/ContactUtils.js"
import { EntityUpdateData } from "../../../src/common/api/common/utils/EntityUpdateUtils"
const { anything, argThat } = matchers
@ -88,6 +89,10 @@ const BODY_TEXT_1 = "lorem ipsum dolor yaddah yaddah"
const SUBJECT_LINE_1 = "Did you get that thing I sent ya"
const STRONG_PASSWORD = "@()IE!)(@FME)0-123jfDSA32SDACmmnvnvddEW"
const WEAK_PASSWORD = "123"
const noPatchesAndInstance: Pick<EntityUpdateData, "instance" | "patches"> = {
instance: null,
patches: null,
}
o.spec("SendMailModel", function () {
o.before(function () {
@ -575,36 +580,48 @@ o.spec("SendMailModel", function () {
operation: OperationType.CREATE,
instanceListId: "",
instanceId: "",
...noPatchesAndInstance,
isPrefetched: false,
})
await model.handleEntityEvent({
typeRef: UserTypeRef,
operation: OperationType.CREATE,
instanceListId: "",
instanceId: "",
...noPatchesAndInstance,
isPrefetched: false,
})
await model.handleEntityEvent({
typeRef: CustomerTypeRef,
operation: OperationType.CREATE,
instanceListId: "",
instanceId: "",
...noPatchesAndInstance,
isPrefetched: false,
})
await model.handleEntityEvent({
typeRef: NotificationMailTypeRef,
operation: OperationType.CREATE,
instanceListId: "",
instanceId: "",
...noPatchesAndInstance,
isPrefetched: false,
})
await model.handleEntityEvent({
typeRef: ChallengeTypeRef,
operation: OperationType.CREATE,
instanceListId: "",
instanceId: "",
...noPatchesAndInstance,
isPrefetched: false,
})
await model.handleEntityEvent({
typeRef: MailTypeRef,
operation: OperationType.CREATE,
instanceListId: "",
instanceId: "",
...noPatchesAndInstance,
isPrefetched: false,
})
verify(entity.load(anything(), anything(), anything()), { times: 0 })
})
@ -635,6 +652,8 @@ o.spec("SendMailModel", function () {
operation: OperationType.UPDATE,
instanceListId,
instanceId,
...noPatchesAndInstance,
isPrefetched: false,
})
o(model.allRecipients().length).equals(2)
const updatedRecipient = model.allRecipients().find((r) => r.contact && isSameId(r.contact._id, existingContact._id))
@ -668,6 +687,8 @@ o.spec("SendMailModel", function () {
operation: OperationType.UPDATE,
instanceListId,
instanceId,
...noPatchesAndInstance,
isPrefetched: false,
})
o(model.allRecipients().length).equals(1)
const updatedContact = model.allRecipients().find((r) => r.contact && isSameId(r.contact._id, existingContact._id))
@ -681,6 +702,8 @@ o.spec("SendMailModel", function () {
operation: OperationType.DELETE,
instanceListId,
instanceId,
...noPatchesAndInstance,
isPrefetched: false,
})
o(model.allRecipients().length).equals(1)
const updatedContact = model.allRecipients().find((r) => r.contact && isSameId(r.contact._id, existingContact._id))

View file

@ -50,6 +50,10 @@ o.spec("ConversationListModel", () => {
mailGroup: createTestEntity(GroupTypeRef),
mailboxGroupRoot: createTestEntity(MailboxGroupRootTypeRef),
}
const noPatchesAndInstance: Pick<EntityUpdateData, "instance" | "patches"> = {
instance: null,
patches: null,
}
const mailSetEntriesListId = "entries"
const _ownerGroup = "me"
@ -368,6 +372,9 @@ o.spec("ConversationListModel", () => {
instanceListId: listIdPart(mailSetEntryId),
instanceId: elementIdPart(mailSetEntryId),
operation: OperationType.CREATE,
instance: null,
patches: null,
isPrefetched: false,
}
when(entityClient.load(MailSetEntryTypeRef, mailSetEntryId)).thenResolve(
@ -410,6 +417,8 @@ o.spec("ConversationListModel", () => {
instanceListId: getListId(labels[1]),
instanceId: getElementId(labels[1]),
operation: OperationType.DELETE,
...noPatchesAndInstance,
isPrefetched: false,
}
const oldMails = model.mails
@ -436,6 +445,8 @@ o.spec("ConversationListModel", () => {
instanceListId: getListId(labels[1]),
instanceId: getElementId(labels[1]),
operation: OperationType.DELETE,
...noPatchesAndInstance,
isPrefetched: false,
}
entityUpdateData.operation = OperationType.DELETE
@ -461,6 +472,8 @@ o.spec("ConversationListModel", () => {
instanceListId: listIdPart(someMail.mailSetEntryId),
instanceId: elementIdPart(someMail.mailSetEntryId),
operation: OperationType.DELETE,
...noPatchesAndInstance,
isPrefetched: false,
}
const oldItems = model.mails
@ -498,6 +511,8 @@ o.spec("ConversationListModel", () => {
instanceListId: getListId(newEntry),
instanceId: getElementId(newEntry),
operation: OperationType.CREATE,
...noPatchesAndInstance,
isPrefetched: false,
}
when(entityClient.load(MailSetEntryTypeRef, newEntry._id)).thenResolve(newEntry)
@ -581,6 +596,8 @@ o.spec("ConversationListModel", () => {
instanceListId: mailSetEntriesListId,
instanceId: makeMailSetElementId(0),
operation: OperationType.DELETE,
...noPatchesAndInstance,
isPrefetched: false,
}
o.check(model.mails).deepEquals(oldMails)
@ -606,6 +623,8 @@ o.spec("ConversationListModel", () => {
instanceListId: mailSetEntriesListId,
instanceId: makeMailSetElementId(2),
operation: OperationType.DELETE,
...noPatchesAndInstance,
isPrefetched: false,
}
o.check(model.mails).deepEquals(oldMails)
@ -631,6 +650,8 @@ o.spec("ConversationListModel", () => {
instanceListId: mailSetEntriesListId,
instanceId: makeMailSetElementId(1),
operation: OperationType.DELETE,
...noPatchesAndInstance,
isPrefetched: false,
}
o.check(model.mails).deepEquals(oldMails)
@ -667,6 +688,8 @@ o.spec("ConversationListModel", () => {
instanceListId: getListId(mail),
instanceId: getElementId(mail),
operation: OperationType.UPDATE,
...noPatchesAndInstance,
isPrefetched: false,
}
when(entityClient.load(MailTypeRef, mail._id)).thenResolve(mail)
@ -694,6 +717,8 @@ o.spec("ConversationListModel", () => {
instanceListId: getListId(mail),
instanceId: getElementId(mail),
operation: OperationType.UPDATE,
...noPatchesAndInstance,
isPrefetched: false,
}
when(entityClient.load(MailTypeRef, mail._id)).thenResolve(mail)
entityUpdateData.operation = OperationType.DELETE

View file

@ -49,6 +49,10 @@ o.spec("MailListModel", () => {
mailGroup: createTestEntity(GroupTypeRef),
mailboxGroupRoot: createTestEntity(MailboxGroupRootTypeRef),
}
const noPatchesAndInstance: Pick<EntityUpdateData, "instance" | "patches"> = {
instance: null,
patches: null,
}
const mailSetEntriesListId = "entries"
const _ownerGroup = "me"
@ -351,6 +355,8 @@ o.spec("MailListModel", () => {
instanceListId: getListId(labels[1]),
instanceId: getElementId(labels[1]),
operation: OperationType.DELETE,
...noPatchesAndInstance,
isPrefetched: false,
}
entityUpdateData.operation = OperationType.UPDATE
@ -371,6 +377,8 @@ o.spec("MailListModel", () => {
instanceListId: getListId(labels[1]),
instanceId: getElementId(labels[1]),
operation: OperationType.DELETE,
...noPatchesAndInstance,
isPrefetched: false,
}
entityUpdateData.operation = OperationType.DELETE
@ -389,6 +397,8 @@ o.spec("MailListModel", () => {
instanceListId: listIdPart(someMail.mailSetEntryId),
instanceId: elementIdPart(someMail.mailSetEntryId),
operation: OperationType.DELETE,
...noPatchesAndInstance,
isPrefetched: false,
}
const oldItems = model.items
@ -422,6 +432,8 @@ o.spec("MailListModel", () => {
instanceListId: getListId(newEntry),
instanceId: getElementId(newEntry),
operation: OperationType.CREATE,
...noPatchesAndInstance,
isPrefetched: false,
}
when(entityClient.load(MailSetEntryTypeRef, newEntry._id)).thenResolve(newEntry)
@ -476,6 +488,8 @@ o.spec("MailListModel", () => {
instanceListId: getListId(mail),
instanceId: getElementId(mail),
operation: OperationType.UPDATE,
...noPatchesAndInstance,
isPrefetched: false,
}
when(entityClient.load(MailTypeRef, mail._id)).thenResolve(mail)
@ -494,6 +508,8 @@ o.spec("MailListModel", () => {
instanceListId: getListId(mail),
instanceId: getElementId(mail),
operation: OperationType.UPDATE,
...noPatchesAndInstance,
isPrefetched: false,
}
when(entityClient.load(MailTypeRef, mail._id)).thenResolve(mail)
entityUpdateData.operation = OperationType.DELETE

View file

@ -22,6 +22,7 @@ import { createTestEntity } from "../../TestUtils.js"
import { MailboxDetail, MailboxModel } from "../../../../src/common/mailFunctionality/MailboxModel.js"
import { MailModel } from "../../../../src/mail-app/mail/model/MailModel.js"
import { ClientModelInfo } from "../../../../src/common/api/common/EntityFunctions"
import { EntityUpdateData } from "../../../../src/common/api/common/utils/EntityUpdateUtils"
o.spec("ConversationViewModel", function () {
let conversation: ConversationEntry[]
@ -41,6 +42,10 @@ o.spec("ConversationViewModel", function () {
let canUseConversationView: boolean
const listId = "listId"
const noPatchesAndInstance: Pick<EntityUpdateData, "instance" | "patches"> = {
instance: null,
patches: null,
}
const viewModelFactory = async (): Promise<
(options: CreateMailViewerOptions, mailboxDetails: MailboxDetail, mailboxProperties: MailboxProperties) => MailViewerViewModel
@ -253,6 +258,8 @@ o.spec("ConversationViewModel", function () {
operation: OperationType.CREATE,
instanceListId: listId,
instanceId: yetAnotherMail.conversationEntry[1],
...noPatchesAndInstance,
isPrefetched: false,
},
],
"mailGroupId",
@ -290,6 +297,8 @@ o.spec("ConversationViewModel", function () {
operation: OperationType.UPDATE,
instanceListId: listId,
instanceId: anotherMail.conversationEntry[1],
...noPatchesAndInstance,
isPrefetched: false,
},
],
"mailGroupId",
@ -316,6 +325,8 @@ o.spec("ConversationViewModel", function () {
operation: OperationType.CREATE,
instanceListId: listId,
instanceId: yetAnotherMail.conversationEntry[1],
...noPatchesAndInstance,
isPrefetched: false,
},
],
"mailGroupId",
@ -340,6 +351,8 @@ o.spec("ConversationViewModel", function () {
operation: OperationType.CREATE,
instanceListId: listId,
instanceId: yetAnotherMail.conversationEntry[1],
isPrefetched: false,
...noPatchesAndInstance,
},
],
"mailGroupId",
@ -379,6 +392,8 @@ o.spec("ConversationViewModel", function () {
operation: OperationType.UPDATE,
instanceListId: listId,
instanceId: trashDraftMail.conversationEntry[1],
...noPatchesAndInstance,
isPrefetched: false,
},
],
"mailGroupId",

View file

@ -1268,6 +1268,10 @@ pub struct EntityUpdate {
pub operation: i64,
#[serde(rename = "2556")]
pub typeId: Option<i64>,
#[serde(rename = "2617")]
pub instance: Option<String>,
#[serde(rename = "2618")]
pub patch: Option<PatchList>,
}
impl Entity for EntityUpdate {
@ -5978,24 +5982,6 @@ impl Entity for Patch {
}
}
#[derive(uniffi::Record, Clone, Serialize, Deserialize)]
#[cfg_attr(any(test, feature = "testing"), derive(PartialEq, Debug))]
pub struct PatchList {
#[serde(rename = "2573")]
pub _format: i64,
#[serde(rename = "2574")]
pub patches: Vec<Patch>,
}
impl Entity for PatchList {
fn type_ref() -> TypeRef {
TypeRef {
app: AppName::Sys,
type_id: TypeId::from(2572),
}
}
}
#[derive(uniffi::Record, Clone, Serialize, Deserialize)]
#[cfg_attr(any(test, feature = "testing"), derive(PartialEq, Debug))]
pub struct IdentityKeyPair {
@ -6147,3 +6133,21 @@ impl Entity for RolloutGetOut {
}
}
}
#[derive(uniffi::Record, Clone, Serialize, Deserialize)]
#[cfg_attr(any(test, feature = "testing"), derive(PartialEq, Debug))]
pub struct PatchList {
#[serde(rename = "2615")]
pub _id: Option<CustomId>,
#[serde(rename = "2616")]
pub patches: Vec<Patch>,
}
impl Entity for PatchList {
fn type_ref() -> TypeRef {
TypeRef {
app: AppName::Sys,
type_id: TypeId::from(2614),
}
}
}

View file

@ -106,7 +106,7 @@ use crate::entities::generated::sys::VersionData;
use crate::entities::generated::sys::VersionReturn;
pub struct AdminGroupKeyRotationService;
crate::service_impl!(declare, AdminGroupKeyRotationService, "sys/admingroupkeyrotationservice", 131);
crate::service_impl!(declare, AdminGroupKeyRotationService, "sys/admingroupkeyrotationservice", 132);
crate::service_impl!(POST, AdminGroupKeyRotationService, AdminGroupKeyRotationPostIn, ());
crate::service_impl!(GET, AdminGroupKeyRotationService, (), AdminGroupKeyRotationGetOut);
crate::service_impl!(PUT, AdminGroupKeyRotationService, AdminGroupKeyRotationPutIn, ());
@ -114,25 +114,25 @@ crate::service_impl!(PUT, AdminGroupKeyRotationService, AdminGroupKeyRotationPut
pub struct AffiliatePartnerKpiService;
crate::service_impl!(declare, AffiliatePartnerKpiService, "sys/affiliatepartnerkpiservice", 131);
crate::service_impl!(declare, AffiliatePartnerKpiService, "sys/affiliatepartnerkpiservice", 132);
crate::service_impl!(GET, AffiliatePartnerKpiService, (), AffiliatePartnerKpiServiceGetOut);
pub struct AlarmService;
crate::service_impl!(declare, AlarmService, "sys/alarmservice", 131);
crate::service_impl!(declare, AlarmService, "sys/alarmservice", 132);
crate::service_impl!(POST, AlarmService, AlarmServicePost, ());
pub struct AppStoreSubscriptionService;
crate::service_impl!(declare, AppStoreSubscriptionService, "sys/appstoresubscriptionservice", 131);
crate::service_impl!(declare, AppStoreSubscriptionService, "sys/appstoresubscriptionservice", 132);
crate::service_impl!(GET, AppStoreSubscriptionService, AppStoreSubscriptionGetIn, AppStoreSubscriptionGetOut);
pub struct AutoLoginService;
crate::service_impl!(declare, AutoLoginService, "sys/autologinservice", 131);
crate::service_impl!(declare, AutoLoginService, "sys/autologinservice", 132);
crate::service_impl!(POST, AutoLoginService, AutoLoginDataReturn, AutoLoginPostReturn);
crate::service_impl!(GET, AutoLoginService, AutoLoginDataGet, AutoLoginDataReturn);
crate::service_impl!(DELETE, AutoLoginService, AutoLoginDataDelete, ());
@ -140,7 +140,7 @@ crate::service_impl!(DELETE, AutoLoginService, AutoLoginDataDelete, ());
pub struct BrandingDomainService;
crate::service_impl!(declare, BrandingDomainService, "sys/brandingdomainservice", 131);
crate::service_impl!(declare, BrandingDomainService, "sys/brandingdomainservice", 132);
crate::service_impl!(POST, BrandingDomainService, BrandingDomainData, ());
crate::service_impl!(GET, BrandingDomainService, (), BrandingDomainGetReturn);
crate::service_impl!(PUT, BrandingDomainService, BrandingDomainData, ());
@ -149,37 +149,37 @@ crate::service_impl!(DELETE, BrandingDomainService, BrandingDomainDeleteData, ()
pub struct ChangeKdfService;
crate::service_impl!(declare, ChangeKdfService, "sys/changekdfservice", 131);
crate::service_impl!(declare, ChangeKdfService, "sys/changekdfservice", 132);
crate::service_impl!(POST, ChangeKdfService, ChangeKdfPostIn, ());
pub struct ChangePasswordService;
crate::service_impl!(declare, ChangePasswordService, "sys/changepasswordservice", 131);
crate::service_impl!(declare, ChangePasswordService, "sys/changepasswordservice", 132);
crate::service_impl!(POST, ChangePasswordService, ChangePasswordPostIn, ());
pub struct CloseSessionService;
crate::service_impl!(declare, CloseSessionService, "sys/closesessionservice", 131);
crate::service_impl!(declare, CloseSessionService, "sys/closesessionservice", 132);
crate::service_impl!(POST, CloseSessionService, CloseSessionServicePost, ());
pub struct CreateCustomerServerProperties;
crate::service_impl!(declare, CreateCustomerServerProperties, "sys/createcustomerserverproperties", 131);
crate::service_impl!(declare, CreateCustomerServerProperties, "sys/createcustomerserverproperties", 132);
crate::service_impl!(POST, CreateCustomerServerProperties, CreateCustomerServerPropertiesData, CreateCustomerServerPropertiesReturn);
pub struct CustomDomainCheckService;
crate::service_impl!(declare, CustomDomainCheckService, "sys/customdomaincheckservice", 131);
crate::service_impl!(declare, CustomDomainCheckService, "sys/customdomaincheckservice", 132);
crate::service_impl!(GET, CustomDomainCheckService, CustomDomainCheckGetIn, CustomDomainCheckGetOut);
pub struct CustomDomainService;
crate::service_impl!(declare, CustomDomainService, "sys/customdomainservice", 131);
crate::service_impl!(declare, CustomDomainService, "sys/customdomainservice", 132);
crate::service_impl!(POST, CustomDomainService, CustomDomainData, CustomDomainReturn);
crate::service_impl!(PUT, CustomDomainService, CustomDomainData, ());
crate::service_impl!(DELETE, CustomDomainService, CustomDomainData, ());
@ -187,50 +187,50 @@ crate::service_impl!(DELETE, CustomDomainService, CustomDomainData, ());
pub struct CustomerAccountTerminationService;
crate::service_impl!(declare, CustomerAccountTerminationService, "sys/customeraccountterminationservice", 131);
crate::service_impl!(declare, CustomerAccountTerminationService, "sys/customeraccountterminationservice", 132);
crate::service_impl!(POST, CustomerAccountTerminationService, CustomerAccountTerminationPostIn, CustomerAccountTerminationPostOut);
pub struct CustomerPublicKeyService;
crate::service_impl!(declare, CustomerPublicKeyService, "sys/customerpublickeyservice", 131);
crate::service_impl!(declare, CustomerPublicKeyService, "sys/customerpublickeyservice", 132);
crate::service_impl!(GET, CustomerPublicKeyService, (), PublicKeyGetOut);
pub struct CustomerService;
crate::service_impl!(declare, CustomerService, "sys/customerservice", 131);
crate::service_impl!(declare, CustomerService, "sys/customerservice", 132);
crate::service_impl!(DELETE, CustomerService, DeleteCustomerData, ());
pub struct DebitService;
crate::service_impl!(declare, DebitService, "sys/debitservice", 131);
crate::service_impl!(declare, DebitService, "sys/debitservice", 132);
crate::service_impl!(PUT, DebitService, DebitServicePutData, ());
pub struct DomainMailAddressAvailabilityService;
crate::service_impl!(declare, DomainMailAddressAvailabilityService, "sys/domainmailaddressavailabilityservice", 131);
crate::service_impl!(declare, DomainMailAddressAvailabilityService, "sys/domainmailaddressavailabilityservice", 132);
crate::service_impl!(GET, DomainMailAddressAvailabilityService, DomainMailAddressAvailabilityData, DomainMailAddressAvailabilityReturn);
pub struct ExternalPropertiesService;
crate::service_impl!(declare, ExternalPropertiesService, "sys/externalpropertiesservice", 131);
crate::service_impl!(declare, ExternalPropertiesService, "sys/externalpropertiesservice", 132);
crate::service_impl!(GET, ExternalPropertiesService, (), ExternalPropertiesReturn);
pub struct GiftCardRedeemService;
crate::service_impl!(declare, GiftCardRedeemService, "sys/giftcardredeemservice", 131);
crate::service_impl!(declare, GiftCardRedeemService, "sys/giftcardredeemservice", 132);
crate::service_impl!(POST, GiftCardRedeemService, GiftCardRedeemData, ());
crate::service_impl!(GET, GiftCardRedeemService, GiftCardRedeemData, GiftCardRedeemGetReturn);
pub struct GiftCardService;
crate::service_impl!(declare, GiftCardService, "sys/giftcardservice", 131);
crate::service_impl!(declare, GiftCardService, "sys/giftcardservice", 132);
crate::service_impl!(POST, GiftCardService, GiftCardCreateData, GiftCardCreateReturn);
crate::service_impl!(GET, GiftCardService, (), GiftCardGetReturn);
crate::service_impl!(DELETE, GiftCardService, GiftCardDeleteData, ());
@ -238,38 +238,38 @@ crate::service_impl!(DELETE, GiftCardService, GiftCardDeleteData, ());
pub struct GroupKeyRotationInfoService;
crate::service_impl!(declare, GroupKeyRotationInfoService, "sys/groupkeyrotationinfoservice", 131);
crate::service_impl!(declare, GroupKeyRotationInfoService, "sys/groupkeyrotationinfoservice", 132);
crate::service_impl!(GET, GroupKeyRotationInfoService, (), GroupKeyRotationInfoGetOut);
pub struct GroupKeyRotationService;
crate::service_impl!(declare, GroupKeyRotationService, "sys/groupkeyrotationservice", 131);
crate::service_impl!(declare, GroupKeyRotationService, "sys/groupkeyrotationservice", 132);
crate::service_impl!(POST, GroupKeyRotationService, GroupKeyRotationPostIn, ());
pub struct IdentityKeyService;
crate::service_impl!(declare, IdentityKeyService, "sys/identitykeyservice", 131);
crate::service_impl!(declare, IdentityKeyService, "sys/identitykeyservice", 132);
crate::service_impl!(POST, IdentityKeyService, IdentityKeyPostIn, ());
crate::service_impl!(GET, IdentityKeyService, IdentityKeyGetIn, IdentityKeyGetOut);
pub struct InvoiceDataService;
crate::service_impl!(declare, InvoiceDataService, "sys/invoicedataservice", 131);
crate::service_impl!(declare, InvoiceDataService, "sys/invoicedataservice", 132);
crate::service_impl!(GET, InvoiceDataService, InvoiceDataGetIn, InvoiceDataGetOut);
pub struct LocationService;
crate::service_impl!(declare, LocationService, "sys/locationservice", 131);
crate::service_impl!(declare, LocationService, "sys/locationservice", 132);
crate::service_impl!(GET, LocationService, (), LocationServiceGetReturn);
pub struct MailAddressAliasService;
crate::service_impl!(declare, MailAddressAliasService, "sys/mailaddressaliasservice", 131);
crate::service_impl!(declare, MailAddressAliasService, "sys/mailaddressaliasservice", 132);
crate::service_impl!(POST, MailAddressAliasService, MailAddressAliasServiceData, ());
crate::service_impl!(GET, MailAddressAliasService, MailAddressAliasGetIn, MailAddressAliasServiceReturn);
crate::service_impl!(DELETE, MailAddressAliasService, MailAddressAliasServiceDataDelete, ());
@ -277,7 +277,7 @@ crate::service_impl!(DELETE, MailAddressAliasService, MailAddressAliasServiceDat
pub struct MembershipService;
crate::service_impl!(declare, MembershipService, "sys/membershipservice", 131);
crate::service_impl!(declare, MembershipService, "sys/membershipservice", 132);
crate::service_impl!(POST, MembershipService, MembershipAddData, ());
crate::service_impl!(PUT, MembershipService, MembershipPutIn, ());
crate::service_impl!(DELETE, MembershipService, MembershipRemoveData, ());
@ -285,13 +285,13 @@ crate::service_impl!(DELETE, MembershipService, MembershipRemoveData, ());
pub struct MultipleMailAddressAvailabilityService;
crate::service_impl!(declare, MultipleMailAddressAvailabilityService, "sys/multiplemailaddressavailabilityservice", 131);
crate::service_impl!(declare, MultipleMailAddressAvailabilityService, "sys/multiplemailaddressavailabilityservice", 132);
crate::service_impl!(GET, MultipleMailAddressAvailabilityService, MultipleMailAddressAvailabilityData, MultipleMailAddressAvailabilityReturn);
pub struct PaymentDataService;
crate::service_impl!(declare, PaymentDataService, "sys/paymentdataservice", 131);
crate::service_impl!(declare, PaymentDataService, "sys/paymentdataservice", 132);
crate::service_impl!(POST, PaymentDataService, PaymentDataServicePostData, ());
crate::service_impl!(GET, PaymentDataService, PaymentDataServiceGetData, PaymentDataServiceGetReturn);
crate::service_impl!(PUT, PaymentDataService, PaymentDataServicePutData, PaymentDataServicePutReturn);
@ -299,77 +299,77 @@ crate::service_impl!(PUT, PaymentDataService, PaymentDataServicePutData, Payment
pub struct PlanService;
crate::service_impl!(declare, PlanService, "sys/planservice", 131);
crate::service_impl!(declare, PlanService, "sys/planservice", 132);
crate::service_impl!(GET, PlanService, (), PlanServiceGetOut);
pub struct PriceService;
crate::service_impl!(declare, PriceService, "sys/priceservice", 131);
crate::service_impl!(declare, PriceService, "sys/priceservice", 132);
crate::service_impl!(GET, PriceService, PriceServiceData, PriceServiceReturn);
pub struct PublicKeyService;
crate::service_impl!(declare, PublicKeyService, "sys/publickeyservice", 131);
crate::service_impl!(declare, PublicKeyService, "sys/publickeyservice", 132);
crate::service_impl!(GET, PublicKeyService, PublicKeyGetIn, PublicKeyGetOut);
crate::service_impl!(PUT, PublicKeyService, PublicKeyPutIn, ());
pub struct ReferralCodeService;
crate::service_impl!(declare, ReferralCodeService, "sys/referralcodeservice", 131);
crate::service_impl!(declare, ReferralCodeService, "sys/referralcodeservice", 132);
crate::service_impl!(POST, ReferralCodeService, ReferralCodePostIn, ReferralCodePostOut);
crate::service_impl!(GET, ReferralCodeService, ReferralCodeGetIn, ());
pub struct RegistrationCaptchaService;
crate::service_impl!(declare, RegistrationCaptchaService, "sys/registrationcaptchaservice", 131);
crate::service_impl!(declare, RegistrationCaptchaService, "sys/registrationcaptchaservice", 132);
crate::service_impl!(POST, RegistrationCaptchaService, RegistrationCaptchaServiceData, ());
crate::service_impl!(GET, RegistrationCaptchaService, RegistrationCaptchaServiceGetData, RegistrationCaptchaServiceReturn);
pub struct RegistrationService;
crate::service_impl!(declare, RegistrationService, "sys/registrationservice", 131);
crate::service_impl!(declare, RegistrationService, "sys/registrationservice", 132);
crate::service_impl!(POST, RegistrationService, RegistrationServiceData, RegistrationReturn);
crate::service_impl!(GET, RegistrationService, (), RegistrationServiceData);
pub struct ResetFactorsService;
crate::service_impl!(declare, ResetFactorsService, "sys/resetfactorsservice", 131);
crate::service_impl!(declare, ResetFactorsService, "sys/resetfactorsservice", 132);
crate::service_impl!(DELETE, ResetFactorsService, ResetFactorsDeleteData, ());
pub struct ResetPasswordService;
crate::service_impl!(declare, ResetPasswordService, "sys/resetpasswordservice", 131);
crate::service_impl!(declare, ResetPasswordService, "sys/resetpasswordservice", 132);
crate::service_impl!(POST, ResetPasswordService, ResetPasswordPostIn, ());
pub struct RolloutService;
crate::service_impl!(declare, RolloutService, "sys/rolloutservice", 131);
crate::service_impl!(declare, RolloutService, "sys/rolloutservice", 132);
crate::service_impl!(GET, RolloutService, (), RolloutGetOut);
pub struct SaltService;
crate::service_impl!(declare, SaltService, "sys/saltservice", 131);
crate::service_impl!(declare, SaltService, "sys/saltservice", 132);
crate::service_impl!(GET, SaltService, SaltData, SaltReturn);
pub struct SecondFactorAuthAllowedService;
crate::service_impl!(declare, SecondFactorAuthAllowedService, "sys/secondfactorauthallowedservice", 131);
crate::service_impl!(declare, SecondFactorAuthAllowedService, "sys/secondfactorauthallowedservice", 132);
crate::service_impl!(GET, SecondFactorAuthAllowedService, (), SecondFactorAuthAllowedReturn);
pub struct SecondFactorAuthService;
crate::service_impl!(declare, SecondFactorAuthService, "sys/secondfactorauthservice", 131);
crate::service_impl!(declare, SecondFactorAuthService, "sys/secondfactorauthservice", 132);
crate::service_impl!(POST, SecondFactorAuthService, SecondFactorAuthData, ());
crate::service_impl!(GET, SecondFactorAuthService, SecondFactorAuthGetData, SecondFactorAuthGetReturn);
crate::service_impl!(DELETE, SecondFactorAuthService, SecondFactorAuthDeleteData, ());
@ -377,77 +377,77 @@ crate::service_impl!(DELETE, SecondFactorAuthService, SecondFactorAuthDeleteData
pub struct SessionService;
crate::service_impl!(declare, SessionService, "sys/sessionservice", 131);
crate::service_impl!(declare, SessionService, "sys/sessionservice", 132);
crate::service_impl!(POST, SessionService, CreateSessionData, CreateSessionReturn);
pub struct SignOrderProcessingAgreementService;
crate::service_impl!(declare, SignOrderProcessingAgreementService, "sys/signorderprocessingagreementservice", 131);
crate::service_impl!(declare, SignOrderProcessingAgreementService, "sys/signorderprocessingagreementservice", 132);
crate::service_impl!(POST, SignOrderProcessingAgreementService, SignOrderProcessingAgreementData, ());
pub struct SurveyService;
crate::service_impl!(declare, SurveyService, "sys/surveyservice", 131);
crate::service_impl!(declare, SurveyService, "sys/surveyservice", 132);
crate::service_impl!(POST, SurveyService, SurveyDataPostIn, ());
pub struct SwitchAccountTypeService;
crate::service_impl!(declare, SwitchAccountTypeService, "sys/switchaccounttypeservice", 131);
crate::service_impl!(declare, SwitchAccountTypeService, "sys/switchaccounttypeservice", 132);
crate::service_impl!(POST, SwitchAccountTypeService, SwitchAccountTypePostIn, ());
pub struct SystemKeysService;
crate::service_impl!(declare, SystemKeysService, "sys/systemkeysservice", 131);
crate::service_impl!(declare, SystemKeysService, "sys/systemkeysservice", 132);
crate::service_impl!(GET, SystemKeysService, (), SystemKeysReturn);
pub struct TakeOverDeletedAddressService;
crate::service_impl!(declare, TakeOverDeletedAddressService, "sys/takeoverdeletedaddressservice", 131);
crate::service_impl!(declare, TakeOverDeletedAddressService, "sys/takeoverdeletedaddressservice", 132);
crate::service_impl!(POST, TakeOverDeletedAddressService, TakeOverDeletedAddressData, ());
pub struct UpdatePermissionKeyService;
crate::service_impl!(declare, UpdatePermissionKeyService, "sys/updatepermissionkeyservice", 131);
crate::service_impl!(declare, UpdatePermissionKeyService, "sys/updatepermissionkeyservice", 132);
crate::service_impl!(POST, UpdatePermissionKeyService, UpdatePermissionKeyData, ());
pub struct UpdateSessionKeysService;
crate::service_impl!(declare, UpdateSessionKeysService, "sys/updatesessionkeysservice", 131);
crate::service_impl!(declare, UpdateSessionKeysService, "sys/updatesessionkeysservice", 132);
crate::service_impl!(POST, UpdateSessionKeysService, UpdateSessionKeysPostIn, ());
pub struct UpgradePriceService;
crate::service_impl!(declare, UpgradePriceService, "sys/upgradepriceservice", 131);
crate::service_impl!(declare, UpgradePriceService, "sys/upgradepriceservice", 132);
crate::service_impl!(GET, UpgradePriceService, UpgradePriceServiceData, UpgradePriceServiceReturn);
pub struct UserGroupKeyRotationService;
crate::service_impl!(declare, UserGroupKeyRotationService, "sys/usergroupkeyrotationservice", 131);
crate::service_impl!(declare, UserGroupKeyRotationService, "sys/usergroupkeyrotationservice", 132);
crate::service_impl!(POST, UserGroupKeyRotationService, UserGroupKeyRotationPostIn, ());
pub struct UserService;
crate::service_impl!(declare, UserService, "sys/userservice", 131);
crate::service_impl!(declare, UserService, "sys/userservice", 132);
crate::service_impl!(DELETE, UserService, UserDataDelete, ());
pub struct VerifierTokenService;
crate::service_impl!(declare, VerifierTokenService, "sys/verifiertokenservice", 131);
crate::service_impl!(declare, VerifierTokenService, "sys/verifiertokenservice", 132);
crate::service_impl!(POST, VerifierTokenService, VerifierTokenServiceIn, VerifierTokenServiceOut);
pub struct VersionService;
crate::service_impl!(declare, VersionService, "sys/versionservice", 131);
crate::service_impl!(declare, VersionService, "sys/versionservice", 132);
crate::service_impl!(GET, VersionService, VersionData, VersionReturn);

File diff suppressed because it is too large Load diff