add prefetching to missed entityUpdates, instance/patch on EntityUpdate

When processing the missed entityUpdates in EventQueue in EventBusClient
, we group entityUpdates based on typeRefs and listIds and do
loadMultiple requests instead of loading them one-by-one (prefetching).
Additionally, when the client is online, the server enriches the
WebSocket message with either the instance (in case of a CREATE event),
or with the patches list (in case of an UPDATE event) so that we do not
need to do an additional GET request and can either put the instance
into the cache or update the entry on the cache using the PatchMerger
instead.

Co-authored-by: abp <abp@tutao.de>
Co-authored-by: das <das@tutao.de>
Co-authored-by: jomapp <17314077+jomapp@users.noreply.github.com>
Co-authored-by: Kinan <104761667+kibibytium@users.noreply.github.com>
Co-authored-by: map <mpfau@users.noreply.github.com>
Co-authored-by: sug <sug@tutao.de>
This commit is contained in:
abp 2025-06-13 17:27:15 +02:00
parent 681b22cb4f
commit 4db7e9b1e9
No known key found for this signature in database
GPG key ID: 791D4EC38A7AA7C2
63 changed files with 4193 additions and 2291 deletions

@ -1 +1 @@
Subproject commit 200f65e9a5111c8f7b19fd884f4c942b61a1579b Subproject commit daba6d10456b5a5f3d3616e6199b3849f19e7507

View file

@ -520,7 +520,6 @@ export function assembleEditResultAndAssignFromExisting(existingEvent: CalendarE
newEvent._ownerGroup = existingEvent._ownerGroup newEvent._ownerGroup = existingEvent._ownerGroup
newEvent._permissions = existingEvent._permissions newEvent._permissions = existingEvent._permissions
newEvent._original = existingEvent._original newEvent._original = existingEvent._original
return { return {
hasUpdateWorthyChanges: eventHasChanged(newEvent, existingEvent), hasUpdateWorthyChanges: eventHasChanged(newEvent, existingEvent),
newEvent, newEvent,

View file

@ -116,6 +116,7 @@ import { lang } from "../../../common/misc/LanguageViewModel.js"
import { NativePushServiceApp } from "../../../common/native/main/NativePushServiceApp.js" import { NativePushServiceApp } from "../../../common/native/main/NativePushServiceApp.js"
import { getClientOnlyCalendars } from "../gui/CalendarGuiUtils.js" import { getClientOnlyCalendars } from "../gui/CalendarGuiUtils.js"
import { SyncTracker } from "../../../common/api/main/SyncTracker.js" import { SyncTracker } from "../../../common/api/main/SyncTracker.js"
import { CacheMode } from "../../../common/api/worker/rest/EntityRestClient"
const TAG = "[CalendarModel]" const TAG = "[CalendarModel]"
const EXTERNAL_CALENDAR_RETRY_LIMIT = 3 const EXTERNAL_CALENDAR_RETRY_LIMIT = 3
@ -824,7 +825,8 @@ export class CalendarModel {
try { try {
// We are not supposed to load files without the key provider, but we hope that the key // We are not supposed to load files without the key provider, but we hope that the key
// was already resolved and the entity updated. // was already resolved and the entity updated.
const file = await this.entityClient.load(FileTypeRef, fileId) const file = await this.entityClient.load(FileTypeRef, fileId, { cacheMode: CacheMode.WriteOnly })
// const file = await this.entityClient.load(FileTypeRef, fileId)
const dataFile = await this.fileController.getAsDataFile(file) const dataFile = await this.fileController.getAsDataFile(file)
const { parseCalendarFile } = await import("../../../common/calendar/import/CalendarImporter.js") const { parseCalendarFile } = await import("../../../common/calendar/import/CalendarImporter.js")
return await parseCalendarFile(dataFile) return await parseCalendarFile(dataFile)

View file

@ -2,11 +2,10 @@ import { CalendarSearchResultListEntry } from "./CalendarSearchListView.js"
import { SearchRestriction, SearchResult } from "../../../../common/api/worker/search/SearchTypes.js" import { SearchRestriction, SearchResult } from "../../../../common/api/worker/search/SearchTypes.js"
import { EntityEventsListener, EventController } from "../../../../common/api/main/EventController.js" import { EntityEventsListener, EventController } from "../../../../common/api/main/EventController.js"
import { CalendarEvent, CalendarEventTypeRef, ContactTypeRef, MailTypeRef } from "../../../../common/api/entities/tutanota/TypeRefs.js" import { CalendarEvent, CalendarEventTypeRef, ContactTypeRef, MailTypeRef } from "../../../../common/api/entities/tutanota/TypeRefs.js"
import { CLIENT_ONLY_CALENDARS, OperationType } from "../../../../common/api/common/TutanotaConstants.js" import { CLIENT_ONLY_CALENDARS } from "../../../../common/api/common/TutanotaConstants.js"
import { assertIsEntity2, elementIdPart, GENERATED_MAX_ID, getElementId, isSameId, ListElement } from "../../../../common/api/common/utils/EntityUtils.js" import { assertIsEntity2, elementIdPart, GENERATED_MAX_ID, getElementId, isSameId, ListElement } from "../../../../common/api/common/utils/EntityUtils.js"
import { ListLoadingState, ListState } from "../../../../common/gui/base/List.js" import { ListLoadingState, ListState } from "../../../../common/gui/base/List.js"
import { import {
assertNotNull,
deepEqual, deepEqual,
downcast, downcast,
getEndOfDay, getEndOfDay,
@ -30,7 +29,7 @@ import stream from "mithril/stream"
import { generateCalendarInstancesInRange, retrieveClientOnlyEventsForUser } from "../../../../common/calendar/date/CalendarUtils.js" import { generateCalendarInstancesInRange, retrieveClientOnlyEventsForUser } from "../../../../common/calendar/date/CalendarUtils.js"
import { LoginController } from "../../../../common/api/main/LoginController.js" import { LoginController } from "../../../../common/api/main/LoginController.js"
import { EntityClient } from "../../../../common/api/common/EntityClient.js" import { EntityClient } from "../../../../common/api/common/EntityClient.js"
import { containsEventOfType, EntityUpdateData, getEventOfType, isUpdateForTypeRef } from "../../../../common/api/common/utils/EntityUpdateUtils.js" import { EntityUpdateData, isUpdateForTypeRef } from "../../../../common/api/common/utils/EntityUpdateUtils.js"
import { CalendarInfo } from "../../model/CalendarModel.js" import { CalendarInfo } from "../../model/CalendarModel.js"
import m from "mithril" import m from "mithril"
import { CalendarFacade } from "../../../../common/api/worker/facades/lazy/CalendarFacade.js" import { CalendarFacade } from "../../../../common/api/worker/facades/lazy/CalendarFacade.js"
@ -200,51 +199,10 @@ export class CalendarSearchViewModel {
private readonly entityEventsListener: EntityEventsListener = async (updates) => { private readonly entityEventsListener: EntityEventsListener = async (updates) => {
for (const update of updates) { for (const update of updates) {
const mergedUpdate = this.mergeOperationsIfNeeded(update, updates) await this.entityEventReceived(update)
if (mergedUpdate == null) continue
await this.entityEventReceived(mergedUpdate)
} }
} }
private mergeOperationsIfNeeded(update: EntityUpdateData, updates: readonly EntityUpdateData[]): EntityUpdateData | null {
// We are trying to keep the mails that are moved and would match the search criteria displayed.
// This is a bit hacky as we reimplement part of the filtering by list.
// Ideally search result would update by itself and we would only need to reconcile the changes.
if (!isUpdateForTypeRef(MailTypeRef, update) || this.searchResult == null) {
return update
}
if (update.operation === OperationType.CREATE && containsEventOfType(updates, OperationType.DELETE, update.instanceId)) {
// This is a move operation, is destination list included in the restrictions?
if (this.listIdMatchesRestriction(update.instanceListId, this.searchResult.restriction)) {
// If it's included, we want to keep showing the item but we will simulate the UPDATE
return { ...update, operation: OperationType.UPDATE }
} else {
// If it's not going to be included we might as well skip the create operation
return null
}
} else if (update.operation === OperationType.DELETE && containsEventOfType(updates, OperationType.CREATE, update.instanceId)) {
// This is a move operation and we are in the delete part of it.
// Grab the other part to check the move destination.
const createOperation = assertNotNull(getEventOfType(updates, OperationType.CREATE, update.instanceId))
// Is destination included in the search?
if (this.listIdMatchesRestriction(createOperation.instanceListId, this.searchResult.restriction)) {
// If so, skip the delete.
return null
} else {
// Otherwise delete
return update
}
} else {
return update
}
}
private listIdMatchesRestriction(listId: string, restriction: SearchRestriction): boolean {
return restriction.folderIds.length === 0 || restriction.folderIds.includes(listId)
}
onNewUrl(args: Record<string, any>, requestedPath: string) { onNewUrl(args: Record<string, any>, requestedPath: string) {
let restriction let restriction
try { try {

View file

@ -81,6 +81,8 @@ import { CalendarEventTypeRef } from "../../../common/api/entities/tutanota/Type
import { CustomUserCacheHandler } from "../../../common/api/worker/rest/cacheHandler/CustomUserCacheHandler" import { CustomUserCacheHandler } from "../../../common/api/worker/rest/cacheHandler/CustomUserCacheHandler"
import { EphemeralCacheStorage } from "../../../common/api/worker/rest/EphemeralCacheStorage" import { EphemeralCacheStorage } from "../../../common/api/worker/rest/EphemeralCacheStorage"
import { CustomCalendarEventCacheHandler } from "../../../common/api/worker/rest/cacheHandler/CustomCalendarEventCacheHandler" import { CustomCalendarEventCacheHandler } from "../../../common/api/worker/rest/cacheHandler/CustomCalendarEventCacheHandler"
import { PatchMerger } from "../../../common/api/worker/offline/PatchMerger"
import { EventInstancePrefetcher } from "../../../common/api/worker/EventInstancePrefetcher"
assertWorkerOrNode() assertWorkerOrNode()
@ -90,6 +92,7 @@ export type CalendarWorkerLocatorType = {
serviceExecutor: IServiceExecutor serviceExecutor: IServiceExecutor
crypto: CryptoFacade crypto: CryptoFacade
instancePipeline: InstancePipeline instancePipeline: InstancePipeline
patchMerger: PatchMerger
applicationTypesFacade: ApplicationTypesFacade applicationTypesFacade: ApplicationTypesFacade
cacheStorage: CacheStorage cacheStorage: CacheStorage
cache: EntityRestInterface cache: EntityRestInterface
@ -242,7 +245,9 @@ export async function initLocator(worker: CalendarWorkerImpl, browserData: Brows
locator.cacheStorage = maybeUninitializedStorage locator.cacheStorage = maybeUninitializedStorage
locator.cache = new DefaultEntityRestCache(entityRestClient, maybeUninitializedStorage, typeModelResolver) locator.patchMerger = new PatchMerger(locator.cacheStorage, locator.instancePipeline, typeModelResolver, () => locator.crypto)
locator.cache = new DefaultEntityRestCache(entityRestClient, maybeUninitializedStorage, typeModelResolver, locator.patchMerger)
locator.cachingEntityClient = new EntityClient(locator.cache, typeModelResolver) locator.cachingEntityClient = new EntityClient(locator.cache, typeModelResolver)
const nonCachingEntityClient = new EntityClient(entityRestClient, typeModelResolver) const nonCachingEntityClient = new EntityClient(entityRestClient, typeModelResolver)
@ -494,6 +499,8 @@ export async function initLocator(worker: CalendarWorkerImpl, browserData: Brows
noOp, noOp,
) )
const eventInstancePrefetcher = new EventInstancePrefetcher(locator.cache)
locator.eventBusClient = new EventBusClient( locator.eventBusClient = new EventBusClient(
eventBusCoordinator, eventBusCoordinator,
locator.cache as EntityRestCache, locator.cache as EntityRestCache,
@ -505,6 +512,8 @@ export async function initLocator(worker: CalendarWorkerImpl, browserData: Brows
mainInterface.progressTracker, mainInterface.progressTracker,
mainInterface.syncTracker, mainInterface.syncTracker,
typeModelResolver, typeModelResolver,
locator.crypto,
eventInstancePrefetcher,
) )
locator.login.init(locator.eventBusClient) locator.login.init(locator.eventBusClient)
locator.Const = Const locator.Const = Const

View file

@ -0,0 +1,9 @@
//@bundleInto:common-min
import { TutanotaError } from "@tutao/tutanota-error"
export class PatchOperationError extends TutanotaError {
constructor(m: string) {
super("PatchOperationError", m)
}
}

View file

@ -1,30 +1,49 @@
import { OperationType } from "../TutanotaConstants.js" import { OperationType } from "../TutanotaConstants.js"
import { EntityUpdate } from "../../entities/sys/TypeRefs.js" import { EntityUpdate, Patch } from "../../entities/sys/TypeRefs.js"
import { SomeEntity } from "../EntityTypes.js" import { ServerModelParsedInstance, SomeEntity } from "../EntityTypes.js"
import { AppName, isSameTypeRef, isSameTypeRefByAttr, TypeRef } from "@tutao/tutanota-utils" import { AppName, isSameTypeRef, TypeRef } from "@tutao/tutanota-utils"
import { isSameId } from "./EntityUtils.js" import { isSameId } from "./EntityUtils.js"
import { ClientTypeModelResolver } from "../EntityFunctions" import { ClientTypeModelResolver } from "../EntityFunctions"
import { Nullable } from "@tutao/tutanota-utils/dist/Utils"
/** /**
* A type similar to {@link EntityUpdate} but mapped to make it easier to work with. * A type similar to {@link EntityUpdate} but mapped to make it easier to work with.
*/ */
export type EntityUpdateData = { export type EntityUpdateData = {
typeRef: TypeRef<any> typeRef: TypeRef<any>
instanceListId: string instanceListId: string
instanceId: string instanceId: string
operation: OperationType operation: OperationType
instance: Nullable<ServerModelParsedInstance>
// emptyList: when server did not send patchList, or empty re-write to the server database.
// length > 0: normal case for patch
patches: Nullable<Array<Patch>>
/// whether EventInstancePrefetcher download this instance already or not
isPrefetched: boolean
} }
export async function entityUpdateToUpdateData(clientTypeModelResolver: ClientTypeModelResolver, update: EntityUpdate): Promise<EntityUpdateData> { export async function entityUpdateToUpdateData(
clientTypeModelResolver: ClientTypeModelResolver,
update: EntityUpdate,
instance: Nullable<ServerModelParsedInstance> = null,
isPrefetched: boolean = false,
): Promise<EntityUpdateData> {
const typeId = update.typeId ? parseInt(update.typeId) : null const typeId = update.typeId ? parseInt(update.typeId) : null
const typeIdOfEntityUpdateType = typeId const typeIdOfEntityUpdateType = typeId
? new TypeRef<SomeEntity>(update.application as AppName, typeId) ? new TypeRef<SomeEntity>(update.application as AppName, typeId)
: clientTypeModelResolver.resolveTypeRefFromAppAndTypeNameLegacy(update.application as AppName, update.type) : clientTypeModelResolver.resolveTypeRefFromAppAndTypeNameLegacy(update.application as AppName, update.type)
return { return {
typeRef: typeIdOfEntityUpdateType, typeRef: typeIdOfEntityUpdateType,
instanceListId: update.instanceListId, instanceListId: update.instanceListId,
instanceId: update.instanceId, instanceId: update.instanceId,
operation: update.operation as OperationType, operation: update.operation as OperationType,
patches: update.patch?.patches ?? null,
instance,
isPrefetched,
} }
} }
@ -39,19 +58,3 @@ export function isUpdateFor<T extends SomeEntity>(entity: T, update: EntityUpdat
(update.instanceListId === "" ? isSameId(update.instanceId, entity._id) : isSameId([update.instanceListId, update.instanceId], entity._id)) (update.instanceListId === "" ? isSameId(update.instanceId, entity._id) : isSameId([update.instanceListId, update.instanceId], entity._id))
) )
} }
export function containsEventOfType(events: ReadonlyArray<EntityUpdateData>, operationType: OperationType, elementId: Id): boolean {
return events.some((event) => event.operation === operationType && event.instanceId === elementId)
}
export function getEventOfType<T extends EntityUpdateData | EntityUpdate>(events: ReadonlyArray<T>, type: OperationType, elementId: Id): T | null {
return events.find((event) => event.operation === type && event.instanceId === elementId) ?? null
}
export function getEntityUpdateId(update: EntityUpdateData): Id | IdTuple {
if (update.instanceListId !== "") {
return [update.instanceListId, update.instanceId]
} else {
return update.instanceId
}
}

View file

@ -11,7 +11,6 @@ import {
deepEqual, deepEqual,
Hex, Hex,
hexToBase64, hexToBase64,
isEmpty,
isSameTypeRef, isSameTypeRef,
pad, pad,
repeat, repeat,
@ -25,9 +24,11 @@ import {
ClientModelEncryptedParsedInstance, ClientModelEncryptedParsedInstance,
ClientModelParsedInstance, ClientModelParsedInstance,
ClientModelUntypedInstance, ClientModelUntypedInstance,
ClientTypeModel,
ElementEntity, ElementEntity,
Entity, Entity,
ModelValue, ModelValue,
ParsedInstance,
ParsedValue, ParsedValue,
SomeEntity, SomeEntity,
TypeModel, TypeModel,
@ -37,7 +38,7 @@ import { ClientTypeReferenceResolver, PatchOperationType } from "../EntityFuncti
import { Nullable } from "@tutao/tutanota-utils/dist/Utils" import { Nullable } from "@tutao/tutanota-utils/dist/Utils"
import { AttributeModel } from "../AttributeModel" import { AttributeModel } from "../AttributeModel"
import { createPatch, createPatchList, Patch, PatchList } from "../../entities/sys/TypeRefs" import { createPatch, createPatchList, Patch, PatchList } from "../../entities/sys/TypeRefs"
import { instance } from "testdouble" import { ProgrammingError } from "../error/ProgrammingError"
/** /**
* the maximum ID for elements stored on the server (number with the length of 10 bytes) => 2^80 - 1 * the maximum ID for elements stored on the server (number with the length of 10 bytes) => 2^80 - 1
@ -359,7 +360,7 @@ export async function computePatches(
): Promise<Patch[]> { ): Promise<Patch[]> {
let patches: Patch[] = [] let patches: Patch[] = []
for (const [valueIdStr, modelValue] of Object.entries(typeModel.values)) { for (const [valueIdStr, modelValue] of Object.entries(typeModel.values)) {
if (modelValue.final && !(modelValue.name == "_ownerEncSessionKey" || modelValue.name == "_ownerKeyVersion")) { if (modelValue.final) {
continue continue
} }
const attributeId = parseInt(valueIdStr) const attributeId = parseInt(valueIdStr)
@ -396,6 +397,7 @@ export async function computePatches(
// keys are in the format attributeId:attributeName when networkDebugging is enabled // keys are in the format attributeId:attributeName when networkDebugging is enabled
attributeIdStr += ":" + modelAssociation.name attributeIdStr += ":" + modelAssociation.name
} }
if (modelAssociation.type == AssociationType.Aggregation) { if (modelAssociation.type == AssociationType.Aggregation) {
const appName = modelAssociation.dependency ?? typeModel.app const appName = modelAssociation.dependency ?? typeModel.app
const typeId = modelAssociation.refTypeId const typeId = modelAssociation.refTypeId
@ -403,6 +405,15 @@ export async function computePatches(
const originalAggregatedEntities = (originalInstance[attributeId] ?? []) as Array<ClientModelParsedInstance> const originalAggregatedEntities = (originalInstance[attributeId] ?? []) as Array<ClientModelParsedInstance>
const modifiedAggregatedEntities = (modifiedInstance[attributeId] ?? []) as Array<ClientModelParsedInstance> const modifiedAggregatedEntities = (modifiedInstance[attributeId] ?? []) as Array<ClientModelParsedInstance>
const modifiedAggregatedUntypedEntities = (modifiedUntypedInstance[attributeIdStr] ?? []) as Array<ClientModelUntypedInstance> const modifiedAggregatedUntypedEntities = (modifiedUntypedInstance[attributeIdStr] ?? []) as Array<ClientModelUntypedInstance>
const modifiedAggregateIds = modifiedAggregatedEntities.map(
(instance) => instance[assertNotNull(AttributeModel.getAttributeId(aggregateTypeModel, "_id"))] as Id,
)
if (!isDistinctAggregateIds(modifiedAggregateIds)) {
throw new ProgrammingError(
"Duplicate aggregate ids in the modified instance: " + AttributeModel.getAttribute(modifiedInstance, "_id", typeModel),
)
}
const addedItems = modifiedAggregatedUntypedEntities.filter( const addedItems = modifiedAggregatedUntypedEntities.filter(
(element) => (element) =>
!originalAggregatedEntities.some((item) => { !originalAggregatedEntities.some((item) => {
@ -471,53 +482,26 @@ export async function computePatches(
}) })
patches = patches.concat(items) patches = patches.concat(items)
} }
if (modelAssociation.cardinality == Cardinality.Any) { if (removedItems.length > 0) {
if (removedItems.length > 0) { const removedAggregateIds = removedItems.map(
const removedAggregateIds = removedItems.map( (instance) => instance[assertNotNull(AttributeModel.getAttributeId(aggregateTypeModel, "_id"))] as Id,
(instance) => instance[assertNotNull(AttributeModel.getAttributeId(aggregateTypeModel, "_id"))] as Id, )
)
patches.push(
createPatch({
attributePath: attributeIdStr,
value: JSON.stringify(removedAggregateIds),
patchOperation: PatchOperationType.REMOVE_ITEM,
}),
)
}
if (addedItems.length > 0) {
patches.push(
createPatch({
attributePath: attributeIdStr,
value: JSON.stringify(addedItems),
patchOperation: PatchOperationType.ADD_ITEM,
}),
)
}
} else if (isEmpty(originalAggregatedEntities)) {
// ZeroOrOne with original aggregation on server is []
patches.push( patches.push(
createPatch({ createPatch({
attributePath: attributeIdStr, attributePath: attributeIdStr,
value: JSON.stringify(modifiedAggregatedUntypedEntities), value: JSON.stringify(removedAggregateIds),
patchOperation: PatchOperationType.REMOVE_ITEM,
}),
)
}
if (addedItems.length > 0) {
patches.push(
createPatch({
attributePath: attributeIdStr,
value: JSON.stringify(addedItems),
patchOperation: PatchOperationType.ADD_ITEM, patchOperation: PatchOperationType.ADD_ITEM,
}), }),
) )
} else {
// ZeroOrOne or One with original aggregation on server already there (i.e. it is a list of one)
const aggregateId = AttributeModel.getAttribute(assertNotNull(originalAggregatedEntities[0]), "_id", aggregateTypeModel)
const fullPath = `${attributeIdStr}/${aggregateId}/`
const items = await computePatches(
originalAggregatedEntities[0],
modifiedAggregatedEntities[0],
modifiedAggregatedUntypedEntities[0],
aggregateTypeModel,
typeReferenceResolver,
isNetworkDebuggingEnabled,
)
items.map((item) => {
item.attributePath = fullPath + item.attributePath
})
patches = patches.concat(items)
} }
} else { } else {
// non aggregation associations // non aggregation associations
@ -529,15 +513,6 @@ export async function computePatches(
// Only Any associations support ADD_ITEM and REMOVE_ITEM operations // Only Any associations support ADD_ITEM and REMOVE_ITEM operations
// All cardinalities support REPLACE operation // All cardinalities support REPLACE operation
if (modelAssociation.cardinality == Cardinality.Any) { if (modelAssociation.cardinality == Cardinality.Any) {
if (addedItems.length > 0) {
patches.push(
createPatch({
attributePath: attributeIdStr,
value: JSON.stringify(addedItems),
patchOperation: PatchOperationType.ADD_ITEM,
}),
)
}
if (removedItems.length > 0) { if (removedItems.length > 0) {
patches.push( patches.push(
createPatch({ createPatch({
@ -637,6 +612,11 @@ export function timestampToGeneratedId(timestamp: number, serverBytes: number =
return base64ToBase64Ext(hexToBase64(hex)) return base64ToBase64Ext(hexToBase64(hex))
} }
function isDistinctAggregateIds(array: Array<Id>) {
const checkSet = new Set(array)
return checkSet.size === array.length
}
/** /**
* Extracts the timestamp from a GeneratedId * Extracts the timestamp from a GeneratedId
* @param base64Ext The id as base64Ext * @param base64Ext The id as base64Ext
@ -692,10 +672,10 @@ export function assertIsEntity2<T extends SomeEntity>(type: TypeRef<T>): (entity
* Only use for new entities, the {@param entity} won't be usable for updates anymore after this. * Only use for new entities, the {@param entity} won't be usable for updates anymore after this.
*/ */
export function removeTechnicalFields<E extends Partial<SomeEntity>>(entity: E) { export function removeTechnicalFields<E extends Partial<SomeEntity>>(entity: E) {
// we want to restrict outer function to entity types but internally we also want to handle aggregates // we want to restrict outer function to entity types, but internally we also want to handle aggregates
function _removeTechnicalFields(erased: Record<string, any>) { function _removeTechnicalFields(erased: Record<string, any>) {
for (const key of Object.keys(erased)) { for (const key of Object.keys(erased)) {
if (key.startsWith("_finalEncrypted") || key.startsWith("_defaultEncrypted") || key.startsWith("_errors")) { if (key.startsWith("_finalIvs") || key.startsWith("_errors")) {
delete erased[key] delete erased[key]
} else { } else {
const value = erased[key] const value = erased[key]
@ -707,6 +687,7 @@ export function removeTechnicalFields<E extends Partial<SomeEntity>>(entity: E)
} }
_removeTechnicalFields(entity) _removeTechnicalFields(entity)
return entity
} }
/** /**

View file

@ -1,5 +1,5 @@
const modelInfo = { const modelInfo = {
version: 131, version: 132,
} }
export default modelInfo export default modelInfo

File diff suppressed because it is too large Load diff

View file

@ -802,6 +802,9 @@ export type EntityUpdate = {
instanceId: string; instanceId: string;
operation: NumberString; operation: NumberString;
typeId: null | NumberString; typeId: null | NumberString;
instance: null | string;
patch: null | PatchList;
} }
export const VersionTypeRef: TypeRef<Version> = new TypeRef("sys", 480) export const VersionTypeRef: TypeRef<Version> = new TypeRef("sys", 480)
@ -3931,20 +3934,6 @@ export type Patch = {
attributePath: string; attributePath: string;
value: null | string; value: null | string;
} }
export const PatchListTypeRef: TypeRef<PatchList> = new TypeRef("sys", 2572)
export function createPatchList(values: StrippedEntity<PatchList>): PatchList {
return Object.assign(create(typeModels[PatchListTypeRef.typeId], PatchListTypeRef), values)
}
export type PatchList = {
_type: TypeRef<PatchList>;
_original?: PatchList
_format: NumberString;
patches: Patch[];
}
export const IdentityKeyPairTypeRef: TypeRef<IdentityKeyPair> = new TypeRef("sys", 2575) export const IdentityKeyPairTypeRef: TypeRef<IdentityKeyPair> = new TypeRef("sys", 2575)
export function createIdentityKeyPair(values: StrippedEntity<IdentityKeyPair>): IdentityKeyPair { export function createIdentityKeyPair(values: StrippedEntity<IdentityKeyPair>): IdentityKeyPair {
@ -4050,3 +4039,17 @@ export type RolloutGetOut = {
rollouts: Rollout[]; rollouts: Rollout[];
} }
export const PatchListTypeRef: TypeRef<PatchList> = new TypeRef("sys", 2614)
export function createPatchList(values: StrippedEntity<PatchList>): PatchList {
return Object.assign(create(typeModels[PatchListTypeRef.typeId], PatchListTypeRef), values)
}
export type PatchList = {
_type: TypeRef<PatchList>;
_original?: PatchList
_id: Id;
patches: Patch[];
}

View file

@ -1,6 +1,7 @@
import stream from "mithril/stream" import stream from "mithril/stream"
import type { ProgressMonitorId } from "../common/utils/ProgressMonitor" import { IProgressMonitor, ProgressMonitor, ProgressMonitorId } from "../common/utils/ProgressMonitor"
import { ProgressMonitor } from "../common/utils/ProgressMonitor" import { EstimatingProgressMonitor } from "../common/utils/EstimatingProgressMonitor"
import Es from "../../../mail-app/translations/es"
export type ExposedProgressTracker = Pick<ProgressTracker, "registerMonitor" | "workDoneForMonitor"> export type ExposedProgressTracker = Pick<ProgressTracker, "registerMonitor" | "workDoneForMonitor">
@ -12,7 +13,7 @@ export type ExposedProgressTracker = Pick<ProgressTracker, "registerMonitor" | "
export class ProgressTracker { export class ProgressTracker {
// Will stream a number between 0 and 1 // Will stream a number between 0 and 1
onProgressUpdate: stream<number> onProgressUpdate: stream<number>
private readonly monitors: Map<ProgressMonitorId, ProgressMonitor> private readonly monitors: Map<ProgressMonitorId, EstimatingProgressMonitor>
private idCounter: ProgressMonitorId private idCounter: ProgressMonitorId
constructor() { constructor() {
@ -31,8 +32,8 @@ export class ProgressTracker {
*/ */
registerMonitorSync(work: number): ProgressMonitorId { registerMonitorSync(work: number): ProgressMonitorId {
const id = this.idCounter++ const id = this.idCounter++
const monitor = new ProgressMonitor(work, (percentage) => this.onProgress(id, percentage)) const monitor = new EstimatingProgressMonitor(work, (percentage) => this.onProgress(id, percentage))
monitor.continueEstimation()
this.monitors.set(id, monitor) this.monitors.set(id, monitor)
return id return id
@ -47,7 +48,7 @@ export class ProgressTracker {
this.getMonitor(id)?.workDone(amount) this.getMonitor(id)?.workDone(amount)
} }
getMonitor(id: ProgressMonitorId): ProgressMonitor | null { getMonitor(id: ProgressMonitorId): IProgressMonitor | null {
return this.monitors.get(id) ?? null return this.monitors.get(id) ?? null
} }

View file

@ -12,13 +12,14 @@ import {
createWebsocketLeaderStatus, createWebsocketLeaderStatus,
EntityEventBatch, EntityEventBatch,
EntityEventBatchTypeRef, EntityEventBatchTypeRef,
EntityUpdate,
WebsocketCounterData, WebsocketCounterData,
WebsocketCounterDataTypeRef, WebsocketCounterDataTypeRef,
WebsocketEntityDataTypeRef, WebsocketEntityDataTypeRef,
WebsocketLeaderStatus, WebsocketLeaderStatus,
WebsocketLeaderStatusTypeRef, WebsocketLeaderStatusTypeRef,
} from "../entities/sys/TypeRefs.js" } from "../entities/sys/TypeRefs.js"
import { binarySearch, delay, identity, lastThrow, ofClass, promiseMap, randomIntFromInterval, TypeRef } from "@tutao/tutanota-utils" import { AppName, assertNotNull, binarySearch, delay, identity, lastThrow, ofClass, promiseMap, randomIntFromInterval, TypeRef } from "@tutao/tutanota-utils"
import { OutOfSyncError } from "../common/error/OutOfSyncError" import { OutOfSyncError } from "../common/error/OutOfSyncError"
import { CloseEventBusOption, GroupType, SECOND_MS } from "../common/TutanotaConstants" import { CloseEventBusOption, GroupType, SECOND_MS } from "../common/TutanotaConstants"
import { CancelledError } from "../common/error/CancelledError" import { CancelledError } from "../common/error/CancelledError"
@ -37,9 +38,14 @@ import { PhishingMarkerWebsocketDataTypeRef, ReportedMailFieldMarker } from "../
import { UserFacade } from "./facades/UserFacade" import { UserFacade } from "./facades/UserFacade"
import { ExposedProgressTracker } from "../main/ProgressTracker.js" import { ExposedProgressTracker } from "../main/ProgressTracker.js"
import { SyncTracker } from "../main/SyncTracker.js" import { SyncTracker } from "../main/SyncTracker.js"
import { Entity, ServerModelUntypedInstance } from "../common/EntityTypes" import { Entity, ServerModelParsedInstance, ServerModelUntypedInstance } from "../common/EntityTypes"
import { InstancePipeline } from "./crypto/InstancePipeline" import { InstancePipeline } from "./crypto/InstancePipeline"
import { EntityUpdateData, entityUpdateToUpdateData } from "../common/utils/EntityUpdateUtils" import { EntityUpdateData, entityUpdateToUpdateData } from "../common/utils/EntityUpdateUtils"
import { CryptoFacade } from "./crypto/CryptoFacade"
import { Nullable } from "@tutao/tutanota-utils/dist/Utils"
import { EntityAdapter } from "./crypto/EntityAdapter"
import { EventInstancePrefetcher } from "./EventInstancePrefetcher"
import { AttributeModel } from "../common/AttributeModel"
assertWorkerOrNode() assertWorkerOrNode()
@ -121,7 +127,7 @@ export class EventBusClient {
private lastAntiphishingMarkersId: Id | null = null private lastAntiphishingMarkersId: Id | null = null
/** Queue to process all events. */ /** Qrueue to process all events. */
private readonly eventQueue: EventQueue private readonly eventQueue: EventQueue
/** Queue that handles incoming websocket messages only. Caches them until we process downloaded ones and then adds them to eventQueue. */ /** Queue that handles incoming websocket messages only. Caches them until we process downloaded ones and then adds them to eventQueue. */
@ -152,6 +158,8 @@ export class EventBusClient {
private readonly progressTracker: ExposedProgressTracker, private readonly progressTracker: ExposedProgressTracker,
private readonly syncTracker: SyncTracker, private readonly syncTracker: SyncTracker,
private readonly typeModelResolver: TypeModelResolver, private readonly typeModelResolver: TypeModelResolver,
private readonly cryptoFacade: CryptoFacade,
private readonly eventInstancePrefetcher: EventInstancePrefetcher,
) { ) {
// We are not connected by default and will not try to unless connect() is called // We are not connected by default and will not try to unless connect() is called
this.state = EventBusState.Terminated this.state = EventBusState.Terminated
@ -160,8 +168,8 @@ export class EventBusClient {
this.socket = null this.socket = null
this.reconnectTimer = null this.reconnectTimer = null
this.connectTimer = null this.connectTimer = null
this.eventQueue = new EventQueue("ws_opt", true, (modification) => this.eventQueueCallback(modification)) this.eventQueue = new EventQueue("ws_opt", (modification) => this.eventQueueCallback(modification))
this.entityUpdateMessageQueue = new EventQueue("ws_msg", false, (batch) => this.entityUpdateMessageQueueCallback(batch)) this.entityUpdateMessageQueue = new EventQueue("ws_msg", (batch) => this.entityUpdateMessageQueueCallback(batch))
this.reset() this.reset()
} }
@ -208,7 +216,8 @@ export class EventBusClient {
"&accessToken=" + "&accessToken=" +
authHeaders.accessToken + authHeaders.accessToken +
(this.lastAntiphishingMarkersId ? "&lastPhishingMarkersId=" + this.lastAntiphishingMarkersId : "") + (this.lastAntiphishingMarkersId ? "&lastPhishingMarkersId=" + this.lastAntiphishingMarkersId : "") +
(env.clientName ? "&clientName=" + env.clientName : "") (env.clientName ? "&clientName=" + env.clientName : "") +
(env.networkDebugging ? "&network-debugging=" + "enable-network-debugging" : "")
const path = "/event?" + authQuery const path = "/event?" + authQuery
this.unsubscribeFromOldWebsocket() this.unsubscribeFromOldWebsocket()
@ -283,7 +292,8 @@ export class EventBusClient {
} }
private async decodeEntityEventValue<E extends Entity>(messageType: TypeRef<E>, untypedInstance: ServerModelUntypedInstance): Promise<E> { private async decodeEntityEventValue<E extends Entity>(messageType: TypeRef<E>, untypedInstance: ServerModelUntypedInstance): Promise<E> {
return await this.instancePipeline.decryptAndMap(messageType, untypedInstance, null) const untypedInstanceSanitized = AttributeModel.removeNetworkDebuggingInfoIfNeeded(untypedInstance)
return await this.instancePipeline.decryptAndMap(messageType, untypedInstanceSanitized, null)
} }
private onError(error: any) { private onError(error: any) {
@ -297,7 +307,11 @@ export class EventBusClient {
case MessageType.EntityUpdate: { case MessageType.EntityUpdate: {
const entityUpdateData = await this.decodeEntityEventValue(WebsocketEntityDataTypeRef, JSON.parse(value)) const entityUpdateData = await this.decodeEntityEventValue(WebsocketEntityDataTypeRef, JSON.parse(value))
this.typeModelResolver.setServerApplicationTypesModelHash(entityUpdateData.applicationTypesHash) this.typeModelResolver.setServerApplicationTypesModelHash(entityUpdateData.applicationTypesHash)
const updates = await promiseMap(entityUpdateData.entityUpdates, (event) => entityUpdateToUpdateData(this.typeModelResolver, event)) const updates = await promiseMap(entityUpdateData.entityUpdates, async (event) => {
let instance = await this.getInstanceFromEntityEvent(event)
return entityUpdateToUpdateData(this.typeModelResolver, event, instance)
})
this.entityUpdateMessageQueue.add(entityUpdateData.eventBatchId, entityUpdateData.eventBatchOwner, updates) this.entityUpdateMessageQueue.add(entityUpdateData.eventBatchId, entityUpdateData.eventBatchOwner, updates)
break break
} }
@ -331,6 +345,25 @@ export class EventBusClient {
} }
} }
private async getInstanceFromEntityEvent(event: EntityUpdate): Promise<Nullable<ServerModelParsedInstance>> {
if (event.instance != null) {
const typeRef = new TypeRef<any>(event.application as AppName, parseInt(event.typeId!))
const serverTypeModel = await this.typeModelResolver.resolveServerTypeReference(typeRef)
const untypedInstance = JSON.parse(event.instance) as ServerModelUntypedInstance
const untypedInstanceSanitized = AttributeModel.removeNetworkDebuggingInfoIfNeeded(untypedInstance)
const encryptedParsedInstance = await this.instancePipeline.typeMapper.applyJsTypes(serverTypeModel, untypedInstanceSanitized)
const entityAdapter = await EntityAdapter.from(serverTypeModel, encryptedParsedInstance, this.instancePipeline)
if (this.userFacade.hasGroup(assertNotNull(entityAdapter._ownerGroup))) {
// if the user was just assigned to a new group, it might it is not yet on the user facade,
// we can't decrypt the instance in that case.
const migratedEntity = await this.cryptoFacade.applyMigrations(typeRef, entityAdapter)
const sessionKey = await this.cryptoFacade.resolveSessionKey(migratedEntity)
return await this.instancePipeline.cryptoMapper.decryptParsedInstance(serverTypeModel, encryptedParsedInstance, sessionKey)
}
}
return null
}
private onClose(event: CloseEvent) { private onClose(event: CloseEvent) {
this.failedConnectionAttempts++ this.failedConnectionAttempts++
console.log("ws close event:", event, "state:", this.state) console.log("ws close event:", event, "state:", this.state)
@ -518,8 +551,12 @@ export class EventBusClient {
// Count all batches that will actually be processed so that the progress is correct // Count all batches that will actually be processed so that the progress is correct
let totalExpectedBatches = 0 let totalExpectedBatches = 0
for (const batch of timeSortedEventBatches) { for (const batch of timeSortedEventBatches) {
const updates = await promiseMap(batch.events, (event) => entityUpdateToUpdateData(this.typeModelResolver, event)) const updates = await promiseMap(batch.events, async (event) => {
// const instance = await this.getInstanceFromEntityEvent(event)
return entityUpdateToUpdateData(this.typeModelResolver, event)
})
const batchWasAddedToQueue = this.addBatch(getElementId(batch), getListId(batch), updates, eventQueue) const batchWasAddedToQueue = this.addBatch(getElementId(batch), getListId(batch), updates, eventQueue)
if (batchWasAddedToQueue) { if (batchWasAddedToQueue) {
// Set as last only if it was inserted with success // Set as last only if it was inserted with success
this.lastInitialEventBatch = getElementId(batch) this.lastInitialEventBatch = getElementId(batch)
@ -527,10 +564,12 @@ export class EventBusClient {
} }
} }
const allEventsFlatMap = this.eventQueue.eventQueue.flatMap((eventQ) => eventQ.events)
// We only have the correct amount of total work after adding all entity event batches. // We only have the correct amount of total work after adding all entity event batches.
// The progress for processed batches is tracked inside the event queue. // The progress for processed batches is tracked inside the event queue.
const progressMonitor = new ProgressMonitorDelegate(this.progressTracker, totalExpectedBatches + 1) const progressMonitor = new ProgressMonitorDelegate(this.progressTracker, totalExpectedBatches + allEventsFlatMap.length + 1)
console.log("ws", `progress monitor expects ${totalExpectedBatches} events`) console.log("ws", `progress monitor expects ${totalExpectedBatches + allEventsFlatMap.length} events`)
await progressMonitor.workDone(1) // show progress right away await progressMonitor.workDone(1) // show progress right away
eventQueue.setProgressMonitor(progressMonitor) eventQueue.setProgressMonitor(progressMonitor)
@ -539,6 +578,8 @@ export class EventBusClient {
this.syncTracker.markSyncAsDone() this.syncTracker.markSyncAsDone()
} }
await this.eventInstancePrefetcher.preloadEntities(allEventsFlatMap, progressMonitor)
// We've loaded all the batches, we've added them to the queue, we can let the cache remember sync point for us to detect out of sync now. // We've loaded all the batches, we've added them to the queue, we can let the cache remember sync point for us to detect out of sync now.
// It is possible that we will record the time before the batch will be processed but the risk is low. // It is possible that we will record the time before the batch will be processed but the risk is low.
await this.cache.recordSyncTime() await this.cache.recordSyncTime()
@ -641,7 +682,7 @@ export class EventBusClient {
if (index < 0) { if (index < 0) {
lastForGroup.splice(-index, 0, batchId) lastForGroup.splice(-index, 0, batchId)
// only add the batch if it was not process before // only add the batch if it was not processed before
wasAdded = eventQueue.add(batchId, groupId, events) wasAdded = eventQueue.add(batchId, groupId, events)
} else { } else {
wasAdded = false wasAdded = false
@ -651,7 +692,7 @@ export class EventBusClient {
lastForGroup.shift() lastForGroup.shift()
} }
this.lastEntityEventIds.set(batchId, lastForGroup) this.lastEntityEventIds.set(groupId, lastForGroup)
if (wasAdded) { if (wasAdded) {
this.lastAddedBatchForGroup.set(groupId, batchId) this.lastAddedBatchForGroup.set(groupId, batchId)

View file

@ -0,0 +1,165 @@
import { EntityUpdateData } from "../common/utils/EntityUpdateUtils"
import { Mail, MailDetailsBlobTypeRef, MailTypeRef } from "../entities/tutanota/TypeRefs"
import { elementIdPart, ensureBase64Ext, isSameId, listIdPart } from "../common/utils/EntityUtils"
import { assertNotNull, getTypeString, groupBy, isNotNull, isSameTypeRef, parseTypeString, TypeRef } from "@tutao/tutanota-utils"
import { parseKeyVersion } from "./facades/KeyLoaderFacade"
import { VersionedEncryptedKey } from "./crypto/CryptoWrapper"
import { OperationType } from "../common/TutanotaConstants"
import { NotAuthorizedError, NotFoundError } from "../common/error/RestError"
import { CacheStorage, Range } from "./rest/DefaultEntityRestCache"
import { Nullable } from "@tutao/tutanota-utils/dist/Utils"
import { ServerTypeModelResolver } from "../common/EntityFunctions"
import { ListElementEntity, SomeEntity } from "../common/EntityTypes"
import { CacheMode, type EntityRestInterface } from "./rest/EntityRestClient"
import { ProgressMonitorDelegate } from "./ProgressMonitorDelegate"
export class EventInstancePrefetcher {
constructor(private readonly entityCache: EntityRestInterface) {}
/**
* We preload list element entities in case we get updates for multiple instances of a single list.
* So that single item requests for those instances will be served from the cache.
*/
public async preloadEntities(allEventsFromAllBatch: Array<EntityUpdateData>, progressMonitor: ProgressMonitorDelegate): Promise<void> {
const start = new Date().getTime()
console.log("====== PREFETCH ============")
const preloadMap = await this.groupedListElementUpdatedInstances(allEventsFromAllBatch, progressMonitor)
await this.loadGroupedListElementEntities(allEventsFromAllBatch, preloadMap, progressMonitor)
console.log("====== PREFETCH END ============", new Date().getTime() - start, "ms")
}
private async loadGroupedListElementEntities(
allEventsFromAllBatch: Array<EntityUpdateData>,
preloadMap: Map<string, Map<Id, Map<Id, number[]>>>,
progressMonitor: ProgressMonitorDelegate,
): Promise<void> {
for (const [typeRefString, groupedListIds] of preloadMap.entries()) {
const typeRef = parseTypeString(typeRefString) as TypeRef<ListElementEntity>
for (const [listId, elementIdsAndIndexes] of groupedListIds.entries()) {
// This prevents requests to conversationEntries which were always singleRequests
if (elementIdsAndIndexes.size > 1) {
try {
const elementIds = Array.from(elementIdsAndIndexes.keys())
const instances = await this.entityCache.loadMultiple<ListElementEntity>(typeRef, listId, elementIds, undefined, {
cacheMode: CacheMode.WriteOnly,
})
if (isSameTypeRef(MailTypeRef, typeRef)) {
await this.fetchMailDetailsBlob(instances)
}
this.setEventsWithInstancesAsPrefetched(allEventsFromAllBatch, instances, elementIdsAndIndexes, progressMonitor)
} catch (e) {
if (isExpectedErrorForSynchronization(e)) {
console.log(`could not preload, probably lost group membership ( or not added yet ) for list ${typeRefString}/${listId}`)
} else {
console.warn(`failed to preload ${typeRefString}/${listId}`, e)
}
}
}
}
}
}
private async fetchMailDetailsBlob(instances: Array<SomeEntity>) {
const mailsWithMailDetails = instances.filter((mail: Mail) => isNotNull(mail.mailDetails)) as Array<Mail>
const mailDetailsByList = groupBy(mailsWithMailDetails, (m) => listIdPart(assertNotNull(m.mailDetails)))
for (const [listId, mails] of mailDetailsByList.entries()) {
const mailDetailsElementIds = mails.map((m) => elementIdPart(assertNotNull(m.mailDetails)))
const initialMap: Map<Id, Mail> = new Map()
const mailDetailsElementIdToMail = mails.reduce((previous: Map<Id, Mail>, current) => {
previous.set(elementIdPart(assertNotNull(current.mailDetails)), current)
return previous
}, initialMap)
await this.entityCache.loadMultiple(
MailDetailsBlobTypeRef,
listId,
mailDetailsElementIds,
async (mailDetailsElementId: Id) => {
const mail = assertNotNull(mailDetailsElementIdToMail.get(mailDetailsElementId))
return {
key: mail._ownerEncSessionKey,
encryptingKeyVersion: parseKeyVersion(mail._ownerKeyVersion ?? "0"),
} as VersionedEncryptedKey
},
{ cacheMode: CacheMode.ReadAndWrite },
)
}
}
private setEventsWithInstancesAsPrefetched(
allEventsFromAllBatch: Array<EntityUpdateData>,
instances: Array<ListElementEntity>,
elementIdsAndIndexes: Map<Id, number[]>,
progressMonitor: ProgressMonitorDelegate,
) {
for (const { _id } of instances) {
const elementId = elementIdPart(_id)
const elementEventBatchIndexes = elementIdsAndIndexes.get(elementId) || []
for (const index of elementEventBatchIndexes) {
allEventsFromAllBatch[index].isPrefetched = true
progressMonitor.workDone(1)
}
}
}
// @VisibleForTesting
public async groupedListElementUpdatedInstances(
allEventsFromAllBatch: Array<EntityUpdateData>,
progressMonitor: ProgressMonitorDelegate,
): Promise<Map<string, Map<Id, Map<Id, number[]>>>> {
const prefetchMap: Map<string, Map<Id, Map<Id, number[]>>> = new Map()
let total = 0
for (const [eventIndexInList, entityUpdateData] of allEventsFromAllBatch.entries()) {
const typeIdentifier = getTypeString(entityUpdateData.typeRef)
// if CREATE update itself have a instance, we don't need to fetch it.
// EventRestCache will update the database
// or,
// if we have UPDATE event with patches, we can also re-create server state locally ( happens in EntityrestCache)
// if we don't have this instance in database, we anyway don't need this event
const isCreateWithInstance = entityUpdateData.operation === OperationType.CREATE && entityUpdateData.instance != null
const isUpdateWithPatches = entityUpdateData.operation === OperationType.UPDATE && entityUpdateData.patches != null
const isListElement = entityUpdateData.instanceListId != ""
if (isCreateWithInstance || isUpdateWithPatches || !isListElement) {
progressMonitor.workDone(1)
total += 1
continue
}
if (entityUpdateData.operation === OperationType.DELETE) {
progressMonitor.workDone(1)
total += 1
continue
} else {
const isTypeIdentifierInitialized = prefetchMap.has(typeIdentifier)
if (!isTypeIdentifierInitialized) {
prefetchMap.set(typeIdentifier, new Map().set(entityUpdateData.instanceListId, new Map()))
}
const isInstanceListInitialized = prefetchMap?.get(typeIdentifier)?.has(entityUpdateData.instanceListId)
if (!isInstanceListInitialized) {
prefetchMap.get(typeIdentifier)?.set(entityUpdateData.instanceListId, new Map())
}
const isInstanceIdInitialized = prefetchMap?.get(typeIdentifier)?.get(entityUpdateData.instanceListId)?.has(entityUpdateData.instanceId)
if (!isTypeIdentifierInitialized || !isInstanceListInitialized || !isInstanceIdInitialized) {
prefetchMap.get(typeIdentifier)!.get(entityUpdateData.instanceListId)!.set(entityUpdateData.instanceId, [])
}
}
const singleEntityUpdateEventIndexes = prefetchMap.get(typeIdentifier)!.get(entityUpdateData.instanceListId)!.get(entityUpdateData.instanceId)!
singleEntityUpdateEventIndexes.push(eventIndexInList)
total += 1
}
return prefetchMap
}
}
/**
* Returns whether the error is expected for the cases where our local state might not be up-to-date with the server yet. E.g. we might be processing an update
* for the instance that was already deleted. Normally this would be optimized away but it might still happen due to timing.
*/
function isExpectedErrorForSynchronization(e: Error): boolean {
return e instanceof NotFoundError || e instanceof NotAuthorizedError
}

View file

@ -1,7 +1,4 @@
import { OperationType } from "../common/TutanotaConstants.js"
import { findAllAndRemove, isSameTypeRef } from "@tutao/tutanota-utils"
import { ConnectionError, ServiceUnavailableError } from "../common/error/RestError.js" import { ConnectionError, ServiceUnavailableError } from "../common/error/RestError.js"
import { ProgrammingError } from "../common/error/ProgrammingError.js"
import { ProgressMonitorDelegate } from "./ProgressMonitorDelegate.js" import { ProgressMonitorDelegate } from "./ProgressMonitorDelegate.js"
import { EntityUpdateData } from "../common/utils/EntityUpdateUtils" import { EntityUpdateData } from "../common/utils/EntityUpdateUtils"
@ -13,66 +10,11 @@ export type QueuedBatch = {
type WritableQueuedBatch = QueuedBatch & { events: EntityUpdateData[] } type WritableQueuedBatch = QueuedBatch & { events: EntityUpdateData[] }
export const enum EntityModificationType {
CREATE = "CREATE",
UPDATE = "UPDATE",
DELETE = "DELETE",
}
type QueueAction = (nextElement: QueuedBatch) => Promise<void> type QueueAction = (nextElement: QueuedBatch) => Promise<void>
/**
* Checks which modification is applied in the given batch for the entity id.
* @param batch entity updates of the batch.
* @private visibleForTests
*/
export function batchMod(batchId: Id, batch: ReadonlyArray<EntityUpdateData>, entityUpdate: EntityUpdateData): EntityModificationType {
for (const batchEvent of batch) {
if (
entityUpdate.instanceId === batchEvent.instanceId &&
entityUpdate.instanceListId === batchEvent.instanceListId &&
isSameTypeRef(entityUpdate.typeRef, batchEvent.typeRef)
) {
switch (batchEvent.operation) {
case OperationType.CREATE:
return EntityModificationType.CREATE
case OperationType.UPDATE:
return EntityModificationType.UPDATE
case OperationType.DELETE:
return EntityModificationType.DELETE
default:
throw new ProgrammingError(`Unknown operation: ${batchEvent.operation}`)
}
}
}
throw new ProgrammingError(
`Batch does not have events for ${entityUpdate.typeRef.app}/${entityUpdate.typeRef.typeId} ${lastOperationKey(entityUpdate)}, batchId: ${batchId}`,
)
}
// A key for _lastOperationForEntity.
// At runtime just an element id or listId/elementId.
// Adding brand for type safety.
type LastOperationKey = string & { __brand: "lastOpeKey" }
function lastOperationKey(update: EntityUpdateData): LastOperationKey {
const typeIdentifier = `${update.typeRef.app}/${update.typeRef.typeId}`
if (update.instanceListId) {
return `${typeIdentifier}/${update.instanceListId}/${update.instanceId}` as LastOperationKey
} else {
return `${typeIdentifier}/${update.instanceId}` as LastOperationKey
}
}
export class EventQueue { export class EventQueue {
/** Batches to process. Oldest first. */ /** Batches to process. Oldest first. */
private readonly eventQueue: Array<WritableQueuedBatch> public readonly eventQueue: Array<WritableQueuedBatch>
// the last processed operation for a given entity id
private readonly lastOperationForEntity: Map<LastOperationKey, QueuedBatch>
private processingBatch: QueuedBatch | null private processingBatch: QueuedBatch | null
private paused: boolean private paused: boolean
private progressMonitor: ProgressMonitorDelegate | null private progressMonitor: ProgressMonitorDelegate | null
@ -80,12 +22,10 @@ export class EventQueue {
/** /**
* @param tag identifier to make for better log messages * @param tag identifier to make for better log messages
* @param optimizationEnabled whether the queue should try to optimize events and remove unnecessary ones with the knowledge of newer ones
* @param queueAction which is executed for each batch. Must *never* throw. * @param queueAction which is executed for each batch. Must *never* throw.
*/ */
constructor(private readonly tag: string, private readonly optimizationEnabled: boolean, private readonly queueAction: QueueAction) { constructor(private readonly tag: string, private readonly queueAction: QueueAction) {
this.eventQueue = [] this.eventQueue = []
this.lastOperationForEntity = new Map()
this.processingBatch = null this.processingBatch = null
this.paused = false this.paused = false
this.progressMonitor = null this.progressMonitor = null
@ -113,97 +53,17 @@ export class EventQueue {
batchId, batchId,
} }
if (!this.optimizationEnabled) { newBatch.events.push(...newEvents)
newBatch.events.push(...newEvents)
} else {
this.optimizingAddEvents(newBatch, batchId, groupId, newEvents)
}
if (newBatch.events.length !== 0) { if (newBatch.events.length !== 0) {
this.eventQueue.push(newBatch) this.eventQueue.push(newBatch)
for (const update of newBatch.events) {
this.lastOperationForEntity.set(lastOperationKey(update), newBatch)
}
} }
// ensures that events are processed when not paused // ensures that events are processed when not **paused**
this.start() this.start()
return newBatch.events.length > 0 return newBatch.events.length > 0
} }
private optimizingAddEvents(newBatch: WritableQueuedBatch, batchId: Id, groupId: Id, newEvents: ReadonlyArray<EntityUpdateData>): void {
for (const newEvent of newEvents) {
const lastOpKey = lastOperationKey(newEvent)
const lastBatchForEntity = this.lastOperationForEntity.get(lastOpKey)
if (
lastBatchForEntity == null ||
(this.processingBatch != null && this.processingBatch === lastBatchForEntity) ||
groupId !== lastBatchForEntity.groupId
) {
// If there's no current operation, there's nothing to merge, just add
// If current operation is already being processed, don't modify it, we cannot merge anymore and should just append.
newBatch.events.push(newEvent)
} else {
const newEntityModification = batchMod(batchId, newEvents, newEvent)
const lastEntityModification = batchMod(lastBatchForEntity.batchId, lastBatchForEntity.events, newEvent)
if (newEntityModification === EntityModificationType.UPDATE) {
switch (lastEntityModification) {
case EntityModificationType.CREATE:
// Skip create because the create was not processed yet and we will download the updated version already
break
case EntityModificationType.UPDATE:
// Skip update because the previous update was not processed yet and we will download the updated version already
break
case EntityModificationType.DELETE:
throw new ProgrammingError(
`UPDATE not allowed after DELETE. Last batch: ${lastBatchForEntity.batchId}, new batch: ${batchId}, ${newEvent.typeRef.typeId} ${lastOpKey}`,
)
}
} else if (newEntityModification === EntityModificationType.DELETE) {
// delete all other events because they don't matter if the entity is already gone
this.removeEventsForInstance(lastOpKey)
// set last operation early to make sure that it's not some empty batch that is the last operation, otherwise batchMod will fail.
// this shouldn't happen (because delete + create for the same entity in the same batch is not really a thing) and is a bit hacky,
// but it works?
this.lastOperationForEntity.set(lastOpKey, newBatch)
// add delete event
newBatch.events.push(newEvent)
} else if (newEntityModification === EntityModificationType.CREATE) {
if (lastEntityModification === EntityModificationType.DELETE || lastEntityModification === EntityModificationType.CREATE) {
// It is likely custom id instance which got re-created
newBatch.events.push(newEvent)
} else {
throw new ProgrammingError(
`Impossible modification combination ${lastEntityModification} ${newEntityModification} ${JSON.stringify(newEvent)}`,
)
}
} else {
throw new ProgrammingError(
`Impossible modification combination ${lastEntityModification} ${newEntityModification} ${JSON.stringify(newEvent)}`,
)
}
}
}
}
private removeEventsForInstance(operationKey: LastOperationKey, startIndex: number = 0): void {
// We keep empty batches because we expect certain number of batches to be processed and it's easier to just keep them.
for (let i = startIndex; i < this.eventQueue.length; i++) {
const batchInThePast = this.eventQueue[i]
if (this.processingBatch === batchInThePast) {
continue
}
// this will remove all events for the element id from the batch
// we keep delete events because they don't hurt generally and we also want things to be timely deleted
findAllAndRemove(batchInThePast.events, (event) => event.operation !== OperationType.DELETE && lastOperationKey(event) === operationKey)
}
}
start() { start() {
if (this.processingBatch) { if (this.processingBatch) {
return return
@ -232,18 +92,11 @@ export class EventQueue {
this.progressMonitor?.workDone(1) this.progressMonitor?.workDone(1)
this.processingBatch = null this.processingBatch = null
// When we are done with the batch, we don't want to merge with it anymore
for (const event of next.events) {
const concatenatedId = lastOperationKey(event)
if (this.lastOperationForEntity.get(concatenatedId) === next) {
this.lastOperationForEntity.delete(concatenatedId)
}
}
// do this *before* processNext() is called // do this *before* processNext() is called
this.processNext() this.processNext()
}) })
.catch((e) => { .catch((e) => {
console.log("EventQueue", this.tag, this.optimizationEnabled, "error", next, e) console.log("EventQueue", this.tag, "error", next, e)
// processing continues if the event bus receives a new event // processing continues if the event bus receives a new event
this.processingBatch = null this.processingBatch = null
@ -260,10 +113,6 @@ export class EventQueue {
this.eventQueue.splice(0) this.eventQueue.splice(0)
this.processingBatch = null this.processingBatch = null
for (const k of this.lastOperationForEntity.keys()) {
this.lastOperationForEntity.delete(k)
}
} }
pause() { pause() {

View file

@ -25,8 +25,16 @@ import {
PublicKeyIdentifierType, PublicKeyIdentifierType,
SYSTEM_GROUP_MAIL_ADDRESS, SYSTEM_GROUP_MAIL_ADDRESS,
} from "../../common/TutanotaConstants" } from "../../common/TutanotaConstants"
import { HttpMethod, TypeModelResolver } from "../../common/EntityFunctions" import { HttpMethod, PatchOperationType, TypeModelResolver } from "../../common/EntityFunctions"
import { BucketPermission, GroupMembership, InstanceSessionKey, PatchListTypeRef, Permission } from "../../entities/sys/TypeRefs.js" import {
BucketPermission,
createPatch,
createPatchList,
GroupMembership,
InstanceSessionKey,
PatchListTypeRef,
Permission,
} from "../../entities/sys/TypeRefs.js"
import { import {
BucketPermissionTypeRef, BucketPermissionTypeRef,
createInstanceSessionKey, createInstanceSessionKey,
@ -87,6 +95,9 @@ import { KeyRotationFacade } from "../facades/KeyRotationFacade.js"
import { InstancePipeline } from "./InstancePipeline" import { InstancePipeline } from "./InstancePipeline"
import { EntityAdapter } from "./EntityAdapter" import { EntityAdapter } from "./EntityAdapter"
import { typeModelToRestPath } from "../rest/EntityRestClient" import { typeModelToRestPath } from "../rest/EntityRestClient"
import { convertJsToDbType } from "./ModelMapper"
import { ValueType } from "../../common/EntityConstants"
import { AttributeModel } from "../../common/AttributeModel"
assertWorkerOrNode() assertWorkerOrNode()
@ -774,7 +785,6 @@ export class CryptoFacade {
private async updateOwnerEncSessionKey(instance: EntityAdapter, ownerGroupKey: VersionedKey, resolvedSessionKey: AesKey) { private async updateOwnerEncSessionKey(instance: EntityAdapter, ownerGroupKey: VersionedKey, resolvedSessionKey: AesKey) {
const newOwnerEncSessionKey = encryptKeyWithVersionedKey(ownerGroupKey, resolvedSessionKey) const newOwnerEncSessionKey = encryptKeyWithVersionedKey(ownerGroupKey, resolvedSessionKey)
const oldInstance = structuredClone(instance)
this.setOwnerEncSessionKey(instance, newOwnerEncSessionKey) this.setOwnerEncSessionKey(instance, newOwnerEncSessionKey)
const id = instance._id const id = instance._id
@ -783,19 +793,27 @@ export class CryptoFacade {
const headers = this.userFacade.createAuthHeaders() const headers = this.userFacade.createAuthHeaders()
headers.v = String(instance.typeModel.version) headers.v = String(instance.typeModel.version)
const untypedInstance = await this.instancePipeline.typeMapper.applyDbTypes( let ownerEncSessionKeyAttributeIdStr = assertNotNull(AttributeModel.getAttributeId(typeModel, "_ownerEncSessionKey")).toString()
instance.typeModel as ClientTypeModel, let ownerKeyVersionAttributeIdStr = assertNotNull(AttributeModel.getAttributeId(typeModel, "_ownerKeyVersion")).toString()
instance.encryptedParsedInstance as ClientModelEncryptedParsedInstance, if (env.networkDebugging) {
) ownerEncSessionKeyAttributeIdStr += ":_ownerEncSessionKey"
ownerKeyVersionAttributeIdStr += ":_ownerKeyVersion"
}
const patchList = await computePatchPayload( const patchList = createPatchList({
oldInstance.encryptedParsedInstance as ClientModelEncryptedParsedInstance, patches: [
instance.encryptedParsedInstance as ClientModelEncryptedParsedInstance, createPatch({
untypedInstance, patchOperation: PatchOperationType.REPLACE,
instance.typeModel, value: uint8ArrayToBase64(newOwnerEncSessionKey.key),
this.typeModelResolver.resolveClientTypeReference.bind(this.typeModelResolver), attributePath: ownerEncSessionKeyAttributeIdStr,
env.networkDebugging, }),
) createPatch({
patchOperation: PatchOperationType.REPLACE,
value: newOwnerEncSessionKey.encryptingKeyVersion.toString(),
attributePath: ownerKeyVersionAttributeIdStr,
}),
],
})
const patchPayload = await this.instancePipeline.mapAndEncrypt(PatchListTypeRef, patchList, null) const patchPayload = await this.instancePipeline.mapAndEncrypt(PatchListTypeRef, patchList, null)

View file

@ -137,7 +137,7 @@ export class CryptoMapper {
return decrypted return decrypted
} }
private async decryptAggregateAssociation( public async decryptAggregateAssociation(
associationServerTypeModel: ServerTypeModel | ClientTypeModel, associationServerTypeModel: ServerTypeModel | ClientTypeModel,
encryptedInstanceValues: Array<ServerModelEncryptedParsedInstance>, encryptedInstanceValues: Array<ServerModelEncryptedParsedInstance>,
sk: Nullable<AesKey>, sk: Nullable<AesKey>,

View file

@ -10,7 +10,7 @@ import { TypeModelResolver } from "../../common/EntityFunctions"
assertWorkerOrNode() assertWorkerOrNode()
export const UPDATE_SESSION_KEYS_SERVICE_DEBOUNCE_MS = 50 export const UPDATE_SESSION_KEYS_SERVICE_DEBOUNCE_MS = 2500
/** /**
* This queue collects updates for ownerEncSessionKeys and debounces the update request to the UpdateSessionKeysService, * This queue collects updates for ownerEncSessionKeys and debounces the update request to the UpdateSessionKeysService,

View file

@ -0,0 +1,380 @@
// read from the offline db according to the list and element id on the entityUpdate
// decrypt encrypted fields using the OwnerEncSessionKey on the entry from the offline db
// apply patch operations using a similar logic from the server
// update the instance in the offline db
import {
EncryptedParsedAssociation,
EncryptedParsedValue,
Entity,
ModelValue,
ParsedAssociation,
ParsedInstance,
ParsedValue,
ServerModelEncryptedParsedInstance,
ServerModelParsedInstance,
ServerModelUntypedInstance,
ServerTypeModel,
} from "../../common/EntityTypes"
import { Patch } from "../../entities/sys/TypeRefs"
import { assertNotNull, Base64, deepEqual, isEmpty, lazy, promiseMap, TypeRef } from "@tutao/tutanota-utils"
import { AttributeModel } from "../../common/AttributeModel"
import { CacheStorage } from "../rest/DefaultEntityRestCache"
import { Nullable } from "@tutao/tutanota-utils/dist/Utils"
import { PatchOperationError } from "../../common/error/PatchOperationError"
import { AssociationType, Cardinality } from "../../common/EntityConstants"
import { PatchOperationType, ServerTypeModelResolver } from "../../common/EntityFunctions"
import { InstancePipeline } from "../crypto/InstancePipeline"
import { isSameId, removeTechnicalFields } from "../../common/utils/EntityUtils"
import { convertDbToJsType } from "../crypto/ModelMapper"
import { decryptValue } from "../crypto/CryptoMapper"
import { VersionedEncryptedKey } from "../crypto/CryptoWrapper"
import { AesKey, extractIvFromCipherText } from "@tutao/tutanota-crypto"
import { CryptoFacade } from "../crypto/CryptoFacade"
import { parseKeyVersion } from "../facades/KeyLoaderFacade"
export class PatchMerger {
constructor(
private readonly cacheStorage: CacheStorage,
public readonly instancePipeline: InstancePipeline,
private readonly serverTypeResolver: ServerTypeModelResolver,
private readonly cryptoFacade: lazy<CryptoFacade>,
) {}
// visible for testing
public async getPatchedInstanceParsed(
instanceType: TypeRef<Entity>,
listId: Nullable<Id>,
elementId: Id,
patches: Array<Patch>,
): Promise<ServerModelParsedInstance | null> {
const parsedInstance = await this.cacheStorage.getParsed(instanceType, listId, elementId)
if (parsedInstance != null) {
const typeModel = await this.serverTypeResolver.resolveServerTypeReference(instanceType)
// We need to preserve the order of patches, so no promiseMap here
for (const patch of patches) {
await this.applySinglePatch(parsedInstance, typeModel, patch)
}
return parsedInstance
}
return null
}
public async patchAndStoreInstance(
instanceType: TypeRef<Entity>,
listId: Nullable<Id>,
elementId: Id,
patches: Array<Patch>,
): Promise<Nullable<ServerModelParsedInstance>> {
const patchAppliedInstance = await this.getPatchedInstanceParsed(instanceType, listId, elementId, patches)
if (patchAppliedInstance == null) {
return null
}
await this.cacheStorage.put(instanceType, patchAppliedInstance)
return patchAppliedInstance
}
private async applySinglePatch(parsedInstance: ServerModelParsedInstance, typeModel: ServerTypeModel, patch: Patch) {
try {
const pathList: Array<string> = patch.attributePath.split("/") //== /$mailId/$attrIdRecipient/${aggregateIdRecipient}/${attrIdName}
const pathResult: PathResult = await this.traversePath(parsedInstance, typeModel, pathList)
const attributeId = pathResult.attributeId
const pathResultTypeModel = pathResult.typeModel
// We need to map and decrypt for REPLACE and ADDITEM as the payloads are encrypted, REMOVEITEM only has either aggregate ids, generated ids, or id tuples
if (patch.patchOperation !== PatchOperationType.REMOVE_ITEM) {
const encryptedParsedValue: Nullable<EncryptedParsedValue | EncryptedParsedAssociation> = await this.parseValueOnPatch(pathResult, patch.value)
const isAggregation = pathResultTypeModel.associations[attributeId]?.type === AssociationType.Aggregation
const isEncryptedValue = pathResultTypeModel.values[attributeId]?.encrypted
let value: Nullable<ParsedValue | ParsedAssociation>
if ((isAggregation && typeModel.encrypted) || isEncryptedValue) {
const sk = await this.getSessionKey(parsedInstance, typeModel)
value = await this.decryptValueOnPatchIfNeeded(pathResult, encryptedParsedValue, sk)
} else {
value = await this.decryptValueOnPatchIfNeeded(pathResult, encryptedParsedValue, null)
}
await this.applyPatchOperation(patch.patchOperation, pathResult, value)
} else {
let idArray = JSON.parse(patch.value!) as Array<any>
await this.applyPatchOperation(patch.patchOperation, pathResult, idArray)
}
} catch (e) {
throw new PatchOperationError(e)
}
}
public async getSessionKey(parsedInstance: ServerModelParsedInstance, typeModel: ServerTypeModel) {
const _ownerEncSessionKey = AttributeModel.getAttribute<Uint8Array>(parsedInstance, "_ownerEncSessionKey", typeModel)
const _ownerKeyVersion = parseKeyVersion(AttributeModel.getAttribute<string>(parsedInstance, "_ownerKeyVersion", typeModel))
const _ownerGroup = AttributeModel.getAttribute<Id>(parsedInstance, "_ownerGroup", typeModel)
const versionedEncryptedKey = {
encryptingKeyVersion: _ownerKeyVersion,
key: _ownerEncSessionKey,
} as VersionedEncryptedKey
return await this.cryptoFacade().decryptSessionKey(_ownerGroup, versionedEncryptedKey)
}
private async applyPatchOperation(
patchOperation: Values<PatchOperationType>,
pathResult: PathResult,
value: Nullable<ParsedValue | ParsedAssociation> | Array<Id | IdTuple>,
) {
const { attributeId, instanceToChange, typeModel } = pathResult
const isValue = typeModel.values[attributeId] !== undefined
const isAssociation = typeModel.associations[attributeId] !== undefined
const isAggregationAssociation = isAssociation && typeModel.associations[attributeId].type === AssociationType.Aggregation
switch (patchOperation) {
case PatchOperationType.ADD_ITEM: {
if (isValue) {
throw new PatchOperationError(
"AddItem operation is supported for associations only, but the operation was called on value with id " + attributeId,
)
}
let associationArray = instanceToChange[attributeId] as ParsedAssociation
const valuesToAdd = value as ParsedAssociation
const commonAssociationItems = associationArray.filter((association) => valuesToAdd.some((item) => deepEqual(item, association)))
if (!isEmpty(commonAssociationItems)) {
console.log(
`PatchMerger attempted to add an already existing item to an association. Common items: ${JSON.stringify(commonAssociationItems)}`,
)
}
if (isAggregationAssociation) {
const modelAssociation = typeModel.associations[attributeId]
const appName = modelAssociation.dependency ?? typeModel.app
const aggregationTypeModel = await this.serverTypeResolver.resolveServerTypeReference(new TypeRef(appName, modelAssociation.refTypeId))
const aggregationsWithCommonIdsButDifferentValues = associationArray.filter((aggregate: ParsedInstance) =>
valuesToAdd.some((item: ParsedInstance) => {
const aggregateIdAttributeId = assertNotNull(AttributeModel.getAttributeId(aggregationTypeModel, "_id"))
const itemWithoutFinalIvs = removeTechnicalFields(structuredClone(item))
const aggregateWithoutFinalIvs = removeTechnicalFields(structuredClone(aggregate))
return (
aggregate[aggregateIdAttributeId] === item[aggregateIdAttributeId] && !deepEqual(itemWithoutFinalIvs, aggregateWithoutFinalIvs)
)
}),
)
if (!isEmpty(aggregationsWithCommonIdsButDifferentValues)) {
throw new PatchOperationError(
`PatchMerger attempted to add an existing aggregate with different values.
existing items: ${JSON.stringify(associationArray)},
values attempted to be added: ${JSON.stringify(valuesToAdd)}`,
)
}
}
const newAssociationValue = associationArray.concat(valuesToAdd)
instanceToChange[attributeId] = distinctAssociations(newAssociationValue)
break
}
case PatchOperationType.REMOVE_ITEM: {
if (isValue) {
throw new PatchOperationError(
"AddItem operation is supported for associations only, but the operation was called on value with id " + attributeId,
)
}
if (!isAggregationAssociation) {
const associationArray = instanceToChange[attributeId] as Array<Id | IdTuple>
const idsToRemove = value as Array<Id | IdTuple>
const remainingAssociations = associationArray.filter(
(element) =>
!idsToRemove.some((item) => {
return isSameId(element, item) // use is same id on the ids instead
}),
)
instanceToChange[attributeId] = distinctAssociations(remainingAssociations)
} else {
const modelAssociation = typeModel.associations[attributeId]
const appName = modelAssociation.dependency ?? typeModel.app
const aggregationTypeModel = await this.serverTypeResolver.resolveServerTypeReference(new TypeRef(appName, modelAssociation.refTypeId))
const aggregationArray = instanceToChange[attributeId] as Array<ParsedInstance>
const idsToRemove = value as Array<Id>
const remainingAggregations = aggregationArray.filter(
(element) =>
!idsToRemove.some((item) => {
const aggregateIdAttributeId = assertNotNull(AttributeModel.getAttributeId(aggregationTypeModel, "_id"))
return isSameId(item as Id, element[aggregateIdAttributeId] as Id)
}),
)
instanceToChange[attributeId] = distinctAssociations(remainingAggregations)
}
break
}
case PatchOperationType.REPLACE: {
if (isValue) {
instanceToChange[attributeId] = value as ParsedValue
} else if (!isAggregationAssociation) {
instanceToChange[attributeId] = value as ParsedAssociation
} else {
throw new PatchOperationError("attempted to replace aggregation " + typeModel.associations[attributeId].name + " on " + typeModel.name)
}
break
}
}
}
private async parseValueOnPatch(
pathResult: PathResult,
value: string | null,
): Promise<Nullable<EncryptedParsedValue> | Nullable<EncryptedParsedAssociation>> {
const { typeModel, attributeId } = pathResult
const isValue = typeModel.values[attributeId] !== undefined
const isAssociation = typeModel.associations[attributeId] !== undefined
const isAggregation = isAssociation && typeModel.associations[attributeId].type === AssociationType.Aggregation
const isNonAggregateAssociation = isAssociation && !isAggregation
if (isValue) {
const valueInfo = typeModel.values[attributeId]
const valueType = valueInfo.type
if (value == null || value === "" || valueInfo.encrypted) {
return value
} else {
return convertDbToJsType(valueType, value)
}
} else if (isAssociation) {
if (isNonAggregateAssociation) {
return JSON.parse(value!)
} else {
const aggregatedEntities = JSON.parse(value!) as Array<ServerModelUntypedInstance>
aggregatedEntities.map(AttributeModel.removeNetworkDebuggingInfoIfNeeded)
const modelAssociation = typeModel.associations[attributeId]
const appName = modelAssociation.dependency ?? typeModel.app
const aggregationTypeModel = await this.serverTypeResolver.resolveServerTypeReference(new TypeRef(appName, modelAssociation.refTypeId))
return await promiseMap(
aggregatedEntities,
async (entity: ServerModelUntypedInstance) => await this.instancePipeline.typeMapper.applyJsTypes(aggregationTypeModel, entity),
)
}
}
return null
}
private async decryptValueOnPatchIfNeeded(
pathResult: PathResult,
value: Nullable<EncryptedParsedValue | EncryptedParsedAssociation>,
sk: Nullable<AesKey>,
): Promise<Nullable<ParsedValue> | Nullable<ParsedAssociation>> {
const { typeModel, attributeId } = pathResult
const isValue = typeModel.values[attributeId] !== undefined
const isAggregation = typeModel.associations[attributeId] !== undefined && typeModel.associations[attributeId].type === AssociationType.Aggregation
if (isValue) {
if (sk !== null) {
const encryptedValueInfo = typeModel.values[attributeId] as ModelValue & { encrypted: true }
const encryptedValue = value
if (encryptedValue == null) {
delete pathResult.instanceToChange._finalIvs[attributeId]
} else if (encryptedValue === "") {
// the encrypted value is "" if the decrypted value is the default value
// storing this marker lets us restore that empty string when we re-encrypt the instance.
// check out encrypt in CryptoMapper to see the other side of this.
pathResult.instanceToChange._finalIvs[attributeId] = null
} else if (encryptedValueInfo.final && encryptedValue) {
// the server needs to be able to check if an encrypted final field changed.
// that's only possible if we re-encrypt using a deterministic IV, because the ciphertext changes if
// the IV or the value changes.
// storing the IV we used for the initial encryption lets us reuse it later.
pathResult.instanceToChange._finalIvs[attributeId] = extractIvFromCipherText(encryptedValue as Base64)
}
return decryptValue(encryptedValueInfo, encryptedValue as Base64, sk)
}
return value
} else if (isAggregation) {
const encryptedAggregatedEntities = value as Array<ServerModelEncryptedParsedInstance>
const modelAssociation = typeModel.associations[attributeId]
const appName = modelAssociation.dependency ?? typeModel.app
const aggregationTypeModel = await this.serverTypeResolver.resolveServerTypeReference(new TypeRef(appName, modelAssociation.refTypeId))
return await this.instancePipeline.cryptoMapper.decryptAggregateAssociation(aggregationTypeModel, encryptedAggregatedEntities, sk)
}
return value // id and idTuple associations are never encrypted
}
private async traversePath(parsedInstance: ServerModelParsedInstance, serverTypeModel: ServerTypeModel, path: Array<string>): Promise<PathResult> {
if (path.length == 0) {
throw new PatchOperationError("Invalid attributePath, expected non-empty attributePath")
}
const pathItem = path.shift()!
try {
let attributeId: number
if (env.networkDebugging) {
attributeId = parseInt(pathItem.split(":")[0])
} else {
attributeId = parseInt(pathItem)
}
if (!Object.keys(parsedInstance).some((attribute) => attribute == attributeId.toString())) {
throw new PatchOperationError("attribute id " + attributeId + " not found on the parsed instance. Type: " + serverTypeModel.name)
}
if (path.length == 0) {
return {
attributeId: attributeId,
instanceToChange: parsedInstance,
typeModel: serverTypeModel,
} as PathResult
}
const isAggregation = serverTypeModel.associations[attributeId].type === AssociationType.Aggregation
if (!isAggregation) {
throw new PatchOperationError("Expected the attribute id " + attributeId + " to be an aggregate on the type: " + serverTypeModel.name)
}
const modelAssociation = serverTypeModel.associations[attributeId]
const appName = modelAssociation.dependency ?? serverTypeModel.app
const aggregationTypeModel = await this.serverTypeResolver.resolveServerTypeReference(new TypeRef(appName, modelAssociation.refTypeId))
const maybeAggregateIdPathItem = path.shift()!
const aggregateArray = parsedInstance[attributeId] as Array<ServerModelParsedInstance>
const aggregatedEntity = assertNotNull(
aggregateArray.find((entity) => {
const aggregateIdAttributeId = assertNotNull(AttributeModel.getAttributeId(aggregationTypeModel, "_id"))
return isSameId(maybeAggregateIdPathItem, entity[aggregateIdAttributeId] as Id)
}),
)
return this.traversePath(aggregatedEntity, aggregationTypeModel, path)
} catch (e) {
throw new PatchOperationError("An error occurred while traversing path " + path + e.message)
}
}
private assertCorrectAssociationCardinality(pathResult: PathResult, valuesToAdd: ParsedAssociation): void {
const modelAssociation = pathResult.typeModel.associations[pathResult.attributeId]!
const cardinality = modelAssociation.cardinality
if ((cardinality == Cardinality.ZeroOrOne && valuesToAdd.length > 1) || (cardinality == Cardinality.One && valuesToAdd.length != 1)) {
throw new PatchOperationError(
`invalid value / cardinality combination for value ${pathResult.attributeId} on association ${modelAssociation.name}: ${cardinality}, val.len: ${valuesToAdd.length}`,
)
}
}
private assertCorrectValueCardinality(pathResult: PathResult, valueToAdd: Nullable<ParsedValue>): void {
const modelValue = pathResult.typeModel.values[pathResult.attributeId]
const cardinality = modelValue.cardinality
if (cardinality == Cardinality.One && valueToAdd === null) {
throw new PatchOperationError(
`invalid value / cardinality combination for value ${pathResult.attributeId} on value ${modelValue.name}: ${cardinality}, isNull: ${true}`,
)
}
}
}
export function distinctAssociations(associationArray: ParsedAssociation) {
return associationArray.reduce((acc: Array<any>, current) => {
if (
!acc.some((item) => {
if (item._finalIvs !== undefined) {
const itemWithoutFinalIvs = removeTechnicalFields(structuredClone(item) as ParsedInstance)
const currentWithoutFinalIvs = removeTechnicalFields(structuredClone(current) as ParsedInstance)
return deepEqual(itemWithoutFinalIvs, currentWithoutFinalIvs)
}
return deepEqual(item, current)
})
) {
acc.push(current)
}
return acc
}, [])
}
export type PathResult = {
instanceToChange: ServerModelParsedInstance
attributeId: number
typeModel: ServerTypeModel
}

View file

@ -1,5 +1,4 @@
import { ListElementEntity, SomeEntity } from "../../common/EntityTypes" import { ListElementEntity, SomeEntity } from "../../common/EntityTypes"
import { QueuedBatch } from "../EventQueue.js"
import { ProgrammingError } from "../../common/error/ProgrammingError" import { ProgrammingError } from "../../common/error/ProgrammingError"
import { TypeRef } from "@tutao/tutanota-utils" import { TypeRef } from "@tutao/tutanota-utils"
import { EntityRestCache } from "./DefaultEntityRestCache.js" import { EntityRestCache } from "./DefaultEntityRestCache.js"
@ -15,6 +14,10 @@ export class AdminClientDummyEntityRestCache implements EntityRestCache {
throw new ProgrammingError("erase not implemented") throw new ProgrammingError("erase not implemented")
} }
deleteFromCacheIfExists<T extends SomeEntity>(typeRef: TypeRef<T>, listId: Id | null, elementId: Iterable<Id>): Promise<void> {
throw new Error("deleteFromCacheIdExists not implemented.")
}
async eraseMultiple<T extends SomeEntity>(listId: Id, instances: Array<T>): Promise<void> { async eraseMultiple<T extends SomeEntity>(listId: Id, instances: Array<T>): Promise<void> {
throw new ProgrammingError("eraseMultiple not implemented") throw new ProgrammingError("eraseMultiple not implemented")
} }

View file

@ -1,5 +1,4 @@
import { import {
CacheMode,
EntityRestClient, EntityRestClient,
EntityRestClientEraseOptions, EntityRestClientEraseOptions,
EntityRestClientLoadOptions, EntityRestClientLoadOptions,
@ -9,7 +8,7 @@ import {
OwnerEncSessionKeyProvider, OwnerEncSessionKeyProvider,
} from "./EntityRestClient" } from "./EntityRestClient"
import { OperationType } from "../../common/TutanotaConstants" import { OperationType } from "../../common/TutanotaConstants"
import { assertNotNull, getFirstOrThrow, getTypeString, groupBy, isSameTypeRef, lastThrow, TypeRef } from "@tutao/tutanota-utils" import { assertNotNull, deepEqual, getFirstOrThrow, getTypeString, isEmpty, isSameTypeRef, lastThrow, TypeRef } from "@tutao/tutanota-utils"
import { import {
AuditLogEntryTypeRef, AuditLogEntryTypeRef,
BucketPermissionTypeRef, BucketPermissionTypeRef,
@ -26,9 +25,9 @@ import {
UserGroupRootTypeRef, UserGroupRootTypeRef,
} from "../../entities/sys/TypeRefs.js" } from "../../entities/sys/TypeRefs.js"
import { ValueType } from "../../common/EntityConstants.js" import { ValueType } from "../../common/EntityConstants.js"
import { NotAuthorizedError, NotFoundError } from "../../common/error/RestError"
import { CalendarEventUidIndexTypeRef, MailDetailsBlobTypeRef, MailSetEntryTypeRef, MailTypeRef } from "../../entities/tutanota/TypeRefs.js" import { CalendarEventUidIndexTypeRef, MailDetailsBlobTypeRef, MailSetEntryTypeRef, MailTypeRef } from "../../entities/tutanota/TypeRefs.js"
import { import {
computePatches,
CUSTOM_MAX_ID, CUSTOM_MAX_ID,
CUSTOM_MIN_ID, CUSTOM_MIN_ID,
elementIdPart, elementIdPart,
@ -36,19 +35,30 @@ import {
GENERATED_MAX_ID, GENERATED_MAX_ID,
GENERATED_MIN_ID, GENERATED_MIN_ID,
get_IdValue, get_IdValue,
getElementId,
isCustomIdType, isCustomIdType,
listIdPart, listIdPart,
} from "../../common/utils/EntityUtils" } from "../../common/utils/EntityUtils"
import { ProgrammingError } from "../../common/error/ProgrammingError" import { ProgrammingError } from "../../common/error/ProgrammingError"
import { assertWorkerOrNode } from "../../common/Env" import { assertWorkerOrNode } from "../../common/Env"
import type { Entity, ListElementEntity, ServerModelParsedInstance, SomeEntity, TypeModel } from "../../common/EntityTypes" import type {
ClientModelParsedInstance,
ClientTypeModel,
Entity,
ListElementEntity,
ServerModelParsedInstance,
SomeEntity,
TypeModel,
} from "../../common/EntityTypes"
import { ENTITY_EVENT_BATCH_EXPIRE_MS } from "../EventBusClient" import { ENTITY_EVENT_BATCH_EXPIRE_MS } from "../EventBusClient"
import { CustomCacheHandlerMap } from "./cacheHandler/CustomCacheHandler.js" import { CustomCacheHandlerMap } from "./cacheHandler/CustomCacheHandler.js"
import { containsEventOfType, EntityUpdateData, getEventOfType, isUpdateForTypeRef } from "../../common/utils/EntityUpdateUtils.js" import { EntityUpdateData } from "../../common/utils/EntityUpdateUtils.js"
import { TypeModelResolver } from "../../common/EntityFunctions" import { TypeModelResolver } from "../../common/EntityFunctions"
import { AttributeModel } from "../../common/AttributeModel" import { AttributeModel } from "../../common/AttributeModel"
import { collapseId, expandId } from "./RestClientIdUtils" import { collapseId, expandId } from "./RestClientIdUtils"
import { PatchMerger } from "../offline/PatchMerger"
import { NotAuthorizedError, NotFoundError } from "../../common/error/RestError"
import { Nullable } from "@tutao/tutanota-utils/dist/Utils"
import { BitArray } from "@tutao/tutanota-crypto"
assertWorkerOrNode() assertWorkerOrNode()
@ -120,8 +130,14 @@ export interface EntityRestCache extends EntityRestInterface {
* Detect if out of sync based on stored "lastUpdateTime" and the current server time * Detect if out of sync based on stored "lastUpdateTime" and the current server time
*/ */
isOutOfSync(): Promise<boolean> isOutOfSync(): Promise<boolean>
/**
* Delete a cached entity. Sometimes this is necessary to do to ensure you always load the new version
*/
deleteFromCacheIfExists<T extends SomeEntity>(typeRef: TypeRef<T>, listId: Id | null, elementId: Iterable<Id>): Promise<void>
} }
// todo: remove this and use from offlineStorage.ts/Range
export type Range = { lower: Id; upper: Id } export type Range = { lower: Id; upper: Id }
export type LastUpdateTime = { type: "recorded"; time: number } | { type: "never" } | { type: "uninitialized" } export type LastUpdateTime = { type: "recorded"; time: number } | { type: "never" } | { type: "uninitialized" }
@ -275,6 +291,7 @@ export class DefaultEntityRestCache implements EntityRestCache {
private readonly entityRestClient: EntityRestClient, private readonly entityRestClient: EntityRestClient,
private readonly storage: CacheStorage, private readonly storage: CacheStorage,
private readonly typeModelResolver: TypeModelResolver, private readonly typeModelResolver: TypeModelResolver,
private readonly patchMerger: PatchMerger,
) {} ) {}
async load<T extends SomeEntity>(typeRef: TypeRef<T>, id: PropertyType<T, "_id">, opts: EntityRestClientLoadOptions = {}): Promise<T> { async load<T extends SomeEntity>(typeRef: TypeRef<T>, id: PropertyType<T, "_id">, opts: EntityRestClientLoadOptions = {}): Promise<T> {
@ -376,11 +393,10 @@ export class DefaultEntityRestCache implements EntityRestCache {
return this.entityRestClient.getRestClient().getServerTimestampMs() return this.entityRestClient.getRestClient().getServerTimestampMs()
} }
/** async deleteFromCacheIfExists<T extends SomeEntity>(typeRef: TypeRef<T>, listId: Id | null, elementId: Iterable<Id>): Promise<void> {
* Delete a cached entity. Sometimes this is necessary to do to ensure you always load the new version for (const eId in elementId) {
*/ await this.storage.deleteIfExists(typeRef, listId, eId)
deleteFromCacheIfExists<T extends SomeEntity>(typeRef: TypeRef<T>, listId: Id | null, elementId: Id): Promise<void> { }
return this.storage.deleteIfExists(typeRef, listId, elementId)
} }
private async _loadMultiple<T extends SomeEntity>( private async _loadMultiple<T extends SomeEntity>(
@ -726,90 +742,32 @@ export class DefaultEntityRestCache implements EntityRestCache {
* *
* @return Promise, which resolves to the array of valid events (if response is NotFound or NotAuthorized we filter it out) * @return Promise, which resolves to the array of valid events (if response is NotFound or NotAuthorized we filter it out)
*/ */
// discuss:
// this function no longer makes use of network client, probably better to move this to outside. we also had an idea of a new call that combines pre fetching
// of instances and does this.
// if we do that we can also remove this from EntityRestCache interface
async entityEventsReceived(events: readonly EntityUpdateData[], batchId: Id, groupId: Id): Promise<readonly EntityUpdateData[]> { async entityEventsReceived(events: readonly EntityUpdateData[], batchId: Id, groupId: Id): Promise<readonly EntityUpdateData[]> {
await this.recordSyncTime() await this.recordSyncTime()
// we handle post multiple create operations separately to optimize the number of requests with getMultiple const regularUpdates = events.filter((u) => u.typeRef.app !== "monitor")
const createUpdatesForLETs: EntityUpdateData[] = []
const regularUpdates: EntityUpdateData[] = [] // all updates not resulting from post multiple requests
for (const update of events) {
// monitor application is ignored
if (update.typeRef.app === "monitor") continue
// mailSetEntries are ignored because move operations are handled as a special event (and no post multiple is possible)
if (
update.operation === OperationType.CREATE &&
getUpdateInstanceId(update).instanceListId != null &&
!isUpdateForTypeRef(MailTypeRef, update) &&
!isUpdateForTypeRef(MailSetEntryTypeRef, update)
) {
createUpdatesForLETs.push(update)
} else {
regularUpdates.push(update)
}
}
const createUpdatesForLETsPerList = groupBy(createUpdatesForLETs, (update) => update.instanceListId)
const postMultipleEventUpdates: EntityUpdateData[][] = []
// we first handle potential post multiple updates in get multiple requests
for (let [instanceListId, updates] of createUpdatesForLETsPerList) {
const firstUpdate = updates[0]
const typeRef = firstUpdate.typeRef
const ids = updates.map((update) => update.instanceId)
// We only want to load the instances that are in cache range
const customHandler = this.storage.getCustomCacheHandlerMap().get(typeRef)
const idsInCacheRange =
customHandler && customHandler.getElementIdsInCacheRange
? await customHandler.getElementIdsInCacheRange(this.storage, instanceListId, ids)
: await this.getElementIdsInCacheRange(typeRef, instanceListId, ids)
if (idsInCacheRange.length === 0) {
postMultipleEventUpdates.push(updates)
} else {
const updatesNotInCacheRange =
idsInCacheRange.length === updates.length ? [] : updates.filter((update) => !idsInCacheRange.includes(update.instanceId))
try {
// loadMultiple is only called to cache the elements and check which ones return errors
const returnedInstances = await this._loadMultiple(typeRef, instanceListId, idsInCacheRange, undefined, { cacheMode: CacheMode.WriteOnly })
//We do not want to pass updates that caused an error
if (returnedInstances.length !== idsInCacheRange.length) {
const returnedIds = returnedInstances.map((instance) => getElementId(instance))
postMultipleEventUpdates.push(updates.filter((update) => returnedIds.includes(update.instanceId)).concat(updatesNotInCacheRange))
} else {
postMultipleEventUpdates.push(updates)
}
} catch (e) {
if (e instanceof NotAuthorizedError) {
// return updates that are not in cache Range if NotAuthorizedError (for those updates that are in cache range)
postMultipleEventUpdates.push(updatesNotInCacheRange)
} else {
throw e
}
}
}
}
// we need an array of UpdateEntityData // we need an array of UpdateEntityData
const otherEventUpdates: EntityUpdateData[] = [] const filteredUpdateEvents: EntityUpdateData[] = []
for (let update of regularUpdates) { for (let update of regularUpdates) {
const { operation, typeRef } = update const { operation, typeRef } = update
const { instanceListId, instanceId } = getUpdateInstanceId(update) const { instanceListId, instanceId } = getUpdateInstanceId(update)
switch (operation) { switch (operation) {
case OperationType.UPDATE: { case OperationType.UPDATE: {
const handledUpdate = await this.processUpdateEvent(typeRef, update) const handledUpdate = await this.processUpdateEvent(update)
if (handledUpdate) { if (handledUpdate) {
otherEventUpdates.push(handledUpdate) filteredUpdateEvents.push(handledUpdate)
} }
break // do break instead of continue to avoid ide warnings break // do break instead of continue to avoid ide warnings
} }
case OperationType.DELETE: { case OperationType.DELETE: {
if (isSameTypeRef(MailSetEntryTypeRef, typeRef) && containsEventOfType(events, OperationType.CREATE, instanceId)) { if (isSameTypeRef(MailTypeRef, typeRef)) {
// move for mail is handled in create event.
} else if (isSameTypeRef(MailTypeRef, typeRef)) {
// delete mailDetails if they are available (as we don't send an event for this type) // delete mailDetails if they are available (as we don't send an event for this type)
const mail = await this.storage.get(typeRef, instanceListId, instanceId) const mail = await this.storage.get(typeRef, instanceListId, instanceId)
if (mail) { if (mail) {
@ -822,13 +780,13 @@ export class DefaultEntityRestCache implements EntityRestCache {
} else { } else {
await this.storage.deleteIfExists(typeRef, instanceListId, instanceId) await this.storage.deleteIfExists(typeRef, instanceListId, instanceId)
} }
otherEventUpdates.push(update) filteredUpdateEvents.push(update)
break // do break instead of continue to avoid ide warnings break // do break instead of continue to avoid ide warnings
} }
case OperationType.CREATE: { case OperationType.CREATE: {
const handledUpdate = await this.processCreateEvent(typeRef, update, events) const handledUpdate = await this.processCreateEvent(typeRef, update)
if (handledUpdate) { if (handledUpdate) {
otherEventUpdates.push(handledUpdate) filteredUpdateEvents.push(handledUpdate)
} }
break // do break instead of continue to avoid ide warnings break // do break instead of continue to avoid ide warnings
} }
@ -865,112 +823,121 @@ export class DefaultEntityRestCache implements EntityRestCache {
// the whole batch has been written successfully // the whole batch has been written successfully
await this.storage.putLastBatchIdForGroup(groupId, batchId) await this.storage.putLastBatchIdForGroup(groupId, batchId)
// merge the results // merge the results
return otherEventUpdates.concat(postMultipleEventUpdates.flat()) return filteredUpdateEvents
} }
/** Returns {null} when the update should be skipped. */ private async processCreateEvent(typeRef: TypeRef<any>, update: EntityUpdateData): Promise<EntityUpdateData | null> {
private async processCreateEvent(
typeRef: TypeRef<any>,
update: EntityUpdateData,
batch: ReadonlyArray<EntityUpdateData>,
): Promise<EntityUpdateData | null> {
// do not return undefined to avoid implicit returns // do not return undefined to avoid implicit returns
const { instanceId, instanceListId } = getUpdateInstanceId(update) const { instanceId, instanceListId } = getUpdateInstanceId(update)
// We put new instances into cache only when it's a new instance in the cached range which is only for the list instances. // We put new instances into cache only when it's a new instance in the cached range which is only for the list instances.
if (instanceListId != null) { if (instanceListId != null) {
const deleteEvent = getEventOfType(batch, OperationType.DELETE, instanceId) // If there is a custom handler we follow its decision.
// TODO This is basically a patch for the mailSetEntry let shouldUpdateDb = !update.isPrefetched && this.storage.getCustomCacheHandlerMap().get(typeRef)?.shouldLoadOnCreateEvent?.(update)
const mailSetEntry = // Otherwise, we do a range check to see if we need to keep the range up-to-date. No need to load anything out of range
deleteEvent && isSameTypeRef(MailSetEntryTypeRef, typeRef) shouldUpdateDb = shouldUpdateDb ?? (await this.storage.isElementIdInCacheRange(typeRef, instanceListId, instanceId))
? await this.storage.getParsed(typeRef, deleteEvent.instanceListId, instanceId) // if we have an instance attached, just update with it
: null // else we assume eventBusClient already did the pre-fetching, so no need to do anything
// avoid downloading new mailSetEntry in case of move event (DELETE + CREATE)
if (deleteEvent != null && mailSetEntry != null) { if (shouldUpdateDb && update.instance != null) {
// It is a move event for cached mailSetEntry console.log("putting the entity on the create event for ", getTypeString(typeRef), instanceListId, instanceId, " to the storage")
await this.storage.deleteIfExists(typeRef, deleteEvent.instanceListId, instanceId) await this.storage.put(update.typeRef, update.instance)
await this.updateListIdOfMailSetEntryAndUpdateCache(mailSetEntry, instanceListId, instanceId) } else if (shouldUpdateDb) {
return update console.log("downloading create event for", getTypeString(typeRef), instanceListId, instanceId)
} else { try {
// If there is a custom handler we follow its decision. const parsedInstance = await this.entityRestClient.loadParsedInstance(typeRef, [instanceListId, instanceId])
// Otherwise, we do a range check to see if we need to keep the range up-to-date. await this.storage.put(update.typeRef, parsedInstance)
const shouldLoad = } catch (e) {
this.storage.getCustomCacheHandlerMap().get(typeRef)?.shouldLoadOnCreateEvent?.(update) ?? if (isExpectedErrorForSynchronization(e)) {
(await this.storage.isElementIdInCacheRange(typeRef, instanceListId, instanceId)) return null
if (shouldLoad) { } else {
// No need to try to download something that's not there anymore throw e
// We do not consult custom handlers here because they are only needed for list elements. }
console.log("downloading create event for", getTypeString(typeRef), instanceListId, instanceId)
return this.entityRestClient
.loadParsedInstance(typeRef, [instanceListId, instanceId])
.then((entity) => this.storage.put(typeRef, entity))
.then(() => update)
.catch((e) => {
if (isExpectedErrorForSynchronization(e)) {
return null
} else {
throw e
}
})
} else {
return update
} }
} }
return update
} else { } else {
return update return update
} }
} }
/**
* Updates the given mailSetEntry with the new list id and add it to the cache.
*/
private async updateListIdOfMailSetEntryAndUpdateCache(mailSetEntry: ServerModelParsedInstance, newListId: Id, elementId: Id) {
// In case of a move operation we have to replace the list id always, as the mailSetEntry is stored in another folder.
const typeModel = await this.typeModelResolver.resolveServerTypeReference(MailSetEntryTypeRef)
const attributeId = AttributeModel.getAttributeId(typeModel, "_id")
if (attributeId == null) {
throw new ProgrammingError("no _id for mail set entry in type model ")
}
mailSetEntry[attributeId] = [newListId, elementId]
await this.storage.put(MailSetEntryTypeRef, mailSetEntry)
}
/** Returns {null} when the update should be skipped. */ /** Returns {null} when the update should be skipped. */
private async processUpdateEvent(typeRef: TypeRef<SomeEntity>, update: EntityUpdateData): Promise<EntityUpdateData | null> { private async processUpdateEvent(update: EntityUpdateData): Promise<EntityUpdateData | null> {
const { instanceId, instanceListId } = getUpdateInstanceId(update) if (update.patches) {
const cached = await this.storage.getParsed(typeRef, instanceListId, instanceId) const patchAppliedInstance = await this.patchMerger.patchAndStoreInstance(update.typeRef, update.instanceListId, update.instanceId, update.patches)
// No need to try to download something that's not there anymore if (patchAppliedInstance == null) {
if (cached != null) { const newEntity = await this.entityRestClient.loadParsedInstance(update.typeRef, collapseId(update.instanceListId, update.instanceId))
try { await this.storage.put(update.typeRef, newEntity)
// in case this is an update for the user instance: if the password changed we'll be logged out at this point } else {
// if we don't catch the expected NotAuthenticated Error that results from trying to load anything with await this.assertInstanceOnUpdateIsSameAsPatched(update, patchAppliedInstance)
// the old user. }
// Letting the NotAuthenticatedError propagate to the main thread instead of trying to handle it ourselves } else if (!update.isPrefetched) {
// or throwing out the update drops us onto the login page and into the session recovery flow if the user const cached = await this.storage.getParsed(update.typeRef, update.instanceListId, update.instanceId)
// clicks their saved credentials again, but lets them still use offline login if they try to use the if (cached != null) {
// outdated credentials while not connected to the internet. try {
if (isSameTypeRef(update.typeRef, GroupTypeRef)) {
if (isSameTypeRef(typeRef, GroupTypeRef)) { console.log("DefaultEntityRestCache - processUpdateEvent of type Group:" + update.instanceId)
console.log("DefaultEntityRestCache - processUpdateEvent of type Group:" + instanceId) }
} const newEntity = await this.entityRestClient.loadParsedInstance(update.typeRef, collapseId(update.instanceListId, update.instanceId))
const newEntity = await this.entityRestClient.loadParsedInstance(typeRef, collapseId(instanceListId, instanceId)) await this.storage.put(update.typeRef, newEntity)
await this.storage.put(typeRef, newEntity) return update
return update } catch (e) {
} catch (e) { // If the entity is not there anymore we should evict it from the cache and not keep the outdated/nonexisting instance around.
// If the entity is not there anymore we should evict it from the cache and not keep the outdated/nonexisting instance around. // Even for list elements this should be safe as the instance is not there anymore and is definitely not in this version
// Even for list elements this should be safe as the instance is not there anymore and is definitely not in this version if (isExpectedErrorForSynchronization(e)) {
if (isExpectedErrorForSynchronization(e)) { console.log(`Instance not found when processing update for ${JSON.stringify(update)}, deleting from the cache.`)
console.log(`Instance not found when processing update for ${JSON.stringify(update)}, deleting from the cache.`) await this.storage.deleteIfExists(update.typeRef, update.instanceListId, update.instanceId)
await this.storage.deleteIfExists(typeRef, instanceListId, instanceId) return null
return null } else {
} else { throw e
throw e }
} }
} }
} }
return update return update
} }
private async assertInstanceOnUpdateIsSameAsPatched(update: EntityUpdateData, patchAppliedInstance: Nullable<ServerModelParsedInstance>) {
if (update.instance != null && update.patches != null && !deepEqual(update.instance, patchAppliedInstance)) {
const instancePipeline = this.patchMerger.instancePipeline
const typeModel = await this.typeModelResolver.resolveServerTypeReference(update.typeRef)
const typeReferenceResolver = this.typeModelResolver.resolveClientTypeReference.bind(this.typeModelResolver)
let sk: Nullable<BitArray> = null
if (typeModel.encrypted) {
sk = await this.patchMerger.getSessionKey(assertNotNull(patchAppliedInstance), typeModel)
}
const patchedEncryptedParsedInstance = await instancePipeline.cryptoMapper.encryptParsedInstance(
typeModel as unknown as ClientTypeModel,
assertNotNull(patchAppliedInstance) as unknown as ClientModelParsedInstance,
sk,
)
const patchedUntypedInstance = await instancePipeline.typeMapper.applyDbTypes(
typeModel as unknown as ClientTypeModel,
patchedEncryptedParsedInstance,
)
const patchDiff = await computePatches(
update.instance as unknown as ClientModelParsedInstance,
assertNotNull(patchAppliedInstance) as unknown as ClientModelParsedInstance,
patchedUntypedInstance,
typeModel,
typeReferenceResolver,
true,
)
if (!isEmpty(patchDiff)) {
console.log("instance on the update: ", update.instance)
console.log("patched instance: ", patchAppliedInstance)
console.log("patches on the update: ", update.patches)
throw new ProgrammingError(
"instance with id [" +
update.instanceListId +
", " +
update.instanceId +
`] has not been successfully patched. Type: ${getTypeString(update.typeRef)}, computePatches: ${JSON.stringify(patchDiff)}`,
)
}
}
}
/** /**
* *
* @returns {Array<Id>} the ids that are in cache range and therefore should be cached * @returns {Array<Id>} the ids that are in cache range and therefore should be cached

View file

@ -217,7 +217,7 @@ export class EntityRestClient implements EntityRestInterface {
private readonly authDataProvider: AuthDataProvider, private readonly authDataProvider: AuthDataProvider,
private readonly restClient: RestClient, private readonly restClient: RestClient,
private readonly lazyCrypto: lazy<CryptoFacade>, private readonly lazyCrypto: lazy<CryptoFacade>,
private readonly instancePipeline: InstancePipeline, public readonly instancePipeline: InstancePipeline,
private readonly blobAccessTokenFacade: BlobAccessTokenFacade, private readonly blobAccessTokenFacade: BlobAccessTokenFacade,
private readonly typeModelResolver: TypeModelResolver, private readonly typeModelResolver: TypeModelResolver,
) {} ) {}
@ -600,7 +600,8 @@ export class EntityRestClient implements EntityRestInterface {
const parsedInstance = await this.instancePipeline.modelMapper.mapToClientModelParsedInstance(instance._type as TypeRef<any>, instance) const parsedInstance = await this.instancePipeline.modelMapper.mapToClientModelParsedInstance(instance._type as TypeRef<any>, instance)
const typeModel = await this.typeModelResolver.resolveClientTypeReference(instance._type) const typeModel = await this.typeModelResolver.resolveClientTypeReference(instance._type)
const typeReferenceResolver = this.typeModelResolver.resolveClientTypeReference.bind(this.typeModelResolver) const typeReferenceResolver = this.typeModelResolver.resolveClientTypeReference.bind(this.typeModelResolver)
const untypedInstance = await this.instancePipeline.mapAndEncrypt(downcast(instance._type), instance, sessionKey) const encryptedParsedInstance = await this.instancePipeline.cryptoMapper.encryptParsedInstance(typeModel, parsedInstance, sessionKey)
const untypedInstance = await this.instancePipeline.typeMapper.applyDbTypes(typeModel, encryptedParsedInstance)
// figure out differing fields and build the PATCH request payload // figure out differing fields and build the PATCH request payload
const patchList = await computePatchPayload( const patchList = await computePatchPayload(
originalParsedInstance, originalParsedInstance,

View file

@ -14,6 +14,7 @@ export class CustomMailEventCacheHandler implements CustomCacheHandler<Mail> {
// - we need them to display the folder contents // - we need them to display the folder contents
// - will very likely be loaded by indexer later // - will very likely be loaded by indexer later
// - we might have the instance in offline cache already because of notification process // - we might have the instance in offline cache already because of notification process
// however, they are already preloaded by the EventBusClient
return true return true
} }

View file

@ -5,7 +5,7 @@ import { LanguageViewModel } from "../../misc/LanguageViewModel"
import { IdTupleWrapper, NotificationInfo } from "../../api/entities/sys/TypeRefs" import { IdTupleWrapper, NotificationInfo } from "../../api/entities/sys/TypeRefs"
import { CredentialEncryptionMode } from "../../misc/credentials/CredentialEncryptionMode.js" import { CredentialEncryptionMode } from "../../misc/credentials/CredentialEncryptionMode.js"
import { ExtendedNotificationMode } from "../../native/common/generatedipc/ExtendedNotificationMode" import { ExtendedNotificationMode } from "../../native/common/generatedipc/ExtendedNotificationMode"
import { assertNotNull, base64ToBase64Url, neverNull } from "@tutao/tutanota-utils" import { assertNotNull, base64ToBase64Url, getFirstOrThrow, groupBy, neverNull } from "@tutao/tutanota-utils"
import { log } from "../DesktopLog" import { log } from "../DesktopLog"
import tutanotaModelInfo from "../../api/entities/tutanota/ModelInfo" import tutanotaModelInfo from "../../api/entities/tutanota/ModelInfo"
import { handleRestError } from "../../api/common/error/RestError" import { handleRestError } from "../../api/common/error/RestError"
@ -19,7 +19,8 @@ import { StrippedEntity } from "../../api/common/utils/EntityUtils"
import { EncryptedParsedInstance, ServerModelUntypedInstance, TypeModel } from "../../api/common/EntityTypes" import { EncryptedParsedInstance, ServerModelUntypedInstance, TypeModel } from "../../api/common/EntityTypes"
import { AttributeModel } from "../../api/common/AttributeModel" import { AttributeModel } from "../../api/common/AttributeModel"
import { InstancePipeline } from "../../api/worker/crypto/InstancePipeline" import { InstancePipeline } from "../../api/worker/crypto/InstancePipeline"
import { ClientTypeModelResolver, TypeModelResolver } from "../../api/common/EntityFunctions" import { ClientTypeModelResolver } from "../../api/common/EntityFunctions"
import { UnencryptedCredentials } from "../../native/common/generatedipc/UnencryptedCredentials"
const TAG = "[notifications]" const TAG = "[notifications]"
@ -27,6 +28,7 @@ export type MailMetadata = {
senderAddress: string senderAddress: string
firstRecipientAddress: string | null firstRecipientAddress: string | null
id: IdTuple id: IdTuple
notificationInfo: StrippedEntity<NotificationInfo>
} }
export class TutaNotificationHandler { export class TutaNotificationHandler {
@ -44,32 +46,44 @@ export class TutaNotificationHandler {
private readonly typeModelResolver: ClientTypeModelResolver, private readonly typeModelResolver: ClientTypeModelResolver,
) {} ) {}
async onMailNotification(sseInfo: SseInfo, notificationInfo: StrippedEntity<NotificationInfo>) { async onMailNotification(sseInfo: SseInfo, notificationInfos: Array<StrippedEntity<NotificationInfo>>) {
const appWindow = this.windowManager.getAll().find((window) => window.getUserId() === notificationInfo.userId) const infosByListId = groupBy(notificationInfos, (ni) => assertNotNull(ni.mailId).listId)
for (const [listId, infos] of infosByListId.entries()) {
const firstNotificationInfo = getFirstOrThrow(infos)
const appWindow = this.windowManager.getAll().find((window) => window.getUserId() === firstNotificationInfo.userId)
if (appWindow && appWindow.isFocused()) { if (appWindow && appWindow.isFocused()) {
// no need for notification if user is looking right at the window // no need for notification if user is looking right at the window
return continue
} }
// we can't download the email if we don't have access to credentials // we can't download the email if we don't have access to credentials
const canShowExtendedNotification = const canShowExtendedNotification =
(await this.nativeCredentialFacade.getCredentialEncryptionMode()) === CredentialEncryptionMode.DEVICE_LOCK && (await this.nativeCredentialFacade.getCredentialEncryptionMode()) === CredentialEncryptionMode.DEVICE_LOCK &&
(await this.sseStorage.getExtendedNotificationConfig(notificationInfo.userId)) !== ExtendedNotificationMode.NoSenderOrSubject (await this.sseStorage.getExtendedNotificationConfig(firstNotificationInfo.userId)) !== ExtendedNotificationMode.NoSenderOrSubject
if (!canShowExtendedNotification) { if (!canShowExtendedNotification) {
const notificationId = notificationInfo.mailId const notificationId = firstNotificationInfo.mailId
? `${notificationInfo.mailId.listId},${notificationInfo.mailId?.listElementId}` ? `${firstNotificationInfo.mailId.listId},${firstNotificationInfo.mailId?.listElementId}`
: notificationInfo.userId : firstNotificationInfo.userId
this.notifier.submitGroupedNotification(this.lang.get("pushNewMail_msg"), notificationInfo.mailAddress, notificationId, (res) => this.notifier.submitGroupedNotification(this.lang.get("pushNewMail_msg"), firstNotificationInfo.mailAddress, notificationId, (res) =>
this.onMailNotificationClick(res, notificationInfo), this.onMailNotificationClick(res, firstNotificationInfo),
) )
return } else {
const credentials = await this.nativeCredentialFacade.loadByUserId(firstNotificationInfo.userId)
if (credentials == null) {
log.warn(`Not found credentials to download notification, userId ${firstNotificationInfo.userId}`)
continue
}
const infosToFetch = infos.slice(0, 5) // don't show notifications for more than five mails at a time
const mailMetadata = await this.downloadMailMetadata(sseInfo, listId, infosToFetch, credentials)
console.log(">>>>>>>>>>>>", mailMetadata)
for (const mailMeta of mailMetadata) {
this.notifier.submitGroupedNotification(mailMeta.senderAddress, mailMeta.firstRecipientAddress ?? "", mailMeta.id.join(","), (res) =>
this.onMailNotificationClick(res, mailMeta.notificationInfo),
)
}
}
} }
const mailMetadata = await this.downloadMailMetadata(sseInfo, notificationInfo)
if (mailMetadata == null) return
this.notifier.submitGroupedNotification(mailMetadata.senderAddress, mailMetadata.firstRecipientAddress ?? "", mailMetadata.id.join(","), (res) =>
this.onMailNotificationClick(res, notificationInfo),
)
} }
private onMailNotificationClick(res: NotificationResult, notificationInfo: StrippedEntity<NotificationInfo>) { private onMailNotificationClick(res: NotificationResult, notificationInfo: StrippedEntity<NotificationInfo>) {
@ -91,15 +105,21 @@ export class TutaNotificationHandler {
} }
} }
private async downloadMailMetadata(sseInfo: SseInfo, ni: StrippedEntity<NotificationInfo>): Promise<MailMetadata | null> { private async downloadMailMetadata(
const url = this.makeMailMetadataUrl(sseInfo, assertNotNull(ni.mailId)) sseInfo: SseInfo,
listId: Id,
notificationInfos: Array<StrippedEntity<NotificationInfo>>,
credentials: UnencryptedCredentials,
): Promise<Array<MailMetadata>> {
const result: Array<MailMetadata> = []
// decrypt access token // decrypt access token
const credentials = await this.nativeCredentialFacade.loadByUserId(ni.userId) const first = notificationInfos[0]
if (credentials == null) {
log.warn(`Not found credentials to download notification, userId ${ni.userId}`) const url = this.makeMailMetadataUrl(
return null sseInfo,
} assertNotNull(listId),
notificationInfos.map((ni) => assertNotNull(ni.mailId)),
)
log.debug(TAG, "downloading mail notification metadata") log.debug(TAG, "downloading mail notification metadata")
const headers: Record<string, string> = { const headers: Record<string, string> = {
@ -114,22 +134,39 @@ export class TutaNotificationHandler {
throw handleRestError(neverNull(response.status), url.toString(), response.headers.get("Error-Id"), null) throw handleRestError(neverNull(response.status), url.toString(), response.headers.get("Error-Id"), null)
} }
const parsedResponse = await response.json() const untypedInstances = (await response.json()) as Array<ServerModelUntypedInstance>
const mailModel = await this.typeModelResolver.resolveClientTypeReference(MailTypeRef) const mailModel = await this.typeModelResolver.resolveClientTypeReference(MailTypeRef)
const mailAddressModel = await this.typeModelResolver.resolveClientTypeReference(MailAddressTypeRef) const mailAddressModel = await this.typeModelResolver.resolveClientTypeReference(MailAddressTypeRef)
const mailEncryptedParsedInstance: EncryptedParsedInstance = await this.nativeInstancePipeline.typeMapper.applyJsTypes(
mailModel, result.push(
parsedResponse as ServerModelUntypedInstance, ...(await Promise.all(
untypedInstances.map(async (untypedInstance) => {
const mailEncryptedParsedInstance: EncryptedParsedInstance = await this.nativeInstancePipeline.typeMapper.applyJsTypes(
mailModel,
untypedInstance,
)
const notificationInfo = notificationInfos.filter(
(info) =>
assertNotNull(info.mailId).listElementId ===
AttributeModel.getAttribute<IdTuple>(mailEncryptedParsedInstance, "_id", mailModel)[1],
)[0]
return this.encryptedMailToMailMetaData(mailModel, mailAddressModel, mailEncryptedParsedInstance, notificationInfo)
}),
)),
) )
return this.encryptedMailToMailMetaData(mailModel, mailAddressModel, mailEncryptedParsedInstance)
} catch (e) { } catch (e) {
log.debug(TAG, "Error fetching mail metadata, " + (e as Error).message) log.debug(TAG, "Error fetching mail metadata, " + (e as Error).message)
return null
} }
return result
} }
private encryptedMailToMailMetaData(mailModel: TypeModel, mailAddressModel: TypeModel, mi: EncryptedParsedInstance): MailMetadata { private encryptedMailToMailMetaData(
mailModel: TypeModel,
mailAddressModel: TypeModel,
mi: EncryptedParsedInstance,
notificationInfo: StrippedEntity<NotificationInfo>,
): MailMetadata {
const mailId = AttributeModel.getAttribute<IdTuple>(mi, "_id", mailModel) const mailId = AttributeModel.getAttribute<IdTuple>(mi, "_id", mailModel)
const firstRecipient = AttributeModel.getAttributeorNull<EncryptedParsedInstance[] | null>(mi, "firstRecipient", mailModel) const firstRecipient = AttributeModel.getAttributeorNull<EncryptedParsedInstance[] | null>(mi, "firstRecipient", mailModel)
@ -140,12 +177,15 @@ export class TutaNotificationHandler {
id: mailId, id: mailId,
senderAddress: senderAddress, senderAddress: senderAddress,
firstRecipientAddress: firstRecipient ? AttributeModel.getAttribute(firstRecipient[0], "address", mailAddressModel) : null, firstRecipientAddress: firstRecipient ? AttributeModel.getAttribute(firstRecipient[0], "address", mailAddressModel) : null,
notificationInfo,
} }
} }
private makeMailMetadataUrl(sseInfo: SseInfo, mailId: IdTupleWrapper): URL { private makeMailMetadataUrl(sseInfo: SseInfo, listId: Id, mailIds: Array<IdTupleWrapper>): URL {
const url = new URL(sseInfo.sseOrigin) const url = new URL(sseInfo.sseOrigin)
url.pathname = `rest/tutanota/mail/${base64ToBase64Url(mailId.listId)}/${base64ToBase64Url(mailId.listElementId)}` const listElementIds = mailIds.map((mailId) => base64ToBase64Url(mailId.listElementId)).join(",")
url.pathname = `rest/tutanota/mail/${base64ToBase64Url(listId)}`
url.searchParams.set("ids", listElementIds)
return url return url
} }

View file

@ -136,10 +136,12 @@ export class TutaSseFacade implements SseEventHandler {
await this.sseStorage.recordMissedNotificationCheckTime() await this.sseStorage.recordMissedNotificationCheckTime()
const sseInfo = this.currentSseInfo const sseInfo = this.currentSseInfo
if (sseInfo == null) return if (sseInfo == null) return
for (const notificationInfoUntyped of encryptedMissedNotification.notificationInfos) { const notificationInfos = await Promise.all(
const notificationInfo = await this.nativeInstancePipeline.decryptAndMap(NotificationInfoTypeRef, notificationInfoUntyped, null) encryptedMissedNotification.notificationInfos.map(
await this.notificationHandler.onMailNotification(sseInfo, notificationInfo) async (notificationInfoUntyped) => await this.nativeInstancePipeline.decryptAndMap(NotificationInfoTypeRef, notificationInfoUntyped, null),
} ),
)
await this.notificationHandler.onMailNotification(sseInfo, notificationInfos)
await this.handleAlarmNotification(encryptedMissedNotification) await this.handleAlarmNotification(encryptedMissedNotification)
} }
@ -187,7 +189,7 @@ export class TutaSseFacade implements SseEventHandler {
const sseInfo = assertNotNull(this.currentSseInfo) const sseInfo = assertNotNull(this.currentSseInfo)
const url = this.makeMissedNotificationUrl(sseInfo) const url = this.makeMissedNotificationUrl(sseInfo)
log.debug("downloading missed notification") log.debug("downloading missed notification", url)
const headers: Record<string, string> = { const headers: Record<string, string> = {
userIds: sseInfo.userIds[0], userIds: sseInfo.userIds[0],
v: typeModels[MissedNotificationTypeRef.typeId].version, v: typeModels[MissedNotificationTypeRef.typeId].version,

View file

@ -9,7 +9,7 @@ export const enum NotificationType {
export class Notifications { export class Notifications {
showNotification(type: NotificationType, title: string, options?: NotificationOptions, onclick: Notification["onclick"] = noOp): Notification | null { showNotification(type: NotificationType, title: string, options?: NotificationOptions, onclick: Notification["onclick"] = noOp): Notification | null {
if (!isApp() && typeof window.Notification !== "undefined" && window.Notification.permission === "granted") { if (!isApp() && !isDesktop() && typeof window.Notification !== "undefined" && window.Notification.permission === "granted") {
try { try {
const actualOptions: NotificationOptions = Object.assign( const actualOptions: NotificationOptions = Object.assign(
{}, {},

View file

@ -18,8 +18,6 @@ import {
splitInChunks, splitInChunks,
} from "@tutao/tutanota-utils" } from "@tutao/tutanota-utils"
import { import {
ConversationEntry,
ConversationEntryTypeRef,
Mail, Mail,
MailboxGroupRoot, MailboxGroupRoot,
MailboxProperties, MailboxProperties,
@ -39,13 +37,13 @@ import {
SimpleMoveMailTarget, SimpleMoveMailTarget,
} from "../../../common/api/common/TutanotaConstants.js" } from "../../../common/api/common/TutanotaConstants.js"
import { CUSTOM_MIN_ID, elementIdPart, getElementId, listIdPart } from "../../../common/api/common/utils/EntityUtils.js" import { CUSTOM_MIN_ID, elementIdPart, getElementId, listIdPart } from "../../../common/api/common/utils/EntityUtils.js"
import { containsEventOfType, EntityUpdateData, isUpdateForTypeRef } from "../../../common/api/common/utils/EntityUpdateUtils.js" import { EntityUpdateData, isUpdateForTypeRef } from "../../../common/api/common/utils/EntityUpdateUtils.js"
import m from "mithril" import m from "mithril"
import { WebsocketCounterData } from "../../../common/api/entities/sys/TypeRefs.js" import { WebsocketCounterData } from "../../../common/api/entities/sys/TypeRefs.js"
import { Notifications, NotificationType } from "../../../common/gui/Notifications.js" import { Notifications, NotificationType } from "../../../common/gui/Notifications.js"
import { lang } from "../../../common/misc/LanguageViewModel.js" import { lang } from "../../../common/misc/LanguageViewModel.js"
import { ProgrammingError } from "../../../common/api/common/error/ProgrammingError.js" import { ProgrammingError } from "../../../common/api/common/error/ProgrammingError.js"
import { NotAuthorizedError, NotFoundError, PreconditionFailedError } from "../../../common/api/common/error/RestError.js" import { NotFoundError, PreconditionFailedError } from "../../../common/api/common/error/RestError.js"
import { UserError } from "../../../common/api/main/UserError.js" import { UserError } from "../../../common/api/main/UserError.js"
import { EventController } from "../../../common/api/main/EventController.js" import { EventController } from "../../../common/api/main/EventController.js"
import { InboxRuleHandler } from "./InboxRuleHandler.js" import { InboxRuleHandler } from "./InboxRuleHandler.js"
@ -156,11 +154,7 @@ export class MailModel {
if (isUpdateForTypeRef(MailFolderTypeRef, update)) { if (isUpdateForTypeRef(MailFolderTypeRef, update)) {
await this.init() await this.init()
m.redraw() m.redraw()
} else if ( } else if (isUpdateForTypeRef(MailTypeRef, update) && update.operation === OperationType.CREATE) {
isUpdateForTypeRef(MailTypeRef, update) &&
update.operation === OperationType.CREATE &&
!containsEventOfType(updates, OperationType.DELETE, update.instanceId)
) {
if (this.inboxRuleHandler && this.connectivityModel) { if (this.inboxRuleHandler && this.connectivityModel) {
const mailId: IdTuple = [update.instanceListId, update.instanceId] const mailId: IdTuple = [update.instanceListId, update.instanceId]
try { try {
@ -593,45 +587,6 @@ export class MailModel {
return [...this.mailSets.values()].filter((f) => f.folders.importedMailSet).map((f) => f.folders.importedMailSet!) return [...this.mailSets.values()].filter((f) => f.folders.importedMailSet).map((f) => f.folders.importedMailSet!)
} }
async loadConversationsForAllMails(mails: ReadonlyArray<Mail>): Promise<ReadonlyArray<Mail>> {
let conversationEntries: ConversationEntry[] = []
for (const mail of mails) {
await this.entityClient.loadAll(ConversationEntryTypeRef, listIdPart(mail.conversationEntry)).then(
async (entries) => {
conversationEntries.push(...entries)
},
async (e) => {
// Most likely the conversation entry list does not exist anymore. The server does not distinguish between the case when the
// list does not exist and when we have no permission on it (and for good reasons, it prevents enumeration).
// Most often it happens when we are not fully synced with the server yet and the primary mail does not even exist.
if (!(e instanceof NotAuthorizedError)) {
throw e
}
},
)
}
// If there are no conversationEntries (somehow they didn't load), just return the mails back
if (conversationEntries.length < 0) {
return mails
}
const byList = groupBy(conversationEntries, (c) => c.mail && listIdPart(c.mail))
const allMails: Mail[] = []
for (const [listId, conversations] of byList.entries()) {
if (!listId) continue
const loaded = await this.entityClient.loadMultiple(
MailTypeRef,
listId,
conversations.map((c) => elementIdPart(assertNotNull(c.mail))),
)
allMails.push(...loaded)
}
return allMails
}
/** Resolve conversation list ids to the IDs of mails in those conversations. */ /** Resolve conversation list ids to the IDs of mails in those conversations. */
async resolveConversationsForMails(mails: readonly Mail[]): Promise<IdTuple[]> { async resolveConversationsForMails(mails: readonly Mail[]): Promise<IdTuple[]> {
return await this.mailFacade.resolveConversations(mails.map((m) => listIdPart(m.conversationEntry))) return await this.mailFacade.resolveConversations(mails.map((m) => listIdPart(m.conversationEntry)))

View file

@ -203,35 +203,33 @@ export class ConversationViewModel {
return listIdPart(this._primaryViewModel.mail.conversationEntry) return listIdPart(this._primaryViewModel.mail.conversationEntry)
} }
private async loadConversation() { private async loadConversation(): Promise<void> {
try { try {
if (!this.showFullConversation()) { // Catch errors but only for loading conversation entries.
this.conversation = this.conversationItemsForSelectedMailOnly() // if success, proceed with loading mails
} else { // otherwise do the error handling
// Catch errors but only for loading conversation entries. try {
// if success, proceed with loading mails if (!this.showFullConversation()) {
// otherwise do the error handling this.conversation = this.conversationItemsForSelectedMailOnly()
this.conversation = await this.entityClient.loadAll(ConversationEntryTypeRef, listIdPart(this.primaryMail.conversationEntry)).then( } else {
async (entries) => { const entries = await this.entityClient.loadAll(ConversationEntryTypeRef, listIdPart(this.primaryMail.conversationEntry))
// if the primary mail is not along conversation then only display the primary mail // if the primary mail is not along conversation then only display the primary mail
if (!entries.some((entry) => isSameId(entry.mail, this.primaryMail._id))) { if (!entries.some((entry) => isSameId(entry.mail, this.primaryMail._id))) {
return this.conversationItemsForSelectedMailOnly() this.conversation = this.conversationItemsForSelectedMailOnly()
} else { } else {
const allMails = await this.loadMails(entries) const allMails = await this.loadMails(entries)
return this.createConversationItems(entries, allMails) this.conversation = this.createConversationItems(entries, allMails)
} }
}, }
async (e) => { } catch (e) {
if (e instanceof NotAuthorizedError) { if (e instanceof NotAuthorizedError) {
// Most likely the conversation entry list does not exist anymore. The server does not distinguish between the case when the // Most likely the conversation entry list does not exist anymore. The server does not distinguish between the case when the
// list does not exist and when we have no permission on it (and for good reasons, it prevents enumeration). // list does not exist and when we have no permission on it (and for good reasons, it prevents enumeration).
// Most often it happens when we are not fully synced with the server yet and the primary mail does not even exist. // Most often it happens when we are not fully synced with the server yet and the primary mail does not even exist.
return this.conversationItemsForSelectedMailOnly() this.conversation = this.conversationItemsForSelectedMailOnly()
} else { } else {
throw e throw e
} }
},
)
} }
} finally { } finally {
this.onUiUpdate() this.onUiUpdate()

View file

@ -677,6 +677,9 @@ export class MailViewModel {
instanceListId: importedFolder.entries, instanceListId: importedFolder.entries,
operation: OperationType.CREATE, operation: OperationType.CREATE,
typeRef: MailSetEntryTypeRef, typeRef: MailSetEntryTypeRef,
instance: null,
patches: null,
isPrefetched: false,
}) })
}) })
} }

View file

@ -259,19 +259,6 @@ export class MailViewerViewModel {
} }
m.redraw() m.redraw()
// We need the conversation entry in order to reply to the message.
// We don't want the user to have to wait for it to load when they click reply,
// So we load it here pre-emptively to make sure it is in the cache.
this.entityClient.load(ConversationEntryTypeRef, this.mail.conversationEntry).catch((e) => {
if (e instanceof NotFoundError) {
console.log("could load conversation entry as it has been moved/deleted already", e)
} else if (isOfflineError(e)) {
console.log("failed to load conversation entry, because of a lost connection", e)
} else {
throw e
}
})
} finally { } finally {
this.renderIsDelayed = false this.renderIsDelayed = false
} }

View file

@ -59,7 +59,7 @@ import { LoginController } from "../../../common/api/main/LoginController.js"
import { EntityClient, loadMultipleFromLists } from "../../../common/api/common/EntityClient.js" import { EntityClient, loadMultipleFromLists } from "../../../common/api/common/EntityClient.js"
import { SearchRouter } from "../../../common/search/view/SearchRouter.js" import { SearchRouter } from "../../../common/search/view/SearchRouter.js"
import { MailOpenedListener } from "../../mail/view/MailViewModel.js" import { MailOpenedListener } from "../../mail/view/MailViewModel.js"
import { containsEventOfType, EntityUpdateData, getEventOfType, isUpdateForTypeRef } from "../../../common/api/common/utils/EntityUpdateUtils.js" import { EntityUpdateData, isUpdateForTypeRef } from "../../../common/api/common/utils/EntityUpdateUtils.js"
import { CalendarInfo } from "../../../calendar-app/calendar/model/CalendarModel.js" import { CalendarInfo } from "../../../calendar-app/calendar/model/CalendarModel.js"
import { locator } from "../../../common/api/main/CommonLocator.js" import { locator } from "../../../common/api/main/CommonLocator.js"
import m from "mithril" import m from "mithril"
@ -309,51 +309,10 @@ export class SearchViewModel {
private readonly entityEventsListener: EntityEventsListener = async (updates) => { private readonly entityEventsListener: EntityEventsListener = async (updates) => {
for (const update of updates) { for (const update of updates) {
const mergedUpdate = this.mergeOperationsIfNeeded(update, updates) await this.entityEventReceived(update)
if (mergedUpdate == null) continue
await this.entityEventReceived(mergedUpdate)
} }
} }
private mergeOperationsIfNeeded(update: EntityUpdateData, updates: readonly EntityUpdateData[]): EntityUpdateData | null {
// We are trying to keep the mails that are moved and would match the search criteria displayed.
// This is a bit hacky as we reimplement part of the filtering by list.
// Ideally search result would update by itself and we would only need to reconcile the changes.
if (!isUpdateForTypeRef(MailTypeRef, update) || this.searchResult == null) {
return update
}
if (update.operation === OperationType.CREATE && containsEventOfType(updates, OperationType.DELETE, update.instanceId)) {
// This is a move operation, is destination list included in the restrictions?
if (this.listIdMatchesRestriction(update.instanceListId, this.searchResult.restriction)) {
// If it's included, we want to keep showing the item but we will simulate the UPDATE
return { ...update, operation: OperationType.UPDATE }
} else {
// If it's not going to be included we might as well skip the create operation
return null
}
} else if (update.operation === OperationType.DELETE && containsEventOfType(updates, OperationType.CREATE, update.instanceId)) {
// This is a move operation and we are in the delete part of it.
// Grab the other part to check the move destination.
const createOperation = assertNotNull(getEventOfType(updates, OperationType.CREATE, update.instanceId))
// Is destination included in the search?
if (this.listIdMatchesRestriction(createOperation.instanceListId, this.searchResult.restriction)) {
// If so, skip the delete.
return null
} else {
// Otherwise delete
return update
}
} else {
return update
}
}
private listIdMatchesRestriction(listId: string, restriction: SearchRestriction): boolean {
return restriction.folderIds.length === 0 || restriction.folderIds.includes(listId)
}
onNewUrl(args: Record<string, any>, requestedPath: string) { onNewUrl(args: Record<string, any>, requestedPath: string) {
const query = args.query ?? "" const query = args.query ?? ""
let restriction let restriction

View file

@ -110,7 +110,7 @@ export class IndexedDbIndexer implements Indexer {
_indexedGroupIds: Array<Id> _indexedGroupIds: Array<Id>
/** @private visibleForTesting */ /** @private visibleForTesting */
readonly eventQueue = new EventQueue("indexer", true, (batch) => this._processEntityEvents(batch)) readonly eventQueue = new EventQueue("indexer", (batch) => this._processEntityEvents(batch))
constructor( constructor(
private readonly serverDateProvider: DateProvider, private readonly serverDateProvider: DateProvider,
@ -127,7 +127,7 @@ export class IndexedDbIndexer implements Indexer {
// correctly initialized during init() // correctly initialized during init()
this._indexedGroupIds = [] this._indexedGroupIds = []
this.initiallyLoadedBatchIdsPerGroup = new Map() this.initiallyLoadedBatchIdsPerGroup = new Map()
this._realtimeEventQueue = new EventQueue("indexer_realtime", false, (nextElement: QueuedBatch) => { this._realtimeEventQueue = new EventQueue("indexer_realtime", (nextElement: QueuedBatch) => {
// During initial loading we remember the last batch we loaded // During initial loading we remember the last batch we loaded
// so if we get updates from EventBusClient here for things that are already loaded we discard them // so if we get updates from EventBusClient here for things that are already loaded we discard them
const loadedIdForGroup = this.initiallyLoadedBatchIdsPerGroup.get(nextElement.groupId) const loadedIdForGroup = this.initiallyLoadedBatchIdsPerGroup.get(nextElement.groupId)

View file

@ -104,6 +104,8 @@ import { DateProvider } from "../../../common/api/common/DateProvider"
import type { ContactSearchFacade } from "../index/ContactSearchFacade" import type { ContactSearchFacade } from "../index/ContactSearchFacade"
import type { IndexedDbSearchFacade } from "../index/IndexedDbSearchFacade.js" import type { IndexedDbSearchFacade } from "../index/IndexedDbSearchFacade.js"
import type { OfflineStorageSearchFacade } from "../index/OfflineStorageSearchFacade.js" import type { OfflineStorageSearchFacade } from "../index/OfflineStorageSearchFacade.js"
import { PatchMerger } from "../../../common/api/worker/offline/PatchMerger"
import { EventInstancePrefetcher } from "../../../common/api/worker/EventInstancePrefetcher"
assertWorkerOrNode() assertWorkerOrNode()
@ -115,6 +117,7 @@ export type WorkerLocatorType = {
asymmetricCrypto: AsymmetricCryptoFacade asymmetricCrypto: AsymmetricCryptoFacade
crypto: CryptoFacade crypto: CryptoFacade
instancePipeline: InstancePipeline instancePipeline: InstancePipeline
patchMerger: PatchMerger
applicationTypesFacade: ApplicationTypesFacade applicationTypesFacade: ApplicationTypesFacade
cacheStorage: CacheStorage cacheStorage: CacheStorage
cache: EntityRestInterface cache: EntityRestInterface
@ -341,10 +344,12 @@ export async function initLocator(worker: WorkerImpl, browserData: BrowserData)
return new PdfWriter(new TextEncoder(), undefined) return new PdfWriter(new TextEncoder(), undefined)
} }
locator.patchMerger = new PatchMerger(locator.cacheStorage, locator.instancePipeline, typeModelResolver, () => locator.crypto)
// We don't want to cache within the admin client // We don't want to cache within the admin client
let cache: DefaultEntityRestCache | null = null let cache: DefaultEntityRestCache | null = null
if (!isAdminClient()) { if (!isAdminClient()) {
cache = new DefaultEntityRestCache(entityRestClient, maybeUninitializedStorage, typeModelResolver) cache = new DefaultEntityRestCache(entityRestClient, maybeUninitializedStorage, typeModelResolver, locator.patchMerger)
} }
locator.cache = cache ?? entityRestClient locator.cache = cache ?? entityRestClient
@ -370,7 +375,7 @@ export async function initLocator(worker: WorkerImpl, browserData: BrowserData)
// We create empty CustomCacheHandlerMap because this cache is separate anyway and user updates don't matter. // We create empty CustomCacheHandlerMap because this cache is separate anyway and user updates don't matter.
const cacheStorage = new EphemeralCacheStorage(locator.instancePipeline.modelMapper, typeModelResolver, new CustomCacheHandlerMap()) const cacheStorage = new EphemeralCacheStorage(locator.instancePipeline.modelMapper, typeModelResolver, new CustomCacheHandlerMap())
return new BulkMailLoader( return new BulkMailLoader(
new EntityClient(new DefaultEntityRestCache(entityRestClient, cacheStorage, typeModelResolver), typeModelResolver), new EntityClient(new DefaultEntityRestCache(entityRestClient, cacheStorage, typeModelResolver, locator.patchMerger), typeModelResolver),
new EntityClient(entityRestClient, typeModelResolver), new EntityClient(entityRestClient, typeModelResolver),
mailFacade, mailFacade,
) )
@ -703,7 +708,7 @@ export async function initLocator(worker: WorkerImpl, browserData: BrowserData)
await indexer.processEntityEvents(events, batchId, groupId) await indexer.processEntityEvents(events, batchId, groupId)
}, },
) )
const prefetcher = new EventInstancePrefetcher(locator.cache)
locator.eventBusClient = new EventBusClient( locator.eventBusClient = new EventBusClient(
eventBusCoordinator, eventBusCoordinator,
cache ?? new AdminClientDummyEntityRestCache(), cache ?? new AdminClientDummyEntityRestCache(),
@ -715,6 +720,8 @@ export async function initLocator(worker: WorkerImpl, browserData: BrowserData)
mainInterface.progressTracker, mainInterface.progressTracker,
mainInterface.syncTracker, mainInterface.syncTracker,
typeModelResolver, typeModelResolver,
locator.crypto,
prefetcher,
) )
locator.login.init(locator.eventBusClient) locator.login.init(locator.eventBusClient)
locator.Const = Const locator.Const = Const

View file

@ -157,8 +157,10 @@ import "./api/worker/search/OfflineStorageContactIndexerBackendTest.js"
import "./api/worker/search/OfflineStorageContactSearchFacadeTest.js" import "./api/worker/search/OfflineStorageContactSearchFacadeTest.js"
import "./api/worker/rest/CustomUserCacheHandlerTest.js" import "./api/worker/rest/CustomUserCacheHandlerTest.js"
import "./api/common/utils/QueryTokenUtilsTest.js" import "./api/common/utils/QueryTokenUtilsTest.js"
import "./api/worker/offline/PatchMergerTest.js"
import "./contacts/ContactModelTest.js" import "./contacts/ContactModelTest.js"
import "./api/worker/search/OfflinestorageIndexerTest.js" import "./api/worker/search/OfflinestorageIndexerTest.js"
import "./api/worker/EventInstancePrefetcherTest.js"
import * as td from "testdouble" import * as td from "testdouble"
import { random } from "@tutao/tutanota-crypto" import { random } from "@tutao/tutanota-crypto"

View file

@ -2,13 +2,13 @@ import type { BrowserData } from "../../src/common/misc/ClientConstants.js"
import { DbEncryptionData } from "../../src/common/api/worker/search/SearchTypes.js" import { DbEncryptionData } from "../../src/common/api/worker/search/SearchTypes.js"
import { IndexerCore } from "../../src/mail-app/workerUtils/index/IndexerCore.js" import { IndexerCore } from "../../src/mail-app/workerUtils/index/IndexerCore.js"
import { DbFacade, DbTransaction } from "../../src/common/api/worker/search/DbFacade.js" import { DbFacade, DbTransaction } from "../../src/common/api/worker/search/DbFacade.js"
import { assertNotNull, clone, deepEqual, defer, Thunk, typedEntries, TypeRef } from "@tutao/tutanota-utils" import { assertNotNull, clone, deepEqual, defer, isNotNull, Thunk, typedEntries, TypeRef } from "@tutao/tutanota-utils"
import type { DesktopKeyStoreFacade } from "../../src/common/desktop/DesktopKeyStoreFacade.js" import type { DesktopKeyStoreFacade } from "../../src/common/desktop/DesktopKeyStoreFacade.js"
import { mock } from "@tutao/tutanota-test-utils" import { mock } from "@tutao/tutanota-test-utils"
import { aes256RandomKey, fixedIv, uint8ArrayToKey } from "@tutao/tutanota-crypto" import { aes256RandomKey, fixedIv, uint8ArrayToKey } from "@tutao/tutanota-crypto"
import { ScheduledPeriodicId, ScheduledTimeoutId, Scheduler } from "../../src/common/api/common/utils/Scheduler.js" import { ScheduledPeriodicId, ScheduledTimeoutId, Scheduler } from "../../src/common/api/common/utils/Scheduler.js"
import { matchers, object, when } from "testdouble" import { matchers, object, when } from "testdouble"
import { Entity, ModelValue, TypeModel } from "../../src/common/api/common/EntityTypes.js" import { Entity, ModelValue, ParsedInstance, TypeModel } from "../../src/common/api/common/EntityTypes.js"
import { create } from "../../src/common/api/common/utils/EntityUtils.js" import { create } from "../../src/common/api/common/utils/EntityUtils.js"
import { ClientModelInfo, ServerModelInfo, ServerModels, TypeModelResolver } from "../../src/common/api/common/EntityFunctions.js" import { ClientModelInfo, ServerModelInfo, ServerModels, TypeModelResolver } from "../../src/common/api/common/EntityFunctions.js"
import { type fetch as undiciFetch, type Response } from "undici" import { type fetch as undiciFetch, type Response } from "undici"
@ -280,7 +280,7 @@ The last expected item is ${JSON.stringify(expectedArray.at(-1))} but got ${JSON
} }
} }
export function removeFinalIvs(instance: Entity): Entity { export function removeFinalIvs(instance: Entity | ParsedInstance): Entity | ParsedInstance {
delete instance["_finalIvs"] delete instance["_finalIvs"]
delete instance["_original"] delete instance["_original"]
const keys = Object.keys(instance) const keys = Object.keys(instance)
@ -293,18 +293,11 @@ export function removeFinalIvs(instance: Entity): Entity {
return instance return instance
} }
export function removeOriginals(instance: Entity | null): Entity | null { export function removeOriginals<T extends Entity>(instance: T | null): T | null {
if (instance === null) { if (isNotNull(instance) && typeof instance == "object") {
return null
}
if (instance["_original"]) {
delete instance["_original"] delete instance["_original"]
} for (const i of Object.values(instance).filter(isNotNull)) {
const keys = Object.keys(instance) removeOriginals(i)
for (const key of keys) {
const maybeAggregate = instance[key]
if (maybeAggregate instanceof Object) {
removeOriginals(maybeAggregate)
} }
} }
return instance return instance

View file

@ -103,39 +103,6 @@ o.spec("EntityUtils", function () {
removeTechnicalFields(entityCopy as ElementEntity) removeTechnicalFields(entityCopy as ElementEntity)
o(entityCopy as unknown).deepEquals(originalEntity) o(entityCopy as unknown).deepEquals(originalEntity)
}) })
o("it removes _finalEncrypted fields directly on the entity", function () {
const originalEntity = { ...makeEntity(), _finalEncryptedThing: [1, 2, 3] }
const entityCopy = clone(originalEntity)
removeTechnicalFields(entityCopy as ElementEntity)
o(entityCopy as unknown).deepEquals({
_id: "test",
_type: typeRef,
_ownerGroup: null,
_ownerEncSessionKey: null,
})
})
o("it removes _finalEncrypted fields deeper in the entity", function () {
const originalEntity = {
...makeEntity(),
nested: {
test: "yes",
_finalEncryptedThing: [1, 2, 3],
},
}
const entityCopy = clone(originalEntity)
removeTechnicalFields(entityCopy as ElementEntity)
o(entityCopy as unknown).deepEquals({
_id: "test",
_type: typeRef,
_ownerGroup: null,
_ownerEncSessionKey: null,
nested: {
test: "yes",
},
})
})
}) })
o.spec("computePatches", function () { o.spec("computePatches", function () {
@ -433,8 +400,18 @@ o.spec("EntityUtils", function () {
o("computePatches works on aggregations and additem operation", async function () { o("computePatches works on aggregations and additem operation", async function () {
const testEntity = await createFilledTestEntity() const testEntity = await createFilledTestEntity()
testEntity.testAssociation.push(await createTestEntityWithDummyResolver(TestAggregateRef, { _id: "newAgId" })) testEntity.testAssociation.push(
testEntity.testAssociation.push(await createTestEntityWithDummyResolver(TestAggregateRef, { _id: "newAgId2" })) await createTestEntityWithDummyResolver(TestAggregateRef, {
_id: "newAgId",
testNumber: "1",
}),
)
testEntity.testAssociation.push(
await createTestEntityWithDummyResolver(TestAggregateRef, {
_id: "newAgId2",
testNumber: "2",
}),
)
let sk = aes256RandomKey() let sk = aes256RandomKey()
const originalParsedInstance = await dummyInstancePipeline.modelMapper.mapToClientModelParsedInstance( const originalParsedInstance = await dummyInstancePipeline.modelMapper.mapToClientModelParsedInstance(
@ -528,6 +505,34 @@ o.spec("EntityUtils", function () {
]) ])
}) })
o("computePatches works on aggregations of cardinality zeroorone", async function () {
const testEntity = await createFilledTestEntity()
testEntity.testAssociation[0].testZeroOrOneAggregation = null
let sk = aes256RandomKey()
const originalParsedInstance = await dummyInstancePipeline.modelMapper.mapToClientModelParsedInstance(
TestTypeRef,
assertNotNull(testEntity._original),
)
const currentParsedInstance = await dummyInstancePipeline.modelMapper.mapToClientModelParsedInstance(TestTypeRef, testEntity)
const currentUntypedInstance = await dummyInstancePipeline.mapAndEncrypt(TestTypeRef, testEntity, sk)
let objectDiff = await computePatches(
originalParsedInstance,
currentParsedInstance,
currentUntypedInstance,
testTypeModel,
dummyTypeReferenceResolver,
false,
)
o(objectDiff).deepEquals([
createPatch({
attributePath: "3/aggId/10",
value: '["aggOnAggId"]',
patchOperation: PatchOperationType.REMOVE_ITEM,
}),
])
})
o("computePatches works on aggregates on aggregations and additem operation", async function () { o("computePatches works on aggregates on aggregations and additem operation", async function () {
const testEntity = await createFilledTestEntity() const testEntity = await createFilledTestEntity()
@ -634,6 +639,12 @@ o.spec("EntityUtils", function () {
testBytes: null, testBytes: null,
} as TestAggregateOnAggregate, } as TestAggregateOnAggregate,
], ],
testZeroOrOneAggregation: {
_type: TestAggregateOnAggregateRef,
_finalIvs: {},
_id: "aggOnAggId",
testBytes: null,
} as TestAggregateOnAggregate,
} as TestAggregate, } as TestAggregate,
], ],
testBoolean: false, testBoolean: false,

View file

@ -32,6 +32,8 @@ import { SyncTracker } from "../../../../src/common/api/main/SyncTracker.js"
import { InstancePipeline } from "../../../../src/common/api/worker/crypto/InstancePipeline" import { InstancePipeline } from "../../../../src/common/api/worker/crypto/InstancePipeline"
import { TypeModelResolver } from "../../../../src/common/api/common/EntityFunctions" import { TypeModelResolver } from "../../../../src/common/api/common/EntityFunctions"
import { EntityUpdateData } from "../../../../src/common/api/common/utils/EntityUpdateUtils" import { EntityUpdateData } from "../../../../src/common/api/common/utils/EntityUpdateUtils"
import { CryptoFacade } from "../../../../src/common/api/worker/crypto/CryptoFacade"
import { EventInstancePrefetcher } from "../../../../src/common/api/worker/EventInstancePrefetcher"
o.spec("EventBusClientTest", function () { o.spec("EventBusClientTest", function () {
let ebc: EventBusClient let ebc: EventBusClient
@ -48,6 +50,8 @@ o.spec("EventBusClientTest", function () {
let socketFactory: (path: string) => WebSocket let socketFactory: (path: string) => WebSocket
let typeModelResolver: TypeModelResolver let typeModelResolver: TypeModelResolver
let entityClient: EntityClient let entityClient: EntityClient
let cryptoFacadeMock: CryptoFacade
let eventInstancePrefetcher: EventInstancePrefetcher
function initEventBus() { function initEventBus() {
ebc = new EventBusClient( ebc = new EventBusClient(
@ -61,6 +65,8 @@ o.spec("EventBusClientTest", function () {
progressTrackerMock, progressTrackerMock,
syncTrackerMock, syncTrackerMock,
typeModelResolver, typeModelResolver,
cryptoFacadeMock,
eventInstancePrefetcher,
) )
} }
@ -82,6 +88,7 @@ o.spec("EventBusClientTest", function () {
listenerMock = object() listenerMock = object()
progressTrackerMock = object() progressTrackerMock = object()
syncTrackerMock = object() syncTrackerMock = object()
eventInstancePrefetcher = object()
cacheMock = object({ cacheMock = object({
async entityEventsReceived(events): Promise<ReadonlyArray<EntityUpdateData>> { async entityEventsReceived(events): Promise<ReadonlyArray<EntityUpdateData>> {
return events.slice() return events.slice()
@ -114,6 +121,7 @@ o.spec("EventBusClientTest", function () {
when(userMock.getLoggedInUser()).thenReturn(user) when(userMock.getLoggedInUser()).thenReturn(user)
when(userMock.isFullyLoggedIn()).thenReturn(true) when(userMock.isFullyLoggedIn()).thenReturn(true)
when(userMock.createAuthHeaders()).thenReturn({}) when(userMock.createAuthHeaders()).thenReturn({})
when(eventInstancePrefetcher.preloadEntities(matchers.anything(), matchers.anything())).thenResolve()
restClient = new EntityRestClientMock() restClient = new EntityRestClientMock()
@ -124,6 +132,7 @@ o.spec("EventBusClientTest", function () {
typeModelResolver = clientInitializedTypeModelResolver() typeModelResolver = clientInitializedTypeModelResolver()
entityClient = new EntityClient(restClient, typeModelResolver) entityClient = new EntityClient(restClient, typeModelResolver)
instancePipeline = instancePipelineFromTypeModelResolver(typeModelResolver) instancePipeline = instancePipelineFromTypeModelResolver(typeModelResolver)
cryptoFacadeMock = object()
initEventBus() initEventBus()
}) })
@ -174,6 +183,9 @@ o.spec("EventBusClientTest", function () {
operation: OperationType.CREATE, operation: OperationType.CREATE,
instanceId: update.instanceId, instanceId: update.instanceId,
instanceListId: update.instanceListId, instanceListId: update.instanceListId,
instance: null,
patches: null,
isPrefetched: false,
} }
const eventsReceivedDefer = defer() const eventsReceivedDefer = defer()

View file

@ -73,12 +73,18 @@ o.spec("EventBusEventCoordinatorTest", () => {
instanceId: userId, instanceId: userId,
instanceListId: "", instanceListId: "",
operation: OperationType.UPDATE, operation: OperationType.UPDATE,
instance: null,
patches: null,
isPrefetched: false,
}, },
{ {
typeRef: UserGroupKeyDistributionTypeRef, typeRef: UserGroupKeyDistributionTypeRef,
instanceId: userGroupId, instanceId: userGroupId,
instanceListId: "", instanceListId: "",
operation: OperationType.CREATE, operation: OperationType.CREATE,
instance: null,
patches: null,
isPrefetched: false,
}, },
] ]
@ -97,6 +103,9 @@ o.spec("EventBusEventCoordinatorTest", () => {
instanceId: userId, instanceId: userId,
instanceListId: "", instanceListId: "",
operation: OperationType.UPDATE, operation: OperationType.UPDATE,
instance: null,
patches: null,
isPrefetched: false,
}, },
] ]
@ -117,6 +126,9 @@ o.spec("EventBusEventCoordinatorTest", () => {
instanceListId, instanceListId,
instanceId, instanceId,
operation: OperationType.CREATE, operation: OperationType.CREATE,
instance: null,
patches: null,
isPrefetched: false,
}, },
] ]

View file

@ -0,0 +1,662 @@
import o from "@tutao/otest"
import { CacheStorage, DefaultEntityRestCache, EntityRestCache } from "../../../../src/common/api/worker/rest/DefaultEntityRestCache"
import { UserFacade } from "../../../../src/common/api/worker/facades/UserFacade"
import { EntityUpdateTypeRef, GroupMembershipTypeRef, User, UserTypeRef } from "../../../../src/common/api/entities/sys/TypeRefs"
import { TypeModelResolver } from "../../../../src/common/api/common/EntityFunctions"
import { EntityUpdateData, entityUpdateToUpdateData } from "../../../../src/common/api/common/utils/EntityUpdateUtils"
import { clientInitializedTypeModelResolver, createTestEntity, modelMapperFromTypeModelResolver } from "../../TestUtils"
import { CalendarEventTypeRef, MailDetailsBlobTypeRef, MailTypeRef } from "../../../../src/common/api/entities/tutanota/TypeRefs"
import { OperationType } from "../../../../src/common/api/common/TutanotaConstants"
import { matchers, object, verify, when } from "testdouble"
import { downcast, getTypeString, promiseMap } from "@tutao/tutanota-utils"
import { EventInstancePrefetcher } from "../../../../src/common/api/worker/EventInstancePrefetcher"
import { CacheMode, EntityRestClient, EntityRestClientLoadOptions } from "../../../../src/common/api/worker/rest/EntityRestClient"
import { elementIdPart, listIdPart, timestampToGeneratedId } from "../../../../src/common/api/common/utils/EntityUtils"
import { Entity, ServerModelParsedInstance } from "../../../../src/common/api/common/EntityTypes"
import { mapToObject } from "@tutao/tutanota-test-utils"
import { ProgressMonitorDelegate } from "../../../../src/common/api/worker/ProgressMonitorDelegate"
o.spec("EventInstancePrefetcherTest", function () {
let cacheStoragex: CacheStorage
let entityCacheClient: EntityRestCache
let entityRestClient: EntityRestClient
let userMock: UserFacade
let user: User
let typeModelResolver: TypeModelResolver
let eventInstancePrefetcher: EventInstancePrefetcher
const fetchBlobOpt: EntityRestClientLoadOptions = { cacheMode: CacheMode.ReadAndWrite }
const fetchInstanceOpt: EntityRestClientLoadOptions = { cacheMode: CacheMode.WriteOnly }
let modelMapper
let progressMonitorMock: ProgressMonitorDelegate
let id1: Id = timestampToGeneratedId(2)
let id2: Id = timestampToGeneratedId(3)
let id3: Id = timestampToGeneratedId(4)
let id4: Id = timestampToGeneratedId(5)
o.beforeEach(async function () {
cacheStoragex = object<CacheStorage>()
entityRestClient = object()
progressMonitorMock = object()
typeModelResolver = clientInitializedTypeModelResolver()
modelMapper = modelMapperFromTypeModelResolver(typeModelResolver)
user = createTestEntity(UserTypeRef, {
userGroup: createTestEntity(GroupMembershipTypeRef, {
group: "userGroupId",
}),
})
userMock = object("user")
when(userMock.getLoggedInUser()).thenReturn(user)
when(userMock.isFullyLoggedIn()).thenReturn(true)
when(userMock.createAuthHeaders()).thenReturn({})
when(entityRestClient.mapInstancesToEntity(matchers.anything(), matchers.anything())).thenDo((typeRef, parsedInstances) => {
return promiseMap(parsedInstances, (parsedInstance) => modelMapper.mapToInstance(typeRef, parsedInstance))
})
entityCacheClient = new DefaultEntityRestCache(entityRestClient, cacheStoragex, typeModelResolver, object())
eventInstancePrefetcher = new EventInstancePrefetcher(entityCacheClient)
})
async function toStorableInstance(entity: Entity): Promise<ServerModelParsedInstance> {
return downcast<ServerModelParsedInstance>(await modelMapper.mapToClientModelParsedInstance(entity._type, entity))
}
o("When there is at least one element per list - fetch all of em", async () => {
const updateTemplate = await entityUpdateToUpdateData(
typeModelResolver,
createTestEntity(EntityUpdateTypeRef, {
_id: "eventBatch",
application: "tutanota",
typeId: MailTypeRef.typeId.toString(),
operation: OperationType.CREATE,
instance: null,
}),
)
const firstUpdate: EntityUpdateData = Object.assign(structuredClone(updateTemplate), {
instanceListId: "firstListId",
instanceId: id1,
})
const secondUpdate: EntityUpdateData = Object.assign(structuredClone(updateTemplate), {
instanceListId: "firstListId",
instanceId: id2,
})
const thirdUpdate: EntityUpdateData = Object.assign(structuredClone(updateTemplate), {
instanceListId: "secondListId",
instanceId: id1,
})
const fourthUpdate: EntityUpdateData = Object.assign(structuredClone(updateTemplate), {
instanceListId: "secondListId",
instanceId: id2,
})
when(entityRestClient.loadMultipleParsedInstances(MailTypeRef, "firstListId", Array.of(id1, id2), undefined, fetchInstanceOpt)).thenResolve([])
when(entityRestClient.loadMultipleParsedInstances(MailTypeRef, "secondListId", Array.of(id1, id2), undefined, fetchInstanceOpt)).thenResolve([])
await eventInstancePrefetcher.preloadEntities(Array.of(firstUpdate, secondUpdate, thirdUpdate, fourthUpdate), progressMonitorMock)
verify(entityRestClient.loadMultipleParsedInstances(MailTypeRef, "firstListId", Array.of(id1, id2), undefined, fetchInstanceOpt), { times: 1 })
verify(entityRestClient.loadMultipleParsedInstances(MailTypeRef, "secondListId", Array.of(id1, id2), undefined, fetchInstanceOpt), { times: 1 })
})
o("Do not prefetch element type", async () => {
const updateTemplate: EntityUpdateData = {
typeRef: UserTypeRef,
instanceId: id1,
instanceListId: "",
operation: OperationType.CREATE,
patches: null,
instance: null,
isPrefetched: false,
}
const firstUpdate: EntityUpdateData = Object.assign(structuredClone(updateTemplate), { instanceId: id1 })
const secondUpdate: EntityUpdateData = Object.assign(structuredClone(updateTemplate), { instanceId: id2 })
const allEventsFromAllBatch = Array.of(firstUpdate, secondUpdate)
const instancesToFetch = await eventInstancePrefetcher.groupedListElementUpdatedInstances(allEventsFromAllBatch, progressMonitorMock)
o(mapToObject(instancesToFetch)).deepEquals({})
})
// make sure instance that are deleted are not fetched otherwise whole request will fail with NotFound
o("When an instance is deleted at the end still fetch previous event", async () => {
const updateTemplate: EntityUpdateData = {
typeRef: CalendarEventTypeRef,
operation: OperationType.CREATE,
instance: null,
patches: null,
instanceListId: "",
instanceId: "",
isPrefetched: false,
}
const firstUpdate: EntityUpdateData = Object.assign(structuredClone(updateTemplate), {
instanceListId: "firstListId",
instanceId: id1,
})
const secondUpdate: EntityUpdateData = Object.assign(structuredClone(firstUpdate), { instanceId: id2 })
const thirdUpdate: EntityUpdateData = Object.assign(structuredClone(updateTemplate), {
instanceListId: "secondListId",
instanceId: id3,
})
const fourthUpdate: EntityUpdateData = Object.assign(structuredClone(thirdUpdate), { operation: OperationType.DELETE })
const fifthUpdate: EntityUpdateData = Object.assign(structuredClone(thirdUpdate), {
instanceId: id4,
})
const allUpdates = Array.of(firstUpdate, secondUpdate, thirdUpdate, fourthUpdate, fifthUpdate)
const instancesToFetch = (await eventInstancePrefetcher.groupedListElementUpdatedInstances(allUpdates, progressMonitorMock)).get(
getTypeString(updateTemplate.typeRef),
)!
o(mapToObject(instancesToFetch.get("firstListId")!)).deepEquals(
mapToObject(
new Map([
[id1, [0]],
[id2, [1]],
]),
),
)
const expectedOnlySecondListWithoutId3 = mapToObject(
new Map(
new Map([
[id3, [2]],
[id4, [4]],
]),
),
)
o(mapToObject(instancesToFetch.get("secondListId")!)).deepEquals(expectedOnlySecondListWithoutId3)
})
o("Returns indexes of multiple batches for a single element with multiple updates", async () => {
const updateTemplate: EntityUpdateData = {
typeRef: CalendarEventTypeRef,
operation: OperationType.CREATE,
instance: null,
patches: null,
instanceListId: "",
instanceId: "",
isPrefetched: false,
}
const firstUpdate: EntityUpdateData = Object.assign(structuredClone(updateTemplate), {
instanceListId: "firstListId",
instanceId: id1,
})
const secondUpdate: EntityUpdateData = Object.assign(structuredClone(firstUpdate), {
operation: OperationType.UPDATE,
})
const thirdUpdate: EntityUpdateData = Object.assign(structuredClone(firstUpdate), {
operation: OperationType.UPDATE,
})
const fourthUpdate: EntityUpdateData = Object.assign(structuredClone(updateTemplate), {
instanceListId: "secondListId",
instanceId: id2,
})
const allUpdates = Array.of(firstUpdate, secondUpdate, fourthUpdate, thirdUpdate)
const instancesToFetch = (await eventInstancePrefetcher.groupedListElementUpdatedInstances(allUpdates, progressMonitorMock)).get(
getTypeString(updateTemplate.typeRef),
)!
o(mapToObject(instancesToFetch.get("firstListId")!)).deepEquals(mapToObject(new Map([[id1, [0, 1, 3]]])))
o(mapToObject(instancesToFetch.get("secondListId")!)).deepEquals(mapToObject(new Map([[id2, [2]]])))
})
o("When a create event have a instance attached to it do not fetch it", async () => {
const testEntity = createTestEntity(EntityUpdateTypeRef, {
operation: OperationType.CREATE,
instanceListId: "firstListId",
instanceId: id1,
instance: downcast({}),
application: MailTypeRef.app,
typeId: MailTypeRef.typeId.toString(),
})
const firstUpdate = await entityUpdateToUpdateData(typeModelResolver, testEntity, downcast({}))
const secondUpdate = Object.assign(structuredClone(firstUpdate), { instance: null, instanceId: id2 })
const instancesToFetch = (await eventInstancePrefetcher.groupedListElementUpdatedInstances(Array.of(firstUpdate, secondUpdate), progressMonitorMock))
.get(getTypeString(MailTypeRef))!
.get(firstUpdate.instanceListId)!
const expectedOnlyUpdateWithoutInstance = mapToObject(new Map([[id2, [1]]]))
o(mapToObject(instancesToFetch)).deepEquals(expectedOnlyUpdateWithoutInstance)
})
o("When a update event have a patchList attached to it do not fetch it", async () => {
const firstUpdate = await entityUpdateToUpdateData(
typeModelResolver,
createTestEntity(EntityUpdateTypeRef, {
operation: OperationType.UPDATE,
instanceListId: "firstListId",
instanceId: id1,
patch: downcast({ patches: [] }),
application: MailTypeRef.app,
typeId: MailTypeRef.typeId.toString(),
}),
)
const secondUpdate = Object.assign(structuredClone(firstUpdate), { patches: null, instanceId: id2 })
const instancesToFetch = (await eventInstancePrefetcher.groupedListElementUpdatedInstances(Array.of(firstUpdate, secondUpdate), progressMonitorMock))
.get(getTypeString(MailTypeRef))!
.get(firstUpdate.instanceListId)!
o(mapToObject(instancesToFetch)).deepEquals(mapToObject(new Map([[id2, [1]]])))
})
o("Ignores update events for non list elements", async () => {
const firstUpdate = await entityUpdateToUpdateData(
typeModelResolver,
createTestEntity(EntityUpdateTypeRef, {
operation: OperationType.UPDATE,
instanceListId: "",
instanceId: id1,
application: MailTypeRef.app,
typeId: MailTypeRef.typeId.toString(),
}),
)
const secondUpdate = Object.assign(structuredClone(firstUpdate), { instanceListId: "listId", instanceId: id2 })
const instancesToFetch = (
await eventInstancePrefetcher.groupedListElementUpdatedInstances(Array.of(firstUpdate, secondUpdate), progressMonitorMock)
).get(getTypeString(MailTypeRef))!
const expectedOnlyListElementInstance = mapToObject(new Map([["listId", new Map([[id2, [1]]])]]))
o(mapToObject(instancesToFetch)).deepEquals(expectedOnlyListElementInstance)
})
o("should load mailDetails for create mail event", async () => {
const firstMail = createTestEntity(
MailTypeRef,
{ _id: ["firstMailListId", id1], mailDetails: ["archiveId", "firstBlob"] },
{ populateAggregates: true },
)
const secondMail = createTestEntity(
MailTypeRef,
{ _id: ["firstMailListId", id2], mailDetails: ["archiveId", "secondBlob"] },
{ populateAggregates: true },
)
const thirdMail = createTestEntity(
MailTypeRef,
{ _id: ["secondMailListId", id3], mailDetails: ["archiveId", "thirdBlob"] },
{ populateAggregates: true },
)
const fourthMail = createTestEntity(
MailTypeRef,
{ _id: ["secondMailListId", id4], mailDetails: ["archiveId", "fourthBlob"] },
{ populateAggregates: true },
)
const firstUpdate: EntityUpdateData = {
instanceId: elementIdPart(firstMail._id),
instanceListId: listIdPart(firstMail._id),
operation: OperationType.CREATE,
instance: null,
patches: null,
isPrefetched: false,
typeRef: MailTypeRef,
}
const secondUpdate: EntityUpdateData = Object.assign(structuredClone(firstUpdate), {
instanceId: elementIdPart(secondMail._id),
instanceListId: listIdPart(secondMail._id),
operation: OperationType.UPDATE,
})
const thirdUpdate: EntityUpdateData = Object.assign(structuredClone(firstUpdate), {
instanceId: elementIdPart(thirdMail._id),
instanceListId: listIdPart(thirdMail._id),
})
const fourthUpdate: EntityUpdateData = Object.assign(structuredClone(firstUpdate), {
instanceId: elementIdPart(fourthMail._id),
instanceListId: listIdPart(fourthMail._id),
})
when(
entityRestClient.loadMultipleParsedInstances(MailDetailsBlobTypeRef, "archiveId", matchers.anything(), matchers.anything(), matchers.anything()),
).thenResolve([])
when(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
firstUpdate.instanceListId,
[firstUpdate.instanceId, secondUpdate.instanceId],
matchers.anything(),
fetchInstanceOpt,
),
).thenResolve(Array.of(await toStorableInstance(firstMail), await toStorableInstance(secondMail)))
// even though thirdMail is also in the same list as fourthMail, we "simulate" some missing instances in server side. and return only one
when(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
fourthUpdate.instanceListId,
[thirdUpdate.instanceId, fourthUpdate.instanceId],
matchers.anything(),
fetchInstanceOpt,
),
).thenResolve(Array.of(await toStorableInstance(fourthMail)))
await eventInstancePrefetcher.preloadEntities(Array.of(firstUpdate, secondUpdate, thirdUpdate, fourthUpdate), progressMonitorMock)
// Check if there are tests for the loop going correctly (for (const [listId, mails] of mailDetailsByList.entries()) {)
verify(
entityRestClient.loadMultipleParsedInstances(MailDetailsBlobTypeRef, "archiveId", ["firstBlob", "secondBlob"], matchers.anything(), fetchBlobOpt),
{
times: 1,
},
)
verify(entityRestClient.loadMultipleParsedInstances(MailDetailsBlobTypeRef, "archiveId", ["fourthBlob"], matchers.anything(), fetchBlobOpt), {
times: 1,
})
})
o("should ignore all error while fetching", async () => {
const firstMail = createTestEntity(MailTypeRef, { _id: ["firstMailListId", id1] })
const secondMail = createTestEntity(MailTypeRef, { _id: ["secondMailListId", id2] })
const thirdMail = createTestEntity(MailTypeRef, { _id: ["firstMailListId", id3] })
const fourthMail = createTestEntity(MailTypeRef, { _id: ["secondMailListId", id4] })
const firstUpdate: EntityUpdateData = {
instanceId: elementIdPart(firstMail._id),
instanceListId: listIdPart(firstMail._id),
operation: OperationType.CREATE,
instance: null,
patches: null,
isPrefetched: false,
typeRef: MailTypeRef,
}
const secondUpdate: EntityUpdateData = Object.assign(structuredClone(firstUpdate), {
instanceListId: listIdPart(secondMail._id),
instanceId: elementIdPart(secondMail._id),
})
const thirdUpdate: EntityUpdateData = Object.assign(structuredClone(firstUpdate), {
instanceListId: listIdPart(thirdMail._id),
instanceId: elementIdPart(thirdMail._id),
operation: OperationType.UPDATE,
})
const fourthUpdate: EntityUpdateData = Object.assign(structuredClone(firstUpdate), {
instanceListId: listIdPart(fourthMail._id),
instanceId: elementIdPart(fourthMail._id),
operation: OperationType.UPDATE,
})
when(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
firstUpdate.instanceListId,
[firstUpdate.instanceId],
matchers.anything(),
fetchInstanceOpt,
),
).thenReturn(Promise.reject("first error"))
when(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
secondUpdate.instanceListId,
[secondUpdate.instanceId],
matchers.anything(),
fetchInstanceOpt,
),
).thenReturn(Promise.reject("second error"))
when(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
thirdUpdate.instanceListId,
[thirdUpdate.instanceId],
matchers.anything(),
fetchInstanceOpt,
),
).thenReturn(Promise.reject("third error"))
when(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
fourthUpdate.instanceListId,
[fourthUpdate.instanceId],
matchers.anything(),
fetchInstanceOpt,
),
).thenReturn(Promise.reject("fourth error"))
await eventInstancePrefetcher.preloadEntities(Array.of(firstUpdate, secondUpdate, thirdUpdate, fourthUpdate), progressMonitorMock)
o(firstUpdate.isPrefetched).equals(false)
o(secondUpdate.isPrefetched).equals(false)
o(thirdUpdate.isPrefetched).equals(false)
o(fourthUpdate.isPrefetched).equals(false)
verify(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
firstUpdate.instanceListId,
[firstUpdate.instanceId, thirdUpdate.instanceId],
matchers.anything(),
matchers.anything(),
),
{ times: 1 },
)
verify(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
secondUpdate.instanceListId,
[secondUpdate.instanceId, fourthUpdate.instanceId],
matchers.anything(),
matchers.anything(),
),
{ times: 1 },
)
})
o("set preFetched flag to true for fetched instances", async () => {
const passMail = createTestEntity(MailTypeRef, { _id: ["firstMailListId", id1] }, { populateAggregates: true })
const secondPassMail = createTestEntity(MailTypeRef, { _id: ["firstMailListId", id3] }, { populateAggregates: true })
const failMail = createTestEntity(MailTypeRef, { _id: ["secondMailListId", id2] }, { populateAggregates: true })
const secondFailMail = createTestEntity(MailTypeRef, { _id: ["secondMailListId", id4] }, { populateAggregates: true })
const passingUpdate: EntityUpdateData = {
instanceId: elementIdPart(passMail._id),
instanceListId: listIdPart(passMail._id),
operation: OperationType.CREATE,
instance: null,
patches: null,
isPrefetched: false,
typeRef: MailTypeRef,
}
const secondPassingUpdate = Object.assign(structuredClone(passingUpdate), {
instanceListId: listIdPart(secondPassMail._id),
instanceId: elementIdPart(secondPassMail._id),
})
const failingUpdate = Object.assign(structuredClone(passingUpdate), {
instanceListId: listIdPart(failMail._id),
instanceId: elementIdPart(failMail._id),
})
const secondFailingUpdate = Object.assign(structuredClone(passingUpdate), {
instanceListId: listIdPart(secondFailMail._id),
instanceId: elementIdPart(secondFailMail._id),
})
when(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
passingUpdate.instanceListId,
[passingUpdate.instanceId, secondPassingUpdate.instanceId],
undefined,
fetchInstanceOpt,
),
).thenResolve(Array.of(await toStorableInstance(passMail), await toStorableInstance(secondPassMail)))
when(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
failingUpdate.instanceListId,
[failingUpdate.instanceId, secondFailingUpdate.instanceId],
undefined,
fetchInstanceOpt,
),
).thenResolve([])
await eventInstancePrefetcher.preloadEntities(Array.of(passingUpdate, failingUpdate, secondPassingUpdate, secondFailingUpdate), progressMonitorMock)
o(passingUpdate.isPrefetched).equals(true)
o(secondPassingUpdate.isPrefetched).equals(true)
o(failingUpdate.isPrefetched).equals(false)
o(secondFailingUpdate.isPrefetched).equals(false)
})
o("set preFetched flag to false for missing instances", async () => {
const firstMail = createTestEntity(MailTypeRef, { _id: ["mailListId", id1] }, { populateAggregates: true })
const secondMail = createTestEntity(MailTypeRef, { _id: ["mailListId", id2] }, { populateAggregates: true })
const thirdMail = createTestEntity(MailTypeRef, { _id: ["mailListId", id3] }, { populateAggregates: true })
const firstMailUpdate: EntityUpdateData = {
typeRef: MailTypeRef,
instanceListId: "mailListId",
instanceId: elementIdPart(firstMail._id),
operation: OperationType.CREATE,
instance: null,
patches: null,
isPrefetched: false,
}
const secondMailUpdate: EntityUpdateData = Object.assign(structuredClone(firstMailUpdate), { instanceId: elementIdPart(secondMail._id) })
const thirdMailUpdate: EntityUpdateData = Object.assign(structuredClone(firstMailUpdate), { instanceId: elementIdPart(thirdMail._id) })
// only return first & third mail
when(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
"mailListId",
[firstMailUpdate.instanceId, secondMailUpdate.instanceId, thirdMailUpdate.instanceId],
undefined,
fetchInstanceOpt,
),
).thenResolve(Array.of(await toStorableInstance(firstMail), await toStorableInstance(thirdMail)))
await eventInstancePrefetcher.preloadEntities(Array.of(firstMailUpdate, secondMailUpdate, thirdMailUpdate), progressMonitorMock)
o(firstMailUpdate.isPrefetched).equals(true)
o(thirdMailUpdate.isPrefetched).equals(true)
o(secondMailUpdate.isPrefetched).equals(false)
})
o("Multiple events of same instance are marked as prefetched", async () => {
const createEvent: EntityUpdateData = {
typeRef: MailTypeRef,
instanceListId: "mailListId",
instanceId: id1,
operation: OperationType.CREATE,
patches: null,
instance: null,
isPrefetched: false,
}
const updateEvent: EntityUpdateData = Object.assign(structuredClone(createEvent), { operation: OperationType.UPDATE })
const createSecondEvent: EntityUpdateData = Object.assign(structuredClone(createEvent), {
instanceId: id2,
})
const updateSecondEvent: EntityUpdateData = Object.assign(structuredClone(createSecondEvent), {
operation: OperationType.UPDATE,
})
const mail = createTestEntity(
MailTypeRef,
{
_id: ["mailListId", id1],
mailDetails: ["archiveId", "firstBlob"],
},
{ populateAggregates: true },
)
const secondMail = createTestEntity(
MailTypeRef,
{
_id: ["mailListId", id2],
mailDetails: ["archiveId", "firstBlob"],
},
{ populateAggregates: true },
)
when(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
createEvent.instanceListId,
[createEvent.instanceId, createSecondEvent.instanceId],
undefined,
fetchInstanceOpt,
),
).thenResolve(Array.of(await toStorableInstance(mail), await toStorableInstance(secondMail)))
when(
entityRestClient.loadMultipleParsedInstances(MailDetailsBlobTypeRef, "archiveId", matchers.anything(), matchers.anything(), matchers.anything()),
).thenResolve([])
await eventInstancePrefetcher.preloadEntities(Array.of(createEvent, updateEvent, createSecondEvent, updateSecondEvent), progressMonitorMock)
o(createEvent.isPrefetched).equals(true)
o(updateEvent.isPrefetched).equals(true)
})
o("prefetched flag is not set to true if mailDetails blob fails to download", async () => {
const mail = createTestEntity(
MailTypeRef,
{
_id: ["firstMailListId", id1],
mailDetails: ["archiveId", "firstBlob"],
},
{ populateAggregates: true },
)
const secondMail = createTestEntity(
MailTypeRef,
{
_id: ["firstMailListId", id2],
mailDetails: ["archiveId", "secondBlob"],
},
{ populateAggregates: true },
)
const mailUpdate: EntityUpdateData = {
instanceId: elementIdPart(mail._id),
instanceListId: listIdPart(mail._id),
operation: OperationType.CREATE,
instance: null,
patches: null,
isPrefetched: false,
typeRef: MailTypeRef,
}
const secondMailUpdate: EntityUpdateData = Object.assign(structuredClone(mailUpdate), {
instanceId: id2,
})
when(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
mailUpdate.instanceListId,
[mailUpdate.instanceId, secondMailUpdate.instanceId],
undefined,
fetchInstanceOpt,
),
).thenResolve(Array.of(await toStorableInstance(mail), await toStorableInstance(secondMail)))
when(entityRestClient.loadMultipleParsedInstances(MailDetailsBlobTypeRef, "archiveId", ["firstBlob"], matchers.anything(), fetchBlobOpt)).thenReturn(
Promise.reject("second error"),
)
await eventInstancePrefetcher.preloadEntities(Array.of(mailUpdate, secondMailUpdate), progressMonitorMock)
o(mailUpdate.isPrefetched).equals(false)
o(secondMailUpdate.isPrefetched).equals(false)
verify(
entityRestClient.loadMultipleParsedInstances(
MailTypeRef,
mailUpdate.instanceListId,
[mailUpdate.instanceId, secondMailUpdate.instanceId],
matchers.anything(),
matchers.anything(),
),
{ times: 1 },
)
verify(
entityRestClient.loadMultipleParsedInstances(
MailDetailsBlobTypeRef,
"archiveId",
["firstBlob", "secondBlob"],
matchers.anything(),
matchers.anything(),
),
{
times: 1,
},
)
})
})

View file

@ -223,7 +223,7 @@ o.spec("CryptoMapper", function () {
const sk = [4136869568, 4101282953, 2038999435, 962526794, 1053028316, 3236029410, 1618615449, 3232287205] const sk = [4136869568, 4101282953, 2038999435, 962526794, 1053028316, 3236029410, 1618615449, 3232287205]
const encryptedInstance: ServerModelEncryptedParsedInstance = { const encryptedInstance: ServerModelEncryptedParsedInstance = {
1: "AV1kmZZfCms1pNvUtGrdhOlnDAr3zb2JWpmlpWEhgG5zqYK3g7PfRsi0vQAKLxXmrNRGp16SBKBa0gqXeFw9F6l7nbGs3U8uNLvs6Fi+9IWj", 1: "AV1kmZZfCms1pNvUtGrdhOlnDAr3zb2JWpmlpWEhgG5zqYK3g7PfRsi0vQAKLxXmrNRGp16SBKBa0gqXeFw9F6l7nbGs3U8uNLvs6Fi+9IWj",
3: [{ 2: "123", 6: "someCustomId", 9: [] }], 3: [{ 2: "123", 6: "someCustomId", 9: [], 10: [] }],
7: "AWBaC3ipyi9kxJn7USkbW1SLXPjgU8T5YqpIP/dmTbyRwtXFU9tQbYBm12gNpI9KJfwO14FN25hjC3SlngSBlzs=", 7: "AWBaC3ipyi9kxJn7USkbW1SLXPjgU8T5YqpIP/dmTbyRwtXFU9tQbYBm12gNpI9KJfwO14FN25hjC3SlngSBlzs=",
4: ["associatedElementId"], 4: ["associatedElementId"],
5: new Date("2025-01-01T13:00:00.000Z"), 5: new Date("2025-01-01T13:00:00.000Z"),
@ -248,7 +248,7 @@ o.spec("CryptoMapper", function () {
5: new Date("2025-01-01T13:00:00.000Z"), 5: new Date("2025-01-01T13:00:00.000Z"),
7: true, 7: true,
// 6 is _id and will be generated // 6 is _id and will be generated
3: [{ 2: "123", 6: "aggregateId", 9: [] }], 3: [{ 2: "123", 6: "aggregateId", 9: [], 10: [] }],
4: ["associatedElementId"], 4: ["associatedElementId"],
_finalIvs: { 1: new Uint8Array([93, 100, 153, 150, 95, 10, 107, 53, 164, 219, 212, 180, 106, 221, 132, 233]) }, _finalIvs: { 1: new Uint8Array([93, 100, 153, 150, 95, 10, 107, 53, 164, 219, 212, 180, 106, 221, 132, 233]) },
} as unknown as ClientModelParsedInstance } as unknown as ClientModelParsedInstance
@ -271,7 +271,7 @@ o.spec("CryptoMapper", function () {
o("decryptParsedInstance with missing sk sets _errors", async function () { o("decryptParsedInstance with missing sk sets _errors", async function () {
const encryptedInstance: ServerModelEncryptedParsedInstance = { const encryptedInstance: ServerModelEncryptedParsedInstance = {
1: "AV1kmZZfCms1pNvUtGrdhOlnDAr3zb2JWpmlpWEhgG5zqYK3g7PfRsi0vQAKLxXmrNRGp16SBKBa0gqXeFw9F6l7nbGs3U8uNLvs6Fi+9IWj", 1: "AV1kmZZfCms1pNvUtGrdhOlnDAr3zb2JWpmlpWEhgG5zqYK3g7PfRsi0vQAKLxXmrNRGp16SBKBa0gqXeFw9F6l7nbGs3U8uNLvs6Fi+9IWj",
3: [{ 2: "123", 6: "someCustomId", 9: [] }], 3: [{ 2: "123", 6: "someCustomId", 9: [], 10: [] }],
4: ["associatedElementId"], 4: ["associatedElementId"],
5: new Date("2025-01-01T13:00:00.000Z"), 5: new Date("2025-01-01T13:00:00.000Z"),
} as any as ServerModelEncryptedParsedInstance } as any as ServerModelEncryptedParsedInstance
@ -285,7 +285,7 @@ o.spec("CryptoMapper", function () {
1: "encrypted string", 1: "encrypted string",
5: new Date("2025-01-01T13:00:00.000Z"), 5: new Date("2025-01-01T13:00:00.000Z"),
// 6 is _id and will be generated // 6 is _id and will be generated
3: [{ 2: "123", 9: [] }], 3: [{ 2: "123", 9: [], 10: [] }],
4: ["associatedElementId"], 4: ["associatedElementId"],
_finalIvs: { 1: new Uint8Array([93, 100, 153, 150, 95, 10, 107, 53, 164, 219, 212, 180, 106, 221, 132, 233]) }, _finalIvs: { 1: new Uint8Array([93, 100, 153, 150, 95, 10, 107, 53, 164, 219, 212, 180, 106, 221, 132, 233]) },
} as unknown as ClientModelParsedInstance } as unknown as ClientModelParsedInstance
@ -297,7 +297,7 @@ o.spec("CryptoMapper", function () {
const encryptedInstance: ServerModelEncryptedParsedInstance = { const encryptedInstance: ServerModelEncryptedParsedInstance = {
1: "", 1: "",
3: [{ 2: "123", 6: "someCustomId", 9: [] }], 3: [{ 2: "123", 6: "someCustomId", 9: [], 10: [] }],
4: ["associatedElementId"], 4: ["associatedElementId"],
5: new Date("2025-01-01T13:00:00.000Z"), 5: new Date("2025-01-01T13:00:00.000Z"),
} as any as ServerModelEncryptedParsedInstance } as any as ServerModelEncryptedParsedInstance
@ -314,7 +314,7 @@ o.spec("CryptoMapper", function () {
1: "", 1: "",
5: new Date("2025-01-01T13:00:00.000Z"), 5: new Date("2025-01-01T13:00:00.000Z"),
// 6 is _id and will be generated // 6 is _id and will be generated
3: [{ 2: "123", 9: [] }], 3: [{ 2: "123", 9: [], 10: [] }],
4: ["associatedElementId"], 4: ["associatedElementId"],
_finalIvs: { 1: null }, _finalIvs: { 1: null },
} as unknown as ClientModelParsedInstance } as unknown as ClientModelParsedInstance
@ -327,7 +327,7 @@ o.spec("CryptoMapper", function () {
const sk = [4136869568, 4101282953, 2038999435, 962526794, 1053028316, 3236029410, 1618615449, 3232287205] const sk = [4136869568, 4101282953, 2038999435, 962526794, 1053028316, 3236029410, 1618615449, 3232287205]
const encryptedInstance: ServerModelEncryptedParsedInstance = { const encryptedInstance: ServerModelEncryptedParsedInstance = {
1: "AV1kmZZfCms1pNvUtGrdhOlnDAr3zb2pmlpWEhgG5iwzqYK3g7PfRsi0vQAKLxXmrNRGp16SBKBa0gqXeFw9F6l7nbGs3U8uNLvs6Fi+9IWj", 1: "AV1kmZZfCms1pNvUtGrdhOlnDAr3zb2pmlpWEhgG5iwzqYK3g7PfRsi0vQAKLxXmrNRGp16SBKBa0gqXeFw9F6l7nbGs3U8uNLvs6Fi+9IWj",
3: [{ 2: "123", 6: "someCustomId", 9: [] }], 3: [{ 2: "123", 6: "someCustomId", 9: [], 10: [] }],
4: ["associatedElementId"], 4: ["associatedElementId"],
5: new Date("2025-01-01T13:00:00.000Z"), 5: new Date("2025-01-01T13:00:00.000Z"),
} as any as ServerModelEncryptedParsedInstance } as any as ServerModelEncryptedParsedInstance

View file

@ -132,6 +132,15 @@ export const testAggregateModel: TypeModel = {
final: false, final: false,
dependency: "tutanota", dependency: "tutanota",
}, },
"10": {
id: 10,
name: "testZeroOrOneAggregation",
type: AssociationType.Aggregation,
cardinality: Cardinality.ZeroOrOne,
refTypeId: 44,
final: false,
dependency: "tutanota",
},
}, },
version: 0, version: 0,
versioned: false, versioned: false,
@ -182,6 +191,7 @@ export type TestAggregate = Entity & {
_id: Id _id: Id
testNumber: NumberString testNumber: NumberString
testSecondLevelAssociation: TestAggregateOnAggregate[] testSecondLevelAssociation: TestAggregateOnAggregate[]
testZeroOrOneAggregation: TestAggregateOnAggregate | null
} }
export type TestEntity = Entity & { export type TestEntity = Entity & {

View file

@ -81,7 +81,7 @@ o.spec("ModelMapper", function () {
const parsedInstance: ServerModelParsedInstance = { const parsedInstance: ServerModelParsedInstance = {
1: "some encrypted string", 1: "some encrypted string",
5: new Date("2025-01-01T13:00:00.000Z"), 5: new Date("2025-01-01T13:00:00.000Z"),
3: [{ 2: "123", 6: "123456", _finalIvs: {}, 9: [] } as unknown as ServerModelParsedInstance], 3: [{ 2: "123", 6: "123456", _finalIvs: {}, 9: [], 10: [] } as unknown as ServerModelParsedInstance],
12: "generatedId", 12: "generatedId",
13: ["listId", "elementId"], 13: ["listId", "elementId"],
4: ["associatedElementId"], 4: ["associatedElementId"],
@ -102,6 +102,7 @@ o.spec("ModelMapper", function () {
testNumber: "123", testNumber: "123",
_id: "123456", _id: "123456",
testSecondLevelAssociation: [], testSecondLevelAssociation: [],
testZeroOrOneAggregation: null,
}) })
o(mappedInstance.testElementAssociation).equals("associatedElementId") o(mappedInstance.testElementAssociation).equals("associatedElementId")
o(mappedInstance.testGeneratedId).equals("generatedId") o(mappedInstance.testGeneratedId).equals("generatedId")

View file

@ -15,7 +15,13 @@ import { AttributeModel } from "../../../../../src/common/api/common/AttributeMo
const serverModelUntypedInstanceNetworkDebugging: ServerModelUntypedInstance = { const serverModelUntypedInstanceNetworkDebugging: ServerModelUntypedInstance = {
"1:testValue": "test string", "1:testValue": "test string",
"3:testAssociation": [{ "2:testNumber": "123", "9:testSecondLevelAssociation": [] }], "3:testAssociation": [
{
"2:testNumber": "123",
"9:testSecondLevelAssociation": [],
"10:testZeroOrOneAggregation": [],
},
],
"4:testListAssociation": ["assocId"], "4:testListAssociation": ["assocId"],
"5:testDate": "1735736415000", "5:testDate": "1735736415000",
"7:testBoolean": "encryptedBool", "7:testBoolean": "encryptedBool",
@ -23,7 +29,7 @@ const serverModelUntypedInstanceNetworkDebugging: ServerModelUntypedInstance = {
const serverModelUntypedInstance: ServerModelUntypedInstance = { const serverModelUntypedInstance: ServerModelUntypedInstance = {
"1": "test string", "1": "test string",
"3": [{ "2": "123", 9: [] }], "3": [{ "2": "123", "9": [], "10": [] }],
"4": ["assocId"], "4": ["assocId"],
"5": "1735736415000", "5": "1735736415000",
"7": "encryptedBool", "7": "encryptedBool",
@ -31,7 +37,7 @@ const serverModelUntypedInstance: ServerModelUntypedInstance = {
const clientModelEncryptedParsedInstance: ClientModelEncryptedParsedInstance = { const clientModelEncryptedParsedInstance: ClientModelEncryptedParsedInstance = {
"1": "base64EncodedString", "1": "base64EncodedString",
"3": [{ "2": "123", 9: [] }], "3": [{ "2": "123", "9": [], "10": [] }],
"4": ["assocId"], "4": ["assocId"],
"5": new Date("2025-01-01T13:00:15Z"), "5": new Date("2025-01-01T13:00:15Z"),
"7": "encryptedBool", "7": "encryptedBool",
@ -39,7 +45,7 @@ const clientModelEncryptedParsedInstance: ClientModelEncryptedParsedInstance = {
const faultyEncryptedParsedInstance: ClientModelEncryptedParsedInstance = { const faultyEncryptedParsedInstance: ClientModelEncryptedParsedInstance = {
"1": new Uint8Array(2), "1": new Uint8Array(2),
"3": [{ "2": "123", 9: [] }], "3": [{ "2": "123", "9": [], "10": [] }],
"4": ["assocId"], "4": ["assocId"],
"5": new Date("2025-01-01T13:00:15Z"), "5": new Date("2025-01-01T13:00:15Z"),
} as unknown as ClientModelEncryptedParsedInstance } as unknown as ClientModelEncryptedParsedInstance

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -102,9 +102,12 @@ o.spec("ContactIndexer", () => {
function createUpdate(operation: OperationType, instanceListId: Id, instanceId: Id, typeRef: TypeRef<any> = ContactTypeRef): EntityUpdateData { function createUpdate(operation: OperationType, instanceListId: Id, instanceId: Id, typeRef: TypeRef<any> = ContactTypeRef): EntityUpdateData {
return { return {
operation, operation: operation,
instanceId, instanceId: instanceId,
instanceListId, instanceListId: instanceListId,
typeRef, typeRef: typeRef,
instance: null,
patches: null,
isPrefetched: false,
} }
} }

View file

@ -1,25 +1,29 @@
import o from "@tutao/otest" import o from "@tutao/otest"
import { batchMod, EntityModificationType, EventQueue, QueuedBatch } from "../../../../../src/common/api/worker/EventQueue.js" import { EventQueue, QueuedBatch } from "../../../../../src/common/api/worker/EventQueue.js"
import { EntityUpdateTypeRef, GroupTypeRef } from "../../../../../src/common/api/entities/sys/TypeRefs.js" import { GroupTypeRef } from "../../../../../src/common/api/entities/sys/TypeRefs.js"
import { OperationType } from "../../../../../src/common/api/common/TutanotaConstants.js" import { OperationType } from "../../../../../src/common/api/common/TutanotaConstants.js"
import { defer, delay } from "@tutao/tutanota-utils" import { defer, delay } from "@tutao/tutanota-utils"
import { ConnectionError } from "../../../../../src/common/api/common/error/RestError.js" import { ConnectionError } from "../../../../../src/common/api/common/error/RestError.js"
import { ContactTypeRef, MailboxGroupRootTypeRef, MailTypeRef } from "../../../../../src/common/api/entities/tutanota/TypeRefs.js" import { MailboxGroupRootTypeRef, MailTypeRef } from "../../../../../src/common/api/entities/tutanota/TypeRefs.js"
import { spy } from "@tutao/tutanota-test-utils" import { spy } from "@tutao/tutanota-test-utils"
import { EntityUpdateData } from "../../../../../src/common/api/common/utils/EntityUpdateUtils" import { EntityUpdateData } from "../../../../../src/common/api/common/utils/EntityUpdateUtils"
import { createTestEntity } from "../../../TestUtils.js"
o.spec("EventQueueTest", function () { o.spec("EventQueueTest", function () {
let queue: EventQueue let queue: EventQueue
let processElement: any let processElement: any
let lastProcess: { resolve: () => void; reject: (Error) => void; promise: Promise<void> } let lastProcess: { resolve: () => void; reject: (Error) => void; promise: Promise<void> }
const noPatchesAndInstance: Pick<EntityUpdateData, "instance" | "patches"> = {
instance: null,
patches: null,
}
const newUpdate = (type: OperationType, instanceId: string): EntityUpdateData => { const newUpdate = (type: OperationType, instanceId: string): EntityUpdateData => {
return { return {
operation: type, operation: type,
instanceId, instanceId,
instanceListId: "", instanceListId: "",
typeRef: MailTypeRef, typeRef: MailTypeRef,
...noPatchesAndInstance,
} as Partial<EntityUpdateData> as EntityUpdateData } as Partial<EntityUpdateData> as EntityUpdateData
} }
@ -32,7 +36,7 @@ o.spec("EventQueueTest", function () {
} }
return Promise.resolve() return Promise.resolve()
}) })
queue = new EventQueue("test!", true, processElement) queue = new EventQueue("test!", processElement)
}) })
o("pause and resume", async function () { o("pause and resume", async function () {
@ -89,7 +93,7 @@ o.spec("EventQueueTest", function () {
} }
return Promise.resolve() return Promise.resolve()
}) })
let queue = new EventQueue("test 2!", true, (nextElement: QueuedBatch) => { let queue = new EventQueue("test 2!", (nextElement: QueuedBatch) => {
if (nextElement.batchId === "2") { if (nextElement.batchId === "2") {
return Promise.reject(new ConnectionError("no connection")) return Promise.reject(new ConnectionError("no connection"))
} else { } else {
@ -103,359 +107,4 @@ o.spec("EventQueueTest", function () {
o(queue.queueSize()).equals(2) o(queue.queueSize()).equals(2)
o(queue.__processingBatch).equals(null) o(queue.__processingBatch).equals(null)
}) })
o.spec("collapsing events", function () {
o.beforeEach(function () {
queue.pause()
})
o("create + delete == delete", async function () {
const createEvent = createUpdate(OperationType.CREATE, "new-mail-list", "1")
const deleteEvent = createUpdate(OperationType.DELETE, createEvent.instanceListId, createEvent.instanceId)
queue.add("batch-id-1", "group-id", [createEvent])
queue.add("batch-id-2", "group-id", [deleteEvent])
queue.resume()
await lastProcess.promise
const expectedDelete = createUpdate(OperationType.DELETE, createEvent.instanceListId, createEvent.instanceId)
o(processElement.invocations).deepEquals([
[{ events: [], batchId: "batch-id-1", groupId: "group-id" }],
[{ events: [expectedDelete], batchId: "batch-id-2", groupId: "group-id" }],
])
})
o("create + update == create", async function () {
const createEvent = createUpdate(OperationType.CREATE, "new-mail-list", "1")
const updateEvent = createUpdate(OperationType.UPDATE, createEvent.instanceListId, createEvent.instanceId)
queue.add("batch-id-1", "group-id", [createEvent])
queue.add("batch-id-2", "group-id", [updateEvent])
queue.resume()
await lastProcess.promise
const expectedCreate = createUpdate(OperationType.CREATE, createEvent.instanceListId, createEvent.instanceId)
o(processElement.invocations).deepEquals([
[{ events: [expectedCreate], batchId: "batch-id-1", groupId: "group-id" }],
// new update got optimized away on the spot
])
})
o("create + create == create + create", async function () {
const createEvent = createUpdate(OperationType.CREATE, "new-mail-list", "1")
const createEvent2 = createUpdate(OperationType.CREATE, createEvent.instanceListId, createEvent.instanceId)
queue.add("batch-id-1", "group-id", [createEvent])
queue.add("batch-id-2", "group-id", [createEvent2])
queue.resume()
await lastProcess.promise
const expectedCreate = createUpdate(OperationType.CREATE, createEvent.instanceListId, createEvent.instanceId)
const expectedCreate2 = createUpdate(OperationType.CREATE, createEvent.instanceListId, createEvent.instanceId)
o(processElement.invocations).deepEquals([
[{ events: [expectedCreate], batchId: "batch-id-1", groupId: "group-id" }],
[{ events: [expectedCreate2], batchId: "batch-id-2", groupId: "group-id" }],
])
})
o("create + update + delete == delete", async function () {
const createEvent = createUpdate(OperationType.CREATE, "new-mail-list", "1")
const updateEvent = createUpdate(OperationType.UPDATE, "new-mail-list", "1")
const deleteEvent = createUpdate(OperationType.DELETE, createEvent.instanceListId, createEvent.instanceId)
queue.add("batch-id-1", "group-id", [createEvent])
queue.add("batch-id-2", "group-id", [updateEvent])
queue.add("batch-id-3", "group-id", [deleteEvent])
queue.resume()
await lastProcess.promise
const expectedDelete = createUpdate(OperationType.DELETE, createEvent.instanceListId, createEvent.instanceId)
o(processElement.invocations).deepEquals([
[{ events: [], batchId: "batch-id-1", groupId: "group-id" }],
// update event was optimized away
[{ events: [expectedDelete], batchId: "batch-id-3", groupId: "group-id" }],
])
})
o("delete + create == delete + create", async function () {
// DELETE can happen after CREATE in case of custom id. We keep it as-is
const deleteEvent = createUpdate(OperationType.DELETE, "mail-list", "1")
const createEvent = createUpdate(OperationType.CREATE, "mail-list", "1")
queue.add("batch-id-0", "group-id", [deleteEvent])
queue.add("batch-id-1", "group-id", [createEvent])
queue.resume()
await lastProcess.promise
o(processElement.invocations).deepEquals([
[{ events: [deleteEvent], batchId: "batch-id-0", groupId: "group-id" }],
[{ events: [createEvent], batchId: "batch-id-1", groupId: "group-id" }],
])
})
o("delete + create + delete + create == delete + create", async function () {
// This tests that create still works a
const deleteEvent1 = createUpdate(OperationType.DELETE, "list", "1")
const nonEmptyEventInBetween = createUpdate(OperationType.CREATE, "list2", "2")
const createEvent1 = createUpdate(OperationType.CREATE, "list", "1")
const deleteEvent2 = createUpdate(OperationType.DELETE, "list", "1")
const createEvent2 = createUpdate(OperationType.CREATE, "list", "1")
queue.add("batch-id-1", "group-id", [deleteEvent1])
queue.add("batch-id-1.1", "group-id", [nonEmptyEventInBetween])
queue.add("batch-id-2", "group-id", [createEvent1])
queue.add("batch-id-3", "group-id", [deleteEvent2])
queue.add("batch-id-4", "group-id", [createEvent2])
queue.resume()
await lastProcess.promise
const expectedDelete = createUpdate(OperationType.DELETE, createEvent1.instanceListId, createEvent1.instanceId)
const expectedCreate = createUpdate(OperationType.CREATE, createEvent1.instanceListId, createEvent1.instanceId)
const expectedDelete2 = createUpdate(OperationType.DELETE, createEvent1.instanceListId, createEvent1.instanceId)
o(processElement.invocations).deepEquals([
[{ events: [expectedDelete], batchId: "batch-id-1", groupId: "group-id" }],
[{ events: [nonEmptyEventInBetween], batchId: "batch-id-1.1", groupId: "group-id" }],
[{ events: [], batchId: "batch-id-2", groupId: "group-id" }],
[{ events: [expectedDelete2], batchId: "batch-id-3", groupId: "group-id" }],
[{ events: [expectedCreate], batchId: "batch-id-4", groupId: "group-id" }],
])
})
o("delete (list 1) + create (list 2) == delete (list 1) + create (list 2)", async function () {
// entity updates with for the same element id but different list IDs do not influence each other
const deleteEvent1 = createUpdate(OperationType.DELETE, "list1", "1")
const createEvent1 = createUpdate(OperationType.CREATE, "list2", "1")
queue.add("batch-id-1", "group-id", [deleteEvent1])
queue.add("batch-id-2", "group-id", [createEvent1])
queue.resume()
await lastProcess.promise
const expectedDelete = createUpdate(OperationType.DELETE, deleteEvent1.instanceListId, deleteEvent1.instanceId)
const expectedCreate = createUpdate(OperationType.CREATE, createEvent1.instanceListId, createEvent1.instanceId)
o(processElement.invocations).deepEquals([
[{ events: [expectedDelete], batchId: "batch-id-1", groupId: "group-id" }],
[{ events: [expectedCreate], batchId: "batch-id-2", groupId: "group-id" }],
])
})
o("create (list 1) + update (list 1) + delete (list 2) == create (list 1) + delete (list 2)", async function () {
// entity updates with for the same element id but different list IDs do not influence each other
const createEvent1 = createUpdate(OperationType.CREATE, "list1", "1")
const updateEvent1 = createUpdate(OperationType.UPDATE, "list1", "1")
const deleteEvent1 = createUpdate(OperationType.DELETE, "list2", "1")
queue.add("batch-id-1", "group-id", [createEvent1])
queue.add("batch-id-2", "group-id", [updateEvent1])
queue.add("batch-id-3", "group-id", [deleteEvent1])
queue.resume()
await lastProcess.promise
const expectedCreate = createUpdate(OperationType.CREATE, createEvent1.instanceListId, createEvent1.instanceId)
const expectedDelete = createUpdate(OperationType.DELETE, deleteEvent1.instanceListId, deleteEvent1.instanceId)
o(processElement.invocations).deepEquals([
[{ events: [expectedCreate], batchId: "batch-id-1", groupId: "group-id" }],
[{ events: [expectedDelete], batchId: "batch-id-3", groupId: "group-id" }],
])
})
o("same batch in two different groups", async function () {
const createEvent1 = createUpdate(OperationType.CREATE, "old-mail-list", "1")
const createEvent2 = createUpdate(OperationType.CREATE, "old-mail-list", "1")
queue.add("batch-id-1", "group-id-1", [createEvent1])
queue.add("batch-id-1", "group-id-2", [createEvent2])
queue.resume()
await lastProcess.promise
o(processElement.invocations).deepEquals([
[{ events: [createEvent1], batchId: "batch-id-1", groupId: "group-id-1" }],
[{ events: [createEvent1], batchId: "batch-id-1", groupId: "group-id-2" }],
])
})
o(
"[delete (list 1) + create (list 2)] + delete (list 2) + create (list 2) = [delete (list 1) + create (list 2)] + delete (list 2) + create (list 2)",
async function () {
const deleteEvent1 = createUpdate(OperationType.DELETE, "l1", "1")
const createEvent1 = createUpdate(OperationType.CREATE, "l2", "1")
const deleteEvent2 = createUpdate(OperationType.DELETE, "l2", "1")
const createEvent2 = createUpdate(OperationType.CREATE, "l2", "1")
queue.add("batch-id-1", "group-id-1", [deleteEvent1, createEvent1])
queue.add("batch-id-2", "group-id-1", [deleteEvent2])
queue.add("batch-id-3", "group-id-1", [createEvent2])
queue.resume()
await lastProcess.promise
o(processElement.invocations).deepEquals([
[{ events: [deleteEvent1], batchId: "batch-id-1", groupId: "group-id-1" }],
[{ events: [deleteEvent2], batchId: "batch-id-2", groupId: "group-id-1" }],
[{ events: [createEvent2], batchId: "batch-id-3", groupId: "group-id-1" }],
])
},
)
o("optimization does not fail when there are new events with the same id but a different type", function () {
const batchId = "batch-id-1"
const groupId = "group-id-1"
const instanceId = "instance-id-1"
const updateEvent1 = createUpdate(OperationType.UPDATE, "", instanceId)
const updateEvent2 = createUpdate(OperationType.UPDATE, "", instanceId)
updateEvent1.typeRef = GroupTypeRef
updateEvent2.typeRef = MailboxGroupRootTypeRef
queue.add(batchId, groupId, [updateEvent1])
queue.add(batchId, groupId, [updateEvent2])
})
function createUpdate(type: OperationType, listId: Id, instanceId: Id): EntityUpdateData {
return {
typeRef: MailTypeRef,
operation: type,
instanceId,
instanceListId: listId,
}
}
})
o.spec("batchMod", function () {
const batchId = "batchId"
const instanceListId = "instanceListId"
const instanceId = "instanceId"
o("one entity with the same id and type", async () => {
o(
batchMod(
batchId,
[
{
typeRef: MailTypeRef,
instanceId,
instanceListId,
operation: OperationType.CREATE,
},
],
{
typeRef: MailTypeRef,
instanceId,
instanceListId,
operation: OperationType.CREATE,
},
),
).equals(EntityModificationType.CREATE)
})
o("there is another op with the same type but different element id", async () => {
o(
batchMod(
batchId,
[
{
typeRef: MailTypeRef,
instanceId: "instanceId2",
instanceListId,
operation: OperationType.DELETE,
},
{
typeRef: MailTypeRef,
instanceId,
instanceListId,
operation: OperationType.CREATE,
},
],
{
typeRef: MailTypeRef,
instanceId,
instanceListId,
operation: OperationType.CREATE,
},
),
).equals(EntityModificationType.CREATE)
})
o("there is another op with the same type but different list id", async () => {
o(
batchMod(
batchId,
[
{
typeRef: MailTypeRef,
instanceId,
instanceListId: "instanceListId2",
operation: OperationType.DELETE,
},
{
typeRef: MailTypeRef,
instanceId,
instanceListId,
operation: OperationType.CREATE,
},
],
{
typeRef: MailTypeRef,
instanceId,
instanceListId,
operation: OperationType.CREATE,
},
),
).equals(EntityModificationType.CREATE)
})
o("there is another op with the id but different type", async () => {
o(
batchMod(
batchId,
[
{
typeRef: ContactTypeRef,
instanceId,
instanceListId,
operation: OperationType.DELETE,
},
{
typeRef: MailTypeRef,
instanceId,
instanceListId,
operation: OperationType.CREATE,
},
],
{
typeRef: MailTypeRef,
instanceId,
instanceListId,
operation: OperationType.CREATE,
},
),
).equals(EntityModificationType.CREATE)
})
o("modification is based on operation of batch, not the argument", async () => {
o(
batchMod(
batchId,
[
{
typeRef: MailTypeRef,
instanceId,
instanceListId,
operation: OperationType.CREATE,
},
],
{
typeRef: MailTypeRef,
instanceId,
instanceListId,
operation: OperationType.DELETE,
},
),
).equals(EntityModificationType.CREATE)
})
})
}) })

View file

@ -17,10 +17,9 @@ import { base64ToUint8Array, byteLength, concat, utf8Uint8ArrayToString } from "
import type { SearchIndexEntry, SearchIndexMetaDataRow } from "../../../../../src/common/api/worker/search/SearchTypes.js" import type { SearchIndexEntry, SearchIndexMetaDataRow } from "../../../../../src/common/api/worker/search/SearchTypes.js"
import { GroupMembershipTypeRef, UserTypeRef } from "../../../../../src/common/api/entities/sys/TypeRefs.js" import { GroupMembershipTypeRef, UserTypeRef } from "../../../../../src/common/api/entities/sys/TypeRefs.js"
import { ContactTypeRef, MailTypeRef } from "../../../../../src/common/api/entities/tutanota/TypeRefs.js" import { ContactTypeRef, MailTypeRef } from "../../../../../src/common/api/entities/tutanota/TypeRefs.js"
import { GroupType, OperationType } from "../../../../../src/common/api/common/TutanotaConstants.js" import { GroupType } from "../../../../../src/common/api/common/TutanotaConstants.js"
import { aes256RandomKey, fixedIv, unauthenticatedAesDecrypt } from "@tutao/tutanota-crypto" import { aes256RandomKey, fixedIv, unauthenticatedAesDecrypt } from "@tutao/tutanota-crypto"
import { createTestEntity } from "../../../TestUtils.js" import { createTestEntity } from "../../../TestUtils.js"
import { containsEventOfType, EntityUpdateData } from "../../../../../src/common/api/common/utils/EntityUpdateUtils.js"
import { ClientModelInfo } from "../../../../../src/common/api/common/EntityFunctions" import { ClientModelInfo } from "../../../../../src/common/api/common/EntityFunctions"
o.spec("Index Utils", () => { o.spec("Index Utils", () => {
@ -195,20 +194,6 @@ o.spec("Index Utils", () => {
o(filterMailMemberships(user)).deepEquals([mailGroup1, mailGroup2]) o(filterMailMemberships(user)).deepEquals([mailGroup1, mailGroup2])
}) })
o("containsEventOfType", function () {
function createUpdate(type: OperationType, id: Id): EntityUpdateData {
return {
operation: type,
instanceId: id,
instanceListId: "",
} as Partial<EntityUpdateData> as EntityUpdateData
}
o(containsEventOfType([], OperationType.CREATE, "1")).equals(false)
o(containsEventOfType([createUpdate(OperationType.CREATE, "1")], OperationType.CREATE, "1")).equals(true)
o(containsEventOfType([createUpdate(OperationType.DELETE, "1")], OperationType.CREATE, "1")).equals(false)
o(containsEventOfType([createUpdate(OperationType.DELETE, "2")], OperationType.DELETE, "1")).equals(false)
})
o("byteLength", function () { o("byteLength", function () {
o(byteLength("")).equals(0) o(byteLength("")).equals(0)
o(byteLength("A")).equals(1) o(byteLength("A")).equals(1)

View file

@ -56,6 +56,10 @@ o.spec("IndexedDbIndexer", () => {
throw new ProgrammingError("not supported") throw new ProgrammingError("not supported")
}, },
} }
const noPatchesAndInstance: Pick<EntityUpdateData, "instance" | "patches"> = {
instance: null,
patches: null,
}
let keyLoaderFacade: KeyLoaderFacade let keyLoaderFacade: KeyLoaderFacade
let mailIndexer: MailIndexer let mailIndexer: MailIndexer
@ -648,6 +652,8 @@ o.spec("IndexedDbIndexer", () => {
instanceId: "realtime-event-id", instanceId: "realtime-event-id",
instanceListId: "", instanceListId: "",
operation: OperationType.CREATE, operation: OperationType.CREATE,
...noPatchesAndInstance,
isPrefetched: false,
}, },
] ]
await indexer.processEntityEvents(realtimeEvents, previousNewestBatchId, groupId) await indexer.processEntityEvents(realtimeEvents, previousNewestBatchId, groupId)
@ -719,6 +725,8 @@ o.spec("IndexedDbIndexer", () => {
instanceId: "realtime-event-id", instanceId: "realtime-event-id",
instanceListId: "", instanceListId: "",
operation: OperationType.CREATE, operation: OperationType.CREATE,
...noPatchesAndInstance,
isPrefetched: false,
}, },
] ]
@ -760,6 +768,8 @@ o.spec("IndexedDbIndexer", () => {
instanceListId: "list", instanceListId: "list",
instanceId: "event1", instanceId: "event1",
type: "", type: "",
instance: null,
patch: null,
}), }),
createEntityUpdate({ createEntityUpdate({
typeId: MailTypeRef.typeId.toString(), typeId: MailTypeRef.typeId.toString(),
@ -768,6 +778,8 @@ o.spec("IndexedDbIndexer", () => {
instanceListId: "list", instanceListId: "list",
instanceId: "event2", instanceId: "event2",
type: "", type: "",
instance: null,
patch: null,
}), }),
], ],
}), }),
@ -791,6 +803,8 @@ o.spec("IndexedDbIndexer", () => {
instanceId: "realtime", instanceId: "realtime",
instanceListId: "list", instanceListId: "list",
operation: OperationType.CREATE, operation: OperationType.CREATE,
...noPatchesAndInstance,
isPrefetched: false,
}, },
] ]
await indexer.processEntityEvents(realtimeUpdates, realtimeBatchId, groupId) await indexer.processEntityEvents(realtimeUpdates, realtimeBatchId, groupId)
@ -812,12 +826,16 @@ o.spec("IndexedDbIndexer", () => {
operation: OperationType.CREATE, operation: OperationType.CREATE,
instanceListId: "list", instanceListId: "list",
instanceId: "event1", instanceId: "event1",
...noPatchesAndInstance,
isPrefetched: false,
}, },
{ {
typeRef: MailTypeRef, typeRef: MailTypeRef,
operation: OperationType.CREATE, operation: OperationType.CREATE,
instanceListId: "list", instanceListId: "list",
instanceId: "event2", instanceId: "event2",
...noPatchesAndInstance,
isPrefetched: false,
}, },
], ],
}, },
@ -1028,6 +1046,8 @@ o.spec("IndexedDbIndexer", () => {
operation: OperationType.CREATE, operation: OperationType.CREATE,
instanceId: "id-1", instanceId: "id-1",
instanceListId: "", instanceListId: "",
...noPatchesAndInstance,
isPrefetched: false,
}, },
] ]
@ -1044,6 +1064,8 @@ o.spec("IndexedDbIndexer", () => {
operation: OperationType.CREATE, operation: OperationType.CREATE,
instanceId: "id-2", instanceId: "id-2",
instanceListId: "", instanceListId: "",
...noPatchesAndInstance,
isPrefetched: false,
}, },
] ]
const batchId2 = "batch-id-2" const batchId2 = "batch-id-2"
@ -1079,18 +1101,24 @@ o.spec("IndexedDbIndexer", () => {
operation: OperationType.CREATE, operation: OperationType.CREATE,
instanceId: "id-1", instanceId: "id-1",
instanceListId: "create", instanceListId: "create",
...noPatchesAndInstance,
isPrefetched: false,
}, },
{ {
typeRef: ContactTypeRef, typeRef: ContactTypeRef,
operation: OperationType.CREATE, operation: OperationType.CREATE,
instanceId: "id-2", instanceId: "id-2",
instanceListId: "create", instanceListId: "create",
...noPatchesAndInstance,
isPrefetched: false,
}, },
{ {
typeRef: MailTypeRef, typeRef: MailTypeRef,
operation: OperationType.CREATE, operation: OperationType.CREATE,
instanceId: "id-3", instanceId: "id-3",
instanceListId: "create", instanceListId: "create",
...noPatchesAndInstance,
isPrefetched: false,
}, },
{ {
@ -1098,18 +1126,24 @@ o.spec("IndexedDbIndexer", () => {
operation: OperationType.UPDATE, operation: OperationType.UPDATE,
instanceId: "id-4", instanceId: "id-4",
instanceListId: "update", instanceListId: "update",
...noPatchesAndInstance,
isPrefetched: false,
}, },
{ {
typeRef: ContactTypeRef, typeRef: ContactTypeRef,
operation: OperationType.UPDATE, operation: OperationType.UPDATE,
instanceId: "id-5", instanceId: "id-5",
instanceListId: "update", instanceListId: "update",
...noPatchesAndInstance,
isPrefetched: false,
}, },
{ {
typeRef: MailTypeRef, typeRef: MailTypeRef,
operation: OperationType.UPDATE, operation: OperationType.UPDATE,
instanceId: "id-6", instanceId: "id-6",
instanceListId: "update", instanceListId: "update",
...noPatchesAndInstance,
isPrefetched: false,
}, },
{ {
@ -1117,18 +1151,24 @@ o.spec("IndexedDbIndexer", () => {
operation: OperationType.DELETE, operation: OperationType.DELETE,
instanceId: "id-7", instanceId: "id-7",
instanceListId: "delete", instanceListId: "delete",
...noPatchesAndInstance,
isPrefetched: false,
}, },
{ {
typeRef: ContactTypeRef, typeRef: ContactTypeRef,
operation: OperationType.DELETE, operation: OperationType.DELETE,
instanceId: "id-8", instanceId: "id-8",
instanceListId: "delete", instanceListId: "delete",
...noPatchesAndInstance,
isPrefetched: false,
}, },
{ {
typeRef: MailTypeRef, typeRef: MailTypeRef,
operation: OperationType.DELETE, operation: OperationType.DELETE,
instanceId: "id-9", instanceId: "id-9",
instanceListId: "delete", instanceListId: "delete",
...noPatchesAndInstance,
isPrefetched: false,
}, },
], ],
groupId: "blah", groupId: "blah",
@ -1323,6 +1363,8 @@ o.spec("IndexedDbIndexer", () => {
instanceId: "instanceId", instanceId: "instanceId",
instanceListId: "instanceListId", instanceListId: "instanceListId",
operation: OperationType.CREATE, operation: OperationType.CREATE,
...noPatchesAndInstance,
isPrefetched: false,
}, },
] ]
when(mailIndexer.processEntityEvents(updates, matchers.anything(), matchers.anything())).thenDo(() => processDeferred.resolve()) when(mailIndexer.processEntityEvents(updates, matchers.anything(), matchers.anything())).thenDo(() => processDeferred.resolve())
@ -1362,6 +1404,8 @@ o.spec("IndexedDbIndexer", () => {
instanceId: "instanceId", instanceId: "instanceId",
instanceListId: "instanceListId", instanceListId: "instanceListId",
operation: OperationType.CREATE, operation: OperationType.CREATE,
...noPatchesAndInstance,
isPrefetched: false,
}, },
] ]
when(mailIndexer.processEntityEvents(updates, matchers.anything(), matchers.anything())).thenDo(() => processDeferred.resolve()) when(mailIndexer.processEntityEvents(updates, matchers.anything(), matchers.anything())).thenDo(() => processDeferred.resolve())

View file

@ -36,7 +36,6 @@ import { ElementDataOS, GroupDataOS, ObjectStoreName, SearchIndexMetaDataOS, Sea
import { AttributeModel } from "../../../../../src/common/api/common/AttributeModel" import { AttributeModel } from "../../../../../src/common/api/common/AttributeModel"
import { ClientModelInfo } from "../../../../../src/common/api/common/EntityFunctions" import { ClientModelInfo } from "../../../../../src/common/api/common/EntityFunctions"
import { EntityUpdateData } from "../../../../../src/common/api/common/utils/EntityUpdateUtils" import { EntityUpdateData } from "../../../../../src/common/api/common/utils/EntityUpdateUtils"
import { OperationType } from "../../../../../src/common/api/common/TutanotaConstants"
import { CancelledError } from "../../../../../src/common/api/common/error/CancelledError.js" import { CancelledError } from "../../../../../src/common/api/common/error/CancelledError.js"
const mailTypeInfo = typeRefToTypeInfo(MailTypeRef) const mailTypeInfo = typeRefToTypeInfo(MailTypeRef)
@ -1029,12 +1028,6 @@ o.spec("IndexerCore", () => {
const instanceId = "L-dNNLe----1" const instanceId = "L-dNNLe----1"
const instanceIdTimestamp = generatedIdToTimestamp(instanceId) const instanceIdTimestamp = generatedIdToTimestamp(instanceId)
const event: EntityUpdateData = {
typeRef: MailTypeRef,
instanceId,
instanceListId: "",
operation: OperationType.CREATE,
}
const metaRowId = 3 const metaRowId = 3
const anotherMetaRowId = 4 const anotherMetaRowId = 4
const transaction: any = { const transaction: any = {
@ -1093,12 +1086,6 @@ o.spec("IndexerCore", () => {
let indexUpdate = _createNewIndexUpdate(mailTypeInfo) let indexUpdate = _createNewIndexUpdate(mailTypeInfo)
let instanceId = "123" let instanceId = "123"
let event: EntityUpdateData = {
typeRef: MailTypeRef,
instanceId,
instanceListId: "",
operation: OperationType.CREATE,
}
let transaction: any = { let transaction: any = {
get: (os, key) => { get: (os, key) => {
o.check(os).equals(ElementDataOS) o.check(os).equals(ElementDataOS)

View file

@ -859,15 +859,6 @@ o.spec("MailIndexer", () => {
} }
}) })
function createUpdate(operation: OperationType, listId: Id, instanceId: Id): EntityUpdateData {
return {
operation: operation,
instanceListId: listId,
instanceId: instanceId,
typeRef: MailTypeRef,
}
}
function createMailInstances({ function createMailInstances({
subject, subject,
mailSetEntryId, mailSetEntryId,

View file

@ -42,6 +42,11 @@ import { ClientModelInfo } from "../../../src/common/api/common/EntityFunctions"
import { EntityRestClient } from "../../../src/common/api/worker/rest/EntityRestClient" import { EntityRestClient } from "../../../src/common/api/worker/rest/EntityRestClient"
o.spec("CalendarModel", function () { o.spec("CalendarModel", function () {
const noPatchesAndInstance: Pick<EntityUpdateData, "instance" | "patches"> = {
instance: null,
patches: null,
}
o.spec("calendar events have same fields", function () { o.spec("calendar events have same fields", function () {
let restClientMock: EntityRestClient let restClientMock: EntityRestClient
let calendarFacadeMock: CalendarFacade let calendarFacadeMock: CalendarFacade
@ -769,6 +774,8 @@ o.spec("CalendarModel", function () {
instanceListId: listIdPart(eventUpdate._id), instanceListId: listIdPart(eventUpdate._id),
instanceId: elementIdPart(eventUpdate._id), instanceId: elementIdPart(eventUpdate._id),
operation: OperationType.CREATE, operation: OperationType.CREATE,
...noPatchesAndInstance,
isPrefetched: false,
}) })
o(model.getFileIdToSkippedCalendarEventUpdates().get(getElementId(calendarFile))!).deepEquals(eventUpdate) o(model.getFileIdToSkippedCalendarEventUpdates().get(getElementId(calendarFile))!).deepEquals(eventUpdate)
@ -784,6 +791,8 @@ o.spec("CalendarModel", function () {
instanceListId: listIdPart(calendarFile._id), instanceListId: listIdPart(calendarFile._id),
instanceId: elementIdPart(calendarFile._id), instanceId: elementIdPart(calendarFile._id),
operation: OperationType.UPDATE, operation: OperationType.UPDATE,
...noPatchesAndInstance,
isPrefetched: false,
}) })
o(model.getFileIdToSkippedCalendarEventUpdates().size).deepEquals(0) o(model.getFileIdToSkippedCalendarEventUpdates().size).deepEquals(0)

View file

@ -418,6 +418,9 @@ o.spec("CalendarViewModel", function () {
instanceListId: getListId(eventToDrag), instanceListId: getListId(eventToDrag),
instanceId: getElementId(eventToDrag), instanceId: getElementId(eventToDrag),
operation: OperationType.CREATE, operation: OperationType.CREATE,
instance: null,
patches: null,
isPrefetched: false,
} }
const updatedEventFromServer = makeEvent(getElementId(eventToDrag), newData, new Date(2021, 0, 5, 14, 30), assertNotNull(eventToDrag.uid)) const updatedEventFromServer = makeEvent(getElementId(eventToDrag), newData, new Date(2021, 0, 5, 14, 30), assertNotNull(eventToDrag.uid))
entityClientMock.addListInstances(updatedEventFromServer) entityClientMock.addListInstances(updatedEventFromServer)

View file

@ -82,7 +82,7 @@ o.spec("TutaNotificationHandler", () => {
userId: "user1", userId: "user1",
}) })
await handler.onMailNotification(setupSseInfo(), notificationInfo) await handler.onMailNotification(setupSseInfo(), [notificationInfo])
verify( verify(
notifier.submitGroupedNotification("translated:pushNewMail_msg", notificationInfo.mailAddress, "mailListId,mailElementId", matchers.anything()), notifier.submitGroupedNotification("translated:pushNewMail_msg", notificationInfo.mailAddress, "mailListId,mailElementId", matchers.anything()),
@ -110,7 +110,7 @@ o.spec("TutaNotificationHandler", () => {
userId: "user1", userId: "user1",
}) })
await handler.onMailNotification(setupSseInfo(), notificationInfo) await handler.onMailNotification(setupSseInfo(), [notificationInfo])
verify(notifier.submitGroupedNotification(matchers.anything(), matchers.anything(), matchers.anything(), matchers.anything()), { times: 0 }) verify(notifier.submitGroupedNotification(matchers.anything(), matchers.anything(), matchers.anything(), matchers.anything()), { times: 0 })
}) })
@ -131,7 +131,7 @@ o.spec("TutaNotificationHandler", () => {
userId: "user1", userId: "user1",
}) })
await handler.onMailNotification(setupSseInfo(), notificationInfo) await handler.onMailNotification(setupSseInfo(), [notificationInfo])
const listenerCaptor = matchers.captor() const listenerCaptor = matchers.captor()
verify( verify(
@ -197,17 +197,17 @@ o.spec("TutaNotificationHandler", () => {
const requestDefer = mockFetchRequest( const requestDefer = mockFetchRequest(
fetch, fetch,
"http://something.com/rest/tutanota/mail/mailListId/mailElementId", "http://something.com/rest/tutanota/mail/mailListId?ids=mailElementId",
{ {
v: tutanotaModelInfo.version.toString(), v: tutanotaModelInfo.version.toString(),
cv: appVersion, cv: appVersion,
accessToken: "accessToken", accessToken: "accessToken",
}, },
200, 200,
mailLiteral, [mailLiteral],
) )
await handler.onMailNotification(sseInfo, notificationInfo) await handler.onMailNotification(sseInfo, [notificationInfo])
await requestDefer await requestDefer

View file

@ -215,7 +215,8 @@ o.spec("TutaSseFacade", () => {
verify( verify(
notificationHandler.onMailNotification( notificationHandler.onMailNotification(
sseInfo, sseInfo,
matchers.argThat((actualNotificationInfo) => { matchers.argThat((actualNotificationInfos) => {
let actualNotificationInfo = actualNotificationInfos[0]
actualNotificationInfo.mailId._id = null actualNotificationInfo.mailId._id = null
removeOriginals(actualNotificationInfo) removeOriginals(actualNotificationInfo)
return deepEqual(actualNotificationInfo, notificationInfo) return deepEqual(actualNotificationInfo, notificationInfo)

View file

@ -11,8 +11,7 @@ import { instance, matchers, object, when } from "testdouble"
import { UserController } from "../../../src/common/api/main/UserController.js" import { UserController } from "../../../src/common/api/main/UserController.js"
import { createTestEntity } from "../TestUtils.js" import { createTestEntity } from "../TestUtils.js"
import { EntityUpdateData } from "../../../src/common/api/common/utils/EntityUpdateUtils.js" import { EntityUpdateData } from "../../../src/common/api/common/utils/EntityUpdateUtils.js"
import { MailboxDetail, MailboxModel } from "../../../src/common/mailFunctionality/MailboxModel.js" import { MailboxModel } from "../../../src/common/mailFunctionality/MailboxModel.js"
import { InboxRuleHandler } from "../../../src/mail-app/mail/model/InboxRuleHandler.js"
import { getElementId, getListId } from "../../../src/common/api/common/utils/EntityUtils.js" import { getElementId, getListId } from "../../../src/common/api/common/utils/EntityUtils.js"
import { MailModel } from "../../../src/mail-app/mail/model/MailModel.js" import { MailModel } from "../../../src/mail-app/mail/model/MailModel.js"
import { EventController } from "../../../src/common/api/main/EventController.js" import { EventController } from "../../../src/common/api/main/EventController.js"
@ -27,9 +26,7 @@ o.spec("MailModelTest", function () {
inboxFolder.folderType = MailSetKind.INBOX inboxFolder.folderType = MailSetKind.INBOX
const anotherFolder = createTestEntity(MailFolderTypeRef, { _id: ["folderListId", "archiveId"] }) const anotherFolder = createTestEntity(MailFolderTypeRef, { _id: ["folderListId", "archiveId"] })
anotherFolder.folderType = MailSetKind.ARCHIVE anotherFolder.folderType = MailSetKind.ARCHIVE
let mailboxDetails: Partial<MailboxDetail>[]
let logins: LoginController let logins: LoginController
let inboxRuleHandler: InboxRuleHandler
let mailFacade: MailFacade let mailFacade: MailFacade
const restClient: EntityRestClientMock = new EntityRestClientMock() const restClient: EntityRestClientMock = new EntityRestClientMock()
@ -44,7 +41,6 @@ o.spec("MailModelTest", function () {
when(userController.isUpdateForLoggedInUserInstance(matchers.anything(), matchers.anything())).thenReturn(false) when(userController.isUpdateForLoggedInUserInstance(matchers.anything(), matchers.anything())).thenReturn(false)
when(logins.getUserController()).thenReturn(userController) when(logins.getUserController()).thenReturn(userController)
inboxRuleHandler = object()
model = new MailModel( model = new MailModel(
downcast({}), downcast({}),
mailboxModel, mailboxModel,
@ -55,8 +51,6 @@ o.spec("MailModelTest", function () {
null, null,
() => null, () => null,
) )
// not pretty, but works
// model.mailboxDetails(mailboxDetails as MailboxDetail[])
}) })
o("doesn't send notification for another folder", async function () { o("doesn't send notification for another folder", async function () {
const mailSetEntry = createTestEntity(MailSetEntryTypeRef, { _id: [anotherFolder.entries, "mailSetEntryId"] }) const mailSetEntry = createTestEntity(MailSetEntryTypeRef, { _id: [anotherFolder.entries, "mailSetEntryId"] })
@ -102,6 +96,9 @@ o.spec("MailModelTest", function () {
operation, operation,
instanceListId, instanceListId,
instanceId, instanceId,
instance: null,
patches: null,
isPrefetched: false,
} }
} }
}) })

View file

@ -49,6 +49,7 @@ import { MailboxDetail, MailboxModel } from "../../../src/common/mailFunctionali
import { SendMailModel, TOO_MANY_VISIBLE_RECIPIENTS } from "../../../src/common/mailFunctionality/SendMailModel.js" import { SendMailModel, TOO_MANY_VISIBLE_RECIPIENTS } from "../../../src/common/mailFunctionality/SendMailModel.js"
import { RecipientField } from "../../../src/common/mailFunctionality/SharedMailUtils.js" import { RecipientField } from "../../../src/common/mailFunctionality/SharedMailUtils.js"
import { getContactDisplayName } from "../../../src/common/contactsFunctionality/ContactUtils.js" import { getContactDisplayName } from "../../../src/common/contactsFunctionality/ContactUtils.js"
import { EntityUpdateData } from "../../../src/common/api/common/utils/EntityUpdateUtils"
const { anything, argThat } = matchers const { anything, argThat } = matchers
@ -88,6 +89,10 @@ const BODY_TEXT_1 = "lorem ipsum dolor yaddah yaddah"
const SUBJECT_LINE_1 = "Did you get that thing I sent ya" const SUBJECT_LINE_1 = "Did you get that thing I sent ya"
const STRONG_PASSWORD = "@()IE!)(@FME)0-123jfDSA32SDACmmnvnvddEW" const STRONG_PASSWORD = "@()IE!)(@FME)0-123jfDSA32SDACmmnvnvddEW"
const WEAK_PASSWORD = "123" const WEAK_PASSWORD = "123"
const noPatchesAndInstance: Pick<EntityUpdateData, "instance" | "patches"> = {
instance: null,
patches: null,
}
o.spec("SendMailModel", function () { o.spec("SendMailModel", function () {
o.before(function () { o.before(function () {
@ -575,36 +580,48 @@ o.spec("SendMailModel", function () {
operation: OperationType.CREATE, operation: OperationType.CREATE,
instanceListId: "", instanceListId: "",
instanceId: "", instanceId: "",
...noPatchesAndInstance,
isPrefetched: false,
}) })
await model.handleEntityEvent({ await model.handleEntityEvent({
typeRef: UserTypeRef, typeRef: UserTypeRef,
operation: OperationType.CREATE, operation: OperationType.CREATE,
instanceListId: "", instanceListId: "",
instanceId: "", instanceId: "",
...noPatchesAndInstance,
isPrefetched: false,
}) })
await model.handleEntityEvent({ await model.handleEntityEvent({
typeRef: CustomerTypeRef, typeRef: CustomerTypeRef,
operation: OperationType.CREATE, operation: OperationType.CREATE,
instanceListId: "", instanceListId: "",
instanceId: "", instanceId: "",
...noPatchesAndInstance,
isPrefetched: false,
}) })
await model.handleEntityEvent({ await model.handleEntityEvent({
typeRef: NotificationMailTypeRef, typeRef: NotificationMailTypeRef,
operation: OperationType.CREATE, operation: OperationType.CREATE,
instanceListId: "", instanceListId: "",
instanceId: "", instanceId: "",
...noPatchesAndInstance,
isPrefetched: false,
}) })
await model.handleEntityEvent({ await model.handleEntityEvent({
typeRef: ChallengeTypeRef, typeRef: ChallengeTypeRef,
operation: OperationType.CREATE, operation: OperationType.CREATE,
instanceListId: "", instanceListId: "",
instanceId: "", instanceId: "",
...noPatchesAndInstance,
isPrefetched: false,
}) })
await model.handleEntityEvent({ await model.handleEntityEvent({
typeRef: MailTypeRef, typeRef: MailTypeRef,
operation: OperationType.CREATE, operation: OperationType.CREATE,
instanceListId: "", instanceListId: "",
instanceId: "", instanceId: "",
...noPatchesAndInstance,
isPrefetched: false,
}) })
verify(entity.load(anything(), anything(), anything()), { times: 0 }) verify(entity.load(anything(), anything(), anything()), { times: 0 })
}) })
@ -635,6 +652,8 @@ o.spec("SendMailModel", function () {
operation: OperationType.UPDATE, operation: OperationType.UPDATE,
instanceListId, instanceListId,
instanceId, instanceId,
...noPatchesAndInstance,
isPrefetched: false,
}) })
o(model.allRecipients().length).equals(2) o(model.allRecipients().length).equals(2)
const updatedRecipient = model.allRecipients().find((r) => r.contact && isSameId(r.contact._id, existingContact._id)) const updatedRecipient = model.allRecipients().find((r) => r.contact && isSameId(r.contact._id, existingContact._id))
@ -668,6 +687,8 @@ o.spec("SendMailModel", function () {
operation: OperationType.UPDATE, operation: OperationType.UPDATE,
instanceListId, instanceListId,
instanceId, instanceId,
...noPatchesAndInstance,
isPrefetched: false,
}) })
o(model.allRecipients().length).equals(1) o(model.allRecipients().length).equals(1)
const updatedContact = model.allRecipients().find((r) => r.contact && isSameId(r.contact._id, existingContact._id)) const updatedContact = model.allRecipients().find((r) => r.contact && isSameId(r.contact._id, existingContact._id))
@ -681,6 +702,8 @@ o.spec("SendMailModel", function () {
operation: OperationType.DELETE, operation: OperationType.DELETE,
instanceListId, instanceListId,
instanceId, instanceId,
...noPatchesAndInstance,
isPrefetched: false,
}) })
o(model.allRecipients().length).equals(1) o(model.allRecipients().length).equals(1)
const updatedContact = model.allRecipients().find((r) => r.contact && isSameId(r.contact._id, existingContact._id)) const updatedContact = model.allRecipients().find((r) => r.contact && isSameId(r.contact._id, existingContact._id))

View file

@ -50,6 +50,10 @@ o.spec("ConversationListModel", () => {
mailGroup: createTestEntity(GroupTypeRef), mailGroup: createTestEntity(GroupTypeRef),
mailboxGroupRoot: createTestEntity(MailboxGroupRootTypeRef), mailboxGroupRoot: createTestEntity(MailboxGroupRootTypeRef),
} }
const noPatchesAndInstance: Pick<EntityUpdateData, "instance" | "patches"> = {
instance: null,
patches: null,
}
const mailSetEntriesListId = "entries" const mailSetEntriesListId = "entries"
const _ownerGroup = "me" const _ownerGroup = "me"
@ -368,6 +372,9 @@ o.spec("ConversationListModel", () => {
instanceListId: listIdPart(mailSetEntryId), instanceListId: listIdPart(mailSetEntryId),
instanceId: elementIdPart(mailSetEntryId), instanceId: elementIdPart(mailSetEntryId),
operation: OperationType.CREATE, operation: OperationType.CREATE,
instance: null,
patches: null,
isPrefetched: false,
} }
when(entityClient.load(MailSetEntryTypeRef, mailSetEntryId)).thenResolve( when(entityClient.load(MailSetEntryTypeRef, mailSetEntryId)).thenResolve(
@ -410,6 +417,8 @@ o.spec("ConversationListModel", () => {
instanceListId: getListId(labels[1]), instanceListId: getListId(labels[1]),
instanceId: getElementId(labels[1]), instanceId: getElementId(labels[1]),
operation: OperationType.DELETE, operation: OperationType.DELETE,
...noPatchesAndInstance,
isPrefetched: false,
} }
const oldMails = model.mails const oldMails = model.mails
@ -436,6 +445,8 @@ o.spec("ConversationListModel", () => {
instanceListId: getListId(labels[1]), instanceListId: getListId(labels[1]),
instanceId: getElementId(labels[1]), instanceId: getElementId(labels[1]),
operation: OperationType.DELETE, operation: OperationType.DELETE,
...noPatchesAndInstance,
isPrefetched: false,
} }
entityUpdateData.operation = OperationType.DELETE entityUpdateData.operation = OperationType.DELETE
@ -461,6 +472,8 @@ o.spec("ConversationListModel", () => {
instanceListId: listIdPart(someMail.mailSetEntryId), instanceListId: listIdPart(someMail.mailSetEntryId),
instanceId: elementIdPart(someMail.mailSetEntryId), instanceId: elementIdPart(someMail.mailSetEntryId),
operation: OperationType.DELETE, operation: OperationType.DELETE,
...noPatchesAndInstance,
isPrefetched: false,
} }
const oldItems = model.mails const oldItems = model.mails
@ -498,6 +511,8 @@ o.spec("ConversationListModel", () => {
instanceListId: getListId(newEntry), instanceListId: getListId(newEntry),
instanceId: getElementId(newEntry), instanceId: getElementId(newEntry),
operation: OperationType.CREATE, operation: OperationType.CREATE,
...noPatchesAndInstance,
isPrefetched: false,
} }
when(entityClient.load(MailSetEntryTypeRef, newEntry._id)).thenResolve(newEntry) when(entityClient.load(MailSetEntryTypeRef, newEntry._id)).thenResolve(newEntry)
@ -581,6 +596,8 @@ o.spec("ConversationListModel", () => {
instanceListId: mailSetEntriesListId, instanceListId: mailSetEntriesListId,
instanceId: makeMailSetElementId(0), instanceId: makeMailSetElementId(0),
operation: OperationType.DELETE, operation: OperationType.DELETE,
...noPatchesAndInstance,
isPrefetched: false,
} }
o.check(model.mails).deepEquals(oldMails) o.check(model.mails).deepEquals(oldMails)
@ -606,6 +623,8 @@ o.spec("ConversationListModel", () => {
instanceListId: mailSetEntriesListId, instanceListId: mailSetEntriesListId,
instanceId: makeMailSetElementId(2), instanceId: makeMailSetElementId(2),
operation: OperationType.DELETE, operation: OperationType.DELETE,
...noPatchesAndInstance,
isPrefetched: false,
} }
o.check(model.mails).deepEquals(oldMails) o.check(model.mails).deepEquals(oldMails)
@ -631,6 +650,8 @@ o.spec("ConversationListModel", () => {
instanceListId: mailSetEntriesListId, instanceListId: mailSetEntriesListId,
instanceId: makeMailSetElementId(1), instanceId: makeMailSetElementId(1),
operation: OperationType.DELETE, operation: OperationType.DELETE,
...noPatchesAndInstance,
isPrefetched: false,
} }
o.check(model.mails).deepEquals(oldMails) o.check(model.mails).deepEquals(oldMails)
@ -667,6 +688,8 @@ o.spec("ConversationListModel", () => {
instanceListId: getListId(mail), instanceListId: getListId(mail),
instanceId: getElementId(mail), instanceId: getElementId(mail),
operation: OperationType.UPDATE, operation: OperationType.UPDATE,
...noPatchesAndInstance,
isPrefetched: false,
} }
when(entityClient.load(MailTypeRef, mail._id)).thenResolve(mail) when(entityClient.load(MailTypeRef, mail._id)).thenResolve(mail)
@ -694,6 +717,8 @@ o.spec("ConversationListModel", () => {
instanceListId: getListId(mail), instanceListId: getListId(mail),
instanceId: getElementId(mail), instanceId: getElementId(mail),
operation: OperationType.UPDATE, operation: OperationType.UPDATE,
...noPatchesAndInstance,
isPrefetched: false,
} }
when(entityClient.load(MailTypeRef, mail._id)).thenResolve(mail) when(entityClient.load(MailTypeRef, mail._id)).thenResolve(mail)
entityUpdateData.operation = OperationType.DELETE entityUpdateData.operation = OperationType.DELETE

View file

@ -49,6 +49,10 @@ o.spec("MailListModel", () => {
mailGroup: createTestEntity(GroupTypeRef), mailGroup: createTestEntity(GroupTypeRef),
mailboxGroupRoot: createTestEntity(MailboxGroupRootTypeRef), mailboxGroupRoot: createTestEntity(MailboxGroupRootTypeRef),
} }
const noPatchesAndInstance: Pick<EntityUpdateData, "instance" | "patches"> = {
instance: null,
patches: null,
}
const mailSetEntriesListId = "entries" const mailSetEntriesListId = "entries"
const _ownerGroup = "me" const _ownerGroup = "me"
@ -351,6 +355,8 @@ o.spec("MailListModel", () => {
instanceListId: getListId(labels[1]), instanceListId: getListId(labels[1]),
instanceId: getElementId(labels[1]), instanceId: getElementId(labels[1]),
operation: OperationType.DELETE, operation: OperationType.DELETE,
...noPatchesAndInstance,
isPrefetched: false,
} }
entityUpdateData.operation = OperationType.UPDATE entityUpdateData.operation = OperationType.UPDATE
@ -371,6 +377,8 @@ o.spec("MailListModel", () => {
instanceListId: getListId(labels[1]), instanceListId: getListId(labels[1]),
instanceId: getElementId(labels[1]), instanceId: getElementId(labels[1]),
operation: OperationType.DELETE, operation: OperationType.DELETE,
...noPatchesAndInstance,
isPrefetched: false,
} }
entityUpdateData.operation = OperationType.DELETE entityUpdateData.operation = OperationType.DELETE
@ -389,6 +397,8 @@ o.spec("MailListModel", () => {
instanceListId: listIdPart(someMail.mailSetEntryId), instanceListId: listIdPart(someMail.mailSetEntryId),
instanceId: elementIdPart(someMail.mailSetEntryId), instanceId: elementIdPart(someMail.mailSetEntryId),
operation: OperationType.DELETE, operation: OperationType.DELETE,
...noPatchesAndInstance,
isPrefetched: false,
} }
const oldItems = model.items const oldItems = model.items
@ -422,6 +432,8 @@ o.spec("MailListModel", () => {
instanceListId: getListId(newEntry), instanceListId: getListId(newEntry),
instanceId: getElementId(newEntry), instanceId: getElementId(newEntry),
operation: OperationType.CREATE, operation: OperationType.CREATE,
...noPatchesAndInstance,
isPrefetched: false,
} }
when(entityClient.load(MailSetEntryTypeRef, newEntry._id)).thenResolve(newEntry) when(entityClient.load(MailSetEntryTypeRef, newEntry._id)).thenResolve(newEntry)
@ -476,6 +488,8 @@ o.spec("MailListModel", () => {
instanceListId: getListId(mail), instanceListId: getListId(mail),
instanceId: getElementId(mail), instanceId: getElementId(mail),
operation: OperationType.UPDATE, operation: OperationType.UPDATE,
...noPatchesAndInstance,
isPrefetched: false,
} }
when(entityClient.load(MailTypeRef, mail._id)).thenResolve(mail) when(entityClient.load(MailTypeRef, mail._id)).thenResolve(mail)
@ -494,6 +508,8 @@ o.spec("MailListModel", () => {
instanceListId: getListId(mail), instanceListId: getListId(mail),
instanceId: getElementId(mail), instanceId: getElementId(mail),
operation: OperationType.UPDATE, operation: OperationType.UPDATE,
...noPatchesAndInstance,
isPrefetched: false,
} }
when(entityClient.load(MailTypeRef, mail._id)).thenResolve(mail) when(entityClient.load(MailTypeRef, mail._id)).thenResolve(mail)
entityUpdateData.operation = OperationType.DELETE entityUpdateData.operation = OperationType.DELETE

View file

@ -22,6 +22,7 @@ import { createTestEntity } from "../../TestUtils.js"
import { MailboxDetail, MailboxModel } from "../../../../src/common/mailFunctionality/MailboxModel.js" import { MailboxDetail, MailboxModel } from "../../../../src/common/mailFunctionality/MailboxModel.js"
import { MailModel } from "../../../../src/mail-app/mail/model/MailModel.js" import { MailModel } from "../../../../src/mail-app/mail/model/MailModel.js"
import { ClientModelInfo } from "../../../../src/common/api/common/EntityFunctions" import { ClientModelInfo } from "../../../../src/common/api/common/EntityFunctions"
import { EntityUpdateData } from "../../../../src/common/api/common/utils/EntityUpdateUtils"
o.spec("ConversationViewModel", function () { o.spec("ConversationViewModel", function () {
let conversation: ConversationEntry[] let conversation: ConversationEntry[]
@ -41,6 +42,10 @@ o.spec("ConversationViewModel", function () {
let canUseConversationView: boolean let canUseConversationView: boolean
const listId = "listId" const listId = "listId"
const noPatchesAndInstance: Pick<EntityUpdateData, "instance" | "patches"> = {
instance: null,
patches: null,
}
const viewModelFactory = async (): Promise< const viewModelFactory = async (): Promise<
(options: CreateMailViewerOptions, mailboxDetails: MailboxDetail, mailboxProperties: MailboxProperties) => MailViewerViewModel (options: CreateMailViewerOptions, mailboxDetails: MailboxDetail, mailboxProperties: MailboxProperties) => MailViewerViewModel
@ -253,6 +258,8 @@ o.spec("ConversationViewModel", function () {
operation: OperationType.CREATE, operation: OperationType.CREATE,
instanceListId: listId, instanceListId: listId,
instanceId: yetAnotherMail.conversationEntry[1], instanceId: yetAnotherMail.conversationEntry[1],
...noPatchesAndInstance,
isPrefetched: false,
}, },
], ],
"mailGroupId", "mailGroupId",
@ -290,6 +297,8 @@ o.spec("ConversationViewModel", function () {
operation: OperationType.UPDATE, operation: OperationType.UPDATE,
instanceListId: listId, instanceListId: listId,
instanceId: anotherMail.conversationEntry[1], instanceId: anotherMail.conversationEntry[1],
...noPatchesAndInstance,
isPrefetched: false,
}, },
], ],
"mailGroupId", "mailGroupId",
@ -316,6 +325,8 @@ o.spec("ConversationViewModel", function () {
operation: OperationType.CREATE, operation: OperationType.CREATE,
instanceListId: listId, instanceListId: listId,
instanceId: yetAnotherMail.conversationEntry[1], instanceId: yetAnotherMail.conversationEntry[1],
...noPatchesAndInstance,
isPrefetched: false,
}, },
], ],
"mailGroupId", "mailGroupId",
@ -340,6 +351,8 @@ o.spec("ConversationViewModel", function () {
operation: OperationType.CREATE, operation: OperationType.CREATE,
instanceListId: listId, instanceListId: listId,
instanceId: yetAnotherMail.conversationEntry[1], instanceId: yetAnotherMail.conversationEntry[1],
isPrefetched: false,
...noPatchesAndInstance,
}, },
], ],
"mailGroupId", "mailGroupId",
@ -379,6 +392,8 @@ o.spec("ConversationViewModel", function () {
operation: OperationType.UPDATE, operation: OperationType.UPDATE,
instanceListId: listId, instanceListId: listId,
instanceId: trashDraftMail.conversationEntry[1], instanceId: trashDraftMail.conversationEntry[1],
...noPatchesAndInstance,
isPrefetched: false,
}, },
], ],
"mailGroupId", "mailGroupId",

View file

@ -1268,6 +1268,10 @@ pub struct EntityUpdate {
pub operation: i64, pub operation: i64,
#[serde(rename = "2556")] #[serde(rename = "2556")]
pub typeId: Option<i64>, pub typeId: Option<i64>,
#[serde(rename = "2617")]
pub instance: Option<String>,
#[serde(rename = "2618")]
pub patch: Option<PatchList>,
} }
impl Entity for EntityUpdate { impl Entity for EntityUpdate {
@ -5978,24 +5982,6 @@ impl Entity for Patch {
} }
} }
#[derive(uniffi::Record, Clone, Serialize, Deserialize)]
#[cfg_attr(any(test, feature = "testing"), derive(PartialEq, Debug))]
pub struct PatchList {
#[serde(rename = "2573")]
pub _format: i64,
#[serde(rename = "2574")]
pub patches: Vec<Patch>,
}
impl Entity for PatchList {
fn type_ref() -> TypeRef {
TypeRef {
app: AppName::Sys,
type_id: TypeId::from(2572),
}
}
}
#[derive(uniffi::Record, Clone, Serialize, Deserialize)] #[derive(uniffi::Record, Clone, Serialize, Deserialize)]
#[cfg_attr(any(test, feature = "testing"), derive(PartialEq, Debug))] #[cfg_attr(any(test, feature = "testing"), derive(PartialEq, Debug))]
pub struct IdentityKeyPair { pub struct IdentityKeyPair {
@ -6147,3 +6133,21 @@ impl Entity for RolloutGetOut {
} }
} }
} }
#[derive(uniffi::Record, Clone, Serialize, Deserialize)]
#[cfg_attr(any(test, feature = "testing"), derive(PartialEq, Debug))]
pub struct PatchList {
#[serde(rename = "2615")]
pub _id: Option<CustomId>,
#[serde(rename = "2616")]
pub patches: Vec<Patch>,
}
impl Entity for PatchList {
fn type_ref() -> TypeRef {
TypeRef {
app: AppName::Sys,
type_id: TypeId::from(2614),
}
}
}

View file

@ -106,7 +106,7 @@ use crate::entities::generated::sys::VersionData;
use crate::entities::generated::sys::VersionReturn; use crate::entities::generated::sys::VersionReturn;
pub struct AdminGroupKeyRotationService; pub struct AdminGroupKeyRotationService;
crate::service_impl!(declare, AdminGroupKeyRotationService, "sys/admingroupkeyrotationservice", 131); crate::service_impl!(declare, AdminGroupKeyRotationService, "sys/admingroupkeyrotationservice", 132);
crate::service_impl!(POST, AdminGroupKeyRotationService, AdminGroupKeyRotationPostIn, ()); crate::service_impl!(POST, AdminGroupKeyRotationService, AdminGroupKeyRotationPostIn, ());
crate::service_impl!(GET, AdminGroupKeyRotationService, (), AdminGroupKeyRotationGetOut); crate::service_impl!(GET, AdminGroupKeyRotationService, (), AdminGroupKeyRotationGetOut);
crate::service_impl!(PUT, AdminGroupKeyRotationService, AdminGroupKeyRotationPutIn, ()); crate::service_impl!(PUT, AdminGroupKeyRotationService, AdminGroupKeyRotationPutIn, ());
@ -114,25 +114,25 @@ crate::service_impl!(PUT, AdminGroupKeyRotationService, AdminGroupKeyRotationPut
pub struct AffiliatePartnerKpiService; pub struct AffiliatePartnerKpiService;
crate::service_impl!(declare, AffiliatePartnerKpiService, "sys/affiliatepartnerkpiservice", 131); crate::service_impl!(declare, AffiliatePartnerKpiService, "sys/affiliatepartnerkpiservice", 132);
crate::service_impl!(GET, AffiliatePartnerKpiService, (), AffiliatePartnerKpiServiceGetOut); crate::service_impl!(GET, AffiliatePartnerKpiService, (), AffiliatePartnerKpiServiceGetOut);
pub struct AlarmService; pub struct AlarmService;
crate::service_impl!(declare, AlarmService, "sys/alarmservice", 131); crate::service_impl!(declare, AlarmService, "sys/alarmservice", 132);
crate::service_impl!(POST, AlarmService, AlarmServicePost, ()); crate::service_impl!(POST, AlarmService, AlarmServicePost, ());
pub struct AppStoreSubscriptionService; pub struct AppStoreSubscriptionService;
crate::service_impl!(declare, AppStoreSubscriptionService, "sys/appstoresubscriptionservice", 131); crate::service_impl!(declare, AppStoreSubscriptionService, "sys/appstoresubscriptionservice", 132);
crate::service_impl!(GET, AppStoreSubscriptionService, AppStoreSubscriptionGetIn, AppStoreSubscriptionGetOut); crate::service_impl!(GET, AppStoreSubscriptionService, AppStoreSubscriptionGetIn, AppStoreSubscriptionGetOut);
pub struct AutoLoginService; pub struct AutoLoginService;
crate::service_impl!(declare, AutoLoginService, "sys/autologinservice", 131); crate::service_impl!(declare, AutoLoginService, "sys/autologinservice", 132);
crate::service_impl!(POST, AutoLoginService, AutoLoginDataReturn, AutoLoginPostReturn); crate::service_impl!(POST, AutoLoginService, AutoLoginDataReturn, AutoLoginPostReturn);
crate::service_impl!(GET, AutoLoginService, AutoLoginDataGet, AutoLoginDataReturn); crate::service_impl!(GET, AutoLoginService, AutoLoginDataGet, AutoLoginDataReturn);
crate::service_impl!(DELETE, AutoLoginService, AutoLoginDataDelete, ()); crate::service_impl!(DELETE, AutoLoginService, AutoLoginDataDelete, ());
@ -140,7 +140,7 @@ crate::service_impl!(DELETE, AutoLoginService, AutoLoginDataDelete, ());
pub struct BrandingDomainService; pub struct BrandingDomainService;
crate::service_impl!(declare, BrandingDomainService, "sys/brandingdomainservice", 131); crate::service_impl!(declare, BrandingDomainService, "sys/brandingdomainservice", 132);
crate::service_impl!(POST, BrandingDomainService, BrandingDomainData, ()); crate::service_impl!(POST, BrandingDomainService, BrandingDomainData, ());
crate::service_impl!(GET, BrandingDomainService, (), BrandingDomainGetReturn); crate::service_impl!(GET, BrandingDomainService, (), BrandingDomainGetReturn);
crate::service_impl!(PUT, BrandingDomainService, BrandingDomainData, ()); crate::service_impl!(PUT, BrandingDomainService, BrandingDomainData, ());
@ -149,37 +149,37 @@ crate::service_impl!(DELETE, BrandingDomainService, BrandingDomainDeleteData, ()
pub struct ChangeKdfService; pub struct ChangeKdfService;
crate::service_impl!(declare, ChangeKdfService, "sys/changekdfservice", 131); crate::service_impl!(declare, ChangeKdfService, "sys/changekdfservice", 132);
crate::service_impl!(POST, ChangeKdfService, ChangeKdfPostIn, ()); crate::service_impl!(POST, ChangeKdfService, ChangeKdfPostIn, ());
pub struct ChangePasswordService; pub struct ChangePasswordService;
crate::service_impl!(declare, ChangePasswordService, "sys/changepasswordservice", 131); crate::service_impl!(declare, ChangePasswordService, "sys/changepasswordservice", 132);
crate::service_impl!(POST, ChangePasswordService, ChangePasswordPostIn, ()); crate::service_impl!(POST, ChangePasswordService, ChangePasswordPostIn, ());
pub struct CloseSessionService; pub struct CloseSessionService;
crate::service_impl!(declare, CloseSessionService, "sys/closesessionservice", 131); crate::service_impl!(declare, CloseSessionService, "sys/closesessionservice", 132);
crate::service_impl!(POST, CloseSessionService, CloseSessionServicePost, ()); crate::service_impl!(POST, CloseSessionService, CloseSessionServicePost, ());
pub struct CreateCustomerServerProperties; pub struct CreateCustomerServerProperties;
crate::service_impl!(declare, CreateCustomerServerProperties, "sys/createcustomerserverproperties", 131); crate::service_impl!(declare, CreateCustomerServerProperties, "sys/createcustomerserverproperties", 132);
crate::service_impl!(POST, CreateCustomerServerProperties, CreateCustomerServerPropertiesData, CreateCustomerServerPropertiesReturn); crate::service_impl!(POST, CreateCustomerServerProperties, CreateCustomerServerPropertiesData, CreateCustomerServerPropertiesReturn);
pub struct CustomDomainCheckService; pub struct CustomDomainCheckService;
crate::service_impl!(declare, CustomDomainCheckService, "sys/customdomaincheckservice", 131); crate::service_impl!(declare, CustomDomainCheckService, "sys/customdomaincheckservice", 132);
crate::service_impl!(GET, CustomDomainCheckService, CustomDomainCheckGetIn, CustomDomainCheckGetOut); crate::service_impl!(GET, CustomDomainCheckService, CustomDomainCheckGetIn, CustomDomainCheckGetOut);
pub struct CustomDomainService; pub struct CustomDomainService;
crate::service_impl!(declare, CustomDomainService, "sys/customdomainservice", 131); crate::service_impl!(declare, CustomDomainService, "sys/customdomainservice", 132);
crate::service_impl!(POST, CustomDomainService, CustomDomainData, CustomDomainReturn); crate::service_impl!(POST, CustomDomainService, CustomDomainData, CustomDomainReturn);
crate::service_impl!(PUT, CustomDomainService, CustomDomainData, ()); crate::service_impl!(PUT, CustomDomainService, CustomDomainData, ());
crate::service_impl!(DELETE, CustomDomainService, CustomDomainData, ()); crate::service_impl!(DELETE, CustomDomainService, CustomDomainData, ());
@ -187,50 +187,50 @@ crate::service_impl!(DELETE, CustomDomainService, CustomDomainData, ());
pub struct CustomerAccountTerminationService; pub struct CustomerAccountTerminationService;
crate::service_impl!(declare, CustomerAccountTerminationService, "sys/customeraccountterminationservice", 131); crate::service_impl!(declare, CustomerAccountTerminationService, "sys/customeraccountterminationservice", 132);
crate::service_impl!(POST, CustomerAccountTerminationService, CustomerAccountTerminationPostIn, CustomerAccountTerminationPostOut); crate::service_impl!(POST, CustomerAccountTerminationService, CustomerAccountTerminationPostIn, CustomerAccountTerminationPostOut);
pub struct CustomerPublicKeyService; pub struct CustomerPublicKeyService;
crate::service_impl!(declare, CustomerPublicKeyService, "sys/customerpublickeyservice", 131); crate::service_impl!(declare, CustomerPublicKeyService, "sys/customerpublickeyservice", 132);
crate::service_impl!(GET, CustomerPublicKeyService, (), PublicKeyGetOut); crate::service_impl!(GET, CustomerPublicKeyService, (), PublicKeyGetOut);
pub struct CustomerService; pub struct CustomerService;
crate::service_impl!(declare, CustomerService, "sys/customerservice", 131); crate::service_impl!(declare, CustomerService, "sys/customerservice", 132);
crate::service_impl!(DELETE, CustomerService, DeleteCustomerData, ()); crate::service_impl!(DELETE, CustomerService, DeleteCustomerData, ());
pub struct DebitService; pub struct DebitService;
crate::service_impl!(declare, DebitService, "sys/debitservice", 131); crate::service_impl!(declare, DebitService, "sys/debitservice", 132);
crate::service_impl!(PUT, DebitService, DebitServicePutData, ()); crate::service_impl!(PUT, DebitService, DebitServicePutData, ());
pub struct DomainMailAddressAvailabilityService; pub struct DomainMailAddressAvailabilityService;
crate::service_impl!(declare, DomainMailAddressAvailabilityService, "sys/domainmailaddressavailabilityservice", 131); crate::service_impl!(declare, DomainMailAddressAvailabilityService, "sys/domainmailaddressavailabilityservice", 132);
crate::service_impl!(GET, DomainMailAddressAvailabilityService, DomainMailAddressAvailabilityData, DomainMailAddressAvailabilityReturn); crate::service_impl!(GET, DomainMailAddressAvailabilityService, DomainMailAddressAvailabilityData, DomainMailAddressAvailabilityReturn);
pub struct ExternalPropertiesService; pub struct ExternalPropertiesService;
crate::service_impl!(declare, ExternalPropertiesService, "sys/externalpropertiesservice", 131); crate::service_impl!(declare, ExternalPropertiesService, "sys/externalpropertiesservice", 132);
crate::service_impl!(GET, ExternalPropertiesService, (), ExternalPropertiesReturn); crate::service_impl!(GET, ExternalPropertiesService, (), ExternalPropertiesReturn);
pub struct GiftCardRedeemService; pub struct GiftCardRedeemService;
crate::service_impl!(declare, GiftCardRedeemService, "sys/giftcardredeemservice", 131); crate::service_impl!(declare, GiftCardRedeemService, "sys/giftcardredeemservice", 132);
crate::service_impl!(POST, GiftCardRedeemService, GiftCardRedeemData, ()); crate::service_impl!(POST, GiftCardRedeemService, GiftCardRedeemData, ());
crate::service_impl!(GET, GiftCardRedeemService, GiftCardRedeemData, GiftCardRedeemGetReturn); crate::service_impl!(GET, GiftCardRedeemService, GiftCardRedeemData, GiftCardRedeemGetReturn);
pub struct GiftCardService; pub struct GiftCardService;
crate::service_impl!(declare, GiftCardService, "sys/giftcardservice", 131); crate::service_impl!(declare, GiftCardService, "sys/giftcardservice", 132);
crate::service_impl!(POST, GiftCardService, GiftCardCreateData, GiftCardCreateReturn); crate::service_impl!(POST, GiftCardService, GiftCardCreateData, GiftCardCreateReturn);
crate::service_impl!(GET, GiftCardService, (), GiftCardGetReturn); crate::service_impl!(GET, GiftCardService, (), GiftCardGetReturn);
crate::service_impl!(DELETE, GiftCardService, GiftCardDeleteData, ()); crate::service_impl!(DELETE, GiftCardService, GiftCardDeleteData, ());
@ -238,38 +238,38 @@ crate::service_impl!(DELETE, GiftCardService, GiftCardDeleteData, ());
pub struct GroupKeyRotationInfoService; pub struct GroupKeyRotationInfoService;
crate::service_impl!(declare, GroupKeyRotationInfoService, "sys/groupkeyrotationinfoservice", 131); crate::service_impl!(declare, GroupKeyRotationInfoService, "sys/groupkeyrotationinfoservice", 132);
crate::service_impl!(GET, GroupKeyRotationInfoService, (), GroupKeyRotationInfoGetOut); crate::service_impl!(GET, GroupKeyRotationInfoService, (), GroupKeyRotationInfoGetOut);
pub struct GroupKeyRotationService; pub struct GroupKeyRotationService;
crate::service_impl!(declare, GroupKeyRotationService, "sys/groupkeyrotationservice", 131); crate::service_impl!(declare, GroupKeyRotationService, "sys/groupkeyrotationservice", 132);
crate::service_impl!(POST, GroupKeyRotationService, GroupKeyRotationPostIn, ()); crate::service_impl!(POST, GroupKeyRotationService, GroupKeyRotationPostIn, ());
pub struct IdentityKeyService; pub struct IdentityKeyService;
crate::service_impl!(declare, IdentityKeyService, "sys/identitykeyservice", 131); crate::service_impl!(declare, IdentityKeyService, "sys/identitykeyservice", 132);
crate::service_impl!(POST, IdentityKeyService, IdentityKeyPostIn, ()); crate::service_impl!(POST, IdentityKeyService, IdentityKeyPostIn, ());
crate::service_impl!(GET, IdentityKeyService, IdentityKeyGetIn, IdentityKeyGetOut); crate::service_impl!(GET, IdentityKeyService, IdentityKeyGetIn, IdentityKeyGetOut);
pub struct InvoiceDataService; pub struct InvoiceDataService;
crate::service_impl!(declare, InvoiceDataService, "sys/invoicedataservice", 131); crate::service_impl!(declare, InvoiceDataService, "sys/invoicedataservice", 132);
crate::service_impl!(GET, InvoiceDataService, InvoiceDataGetIn, InvoiceDataGetOut); crate::service_impl!(GET, InvoiceDataService, InvoiceDataGetIn, InvoiceDataGetOut);
pub struct LocationService; pub struct LocationService;
crate::service_impl!(declare, LocationService, "sys/locationservice", 131); crate::service_impl!(declare, LocationService, "sys/locationservice", 132);
crate::service_impl!(GET, LocationService, (), LocationServiceGetReturn); crate::service_impl!(GET, LocationService, (), LocationServiceGetReturn);
pub struct MailAddressAliasService; pub struct MailAddressAliasService;
crate::service_impl!(declare, MailAddressAliasService, "sys/mailaddressaliasservice", 131); crate::service_impl!(declare, MailAddressAliasService, "sys/mailaddressaliasservice", 132);
crate::service_impl!(POST, MailAddressAliasService, MailAddressAliasServiceData, ()); crate::service_impl!(POST, MailAddressAliasService, MailAddressAliasServiceData, ());
crate::service_impl!(GET, MailAddressAliasService, MailAddressAliasGetIn, MailAddressAliasServiceReturn); crate::service_impl!(GET, MailAddressAliasService, MailAddressAliasGetIn, MailAddressAliasServiceReturn);
crate::service_impl!(DELETE, MailAddressAliasService, MailAddressAliasServiceDataDelete, ()); crate::service_impl!(DELETE, MailAddressAliasService, MailAddressAliasServiceDataDelete, ());
@ -277,7 +277,7 @@ crate::service_impl!(DELETE, MailAddressAliasService, MailAddressAliasServiceDat
pub struct MembershipService; pub struct MembershipService;
crate::service_impl!(declare, MembershipService, "sys/membershipservice", 131); crate::service_impl!(declare, MembershipService, "sys/membershipservice", 132);
crate::service_impl!(POST, MembershipService, MembershipAddData, ()); crate::service_impl!(POST, MembershipService, MembershipAddData, ());
crate::service_impl!(PUT, MembershipService, MembershipPutIn, ()); crate::service_impl!(PUT, MembershipService, MembershipPutIn, ());
crate::service_impl!(DELETE, MembershipService, MembershipRemoveData, ()); crate::service_impl!(DELETE, MembershipService, MembershipRemoveData, ());
@ -285,13 +285,13 @@ crate::service_impl!(DELETE, MembershipService, MembershipRemoveData, ());
pub struct MultipleMailAddressAvailabilityService; pub struct MultipleMailAddressAvailabilityService;
crate::service_impl!(declare, MultipleMailAddressAvailabilityService, "sys/multiplemailaddressavailabilityservice", 131); crate::service_impl!(declare, MultipleMailAddressAvailabilityService, "sys/multiplemailaddressavailabilityservice", 132);
crate::service_impl!(GET, MultipleMailAddressAvailabilityService, MultipleMailAddressAvailabilityData, MultipleMailAddressAvailabilityReturn); crate::service_impl!(GET, MultipleMailAddressAvailabilityService, MultipleMailAddressAvailabilityData, MultipleMailAddressAvailabilityReturn);
pub struct PaymentDataService; pub struct PaymentDataService;
crate::service_impl!(declare, PaymentDataService, "sys/paymentdataservice", 131); crate::service_impl!(declare, PaymentDataService, "sys/paymentdataservice", 132);
crate::service_impl!(POST, PaymentDataService, PaymentDataServicePostData, ()); crate::service_impl!(POST, PaymentDataService, PaymentDataServicePostData, ());
crate::service_impl!(GET, PaymentDataService, PaymentDataServiceGetData, PaymentDataServiceGetReturn); crate::service_impl!(GET, PaymentDataService, PaymentDataServiceGetData, PaymentDataServiceGetReturn);
crate::service_impl!(PUT, PaymentDataService, PaymentDataServicePutData, PaymentDataServicePutReturn); crate::service_impl!(PUT, PaymentDataService, PaymentDataServicePutData, PaymentDataServicePutReturn);
@ -299,77 +299,77 @@ crate::service_impl!(PUT, PaymentDataService, PaymentDataServicePutData, Payment
pub struct PlanService; pub struct PlanService;
crate::service_impl!(declare, PlanService, "sys/planservice", 131); crate::service_impl!(declare, PlanService, "sys/planservice", 132);
crate::service_impl!(GET, PlanService, (), PlanServiceGetOut); crate::service_impl!(GET, PlanService, (), PlanServiceGetOut);
pub struct PriceService; pub struct PriceService;
crate::service_impl!(declare, PriceService, "sys/priceservice", 131); crate::service_impl!(declare, PriceService, "sys/priceservice", 132);
crate::service_impl!(GET, PriceService, PriceServiceData, PriceServiceReturn); crate::service_impl!(GET, PriceService, PriceServiceData, PriceServiceReturn);
pub struct PublicKeyService; pub struct PublicKeyService;
crate::service_impl!(declare, PublicKeyService, "sys/publickeyservice", 131); crate::service_impl!(declare, PublicKeyService, "sys/publickeyservice", 132);
crate::service_impl!(GET, PublicKeyService, PublicKeyGetIn, PublicKeyGetOut); crate::service_impl!(GET, PublicKeyService, PublicKeyGetIn, PublicKeyGetOut);
crate::service_impl!(PUT, PublicKeyService, PublicKeyPutIn, ()); crate::service_impl!(PUT, PublicKeyService, PublicKeyPutIn, ());
pub struct ReferralCodeService; pub struct ReferralCodeService;
crate::service_impl!(declare, ReferralCodeService, "sys/referralcodeservice", 131); crate::service_impl!(declare, ReferralCodeService, "sys/referralcodeservice", 132);
crate::service_impl!(POST, ReferralCodeService, ReferralCodePostIn, ReferralCodePostOut); crate::service_impl!(POST, ReferralCodeService, ReferralCodePostIn, ReferralCodePostOut);
crate::service_impl!(GET, ReferralCodeService, ReferralCodeGetIn, ()); crate::service_impl!(GET, ReferralCodeService, ReferralCodeGetIn, ());
pub struct RegistrationCaptchaService; pub struct RegistrationCaptchaService;
crate::service_impl!(declare, RegistrationCaptchaService, "sys/registrationcaptchaservice", 131); crate::service_impl!(declare, RegistrationCaptchaService, "sys/registrationcaptchaservice", 132);
crate::service_impl!(POST, RegistrationCaptchaService, RegistrationCaptchaServiceData, ()); crate::service_impl!(POST, RegistrationCaptchaService, RegistrationCaptchaServiceData, ());
crate::service_impl!(GET, RegistrationCaptchaService, RegistrationCaptchaServiceGetData, RegistrationCaptchaServiceReturn); crate::service_impl!(GET, RegistrationCaptchaService, RegistrationCaptchaServiceGetData, RegistrationCaptchaServiceReturn);
pub struct RegistrationService; pub struct RegistrationService;
crate::service_impl!(declare, RegistrationService, "sys/registrationservice", 131); crate::service_impl!(declare, RegistrationService, "sys/registrationservice", 132);
crate::service_impl!(POST, RegistrationService, RegistrationServiceData, RegistrationReturn); crate::service_impl!(POST, RegistrationService, RegistrationServiceData, RegistrationReturn);
crate::service_impl!(GET, RegistrationService, (), RegistrationServiceData); crate::service_impl!(GET, RegistrationService, (), RegistrationServiceData);
pub struct ResetFactorsService; pub struct ResetFactorsService;
crate::service_impl!(declare, ResetFactorsService, "sys/resetfactorsservice", 131); crate::service_impl!(declare, ResetFactorsService, "sys/resetfactorsservice", 132);
crate::service_impl!(DELETE, ResetFactorsService, ResetFactorsDeleteData, ()); crate::service_impl!(DELETE, ResetFactorsService, ResetFactorsDeleteData, ());
pub struct ResetPasswordService; pub struct ResetPasswordService;
crate::service_impl!(declare, ResetPasswordService, "sys/resetpasswordservice", 131); crate::service_impl!(declare, ResetPasswordService, "sys/resetpasswordservice", 132);
crate::service_impl!(POST, ResetPasswordService, ResetPasswordPostIn, ()); crate::service_impl!(POST, ResetPasswordService, ResetPasswordPostIn, ());
pub struct RolloutService; pub struct RolloutService;
crate::service_impl!(declare, RolloutService, "sys/rolloutservice", 131); crate::service_impl!(declare, RolloutService, "sys/rolloutservice", 132);
crate::service_impl!(GET, RolloutService, (), RolloutGetOut); crate::service_impl!(GET, RolloutService, (), RolloutGetOut);
pub struct SaltService; pub struct SaltService;
crate::service_impl!(declare, SaltService, "sys/saltservice", 131); crate::service_impl!(declare, SaltService, "sys/saltservice", 132);
crate::service_impl!(GET, SaltService, SaltData, SaltReturn); crate::service_impl!(GET, SaltService, SaltData, SaltReturn);
pub struct SecondFactorAuthAllowedService; pub struct SecondFactorAuthAllowedService;
crate::service_impl!(declare, SecondFactorAuthAllowedService, "sys/secondfactorauthallowedservice", 131); crate::service_impl!(declare, SecondFactorAuthAllowedService, "sys/secondfactorauthallowedservice", 132);
crate::service_impl!(GET, SecondFactorAuthAllowedService, (), SecondFactorAuthAllowedReturn); crate::service_impl!(GET, SecondFactorAuthAllowedService, (), SecondFactorAuthAllowedReturn);
pub struct SecondFactorAuthService; pub struct SecondFactorAuthService;
crate::service_impl!(declare, SecondFactorAuthService, "sys/secondfactorauthservice", 131); crate::service_impl!(declare, SecondFactorAuthService, "sys/secondfactorauthservice", 132);
crate::service_impl!(POST, SecondFactorAuthService, SecondFactorAuthData, ()); crate::service_impl!(POST, SecondFactorAuthService, SecondFactorAuthData, ());
crate::service_impl!(GET, SecondFactorAuthService, SecondFactorAuthGetData, SecondFactorAuthGetReturn); crate::service_impl!(GET, SecondFactorAuthService, SecondFactorAuthGetData, SecondFactorAuthGetReturn);
crate::service_impl!(DELETE, SecondFactorAuthService, SecondFactorAuthDeleteData, ()); crate::service_impl!(DELETE, SecondFactorAuthService, SecondFactorAuthDeleteData, ());
@ -377,77 +377,77 @@ crate::service_impl!(DELETE, SecondFactorAuthService, SecondFactorAuthDeleteData
pub struct SessionService; pub struct SessionService;
crate::service_impl!(declare, SessionService, "sys/sessionservice", 131); crate::service_impl!(declare, SessionService, "sys/sessionservice", 132);
crate::service_impl!(POST, SessionService, CreateSessionData, CreateSessionReturn); crate::service_impl!(POST, SessionService, CreateSessionData, CreateSessionReturn);
pub struct SignOrderProcessingAgreementService; pub struct SignOrderProcessingAgreementService;
crate::service_impl!(declare, SignOrderProcessingAgreementService, "sys/signorderprocessingagreementservice", 131); crate::service_impl!(declare, SignOrderProcessingAgreementService, "sys/signorderprocessingagreementservice", 132);
crate::service_impl!(POST, SignOrderProcessingAgreementService, SignOrderProcessingAgreementData, ()); crate::service_impl!(POST, SignOrderProcessingAgreementService, SignOrderProcessingAgreementData, ());
pub struct SurveyService; pub struct SurveyService;
crate::service_impl!(declare, SurveyService, "sys/surveyservice", 131); crate::service_impl!(declare, SurveyService, "sys/surveyservice", 132);
crate::service_impl!(POST, SurveyService, SurveyDataPostIn, ()); crate::service_impl!(POST, SurveyService, SurveyDataPostIn, ());
pub struct SwitchAccountTypeService; pub struct SwitchAccountTypeService;
crate::service_impl!(declare, SwitchAccountTypeService, "sys/switchaccounttypeservice", 131); crate::service_impl!(declare, SwitchAccountTypeService, "sys/switchaccounttypeservice", 132);
crate::service_impl!(POST, SwitchAccountTypeService, SwitchAccountTypePostIn, ()); crate::service_impl!(POST, SwitchAccountTypeService, SwitchAccountTypePostIn, ());
pub struct SystemKeysService; pub struct SystemKeysService;
crate::service_impl!(declare, SystemKeysService, "sys/systemkeysservice", 131); crate::service_impl!(declare, SystemKeysService, "sys/systemkeysservice", 132);
crate::service_impl!(GET, SystemKeysService, (), SystemKeysReturn); crate::service_impl!(GET, SystemKeysService, (), SystemKeysReturn);
pub struct TakeOverDeletedAddressService; pub struct TakeOverDeletedAddressService;
crate::service_impl!(declare, TakeOverDeletedAddressService, "sys/takeoverdeletedaddressservice", 131); crate::service_impl!(declare, TakeOverDeletedAddressService, "sys/takeoverdeletedaddressservice", 132);
crate::service_impl!(POST, TakeOverDeletedAddressService, TakeOverDeletedAddressData, ()); crate::service_impl!(POST, TakeOverDeletedAddressService, TakeOverDeletedAddressData, ());
pub struct UpdatePermissionKeyService; pub struct UpdatePermissionKeyService;
crate::service_impl!(declare, UpdatePermissionKeyService, "sys/updatepermissionkeyservice", 131); crate::service_impl!(declare, UpdatePermissionKeyService, "sys/updatepermissionkeyservice", 132);
crate::service_impl!(POST, UpdatePermissionKeyService, UpdatePermissionKeyData, ()); crate::service_impl!(POST, UpdatePermissionKeyService, UpdatePermissionKeyData, ());
pub struct UpdateSessionKeysService; pub struct UpdateSessionKeysService;
crate::service_impl!(declare, UpdateSessionKeysService, "sys/updatesessionkeysservice", 131); crate::service_impl!(declare, UpdateSessionKeysService, "sys/updatesessionkeysservice", 132);
crate::service_impl!(POST, UpdateSessionKeysService, UpdateSessionKeysPostIn, ()); crate::service_impl!(POST, UpdateSessionKeysService, UpdateSessionKeysPostIn, ());
pub struct UpgradePriceService; pub struct UpgradePriceService;
crate::service_impl!(declare, UpgradePriceService, "sys/upgradepriceservice", 131); crate::service_impl!(declare, UpgradePriceService, "sys/upgradepriceservice", 132);
crate::service_impl!(GET, UpgradePriceService, UpgradePriceServiceData, UpgradePriceServiceReturn); crate::service_impl!(GET, UpgradePriceService, UpgradePriceServiceData, UpgradePriceServiceReturn);
pub struct UserGroupKeyRotationService; pub struct UserGroupKeyRotationService;
crate::service_impl!(declare, UserGroupKeyRotationService, "sys/usergroupkeyrotationservice", 131); crate::service_impl!(declare, UserGroupKeyRotationService, "sys/usergroupkeyrotationservice", 132);
crate::service_impl!(POST, UserGroupKeyRotationService, UserGroupKeyRotationPostIn, ()); crate::service_impl!(POST, UserGroupKeyRotationService, UserGroupKeyRotationPostIn, ());
pub struct UserService; pub struct UserService;
crate::service_impl!(declare, UserService, "sys/userservice", 131); crate::service_impl!(declare, UserService, "sys/userservice", 132);
crate::service_impl!(DELETE, UserService, UserDataDelete, ()); crate::service_impl!(DELETE, UserService, UserDataDelete, ());
pub struct VerifierTokenService; pub struct VerifierTokenService;
crate::service_impl!(declare, VerifierTokenService, "sys/verifiertokenservice", 131); crate::service_impl!(declare, VerifierTokenService, "sys/verifiertokenservice", 132);
crate::service_impl!(POST, VerifierTokenService, VerifierTokenServiceIn, VerifierTokenServiceOut); crate::service_impl!(POST, VerifierTokenService, VerifierTokenServiceIn, VerifierTokenServiceOut);
pub struct VersionService; pub struct VersionService;
crate::service_impl!(declare, VersionService, "sys/versionservice", 131); crate::service_impl!(declare, VersionService, "sys/versionservice", 132);
crate::service_impl!(GET, VersionService, VersionData, VersionReturn); crate::service_impl!(GET, VersionService, VersionData, VersionReturn);

File diff suppressed because it is too large Load diff