2024-04-17 10:34:33 +02:00
import type { EntityRestInterface , OwnerEncSessionKeyProvider , OwnerKeyProvider } from "./EntityRestClient"
2022-12-27 15:37:40 +01:00
import { EntityRestClient , EntityRestClientSetupOptions } from "./EntityRestClient"
import { resolveTypeReference } from "../../common/EntityFunctions"
import { OperationType } from "../../common/TutanotaConstants"
2024-08-23 13:00:37 +02:00
import { assertNotNull , difference , getFirstOrThrow , getTypeId , groupBy , isEmpty , isSameTypeRef , lastThrow , TypeRef } from "@tutao/tutanota-utils"
2022-04-26 17:33:40 +02:00
import {
BucketPermissionTypeRef ,
EntityEventBatchTypeRef ,
2024-04-10 16:45:23 +02:00
EntityUpdate ,
KeyRotationTypeRef ,
2022-04-26 17:33:40 +02:00
PermissionTypeRef ,
RecoverCodeTypeRef ,
RejectedSenderTypeRef ,
SecondFactorTypeRef ,
2022-11-03 11:04:26 +01:00
SessionTypeRef ,
2024-04-10 16:45:23 +02:00
User ,
2024-06-26 17:55:00 +02:00
UserGroupKeyDistributionTypeRef ,
2024-05-21 16:48:24 +02:00
UserGroupRootTypeRef ,
2022-12-27 15:37:40 +01:00
UserTypeRef ,
2022-04-26 17:33:40 +02:00
} from "../../entities/sys/TypeRefs.js"
2024-07-05 11:32:43 +02:00
import { ValueType } from "../../common/EntityConstants.js"
2022-12-27 15:37:40 +01:00
import { NotAuthorizedError , NotFoundError } from "../../common/error/RestError"
2024-08-23 13:00:37 +02:00
import { CalendarEventUidIndexTypeRef , Mail , MailDetailsBlobTypeRef , MailSetEntryTypeRef , MailTypeRef } from "../../entities/tutanota/TypeRefs.js"
import { CUSTOM_MAX_ID , CUSTOM_MIN_ID , firstBiggerThanSecond , GENERATED_MAX_ID , GENERATED_MIN_ID , getElementId , isSameId } from "../../common/utils/EntityUtils"
2022-12-27 15:37:40 +01:00
import { ProgrammingError } from "../../common/error/ProgrammingError"
import { assertWorkerOrNode } from "../../common/Env"
import type { ListElementEntity , SomeEntity , TypeModel } from "../../common/EntityTypes"
2023-03-22 15:20:28 +01:00
import { ElementEntity } from "../../common/EntityTypes"
2023-02-10 12:01:42 +01:00
import { QueuedBatch } from "../EventQueue.js"
2022-12-27 15:37:40 +01:00
import { ENTITY_EVENT_BATCH_EXPIRE_MS } from "../EventBusClient"
import { CustomCacheHandlerMap } from "./CustomCacheHandler.js"
2024-01-08 17:14:09 +01:00
import { containsEventOfType , EntityUpdateData , getEventOfType } from "../../common/utils/EntityUpdateUtils.js"
2024-08-23 13:00:37 +02:00
import { isCustomIdType } from "../offline/OfflineStorage.js"
2022-01-07 15:58:30 +01:00
2017-08-15 13:54:22 +02:00
assertWorkerOrNode ( )
2022-02-28 12:13:28 +01:00
/ * *
*
* The minimum size of a range request when extending an existing range
* Because we extend by making ( potentially ) many range requests until we reach the startId
* We want to avoid that the requests are too small
* /
export const EXTEND_RANGE_MIN_CHUNK_SIZE = 40
2022-05-20 14:04:39 +02:00
const IGNORED_TYPES = [
EntityEventBatchTypeRef ,
PermissionTypeRef ,
BucketPermissionTypeRef ,
SessionTypeRef ,
SecondFactorTypeRef ,
RecoverCodeTypeRef ,
RejectedSenderTypeRef ,
2023-07-20 11:11:05 +02:00
// when doing automatic calendar updates, we will miss uid index entity updates if we're using the cache.
// this is mainly caused by some calendaring apps sending the same update multiple times in the same mail.
// the earliest place where we could deduplicate would be in entityEventsReceived on the calendarModel.
CalendarEventUidIndexTypeRef ,
2024-04-10 16:45:23 +02:00
KeyRotationTypeRef ,
2024-05-21 16:48:24 +02:00
UserGroupRootTypeRef ,
2024-06-26 17:55:00 +02:00
UserGroupKeyDistributionTypeRef ,
2022-05-20 14:04:39 +02:00
] as const
2022-02-28 12:13:28 +01:00
2022-05-30 11:01:00 +02:00
export interface EntityRestCache extends EntityRestInterface {
2022-01-12 14:43:01 +01:00
/ * *
* Clear out the contents of the cache .
* /
2022-12-27 15:37:40 +01:00
purgeStorage ( ) : Promise < void >
2022-01-12 14:43:01 +01:00
/ * *
* Get the batch id of the most recently processed batch for the given group .
* /
2022-12-27 15:37:40 +01:00
getLastEntityEventBatchForGroup ( groupId : Id ) : Promise < Id | null >
2022-01-12 14:43:01 +01:00
2022-03-01 16:19:18 +01:00
/ * *
* Saved tha batch id of the most recently processed batch manually .
*
* Is needed when the cache is new but we want to make sure that the next time we will download from this moment , even if we don ' t receive any events .
* /
2022-12-27 15:37:40 +01:00
setLastEntityEventBatchForGroup ( groupId : Id , batchId : Id ) : Promise < void >
2022-03-01 16:19:18 +01:00
2022-01-12 14:43:01 +01:00
/ * *
2022-02-28 17:23:22 +01:00
* Persist the last time client downloaded event batches . This is not the last * processed * item , merely when things were * downloaded * . We use it to
* detect out - of - sync .
2022-01-12 14:43:01 +01:00
* /
2022-12-27 15:37:40 +01:00
recordSyncTime ( ) : Promise < void >
2022-01-12 14:43:01 +01:00
/ * *
2022-02-28 17:23:22 +01:00
* Fetch the time since last time we downloaded event batches .
2022-01-12 14:43:01 +01:00
* /
2022-12-27 15:37:40 +01:00
timeSinceLastSyncMs ( ) : Promise < number | null >
2022-04-05 12:31:07 +02:00
/ * *
* Detect if out of sync based on stored "lastUpdateTime" and the current server time
* /
2022-12-27 15:37:40 +01:00
isOutOfSync ( ) : Promise < boolean >
2022-01-12 14:43:01 +01:00
}
2022-12-27 15:37:40 +01:00
export type Range = { lower : Id ; upper : Id }
2022-01-12 14:43:01 +01:00
2022-12-27 15:37:40 +01:00
export type LastUpdateTime = { type : "recorded" ; time : number } | { type : "never" } | { type : "uninitialized" }
2022-09-07 17:09:25 +02:00
2022-04-26 17:33:40 +02:00
/ * *
2022-05-12 17:06:57 +02:00
* Part of the cache storage only with subset of CacheStorage functionality
*
2022-04-26 17:33:40 +02:00
* Separate from the rest of the cache as a narrow interface to not expose the whole storage for cases where we want to only get the cached part of the list to
2022-05-12 17:11:00 +02:00
* display it even if we can ' t load the full page from the server or need some metadata .
2023-03-20 16:47:38 +01:00
*
* also exposes functions to repair an outdated cache in case we can ' t access the server without getting a new version of a cached entity
* ( mainly password changes )
2022-04-26 17:33:40 +02:00
* /
2022-05-12 17:06:57 +02:00
export interface ExposedCacheStorage {
2022-04-26 17:33:40 +02:00
/ * *
* Load range of entities . Does not include { @param start } .
* If { @param reverse } is false then returns entities newer than { @param start } in ascending order sorted by
* elementId .
* If { @param reverse } is true then returns entities older than { @param start } in descending order sorted by
* elementId .
* /
2022-12-27 15:37:40 +01:00
provideFromRange < T extends ListElementEntity > ( typeRef : TypeRef < T > , listId : Id , start : Id , count : number , reverse : boolean ) : Promise < T [ ] >
2022-05-12 17:11:00 +02:00
2024-08-07 08:38:58 +02:00
/ * *
* Load a set of list element entities by id . Missing elements are not returned , no error is thrown .
* /
provideMultiple < T extends ListElementEntity > ( typeRef : TypeRef < T > , listId : Id , elementIds : Id [ ] ) : Promise < Array < T > >
2022-05-24 18:31:01 +02:00
/ * *
* retrieve all list elements that are in the cache
* @param typeRef
* @param listId
* /
getWholeList < T extends ListElementEntity > ( typeRef : TypeRef < T > , listId : Id ) : Promise < Array < T > >
2022-09-07 17:09:25 +02:00
getLastUpdateTime ( ) : Promise < LastUpdateTime >
2022-11-28 17:38:17 +01:00
clearExcludedData ( ) : Promise < void >
2023-03-20 16:47:38 +01:00
/ * *
2023-03-22 15:20:28 +01:00
* remove an ElementEntity from the cache by typeRef and Id .
* the exposed interface is intentionally more narrow than the internal cacheStorage because
* we must maintain the integrity of our list ranges .
2023-03-20 16:47:38 +01:00
* * /
2023-03-22 15:20:28 +01:00
deleteIfExists < T extends ElementEntity > ( typeRef : TypeRef < T > , listId : null , id : Id ) : Promise < void >
2022-04-26 17:33:40 +02:00
}
2022-05-12 17:06:57 +02:00
export interface CacheStorage extends ExposedCacheStorage {
2022-01-12 14:43:01 +01:00
/ * *
* Get a given entity from the cache , expects that you have already checked for existence
* /
2022-12-27 15:37:40 +01:00
get < T extends SomeEntity > ( typeRef : TypeRef < T > , listId : Id | null , id : Id ) : Promise < T | null >
2022-01-12 14:43:01 +01:00
2022-07-04 14:55:17 +02:00
/ * *
* get a map with cache handlers for the customId types this storage implementation supports
* customId types that don 't have a custom handler don' t get served from the cache
* /
2022-12-27 15:37:40 +01:00
getCustomCacheHandlerMap ( entityRestClient : EntityRestClient ) : CustomCacheHandlerMap
2022-07-04 14:55:17 +02:00
2022-12-27 15:37:40 +01:00
isElementIdInCacheRange < T extends ListElementEntity > ( typeRef : TypeRef < T > , listId : Id , id : Id ) : Promise < boolean >
2022-01-12 14:43:01 +01:00
2022-12-27 15:37:40 +01:00
put ( originalEntity : SomeEntity ) : Promise < void >
2022-01-12 14:43:01 +01:00
2022-12-27 15:37:40 +01:00
getRangeForList < T extends ListElementEntity > ( typeRef : TypeRef < T > , listId : Id ) : Promise < Range | null >
2022-01-12 14:43:01 +01:00
2022-12-27 15:37:40 +01:00
setUpperRangeForList < T extends ListElementEntity > ( typeRef : TypeRef < T > , listId : Id , id : Id ) : Promise < void >
2022-01-12 14:43:01 +01:00
2022-12-27 15:37:40 +01:00
setLowerRangeForList < T extends ListElementEntity > ( typeRef : TypeRef < T > , listId : Id , id : Id ) : Promise < void >
2022-01-12 14:43:01 +01:00
/ * *
* Creates a new list cache if there is none . Resets everything but elements .
* @param typeRef
* @param listId
* @param lower
* @param upper
* /
2022-12-27 15:37:40 +01:00
setNewRangeForList < T extends ListElementEntity > ( typeRef : TypeRef < T > , listId : Id , lower : Id , upper : Id ) : Promise < void >
2022-01-12 14:43:01 +01:00
2022-12-27 15:37:40 +01:00
getIdsInRange < T extends ListElementEntity > ( typeRef : TypeRef < T > , listId : Id ) : Promise < Array < Id > >
2022-01-12 14:43:01 +01:00
2022-11-03 11:04:26 +01:00
/ * *
* Persist the last processed batch for a given group id .
* /
2022-12-27 15:37:40 +01:00
putLastBatchIdForGroup ( groupId : Id , batchId : Id ) : Promise < void >
2022-01-12 14:43:01 +01:00
2022-11-03 11:04:26 +01:00
/ * *
* Retrieve the least processed batch id for a given group .
* /
2022-12-27 15:37:40 +01:00
getLastBatchIdForGroup ( groupId : Id ) : Promise < Id | null >
2022-01-12 14:43:01 +01:00
2023-03-22 15:20:28 +01:00
deleteIfExists < T extends SomeEntity > ( typeRef : TypeRef < T > , listId : Id | null , id : Id ) : Promise < void >
2022-01-12 14:43:01 +01:00
purgeStorage ( ) : Promise < void >
putLastUpdateTime ( value : number ) : Promise < void >
2022-10-21 15:53:39 +02:00
getUserId ( ) : Id
deleteAllOwnedBy ( owner : Id ) : Promise < void >
2022-11-30 17:15:08 +01:00
/ * *
* We want to lock the access to the "ranges" db when updating / reading the
* offline available mail list ranges for each mail list ( referenced using the listId )
* @param listId the mail list that we want to lock
* /
lockRangesDbAccess ( listId : Id ) : Promise < void >
/ * *
* This is the counterpart to the function "lockRangesDbAccess(listId)"
* @param listId the mail list that we want to unlock
* /
unlockRangesDbAccess ( listId : Id ) : Promise < void >
2022-01-12 14:43:01 +01:00
}
2017-08-15 13:54:22 +02:00
/ * *
* This implementation provides a caching mechanism to the rest chain .
* It forwards requests to the entity rest client .
* The cache works as follows :
* If a read from the target fails , the request fails .
* If a read from the target is successful , the cache is written and the element returned .
* For LETs the cache stores one range per list id . if a range is requested starting in the stored range or at the range ends the missing elements are loaded from the server .
* Only ranges with elements with generated ids are stored in the cache . Custom id elements are only stored as single element currently . If needed this has to be extended for ranges .
* Range requests starting outside the stored range are only allowed if the direction is away from the stored range . In this case we load from the range end to avoid gaps in the stored range .
* Requests for creating or updating elements are always forwarded and not directly stored in the cache .
* On EventBusClient notifications updated elements are stored in the cache if the element already exists in the cache .
* On EventBusClient notifications new elements are only stored in the cache if they are LETs and in the stored range .
* On EventBusClient notifications deleted elements are removed from the cache .
*
* Range handling :
* | < | > c d e f g h i j k < | > |
2022-05-20 14:04:39 +02:00
* MIN_ID lowerRangeId ids in range upperRangeId MAX_ID
2017-08-15 13:54:22 +02:00
* lowerRangeId may be anything from MIN_ID to c , upperRangeId may be anything from k to MAX_ID
* /
2022-05-30 11:01:00 +02:00
export class DefaultEntityRestCache implements EntityRestCache {
2023-08-07 13:20:01 +02:00
constructor ( private readonly entityRestClient : EntityRestClient , private readonly storage : CacheStorage ) { }
2022-04-05 12:31:07 +02:00
2023-08-08 14:52:14 +02:00
async load < T extends SomeEntity > (
typeRef : TypeRef < T > ,
id : PropertyType < T , "_id" > ,
queryParameters? : Dict ,
extraHeaders? : Dict ,
2024-04-17 10:34:33 +02:00
ownerKeyProvider? : OwnerKeyProvider ,
2023-08-08 14:52:14 +02:00
) : Promise < T > {
2022-12-27 15:37:40 +01:00
const { listId , elementId } = expandId ( id )
2022-01-07 15:58:30 +01:00
2024-04-25 11:05:18 +02:00
// if a specific version is requested we have to load again and do not want to store it in the cache
if ( queryParameters ? . version != null ) {
return await this . entityRestClient . load ( typeRef , id , queryParameters , extraHeaders , ownerKeyProvider )
}
2022-01-12 14:43:01 +01:00
const cachedEntity = await this . storage . get ( typeRef , listId , elementId )
2024-04-25 11:05:18 +02:00
if ( cachedEntity == null ) {
2024-04-17 10:34:33 +02:00
const entity = await this . entityRestClient . load ( typeRef , id , queryParameters , extraHeaders , ownerKeyProvider )
2024-04-25 11:05:18 +02:00
if ( ! isIgnoredType ( typeRef ) ) {
2022-01-12 14:43:01 +01:00
await this . storage . put ( entity )
2022-01-07 15:58:30 +01:00
}
return entity
}
2024-04-25 11:05:18 +02:00
2022-01-12 14:43:01 +01:00
return cachedEntity
2022-01-07 15:58:30 +01:00
}
2023-08-08 14:52:14 +02:00
loadMultiple < T extends SomeEntity > (
typeRef : TypeRef < T > ,
listId : Id | null ,
elementIds : Array < Id > ,
2023-10-19 17:59:51 +02:00
ownerEncSessionKeyProvider? : OwnerEncSessionKeyProvider ,
2023-08-08 14:52:14 +02:00
) : Promise < Array < T > > {
2022-05-20 14:04:39 +02:00
if ( isIgnoredType ( typeRef ) ) {
2023-10-19 17:59:51 +02:00
return this . entityRestClient . loadMultiple ( typeRef , listId , elementIds , ownerEncSessionKeyProvider )
2022-01-07 15:58:30 +01:00
}
2023-10-19 17:59:51 +02:00
return this . _loadMultiple ( typeRef , listId , elementIds , ownerEncSessionKeyProvider )
2022-01-07 15:58:30 +01:00
}
2022-11-08 17:06:42 +01:00
setup < T extends SomeEntity > ( listId : Id | null , instance : T , extraHeaders? : Dict , options? : EntityRestClientSetupOptions ) : Promise < Id > {
return this . entityRestClient . setup ( listId , instance , extraHeaders , options )
2022-01-07 15:58:30 +01:00
}
setupMultiple < T extends SomeEntity > ( listId : Id | null , instances : Array < T > ) : Promise < Array < Id > > {
2022-01-12 14:43:01 +01:00
return this . entityRestClient . setupMultiple ( listId , instances )
2022-01-07 15:58:30 +01:00
}
2023-11-20 14:57:51 +01:00
update < T extends SomeEntity > ( instance : T ) : Promise < void > {
return this . entityRestClient . update ( instance )
2022-01-07 15:58:30 +01:00
}
erase < T extends SomeEntity > ( instance : T ) : Promise < void > {
2022-01-12 14:43:01 +01:00
return this . entityRestClient . erase ( instance )
}
getLastEntityEventBatchForGroup ( groupId : Id ) : Promise < Id | null > {
return this . storage . getLastBatchIdForGroup ( groupId )
2022-01-07 15:58:30 +01:00
}
2022-03-01 16:19:18 +01:00
setLastEntityEventBatchForGroup ( groupId : Id , batchId : Id ) : Promise < void > {
return this . storage . putLastBatchIdForGroup ( groupId , batchId )
}
2022-01-12 14:43:01 +01:00
purgeStorage ( ) : Promise < void > {
2022-04-05 12:31:07 +02:00
console . log ( "Purging the user's offline database" )
2022-01-12 14:43:01 +01:00
return this . storage . purgeStorage ( )
}
2022-04-05 12:31:07 +02:00
async isOutOfSync ( ) : Promise < boolean > {
const timeSinceLastSync = await this . timeSinceLastSyncMs ( )
return timeSinceLastSync != null && timeSinceLastSync > ENTITY_EVENT_BATCH_EXPIRE_MS
}
2022-02-28 17:23:22 +01:00
async recordSyncTime ( ) : Promise < void > {
const timestamp = this . getServerTimestampMs ( )
await this . storage . putLastUpdateTime ( timestamp )
2022-01-12 14:43:01 +01:00
}
2022-04-05 12:31:07 +02:00
async timeSinceLastSyncMs ( ) : Promise < number | null > {
2022-02-28 17:23:22 +01:00
const lastUpdate = await this . storage . getLastUpdateTime ( )
2022-09-07 17:09:25 +02:00
let lastUpdateTime : number
switch ( lastUpdate . type ) {
case "recorded" :
lastUpdateTime = lastUpdate . time
break
case "never" :
return null
case "uninitialized" :
throw new ProgrammingError ( "Offline storage is not initialized" )
2022-02-28 17:23:22 +01:00
}
const now = this . getServerTimestampMs ( )
2022-09-07 17:09:25 +02:00
return now - lastUpdateTime
2022-01-12 14:43:01 +01:00
}
2022-02-28 17:23:22 +01:00
private getServerTimestampMs ( ) : number {
return this . entityRestClient . getRestClient ( ) . getServerTimestampMs ( )
2022-01-12 14:43:01 +01:00
}
/ * *
* Delete a cached entity . Sometimes this is necessary to do to ensure you always load the new version
* /
deleteFromCacheIfExists < T extends SomeEntity > ( typeRef : TypeRef < T > , listId : Id | null , elementId : Id ) : Promise < void > {
return this . storage . deleteIfExists ( typeRef , listId , elementId )
}
2023-08-08 14:52:14 +02:00
private async _loadMultiple < T extends SomeEntity > (
typeRef : TypeRef < T > ,
listId : Id | null ,
ids : Array < Id > ,
2023-10-19 17:59:51 +02:00
ownerEncSessionKeyProvider? : OwnerEncSessionKeyProvider ,
2023-08-08 14:52:14 +02:00
) : Promise < Array < T > > {
2022-01-07 15:58:30 +01:00
const entitiesInCache : T [ ] = [ ]
const idsToLoad : Id [ ] = [ ]
for ( let id of ids ) {
2024-04-25 11:05:18 +02:00
const cachedEntity = await this . storage . get ( typeRef , listId , id )
if ( cachedEntity != null ) {
Remove LegacyMailWrapper (legacy mail bodies) and cleanup TutanotaModel
Prior to starting implementing static MailIds and MailSets, we
want to clean up the current TutanotaModel. Therefore, this commit
removes a lot of legacy metamodel definitions that are not used any
longer, including removing the LegacyMailWrapper (legacy mail bodies).
Additionally, this commit inter alia includes:
* removing types no longer needed after migrating to MailDetails, e.g.
the "body", "toRecipients", "ccRecipients", "bccRecipients",
"replyTos", "sentDate" and "headers" references / values from MAIL_TYPE
* removing "mails" reference form MAIL_BOX_TYPE
* removing "subFolders" reference from MAIL_FOLDER
* removing the legacy types MAIL_BODY_TYPE and MAIL_HEADERS
* removing Value.OLD_OWNER_GROUP_NAME, and Value.OLD_AREA_ID_NAME from
FILE_TYPE and CONTACT_TYPE
Closes #7255
Co-authored-by: sug <sug@tutao.de>
2024-07-23 10:05:35 +02:00
entitiesInCache . push ( cachedEntity )
2022-01-07 15:58:30 +01:00
} else {
idsToLoad . push ( id )
}
}
const entitiesFromServer : T [ ] = [ ]
if ( idsToLoad . length > 0 ) {
2023-10-19 17:59:51 +02:00
const entities = await this . entityRestClient . loadMultiple ( typeRef , listId , idsToLoad , ownerEncSessionKeyProvider )
2022-01-07 15:58:30 +01:00
for ( let entity of entities ) {
2022-01-12 14:43:01 +01:00
await this . storage . put ( entity )
2022-01-07 15:58:30 +01:00
entitiesFromServer . push ( entity )
}
}
return entitiesFromServer . concat ( entitiesInCache )
}
2022-02-25 17:33:51 +01:00
async loadRange < T extends ListElementEntity > ( typeRef : TypeRef < T > , listId : Id , start : Id , count : number , reverse : boolean ) : Promise < T [ ] > {
2022-07-04 14:55:17 +02:00
if ( this . storage . getCustomCacheHandlerMap ( this . entityRestClient ) . has ( typeRef ) ) {
return await this . storage . getCustomCacheHandlerMap ( this . entityRestClient ) . get ( typeRef ) ! . loadRange ( this . storage , listId , start , count , reverse )
2022-05-24 18:35:24 +02:00
}
2022-02-25 17:33:51 +01:00
const typeModel = await resolveTypeReference ( typeRef )
2022-05-20 14:04:39 +02:00
if ( ! isCachedType ( typeModel , typeRef ) ) {
2022-02-25 17:33:51 +01:00
return this . entityRestClient . loadRange ( typeRef , listId , start , count , reverse )
}
2021-12-28 13:53:11 +01:00
2022-11-30 17:15:08 +01:00
// We lock access to the "ranges" db here in order to prevent race conditions when accessing the ranges database.
await this . storage . lockRangesDbAccess ( listId )
2021-12-28 13:53:11 +01:00
2022-11-30 17:15:08 +01:00
try {
const range = await this . storage . getRangeForList ( typeRef , listId )
2022-12-27 15:37:40 +01:00
if ( range == null ) {
2022-11-30 17:15:08 +01:00
await this . populateNewListWithRange ( typeRef , listId , start , count , reverse )
2024-08-23 13:00:37 +02:00
} else if ( isStartIdWithinRange ( range , start , typeModel ) ) {
2022-11-30 17:15:08 +01:00
await this . extendFromWithinRange ( typeRef , listId , start , count , reverse )
2024-08-23 13:00:37 +02:00
} else if ( isRangeRequestAwayFromExistingRange ( range , reverse , start , typeModel ) ) {
2022-11-30 17:15:08 +01:00
await this . extendAwayFromRange ( typeRef , listId , start , count , reverse )
} else {
await this . extendTowardsRange ( typeRef , listId , start , count , reverse )
}
2021-12-23 14:03:23 +01:00
Remove LegacyMailWrapper (legacy mail bodies) and cleanup TutanotaModel
Prior to starting implementing static MailIds and MailSets, we
want to clean up the current TutanotaModel. Therefore, this commit
removes a lot of legacy metamodel definitions that are not used any
longer, including removing the LegacyMailWrapper (legacy mail bodies).
Additionally, this commit inter alia includes:
* removing types no longer needed after migrating to MailDetails, e.g.
the "body", "toRecipients", "ccRecipients", "bccRecipients",
"replyTos", "sentDate" and "headers" references / values from MAIL_TYPE
* removing "mails" reference form MAIL_BOX_TYPE
* removing "subFolders" reference from MAIL_FOLDER
* removing the legacy types MAIL_BODY_TYPE and MAIL_HEADERS
* removing Value.OLD_OWNER_GROUP_NAME, and Value.OLD_AREA_ID_NAME from
FILE_TYPE and CONTACT_TYPE
Closes #7255
Co-authored-by: sug <sug@tutao.de>
2024-07-23 10:05:35 +02:00
return this . storage . provideFromRange ( typeRef , listId , start , count , reverse )
2022-11-30 17:15:08 +01:00
} finally {
// We unlock access to the "ranges" db here. We lock it in order to prevent race conditions when accessing the "ranges" database.
await this . storage . unlockRangesDbAccess ( listId )
}
2022-02-25 17:33:51 +01:00
}
2022-01-12 14:43:01 +01:00
2022-02-25 17:33:51 +01:00
/ * *
* Creates a new list range , reading everything from the server that it can
* range : ( none )
2022-02-28 12:13:28 +01:00
* request : * -- -- -- -- - >
* range becomes : | -- -- -- -- - |
2022-02-25 17:33:51 +01:00
* @private
* /
2022-12-27 15:37:40 +01:00
private async populateNewListWithRange < T extends ListElementEntity > ( typeRef : TypeRef < T > , listId : Id , start : Id , count : number , reverse : boolean ) {
2022-02-25 17:33:51 +01:00
// Create a new range and load everything
const entities = await this . entityRestClient . loadRange ( typeRef , listId , start , count , reverse )
// Initialize a new range for this list
await this . storage . setNewRangeForList ( typeRef , listId , start , start )
// The range bounds will be updated in here
await this . updateRangeInStorage ( typeRef , listId , count , reverse , entities )
}
2022-01-12 14:43:01 +01:00
2022-02-25 17:33:51 +01:00
/ * *
* Returns part of a request from the cache , and the remainder is loaded from the server
2022-02-28 12:13:28 +01:00
* range : | -- -- -- -- - |
* request : * -- -- -- -- -- -- -- >
* range becomes : | -- -- -- -- -- -- -- -- -- -- |
2022-02-25 17:33:51 +01:00
* /
private async extendFromWithinRange < T extends ListElementEntity > ( typeRef : TypeRef < T > , listId : Id , start : Id , count : number , reverse : boolean ) {
2022-12-27 15:37:40 +01:00
const { newStart , newCount } = await this . recalculateRangeRequest ( typeRef , listId , start , count , reverse )
2022-02-25 17:33:51 +01:00
if ( newCount > 0 ) {
// We will be able to provide some entities from the cache, so we just want to load the remaining entities from the server
const entities = await this . entityRestClient . loadRange ( typeRef , listId , newStart , newCount , reverse )
await this . updateRangeInStorage ( typeRef , listId , newCount , reverse , entities )
}
}
2022-01-12 14:43:01 +01:00
2022-02-25 17:33:51 +01:00
/ * *
* Start was outside the range , and we are loading away from the range
* Keeps loading elements from the end of the range in the direction of the startId .
* Returns once all available elements have been loaded or the requested number is in cache
2022-02-28 12:13:28 +01:00
* range : | -- -- -- -- - |
* request : * -- -- -- - >
* range becomes : | -- -- -- -- -- -- -- -- -- -- |
2022-02-25 17:33:51 +01:00
* /
2022-12-27 15:37:40 +01:00
private async extendAwayFromRange < T extends ListElementEntity > ( typeRef : TypeRef < T > , listId : Id , start : Id , count : number , reverse : boolean ) {
2022-02-25 17:33:51 +01:00
// Start is outside the range, and we are loading away from the range, so we grow until we are able to provide enough
// entities starting at startId
while ( true ) {
const range = assertNotNull ( await this . storage . getRangeForList ( typeRef , listId ) )
// Which end of the range to start loading from
2022-12-27 15:37:40 +01:00
const loadStartId = reverse ? range.lower : range.upper
2022-02-25 17:33:51 +01:00
2022-02-28 12:13:28 +01:00
const requestCount = Math . max ( count , EXTEND_RANGE_MIN_CHUNK_SIZE )
2022-02-25 17:33:51 +01:00
// Load some entities
2022-02-28 12:13:28 +01:00
const entities = await this . entityRestClient . loadRange ( typeRef , listId , loadStartId , requestCount , reverse )
2022-02-25 17:33:51 +01:00
2022-02-28 12:13:28 +01:00
await this . updateRangeInStorage ( typeRef , listId , requestCount , reverse , entities )
// If we exhausted the entities from the server
if ( entities . length < requestCount ) {
break
}
2022-02-25 17:33:51 +01:00
// Try to get enough entities from cache
const entitiesFromCache = await this . storage . provideFromRange ( typeRef , listId , start , count , reverse )
2022-02-28 12:13:28 +01:00
// If cache is now capable of providing the whole request
if ( entitiesFromCache . length === count ) {
2022-02-25 17:33:51 +01:00
break
}
}
}
2022-01-12 14:43:01 +01:00
2022-02-25 17:33:51 +01:00
/ * *
* Loads all elements from the startId in the direction of the range
* Once complete , returns as many elements as it can from the original request
2022-02-28 12:13:28 +01:00
* range : | -- -- -- -- - |
* request : < -- -- -- *
* range becomes : | -- -- -- -- -- -- -- -- -- -- |
2022-02-25 17:33:51 +01:00
* or
2022-02-28 12:13:28 +01:00
* range : | -- -- -- -- - |
* request : < -- -- -- -- -- -- -- -- -- - *
* range becomes : | -- -- -- -- -- -- -- -- -- -- |
2022-02-25 17:33:51 +01:00
* /
private async extendTowardsRange < T extends ListElementEntity > ( typeRef : TypeRef < T > , listId : Id , start : Id , count : number , reverse : boolean ) {
while ( true ) {
const range = assertNotNull ( await this . storage . getRangeForList ( typeRef , listId ) )
2022-01-12 14:43:01 +01:00
2022-12-27 15:37:40 +01:00
const loadStartId = reverse ? range.upper : range.lower
2022-01-12 14:43:01 +01:00
2022-02-28 12:13:28 +01:00
const requestCount = Math . max ( count , EXTEND_RANGE_MIN_CHUNK_SIZE )
const entities = await this . entityRestClient . loadRange ( typeRef , listId , loadStartId , requestCount , ! reverse )
2022-01-12 14:43:01 +01:00
2022-02-28 12:13:28 +01:00
await this . updateRangeInStorage ( typeRef , listId , requestCount , ! reverse , entities )
2022-01-12 14:43:01 +01:00
2022-02-28 12:13:28 +01:00
// The call to `updateRangeInStorage` will have set the range bounds to GENERATED_MIN_ID/GENERATED_MAX_ID
// in the case that we have exhausted all elements from the server, so if that happens, we will also end up breaking here
2022-02-25 17:33:51 +01:00
if ( await this . storage . isElementIdInCacheRange ( typeRef , listId , start ) ) {
break
2022-01-07 15:58:30 +01:00
}
2022-01-12 14:43:01 +01:00
}
2022-02-25 17:33:51 +01:00
await this . extendFromWithinRange ( typeRef , listId , start , count , reverse )
2022-01-12 14:43:01 +01:00
}
2022-01-07 15:58:30 +01:00
2022-02-25 17:33:51 +01:00
/ * *
* Given the parameters and result of a range request ,
* Inserts the result into storage , and updates the range bounds
* based on number of entities requested and the actual amount that were received
* /
private async updateRangeInStorage < T extends ListElementEntity > (
typeRef : TypeRef < T > ,
listId : Id ,
countRequested : number ,
wasReverseRequest : boolean ,
receivedEntities : T [ ] ,
) {
2024-08-23 13:00:37 +02:00
const isCustomId = isCustomIdType ( await resolveTypeReference ( typeRef ) )
2022-02-25 17:33:51 +01:00
let elementsToAdd = receivedEntities
if ( wasReverseRequest ) {
2022-01-12 14:43:01 +01:00
// Ensure that elements are cached in ascending (not reverse) order
2022-02-25 17:33:51 +01:00
elementsToAdd = receivedEntities . reverse ( )
if ( receivedEntities . length < countRequested ) {
2024-08-23 13:00:37 +02:00
console . log ( "finished loading, setting min id" )
await this . storage . setLowerRangeForList ( typeRef , listId , isCustomId ? CUSTOM_MIN_ID : GENERATED_MIN_ID )
2022-01-12 14:43:01 +01:00
} else {
// After reversing the list the first element in the list is the lower range limit
2022-12-15 13:18:25 +01:00
await this . storage . setLowerRangeForList ( typeRef , listId , getElementId ( getFirstOrThrow ( receivedEntities ) ) )
2022-01-07 15:58:30 +01:00
}
} else {
2022-01-12 14:43:01 +01:00
// Last element in the list is the upper range limit
2022-02-25 17:33:51 +01:00
if ( receivedEntities . length < countRequested ) {
2022-01-12 14:43:01 +01:00
// all elements have been loaded, so the upper range must be set to MAX_ID
2024-08-23 13:00:37 +02:00
console . log ( "finished loading, setting max id" )
await this . storage . setUpperRangeForList ( typeRef , listId , isCustomId ? CUSTOM_MAX_ID : GENERATED_MAX_ID )
2022-01-07 15:58:30 +01:00
} else {
2022-02-25 17:33:51 +01:00
await this . storage . setUpperRangeForList ( typeRef , listId , getElementId ( lastThrow ( receivedEntities ) ) )
2022-01-07 15:58:30 +01:00
}
}
2022-12-27 15:37:40 +01:00
await Promise . all ( elementsToAdd . map ( ( element ) = > this . storage . put ( element ) ) )
2022-01-07 15:58:30 +01:00
}
/ * *
* Calculates the new start value for the getElementRange request and the number of elements to read in
* order to read no duplicate values .
* @return returns the new start and count value .
* /
2022-12-27 15:37:40 +01:00
private async recalculateRangeRequest < T extends ListElementEntity > (
typeRef : TypeRef < T > ,
listId : Id ,
start : Id ,
count : number ,
reverse : boolean ,
) : Promise < { newStart : string ; newCount : number } > {
2022-01-12 14:43:01 +01:00
let allRangeList = await this . storage . getIdsInRange ( typeRef , listId )
2022-01-07 15:58:30 +01:00
let elementsToRead = count
let startElementId = start
2022-01-12 14:43:01 +01:00
const range = await this . storage . getRangeForList ( typeRef , listId )
if ( range == null ) {
2022-12-27 15:37:40 +01:00
return { newStart : start , newCount : count }
2022-01-12 14:43:01 +01:00
}
2022-12-27 15:37:40 +01:00
const { lower , upper } = range
2022-01-07 15:58:30 +01:00
let indexOfStart = allRangeList . indexOf ( start )
2024-08-23 13:00:37 +02:00
const typeModel = await resolveTypeReference ( typeRef )
const isCustomId = isCustomIdType ( typeModel )
if (
( ! reverse && ( isCustomId ? upper == CUSTOM_MAX_ID : upper === GENERATED_MAX_ID ) ) ||
( reverse && ( isCustomId ? lower == CUSTOM_MIN_ID : lower === GENERATED_MIN_ID ) )
) {
2022-01-07 15:58:30 +01:00
// we have already loaded the complete range in the desired direction, so we do not have to load from server
elementsToRead = 0
2022-12-27 15:37:40 +01:00
} else if ( allRangeList . length === 0 ) {
// Element range is empty, so read all elements
2022-01-07 15:58:30 +01:00
elementsToRead = count
2022-12-27 15:37:40 +01:00
} else if ( indexOfStart !== - 1 ) {
// Start element is located in allRange read only elements that are not in allRange.
2022-01-07 15:58:30 +01:00
if ( reverse ) {
elementsToRead = count - indexOfStart
startElementId = allRangeList [ 0 ] // use the lowest id in allRange as start element
} else {
elementsToRead = count - ( allRangeList . length - 1 - indexOfStart )
startElementId = allRangeList [ allRangeList . length - 1 ] // use the highest id in allRange as start element
}
2024-08-23 13:00:37 +02:00
} else if ( lower === start || ( firstBiggerThanSecond ( start , lower , typeModel ) && firstBiggerThanSecond ( allRangeList [ 0 ] , start , typeModel ) ) ) {
2022-12-27 15:37:40 +01:00
// Start element is not in allRange but has been used has start element for a range request, eg. EntityRestInterface.GENERATED_MIN_ID, or start is between lower range id and lowest element in range
if ( ! reverse ) {
// if not reverse read only elements that are not in allRange
2022-01-07 15:58:30 +01:00
startElementId = allRangeList [ allRangeList . length - 1 ] // use the highest id in allRange as start element
elementsToRead = count - allRangeList . length
2022-01-12 14:43:01 +01:00
}
// if reverse read all elements
2024-08-23 13:00:37 +02:00
} else if (
upper === start ||
( firstBiggerThanSecond ( start , allRangeList [ allRangeList . length - 1 ] , typeModel ) && firstBiggerThanSecond ( upper , start , typeModel ) )
) {
2022-12-27 15:37:40 +01:00
// Start element is not in allRange but has been used has start element for a range request, eg. EntityRestInterface.GENERATED_MAX_ID, or start is between upper range id and highest element in range
if ( reverse ) {
// if not reverse read only elements that are not in allRange
2022-01-07 15:58:30 +01:00
startElementId = allRangeList [ 0 ] // use the highest id in allRange as start element
elementsToRead = count - allRangeList . length
}
2022-01-12 14:43:01 +01:00
// if not reverse read all elements
2022-01-07 15:58:30 +01:00
}
2022-12-27 15:37:40 +01:00
return { newStart : startElementId , newCount : elementsToRead }
2022-01-07 15:58:30 +01:00
}
/ * *
* Resolves when the entity is loaded from the server if necessary
* @pre The last call of this function must be resolved . This is needed to avoid that e . g . while
* loading a created instance from the server we receive an update of that instance and ignore it because the instance is not in the cache yet .
*
* @return Promise , which resolves to the array of valid events ( if response is NotFound or NotAuthorized we filter it out )
* /
2022-01-12 14:43:01 +01:00
async entityEventsReceived ( batch : QueuedBatch ) : Promise < Array < EntityUpdate > > {
2022-04-05 12:31:07 +02:00
await this . recordSyncTime ( )
2022-01-07 15:58:30 +01:00
// we handle post multiple create operations separately to optimize the number of requests with getMultiple
const createUpdatesForLETs : EntityUpdate [ ] = [ ]
const regularUpdates : EntityUpdate [ ] = [ ] // all updates not resulting from post multiple requests
2022-01-12 14:43:01 +01:00
const updatesArray = batch . events
for ( const update of updatesArray ) {
2024-08-23 13:00:37 +02:00
// monitor application is ignored
if ( update . application === "monitor" ) continue
// mails are ignored because move operations are handled as a special event (and no post multiple is possible)
if (
update . operation === OperationType . CREATE &&
getUpdateInstanceId ( update ) . instanceListId != null &&
! isSameTypeRef ( new TypeRef ( update . application , update . type ) , MailTypeRef )
) {
createUpdatesForLETs . push ( update )
} else {
regularUpdates . push ( update )
2022-01-07 15:58:30 +01:00
}
}
2022-01-12 14:43:01 +01:00
const createUpdatesForLETsPerList = groupBy ( createUpdatesForLETs , ( update ) = > update . instanceListId )
const postMultipleEventUpdates : EntityUpdate [ ] [ ] = [ ]
2022-01-07 15:58:30 +01:00
// we first handle potential post multiple updates in get multiple requests
2022-01-12 14:43:01 +01:00
for ( let [ instanceListId , updates ] of createUpdatesForLETsPerList ) {
2022-01-07 15:58:30 +01:00
const firstUpdate = updates [ 0 ]
const typeRef = new TypeRef < ListElementEntity > ( firstUpdate . application , firstUpdate . type )
2022-12-27 15:37:40 +01:00
const ids = updates . map ( ( update ) = > update . instanceId )
2022-01-07 15:58:30 +01:00
2022-05-24 18:35:24 +02:00
// We only want to load the instances that are in cache range
2022-07-04 14:55:17 +02:00
const customHandlers = this . storage . getCustomCacheHandlerMap ( this . entityRestClient )
const idsInCacheRange = customHandlers . has ( typeRef )
? await customHandlers . get ( typeRef ) ! . getElementIdsInCacheRange ( this . storage , instanceListId , ids )
2022-05-24 18:35:24 +02:00
: await this . getElementIdsInCacheRange ( typeRef , instanceListId , ids )
2022-01-07 15:58:30 +01:00
if ( idsInCacheRange . length === 0 ) {
2022-01-12 14:43:01 +01:00
postMultipleEventUpdates . push ( updates )
} else {
2022-12-27 15:37:40 +01:00
const updatesNotInCacheRange =
idsInCacheRange . length === updates . length ? [ ] : updates . filter ( ( update ) = > ! idsInCacheRange . includes ( update . instanceId ) )
2022-01-12 14:43:01 +01:00
try {
// loadMultiple is only called to cache the elements and check which ones return errors
const returnedInstances = await this . _loadMultiple ( typeRef , instanceListId , idsInCacheRange )
//We do not want to pass updates that caused an error
if ( returnedInstances . length !== idsInCacheRange . length ) {
2022-12-27 15:37:40 +01:00
const returnedIds = returnedInstances . map ( ( instance ) = > getElementId ( instance ) )
postMultipleEventUpdates . push ( updates . filter ( ( update ) = > returnedIds . includes ( update . instanceId ) ) . concat ( updatesNotInCacheRange ) )
2022-01-12 14:43:01 +01:00
} else {
postMultipleEventUpdates . push ( updates )
}
} catch ( e ) {
if ( e instanceof NotAuthorizedError ) {
// return updates that are not in cache Range if NotAuthorizedError (for those updates that are in cache range)
postMultipleEventUpdates . push ( updatesNotInCacheRange )
} else {
throw e
}
2022-01-07 15:58:30 +01:00
}
}
2022-01-12 14:43:01 +01:00
}
2022-01-07 15:58:30 +01:00
2022-01-12 14:43:01 +01:00
const otherEventUpdates : EntityUpdate [ ] = [ ]
for ( let update of regularUpdates ) {
2022-12-27 15:37:40 +01:00
const { operation , type , application } = update
const { instanceListId , instanceId } = getUpdateInstanceId ( update )
2022-01-12 14:43:01 +01:00
const typeRef = new TypeRef < SomeEntity > ( application , type )
2022-01-07 15:58:30 +01:00
2022-01-12 14:43:01 +01:00
switch ( operation ) {
case OperationType . UPDATE : {
const handledUpdate = await this . processUpdateEvent ( typeRef , update )
if ( handledUpdate ) {
otherEventUpdates . push ( handledUpdate )
}
continue
}
case OperationType . DELETE : {
2022-12-27 15:37:40 +01:00
if (
isSameTypeRef ( MailTypeRef , typeRef ) &&
containsEventOfType ( updatesArray as Readonly < EntityUpdateData [ ] > , OperationType . CREATE , instanceId )
) {
2022-01-07 15:58:30 +01:00
// move for mail is handled in create event.
2023-02-07 17:12:41 +01:00
} else if ( isSameTypeRef ( MailTypeRef , typeRef ) ) {
// delete mailDetails if they are available (as we don't send an event for this type)
const mail = await this . storage . get ( MailTypeRef , instanceListId , instanceId )
await this . storage . deleteIfExists ( typeRef , instanceListId , instanceId )
if ( mail ? . mailDetails != null ) {
await this . storage . deleteIfExists ( MailDetailsBlobTypeRef , mail . mailDetails [ 0 ] , mail . mailDetails [ 1 ] )
}
2022-01-07 15:58:30 +01:00
} else {
2022-01-12 14:43:01 +01:00
await this . storage . deleteIfExists ( typeRef , instanceListId , instanceId )
2022-01-07 15:58:30 +01:00
}
2022-01-12 14:43:01 +01:00
otherEventUpdates . push ( update )
continue
}
case OperationType . CREATE : {
const handledUpdate = await this . processCreateEvent ( typeRef , update , updatesArray )
if ( handledUpdate ) {
otherEventUpdates . push ( handledUpdate )
}
continue
}
2022-01-07 15:58:30 +01:00
default :
throw new ProgrammingError ( "Unknown operation type: " + operation )
}
2022-01-12 14:43:01 +01:00
}
// the whole batch has been written successfully
await this . storage . putLastBatchIdForGroup ( batch . groupId , batch . batchId )
2022-01-07 15:58:30 +01:00
// merge the results
2023-06-22 15:55:07 +02:00
return otherEventUpdates . concat ( postMultipleEventUpdates . flat ( ) )
2022-01-07 15:58:30 +01:00
}
2023-03-23 14:09:01 +01:00
/** Returns {null} when the update should be skipped. */
2022-12-27 15:37:40 +01:00
private async processCreateEvent ( typeRef : TypeRef < any > , update : EntityUpdate , batch : ReadonlyArray < EntityUpdate > ) : Promise < EntityUpdate | null > {
// do not return undefined to avoid implicit returns
const { instanceId , instanceListId } = getUpdateInstanceId ( update )
2022-01-07 15:58:30 +01:00
// We put new instances into cache only when it's a new instance in the cached range which is only for the list instances.
2022-01-12 14:43:01 +01:00
if ( instanceListId != null ) {
2022-01-07 15:58:30 +01:00
const deleteEvent = getEventOfType ( batch , OperationType . DELETE , instanceId )
2024-08-23 13:00:37 +02:00
const mail = deleteEvent && isSameTypeRef ( MailTypeRef , typeRef ) ? await this . storage . get ( MailTypeRef , deleteEvent . instanceListId , instanceId ) : null
// avoid downloading new mail element for non-mailSet user.
// can be removed once all mailbox have been migrated to mailSet (once lastNonOutdatedClientVersion is >= v242)
if ( deleteEvent != null && mail != null && isEmpty ( mail . sets ) ) {
2022-01-07 15:58:30 +01:00
// It is a move event for cached mail
2022-01-12 14:43:01 +01:00
await this . storage . deleteIfExists ( typeRef , deleteEvent . instanceListId , instanceId )
2024-08-23 13:00:37 +02:00
await this . updateListIdOfMailAndUpdateCache ( mail , instanceListId , instanceId )
2022-01-07 15:58:30 +01:00
return update
2022-01-12 14:43:01 +01:00
} else if ( await this . storage . isElementIdInCacheRange ( typeRef , instanceListId , instanceId ) ) {
2022-01-07 15:58:30 +01:00
// No need to try to download something that's not there anymore
2022-06-21 16:23:50 +02:00
// We do not consult custom handlers here because they are only needed for list elements.
2024-08-23 13:00:37 +02:00
console . log ( "downloading create event for" , getTypeId ( typeRef ) , instanceListId , instanceId )
2022-12-27 15:37:40 +01:00
return this . entityRestClient
. load ( typeRef , [ instanceListId , instanceId ] )
. then ( ( entity ) = > this . storage . put ( entity ) )
. then ( ( ) = > update )
2023-03-23 14:09:01 +01:00
. catch ( ( e ) = > {
if ( isExpectedErrorForSynchronization ( e ) ) {
return null
} else {
throw e
}
} )
2022-01-07 15:58:30 +01:00
} else {
return update
}
} else {
return update
}
}
2023-07-28 16:59:11 +02:00
/ * *
* Updates the given mail with the new list id and add it to the cache .
* /
private async updateListIdOfMailAndUpdateCache ( mail : Mail , newListId : Id , elementId : Id ) {
// In case of a move operation we have to replace the list id always, as the mail is stored in another folder.
mail . _id = [ newListId , elementId ]
if ( mail . bucketKey != null ) {
// With the simplified permission system (MailDetails) we also have to update the bucketEncSessionKey for the mail,
// which also references the mail list id. We need this for some cases when the move operation was executed
// before the UpdateSessionKeyService has been executed, e.g. when using inbox rules.
// The UpdateSessionKeyService would remove the bucketKey from the mail and there is no need to synchronize it anymore.
const mailSessionKey = mail . bucketKey . bucketEncSessionKeys . find ( ( bucketEncSessionKey ) = > isSameId ( bucketEncSessionKey . instanceId , elementId ) )
if ( mailSessionKey ) {
mailSessionKey . instanceList = newListId
}
}
await this . storage . put ( mail )
}
2023-03-23 14:09:01 +01:00
/** Returns {null} when the update should be skipped. */
2022-01-12 14:43:01 +01:00
private async processUpdateEvent ( typeRef : TypeRef < SomeEntity > , update : EntityUpdate ) : Promise < EntityUpdate | null > {
2022-12-27 15:37:40 +01:00
const { instanceId , instanceListId } = getUpdateInstanceId ( update )
const cached = await this . storage . get ( typeRef , instanceListId , instanceId )
2022-10-21 15:53:39 +02:00
// No need to try to download something that's not there anymore
if ( cached != null ) {
try {
2023-03-22 15:37:54 +01:00
// in case this is an update for the user instance: if the password changed we'll be logged out at this point
// if we don't catch the expected NotAuthenticated Error that results from trying to load anything with
// the old user.
// Letting the NotAuthenticatedError propagate to the main thread instead of trying to handle it ourselves
// or throwing out the update drops us onto the login page and into the session recovery flow if the user
// clicks their saved credentials again, but lets them still use offline login if they try to use the
// outdated credentials while not connected to the internet.
2022-10-21 15:53:39 +02:00
const newEntity = await this . entityRestClient . load ( typeRef , collapseId ( instanceListId , instanceId ) )
if ( isSameTypeRef ( typeRef , UserTypeRef ) ) {
await this . handleUpdatedUser ( cached , newEntity )
}
await this . storage . put ( newEntity )
return update
} catch ( e ) {
2023-03-22 16:42:08 +01:00
// If the entity is not there anymore we should evict it from the cache and not keep the outdated/nonexisting instance around.
// Even for list elements this should be safe as the instance is not there anymore and is definitely not in this version
2023-03-23 14:09:01 +01:00
if ( isExpectedErrorForSynchronization ( e ) ) {
2023-03-22 16:42:08 +01:00
console . log ( ` Instance not found when processing update for ${ JSON . stringify ( update ) } , deleting from the cache. ` )
await this . storage . deleteIfExists ( typeRef , instanceListId , instanceId )
2023-03-23 14:09:01 +01:00
return null
} else {
throw e
2023-03-22 16:42:08 +01:00
}
2022-10-21 15:53:39 +02:00
}
2022-01-07 15:58:30 +01:00
}
return update
}
2022-10-21 15:53:39 +02:00
private async handleUpdatedUser ( cached : SomeEntity , newEntity : SomeEntity ) {
// When we are removed from a group we just get an update for our user
// with no membership on it. We need to clean up all the entities that
// belong to that group since we shouldn't be able to access them anymore
// and we won't get any update or another chance to clean them up.
const oldUser = cached as User
if ( oldUser . _id !== this . storage . getUserId ( ) ) {
return
}
const newUser = newEntity as User
const removedShips = difference ( oldUser . memberships , newUser . memberships , ( l , r ) = > l . _id === r . _id )
for ( const ship of removedShips ) {
console . log ( "Lost membership on " , ship . _id , ship . groupType )
await this . storage . deleteAllOwnedBy ( ship . group )
}
}
2022-01-07 15:58:30 +01:00
/ * *
*
2022-01-12 14:43:01 +01:00
* @returns { Array < Id > } the ids that are in cache range and therefore should be cached
2022-01-07 15:58:30 +01:00
* /
2022-01-12 14:43:01 +01:00
private async getElementIdsInCacheRange < T extends ListElementEntity > ( typeRef : TypeRef < T > , listId : Id , ids : Id [ ] ) : Promise < Id [ ] > {
const ret : Id [ ] = [ ]
for ( let i = 0 ; i < ids . length ; i ++ ) {
if ( await this . storage . isElementIdInCacheRange ( typeRef , listId , ids [ i ] ) ) {
ret . push ( ids [ i ] )
2022-01-07 15:58:30 +01:00
}
}
2022-01-12 14:43:01 +01:00
return ret
2022-01-07 15:58:30 +01:00
}
2021-12-07 15:30:53 +01:00
}
2022-01-07 15:58:30 +01:00
2023-03-23 14:09:01 +01:00
/ * *
* Returns whether the error is expected for the cases where our local state might not be up - to - date with the server yet . E . g . we might be processing an update
* for the instance that was already deleted . Normally this would be optimized away but it might still happen due to timing .
* /
function isExpectedErrorForSynchronization ( e : Error ) : boolean {
return e instanceof NotFoundError || e instanceof NotAuthorizedError
}
2022-12-27 15:37:40 +01:00
export function expandId ( id : Id | IdTuple ) : { listId : Id | null ; elementId : Id } {
2022-01-07 15:58:30 +01:00
if ( typeof id === "string" ) {
return {
listId : null ,
2022-12-27 15:37:40 +01:00
elementId : id ,
2022-01-07 15:58:30 +01:00
}
} else {
const [ listId , elementId ] = id
return {
2022-12-27 15:37:40 +01:00
listId ,
elementId ,
2022-01-07 15:58:30 +01:00
}
}
2021-12-07 15:30:53 +01:00
}
2022-01-07 15:58:30 +01:00
2021-12-23 14:03:23 +01:00
export function collapseId ( listId : Id | null , elementId : Id ) : Id | IdTuple {
2022-01-07 15:58:30 +01:00
if ( listId != null ) {
return [ listId , elementId ]
} else {
return elementId
}
2022-01-12 14:43:01 +01:00
}
2022-12-27 15:37:40 +01:00
export function getUpdateInstanceId ( update : EntityUpdate ) : { instanceListId : Id | null ; instanceId : Id } {
2022-01-12 14:43:01 +01:00
let instanceListId
if ( update . instanceListId === "" ) {
instanceListId = null
} else {
instanceListId = update . instanceListId
}
2022-12-27 15:37:40 +01:00
return { instanceListId , instanceId : update.instanceId }
2022-02-28 12:13:28 +01:00
}
/ * *
2024-04-25 11:05:18 +02:00
* Check if a range request begins inside an existing range
2022-02-28 12:13:28 +01:00
* /
2024-08-23 13:00:37 +02:00
function isStartIdWithinRange ( range : Range , startId : Id , typeModel : TypeModel ) : boolean {
return ! firstBiggerThanSecond ( startId , range . upper , typeModel ) && ! firstBiggerThanSecond ( range . lower , startId , typeModel )
2022-02-28 12:13:28 +01:00
}
/ * *
* Check if a range request is going away from an existing range
* Assumes that the range request doesn ' t start inside the range
* /
2024-08-23 13:00:37 +02:00
function isRangeRequestAwayFromExistingRange ( range : Range , reverse : boolean , start : string , typeModel : TypeModel ) {
return reverse ? firstBiggerThanSecond ( range . lower , start , typeModel ) : firstBiggerThanSecond ( start , range . upper , typeModel )
2022-02-28 12:13:28 +01:00
}
2022-05-20 14:04:39 +02:00
/ * *
* some types are completely ignored by the cache and always served from a request .
* Note :
* isCachedType ( ref ) -- - > ! isIgnoredType ( ref ) but
* isIgnoredType ( ref ) - / - > ! i s C a c h e d T y p e ( r e f ) b e c a u s e o f o p t e d - i n C u s t o m I d t y p e s .
* /
function isIgnoredType ( typeRef : TypeRef < unknown > ) : boolean {
2022-12-27 15:37:40 +01:00
return typeRef . app === "monitor" || IGNORED_TYPES . some ( ( ref ) = > isSameTypeRef ( typeRef , ref ) )
2022-05-20 14:04:39 +02:00
}
/ * *
* customId types are normally not cached , but some are opted in .
* Note :
* isCachedType ( ref ) -- - > ! isIgnoredType ( ref ) but
* isIgnoredType ( ref ) - / - > ! i s C a c h e d T y p e ( r e f )
* /
function isCachedType ( typeModel : TypeModel , typeRef : TypeRef < unknown > ) : boolean {
2024-08-23 13:00:37 +02:00
return ( ! isIgnoredType ( typeRef ) && isGeneratedIdType ( typeModel ) ) || isSameTypeRef ( typeRef , MailSetEntryTypeRef )
}
function isGeneratedIdType ( typeModel : TypeModel ) : boolean {
return typeModel . values . _id . type === ValueType . GeneratedId
2022-12-27 15:37:40 +01:00
}