2023-01-17 12:21:29 +01:00
|
|
|
import { ElementEntity, ListElementEntity, SomeEntity, TypeModel } from "../../common/EntityTypes.js"
|
2022-04-20 10:39:52 +02:00
|
|
|
import {
|
2024-08-26 15:07:37 +02:00
|
|
|
constructMailSetEntryId,
|
2024-08-23 13:00:37 +02:00
|
|
|
CUSTOM_MIN_ID,
|
2024-08-27 16:51:40 +02:00
|
|
|
DEFAULT_MAILSET_ENTRY_CUSTOM_CUTOFF_TIMESTAMP,
|
2022-04-20 10:39:52 +02:00
|
|
|
elementIdPart,
|
|
|
|
|
firstBiggerThanSecond,
|
|
|
|
|
GENERATED_MAX_ID,
|
|
|
|
|
GENERATED_MIN_ID,
|
|
|
|
|
getElementId,
|
|
|
|
|
listIdPart,
|
2022-12-27 15:37:40 +01:00
|
|
|
timestampToGeneratedId,
|
2022-04-20 10:39:52 +02:00
|
|
|
} from "../../common/utils/EntityUtils.js"
|
2022-12-27 15:37:40 +01:00
|
|
|
import { CacheStorage, expandId, ExposedCacheStorage, LastUpdateTime } from "../rest/DefaultEntityRestCache.js"
|
2022-01-12 14:43:01 +01:00
|
|
|
import * as cborg from "cborg"
|
2022-12-27 15:37:40 +01:00
|
|
|
import { EncodeOptions, Token, Type } from "cborg"
|
2023-08-16 10:47:09 +02:00
|
|
|
import {
|
|
|
|
|
assert,
|
|
|
|
|
assertNotNull,
|
2024-08-23 13:00:37 +02:00
|
|
|
base64ExtToBase64,
|
|
|
|
|
base64ToBase64Ext,
|
|
|
|
|
base64ToBase64Url,
|
|
|
|
|
base64UrlToBase64,
|
2023-08-16 10:47:09 +02:00
|
|
|
DAY_IN_MILLIS,
|
|
|
|
|
getTypeId,
|
|
|
|
|
groupByAndMap,
|
|
|
|
|
groupByAndMapUniquely,
|
|
|
|
|
mapNullable,
|
|
|
|
|
splitInChunks,
|
|
|
|
|
TypeRef,
|
|
|
|
|
} from "@tutao/tutanota-utils"
|
2022-12-27 15:37:40 +01:00
|
|
|
import { isDesktop, isOfflineStorageAvailable, isTest } from "../../common/Env.js"
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
import { modelInfos, resolveTypeReference } from "../../common/EntityFunctions.js"
|
2023-02-07 10:18:22 +01:00
|
|
|
import { AccountType, OFFLINE_STORAGE_DEFAULT_TIME_RANGE_DAYS } from "../../common/TutanotaConstants.js"
|
2022-12-27 15:37:40 +01:00
|
|
|
import { DateProvider } from "../../common/DateProvider.js"
|
2024-01-08 17:14:09 +01:00
|
|
|
import { TokenOrNestedTokens } from "cborg/interface"
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
import {
|
|
|
|
|
CalendarEventTypeRef,
|
|
|
|
|
FileTypeRef,
|
2024-08-07 08:38:58 +02:00
|
|
|
MailBoxTypeRef,
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
MailDetailsBlobTypeRef,
|
|
|
|
|
MailDetailsDraftTypeRef,
|
|
|
|
|
MailFolderTypeRef,
|
2024-08-26 15:07:37 +02:00
|
|
|
MailSetEntryTypeRef,
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
MailTypeRef,
|
|
|
|
|
} from "../../entities/tutanota/TypeRefs.js"
|
2022-12-27 15:37:40 +01:00
|
|
|
import { UserTypeRef } from "../../entities/sys/TypeRefs.js"
|
|
|
|
|
import { OfflineStorageMigrator } from "./OfflineStorageMigrator.js"
|
|
|
|
|
import { CustomCacheHandlerMap, CustomCalendarEventCacheHandler } from "../rest/CustomCacheHandler.js"
|
|
|
|
|
import { EntityRestClient } from "../rest/EntityRestClient.js"
|
|
|
|
|
import { InterWindowEventFacadeSendDispatcher } from "../../../native/common/generatedipc/InterWindowEventFacadeSendDispatcher.js"
|
|
|
|
|
import { SqlCipherFacade } from "../../../native/common/generatedipc/SqlCipherFacade.js"
|
2024-03-11 16:25:43 +01:00
|
|
|
import { FormattedQuery, SqlValue, TaggedSqlValue, untagSqlObject } from "./SqlValue.js"
|
2023-01-12 14:54:42 +01:00
|
|
|
import { FolderSystem } from "../../common/mail/FolderSystem.js"
|
2024-08-23 13:00:37 +02:00
|
|
|
import { Type as TypeId, ValueType } from "../../common/EntityConstants.js"
|
2023-01-04 10:54:28 +01:00
|
|
|
import { OutOfSyncError } from "../../common/error/OutOfSyncError.js"
|
2024-03-11 16:25:43 +01:00
|
|
|
import { sql, SqlFragment } from "./Sql.js"
|
2024-07-30 12:07:58 +02:00
|
|
|
import { isDraft, isSpamOrTrashFolder } from "../../common/CommonMailUtils.js"
|
2022-01-12 14:43:01 +01:00
|
|
|
|
2023-08-16 10:47:09 +02:00
|
|
|
/**
|
|
|
|
|
* this is the value of SQLITE_MAX_VARIABLE_NUMBER in sqlite3.c
|
|
|
|
|
* it may change if the sqlite version is updated.
|
|
|
|
|
* */
|
|
|
|
|
const MAX_SAFE_SQL_VARS = 32766
|
|
|
|
|
|
2022-01-12 14:43:01 +01:00
|
|
|
function dateEncoder(data: Date, typ: string, options: EncodeOptions): TokenOrNestedTokens | null {
|
2022-10-13 13:29:00 +02:00
|
|
|
const time = data.getTime()
|
2022-01-12 14:43:01 +01:00
|
|
|
return [
|
|
|
|
|
// https://datatracker.ietf.org/doc/rfc8943/
|
|
|
|
|
new Token(Type.tag, 100),
|
2022-12-27 15:37:40 +01:00
|
|
|
new Token(time < 0 ? Type.negint : Type.uint, time),
|
2022-01-12 14:43:01 +01:00
|
|
|
]
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
function dateDecoder(bytes: number): Date {
|
|
|
|
|
return new Date(bytes)
|
|
|
|
|
}
|
|
|
|
|
|
2022-12-27 15:37:40 +01:00
|
|
|
export const customTypeEncoders: { [typeName: string]: typeof dateEncoder } = Object.freeze({
|
|
|
|
|
Date: dateEncoder,
|
2022-01-12 14:43:01 +01:00
|
|
|
})
|
|
|
|
|
|
|
|
|
|
type TypeDecoder = (_: any) => any
|
|
|
|
|
export const customTypeDecoders: Array<TypeDecoder> = (() => {
|
|
|
|
|
const tags: Array<TypeDecoder> = []
|
|
|
|
|
tags[100] = dateDecoder
|
|
|
|
|
return tags
|
|
|
|
|
})()
|
|
|
|
|
|
2022-10-21 15:53:39 +02:00
|
|
|
/**
|
|
|
|
|
* For each of these keys we track the current version in the database.
|
|
|
|
|
* The keys are different model versions (because we need to migrate the data with certain model version changes) and "offline" key which is used to track
|
|
|
|
|
* migrations that are needed for other reasons e.g. if DB structure changes or if we need to invalidate some tables.
|
|
|
|
|
*/
|
|
|
|
|
export type VersionMetadataBaseKey = keyof typeof modelInfos | "offline"
|
2022-04-20 10:39:52 +02:00
|
|
|
|
2022-10-21 15:53:39 +02:00
|
|
|
type VersionMetadataEntries = {
|
2022-04-20 10:39:52 +02:00
|
|
|
// Yes this is cursed, give me a break
|
2022-10-21 15:53:39 +02:00
|
|
|
[P in VersionMetadataBaseKey as `${P}-version`]: number
|
2022-04-20 10:39:52 +02:00
|
|
|
}
|
|
|
|
|
|
2022-10-21 15:53:39 +02:00
|
|
|
export interface OfflineDbMeta extends VersionMetadataEntries {
|
2022-12-27 15:37:40 +01:00
|
|
|
lastUpdateTime: number
|
2022-04-20 10:39:52 +02:00
|
|
|
timeRangeDays: number
|
|
|
|
|
}
|
|
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
const TableDefinitions = Object.freeze({
|
2022-10-21 15:53:39 +02:00
|
|
|
// plus ownerGroup added in a migration
|
2022-12-27 15:37:40 +01:00
|
|
|
list_entities:
|
|
|
|
|
"type TEXT NOT NULL, listId TEXT NOT NULL, elementId TEXT NOT NULL, ownerGroup TEXT, entity BLOB NOT NULL, PRIMARY KEY (type, listId, elementId)",
|
2022-10-21 15:53:39 +02:00
|
|
|
// plus ownerGroup added in a migration
|
|
|
|
|
element_entities: "type TEXT NOT NULL, elementId TEXT NOT NULL, ownerGroup TEXT, entity BLOB NOT NULL, PRIMARY KEY (type, elementId)",
|
2022-08-11 16:38:53 +02:00
|
|
|
ranges: "type TEXT NOT NULL, listId TEXT NOT NULL, lower TEXT NOT NULL, upper TEXT NOT NULL, PRIMARY KEY (type, listId)",
|
|
|
|
|
lastUpdateBatchIdPerGroupId: "groupId TEXT NOT NULL, batchId TEXT NOT NULL, PRIMARY KEY (groupId)",
|
|
|
|
|
metadata: "key TEXT NOT NULL, value BLOB, PRIMARY KEY (key)",
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
blob_element_entities:
|
|
|
|
|
"type TEXT NOT NULL, listId TEXT NOT NULL, elementId TEXT NOT NULL, ownerGroup TEXT, entity BLOB NOT NULL, PRIMARY KEY (type, listId, elementId)",
|
2022-08-11 16:38:53 +02:00
|
|
|
} as const)
|
|
|
|
|
|
2024-08-23 13:00:37 +02:00
|
|
|
type Range = { lower: Id; upper: Id }
|
2022-08-11 16:38:53 +02:00
|
|
|
|
2022-10-21 15:53:39 +02:00
|
|
|
export interface OfflineStorageInitArgs {
|
2022-12-27 15:37:40 +01:00
|
|
|
userId: Id
|
|
|
|
|
databaseKey: Uint8Array
|
|
|
|
|
timeRangeDays: number | null
|
2022-10-21 15:53:39 +02:00
|
|
|
forceNewDatabase: boolean
|
|
|
|
|
}
|
|
|
|
|
|
2022-05-12 17:06:57 +02:00
|
|
|
export class OfflineStorage implements CacheStorage, ExposedCacheStorage {
|
2022-07-04 14:55:17 +02:00
|
|
|
private customCacheHandler: CustomCacheHandlerMap | null = null
|
2022-10-21 15:53:39 +02:00
|
|
|
private userId: Id | null = null
|
2022-11-28 17:38:17 +01:00
|
|
|
private timeRangeDays: number | null = null
|
2022-02-10 16:32:47 +01:00
|
|
|
|
2022-01-12 14:43:01 +01:00
|
|
|
constructor(
|
2022-08-11 16:38:53 +02:00
|
|
|
private readonly sqlCipherFacade: SqlCipherFacade,
|
2022-07-20 15:28:38 +02:00
|
|
|
private readonly interWindowEventSender: InterWindowEventFacadeSendDispatcher,
|
2022-04-20 10:39:52 +02:00
|
|
|
private readonly dateProvider: DateProvider,
|
2022-05-17 17:40:44 +02:00
|
|
|
private readonly migrator: OfflineStorageMigrator,
|
2022-01-12 14:43:01 +01:00
|
|
|
) {
|
2022-02-10 16:32:47 +01:00
|
|
|
assert(isOfflineStorageAvailable() || isTest(), "Offline storage is not available.")
|
|
|
|
|
}
|
|
|
|
|
|
2022-05-17 17:40:44 +02:00
|
|
|
/**
|
|
|
|
|
* @return {boolean} whether the database was newly created or not
|
|
|
|
|
*/
|
2022-12-27 15:37:40 +01:00
|
|
|
async init({ userId, databaseKey, timeRangeDays, forceNewDatabase }: OfflineStorageInitArgs): Promise<boolean> {
|
2022-10-21 15:53:39 +02:00
|
|
|
this.userId = userId
|
2022-11-28 17:38:17 +01:00
|
|
|
this.timeRangeDays = timeRangeDays
|
2022-07-20 15:28:38 +02:00
|
|
|
if (forceNewDatabase) {
|
2022-08-11 16:38:53 +02:00
|
|
|
if (isDesktop()) {
|
|
|
|
|
await this.interWindowEventSender.localUserDataInvalidated(userId)
|
|
|
|
|
}
|
|
|
|
|
await this.sqlCipherFacade.deleteDb(userId)
|
2022-07-20 15:28:38 +02:00
|
|
|
}
|
2022-11-28 17:38:17 +01:00
|
|
|
// We open database here, and it is closed in the native side when the window is closed or the page is reloaded
|
2022-08-11 16:38:53 +02:00
|
|
|
await this.sqlCipherFacade.openDb(userId, databaseKey)
|
|
|
|
|
await this.createTables()
|
2024-02-01 13:54:19 +01:00
|
|
|
|
2023-01-04 10:54:28 +01:00
|
|
|
try {
|
|
|
|
|
await this.migrator.migrate(this, this.sqlCipherFacade)
|
|
|
|
|
} catch (e) {
|
2023-01-04 16:54:58 +01:00
|
|
|
if (e instanceof OutOfSyncError) {
|
2023-02-02 14:17:02 +01:00
|
|
|
console.warn("Offline db is out of sync!", e)
|
2023-01-04 16:54:58 +01:00
|
|
|
await this.recreateDbFile(userId, databaseKey)
|
2023-02-02 14:17:02 +01:00
|
|
|
await this.migrator.migrate(this, this.sqlCipherFacade)
|
2023-01-04 16:54:58 +01:00
|
|
|
} else {
|
2023-01-04 10:54:28 +01:00
|
|
|
throw e
|
|
|
|
|
}
|
|
|
|
|
}
|
2022-05-17 17:40:44 +02:00
|
|
|
// if nothing is written here, it means it's a new database
|
2024-01-22 10:42:11 +01:00
|
|
|
return (await this.getLastUpdateTime()).type === "never"
|
2023-01-04 10:54:28 +01:00
|
|
|
}
|
2022-11-28 17:38:17 +01:00
|
|
|
|
2023-01-04 10:54:28 +01:00
|
|
|
private async recreateDbFile(userId: string, databaseKey: Uint8Array): Promise<void> {
|
|
|
|
|
console.log(`recreating DB file for userId ${userId}`)
|
|
|
|
|
await this.sqlCipherFacade.closeDb()
|
|
|
|
|
await this.sqlCipherFacade.deleteDb(userId)
|
|
|
|
|
await this.sqlCipherFacade.openDb(userId, databaseKey)
|
|
|
|
|
await this.createTables()
|
2022-02-10 16:32:47 +01:00
|
|
|
}
|
|
|
|
|
|
2022-08-15 14:22:44 +02:00
|
|
|
/**
|
|
|
|
|
* currently, we close DBs from the native side (mainly on things like reload and on android's onDestroy)
|
|
|
|
|
*/
|
2022-07-20 15:28:38 +02:00
|
|
|
async deinit() {
|
2022-10-21 15:53:39 +02:00
|
|
|
this.userId = null
|
2022-08-11 16:38:53 +02:00
|
|
|
await this.sqlCipherFacade.closeDb()
|
2022-07-20 15:28:38 +02:00
|
|
|
}
|
|
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
async deleteIfExists(typeRef: TypeRef<SomeEntity>, listId: Id | null, elementId: Id): Promise<void> {
|
|
|
|
|
const type = getTypeId(typeRef)
|
2023-01-17 12:21:29 +01:00
|
|
|
let typeModel: TypeModel
|
2024-07-01 14:54:13 +02:00
|
|
|
typeModel = await resolveTypeReference(typeRef)
|
2024-08-23 13:00:37 +02:00
|
|
|
elementId = ensureBase64Ext(typeModel, elementId)
|
2023-08-15 16:43:50 +02:00
|
|
|
let formattedQuery
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
switch (typeModel.type) {
|
2023-01-12 16:48:28 +01:00
|
|
|
case TypeId.Element:
|
2023-08-15 16:43:50 +02:00
|
|
|
formattedQuery = sql`DELETE FROM element_entities WHERE type = ${type} AND elementId = ${elementId}`
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
break
|
2023-01-12 16:48:28 +01:00
|
|
|
case TypeId.ListElement:
|
2023-08-15 16:43:50 +02:00
|
|
|
formattedQuery = sql`DELETE FROM list_entities WHERE type = ${type} AND listId = ${listId} AND elementId = ${elementId}`
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
break
|
2023-01-12 16:48:28 +01:00
|
|
|
case TypeId.BlobElement:
|
2023-08-15 16:43:50 +02:00
|
|
|
formattedQuery = sql`DELETE FROM blob_element_entities WHERE type = ${type} AND listId = ${listId} AND elementId = ${elementId}`
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
break
|
|
|
|
|
default:
|
|
|
|
|
throw new Error("must be a persistent type")
|
2022-02-10 16:32:47 +01:00
|
|
|
}
|
2023-08-15 16:43:50 +02:00
|
|
|
await this.sqlCipherFacade.run(formattedQuery.query, formattedQuery.params)
|
2022-01-12 14:43:01 +01:00
|
|
|
}
|
|
|
|
|
|
2023-02-02 17:21:34 +01:00
|
|
|
async deleteAllOfType(typeRef: TypeRef<SomeEntity>): Promise<void> {
|
|
|
|
|
const type = getTypeId(typeRef)
|
|
|
|
|
let typeModel: TypeModel
|
2024-07-01 14:54:13 +02:00
|
|
|
typeModel = await resolveTypeReference(typeRef)
|
2023-08-15 16:43:50 +02:00
|
|
|
let formattedQuery
|
2023-02-02 17:21:34 +01:00
|
|
|
switch (typeModel.type) {
|
|
|
|
|
case TypeId.Element:
|
2023-08-15 16:43:50 +02:00
|
|
|
formattedQuery = sql`DELETE FROM element_entities WHERE type = ${type}`
|
2023-02-02 17:21:34 +01:00
|
|
|
break
|
|
|
|
|
case TypeId.ListElement:
|
2023-08-15 16:43:50 +02:00
|
|
|
formattedQuery = sql`DELETE FROM list_entities WHERE type = ${type}`
|
|
|
|
|
await this.sqlCipherFacade.run(formattedQuery.query, formattedQuery.params)
|
2023-03-29 14:46:04 +02:00
|
|
|
await this.deleteAllRangesForType(type)
|
|
|
|
|
return
|
2023-02-02 17:21:34 +01:00
|
|
|
case TypeId.BlobElement:
|
2023-08-15 16:43:50 +02:00
|
|
|
formattedQuery = sql`DELETE FROM blob_element_entities WHERE type = ${type}`
|
2023-02-02 17:21:34 +01:00
|
|
|
break
|
|
|
|
|
default:
|
|
|
|
|
throw new Error("must be a persistent type")
|
|
|
|
|
}
|
2023-08-15 16:43:50 +02:00
|
|
|
await this.sqlCipherFacade.run(formattedQuery.query, formattedQuery.params)
|
2023-02-02 17:21:34 +01:00
|
|
|
}
|
|
|
|
|
|
2023-03-29 14:46:04 +02:00
|
|
|
private async deleteAllRangesForType(type: string): Promise<void> {
|
|
|
|
|
const { query, params } = sql`DELETE FROM ranges WHERE type = ${type}`
|
|
|
|
|
await this.sqlCipherFacade.run(query, params)
|
|
|
|
|
}
|
|
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
async get<T extends SomeEntity>(typeRef: TypeRef<T>, listId: Id | null, elementId: Id): Promise<T | null> {
|
|
|
|
|
const type = getTypeId(typeRef)
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
const typeModel = await resolveTypeReference(typeRef)
|
2024-08-23 13:00:37 +02:00
|
|
|
elementId = ensureBase64Ext(typeModel, elementId)
|
2023-08-15 16:43:50 +02:00
|
|
|
let formattedQuery
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
switch (typeModel.type) {
|
2023-01-12 16:48:28 +01:00
|
|
|
case TypeId.Element:
|
2023-08-15 16:43:50 +02:00
|
|
|
formattedQuery = sql`SELECT entity from element_entities WHERE type = ${type} AND elementId = ${elementId}`
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
break
|
2023-01-12 16:48:28 +01:00
|
|
|
case TypeId.ListElement:
|
2023-08-15 16:43:50 +02:00
|
|
|
formattedQuery = sql`SELECT entity from list_entities WHERE type = ${type} AND listId = ${listId} AND elementId = ${elementId}`
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
break
|
2023-01-12 16:48:28 +01:00
|
|
|
case TypeId.BlobElement:
|
2023-08-15 16:43:50 +02:00
|
|
|
formattedQuery = sql`SELECT entity from blob_element_entities WHERE type = ${type} AND listId = ${listId} AND elementId = ${elementId}`
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
break
|
|
|
|
|
default:
|
|
|
|
|
throw new Error("must be a persistent type")
|
2022-08-11 16:38:53 +02:00
|
|
|
}
|
2023-08-15 16:43:50 +02:00
|
|
|
const result = await this.sqlCipherFacade.get(formattedQuery.query, formattedQuery.params)
|
2022-12-27 15:37:40 +01:00
|
|
|
return result?.entity ? this.deserialize(typeRef, result.entity.value as Uint8Array) : null
|
2022-01-12 14:43:01 +01:00
|
|
|
}
|
|
|
|
|
|
2024-08-07 08:38:58 +02:00
|
|
|
async provideMultiple<T extends ListElementEntity>(typeRef: TypeRef<T>, listId: Id, elementIds: Id[]): Promise<Array<T>> {
|
|
|
|
|
if (elementIds.length === 0) return []
|
2024-08-23 13:00:37 +02:00
|
|
|
const typeModel = await resolveTypeReference(typeRef)
|
|
|
|
|
elementIds = elementIds.map((el) => ensureBase64Ext(typeModel, el))
|
|
|
|
|
|
2024-08-07 08:38:58 +02:00
|
|
|
const type = getTypeId(typeRef)
|
|
|
|
|
const serializedList: ReadonlyArray<Record<string, TaggedSqlValue>> = await this.allChunked(
|
|
|
|
|
MAX_SAFE_SQL_VARS - 2,
|
|
|
|
|
elementIds,
|
|
|
|
|
(c) => sql`SELECT entity FROM list_entities WHERE type = ${type} AND listId = ${listId} AND elementId IN ${paramList(c)}`,
|
|
|
|
|
)
|
|
|
|
|
return this.deserializeList(
|
|
|
|
|
typeRef,
|
|
|
|
|
serializedList.map((r) => r.entity.value as Uint8Array),
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
|
2022-01-12 14:43:01 +01:00
|
|
|
async getIdsInRange<T extends ListElementEntity>(typeRef: TypeRef<T>, listId: Id): Promise<Array<Id>> {
|
2022-08-11 16:38:53 +02:00
|
|
|
const type = getTypeId(typeRef)
|
2024-08-23 13:00:37 +02:00
|
|
|
const range = await this.getRange(typeRef, listId)
|
2022-08-11 16:38:53 +02:00
|
|
|
if (range == null) {
|
|
|
|
|
throw new Error(`no range exists for ${type} and list ${listId}`)
|
|
|
|
|
}
|
2022-12-27 15:37:40 +01:00
|
|
|
const { query, params } = sql`SELECT elementId FROM list_entities
|
2022-08-11 16:38:53 +02:00
|
|
|
WHERE type = ${type}
|
|
|
|
|
AND listId = ${listId}
|
2024-08-23 13:00:37 +02:00
|
|
|
AND (elementId = ${range.lower}
|
|
|
|
|
OR ${firstIdBigger("elementId", range.lower)})
|
|
|
|
|
AND NOT(${firstIdBigger("elementId", range.upper)})`
|
2022-08-11 16:38:53 +02:00
|
|
|
const rows = await this.sqlCipherFacade.all(query, params)
|
|
|
|
|
return rows.map((row) => row.elementId.value as string)
|
2022-01-12 14:43:01 +01:00
|
|
|
}
|
|
|
|
|
|
2024-08-23 13:00:37 +02:00
|
|
|
/** don't use this internally in this class, use OfflineStorage::getRange instead. OfflineStorage is
|
|
|
|
|
* using converted custom IDs internally which is undone when using this to access the range.
|
|
|
|
|
*/
|
2022-08-11 16:38:53 +02:00
|
|
|
async getRangeForList<T extends ListElementEntity>(typeRef: TypeRef<T>, listId: Id): Promise<Range | null> {
|
2024-08-23 13:00:37 +02:00
|
|
|
let range = await this.getRange(typeRef, listId)
|
|
|
|
|
const typeModel = await resolveTypeReference(typeRef)
|
|
|
|
|
if (range == null) return range
|
|
|
|
|
return {
|
|
|
|
|
lower: customIdToBase64Url(typeModel, range.lower),
|
|
|
|
|
upper: customIdToBase64Url(typeModel, range.upper),
|
|
|
|
|
}
|
2022-01-12 14:43:01 +01:00
|
|
|
}
|
|
|
|
|
|
2024-08-23 13:00:37 +02:00
|
|
|
async isElementIdInCacheRange<T extends ListElementEntity>(typeRef: TypeRef<T>, listId: Id, elementId: Id): Promise<boolean> {
|
|
|
|
|
const typeModel = await resolveTypeReference(typeRef)
|
|
|
|
|
elementId = ensureBase64Ext(typeModel, elementId)
|
|
|
|
|
|
|
|
|
|
const range = await this.getRange(typeRef, listId)
|
|
|
|
|
return range != null && !firstBiggerThanSecond(elementId, range.upper) && !firstBiggerThanSecond(range.lower, elementId)
|
2022-01-12 14:43:01 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async provideFromRange<T extends ListElementEntity>(typeRef: TypeRef<T>, listId: Id, start: Id, count: number, reverse: boolean): Promise<T[]> {
|
2024-08-23 13:00:37 +02:00
|
|
|
const typeModel = await resolveTypeReference(typeRef)
|
|
|
|
|
start = ensureBase64Ext(typeModel, start)
|
2022-08-11 16:38:53 +02:00
|
|
|
const type = getTypeId(typeRef)
|
2023-08-15 16:43:50 +02:00
|
|
|
let formattedQuery
|
2022-08-11 16:38:53 +02:00
|
|
|
if (reverse) {
|
2023-08-15 16:43:50 +02:00
|
|
|
formattedQuery = sql`SELECT entity FROM list_entities WHERE type = ${type} AND listId = ${listId} AND ${firstIdBigger(
|
2022-12-27 15:37:40 +01:00
|
|
|
start,
|
|
|
|
|
"elementId",
|
|
|
|
|
)} ORDER BY LENGTH(elementId) DESC, elementId DESC LIMIT ${count}`
|
2022-08-11 16:38:53 +02:00
|
|
|
} else {
|
2023-08-15 16:43:50 +02:00
|
|
|
formattedQuery = sql`SELECT entity FROM list_entities WHERE type = ${type} AND listId = ${listId} AND ${firstIdBigger(
|
2022-12-27 15:37:40 +01:00
|
|
|
"elementId",
|
|
|
|
|
start,
|
|
|
|
|
)} ORDER BY LENGTH(elementId) ASC, elementId ASC LIMIT ${count}`
|
2022-08-11 16:38:53 +02:00
|
|
|
}
|
2023-08-15 16:43:50 +02:00
|
|
|
const { query, params } = formattedQuery
|
2022-08-11 16:38:53 +02:00
|
|
|
const serializedList: ReadonlyArray<Record<string, TaggedSqlValue>> = await this.sqlCipherFacade.all(query, params)
|
2022-12-27 15:37:40 +01:00
|
|
|
return this.deserializeList(
|
|
|
|
|
typeRef,
|
|
|
|
|
serializedList.map((r) => r.entity.value as Uint8Array),
|
|
|
|
|
)
|
2022-01-12 14:43:01 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async put(originalEntity: SomeEntity): Promise<void> {
|
|
|
|
|
const serializedEntity = this.serialize(originalEntity)
|
2024-08-23 13:00:37 +02:00
|
|
|
let { listId, elementId } = expandId(originalEntity._id)
|
2022-08-11 16:38:53 +02:00
|
|
|
const type = getTypeId(originalEntity._type)
|
2022-10-21 15:53:39 +02:00
|
|
|
const ownerGroup = originalEntity._ownerGroup
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
const typeModel = await resolveTypeReference(originalEntity._type)
|
2024-08-23 13:00:37 +02:00
|
|
|
elementId = ensureBase64Ext(typeModel, elementId)
|
2023-08-15 16:43:50 +02:00
|
|
|
let formattedQuery: FormattedQuery
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
switch (typeModel.type) {
|
2023-01-12 16:48:28 +01:00
|
|
|
case TypeId.Element:
|
2023-08-15 16:43:50 +02:00
|
|
|
formattedQuery = sql`INSERT OR REPLACE INTO element_entities (type, elementId, ownerGroup, entity) VALUES (${type}, ${elementId}, ${ownerGroup}, ${serializedEntity})`
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
break
|
2023-01-12 16:48:28 +01:00
|
|
|
case TypeId.ListElement:
|
2023-08-15 16:43:50 +02:00
|
|
|
formattedQuery = sql`INSERT OR REPLACE INTO list_entities (type, listId, elementId, ownerGroup, entity) VALUES (${type}, ${listId}, ${elementId}, ${ownerGroup}, ${serializedEntity})`
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
break
|
2023-01-12 16:48:28 +01:00
|
|
|
case TypeId.BlobElement:
|
2023-08-15 16:43:50 +02:00
|
|
|
formattedQuery = sql`INSERT OR REPLACE INTO blob_element_entities (type, listId, elementId, ownerGroup, entity) VALUES (${type}, ${listId}, ${elementId}, ${ownerGroup}, ${serializedEntity})`
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
break
|
|
|
|
|
default:
|
|
|
|
|
throw new Error("must be a persistent type")
|
|
|
|
|
}
|
2023-08-15 16:43:50 +02:00
|
|
|
await this.sqlCipherFacade.run(formattedQuery.query, formattedQuery.params)
|
2022-01-12 14:43:01 +01:00
|
|
|
}
|
|
|
|
|
|
2024-08-23 13:00:37 +02:00
|
|
|
async setLowerRangeForList<T extends ListElementEntity>(typeRef: TypeRef<T>, listId: Id, lowerId: Id): Promise<void> {
|
|
|
|
|
lowerId = ensureBase64Ext(await resolveTypeReference(typeRef), lowerId)
|
2022-08-11 16:38:53 +02:00
|
|
|
const type = getTypeId(typeRef)
|
2024-08-23 13:00:37 +02:00
|
|
|
const { query, params } = sql`UPDATE ranges SET lower = ${lowerId} WHERE type = ${type} AND listId = ${listId}`
|
2022-08-11 16:38:53 +02:00
|
|
|
await this.sqlCipherFacade.run(query, params)
|
2022-01-12 14:43:01 +01:00
|
|
|
}
|
|
|
|
|
|
2024-08-23 13:00:37 +02:00
|
|
|
async setUpperRangeForList<T extends ListElementEntity>(typeRef: TypeRef<T>, listId: Id, upperId: Id): Promise<void> {
|
|
|
|
|
upperId = ensureBase64Ext(await resolveTypeReference(typeRef), upperId)
|
2022-08-11 16:38:53 +02:00
|
|
|
const type = getTypeId(typeRef)
|
2024-08-23 13:00:37 +02:00
|
|
|
const { query, params } = sql`UPDATE ranges SET upper = ${upperId} WHERE type = ${type} AND listId = ${listId}`
|
2022-08-11 16:38:53 +02:00
|
|
|
await this.sqlCipherFacade.run(query, params)
|
2022-01-12 14:43:01 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async setNewRangeForList<T extends ListElementEntity>(typeRef: TypeRef<T>, listId: Id, lower: Id, upper: Id): Promise<void> {
|
2024-08-23 13:00:37 +02:00
|
|
|
const typeModel = await resolveTypeReference(typeRef)
|
|
|
|
|
lower = ensureBase64Ext(typeModel, lower)
|
|
|
|
|
upper = ensureBase64Ext(typeModel, upper)
|
|
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
const type = getTypeId(typeRef)
|
2022-12-27 15:37:40 +01:00
|
|
|
const { query, params } = sql`INSERT OR REPLACE INTO ranges VALUES (${type}, ${listId}, ${lower}, ${upper})`
|
2022-08-11 16:38:53 +02:00
|
|
|
return this.sqlCipherFacade.run(query, params)
|
2022-01-12 14:43:01 +01:00
|
|
|
}
|
|
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
async getLastBatchIdForGroup(groupId: Id): Promise<Id | null> {
|
2022-12-27 15:37:40 +01:00
|
|
|
const { query, params } = sql`SELECT batchId from lastUpdateBatchIdPerGroupId WHERE groupId = ${groupId}`
|
|
|
|
|
const row = (await this.sqlCipherFacade.get(query, params)) as { batchId: TaggedSqlValue } | null
|
2022-08-11 16:38:53 +02:00
|
|
|
return (row?.batchId?.value ?? null) as Id | null
|
2022-01-12 14:43:01 +01:00
|
|
|
}
|
|
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
async putLastBatchIdForGroup(groupId: Id, batchId: Id): Promise<void> {
|
2022-12-27 15:37:40 +01:00
|
|
|
const { query, params } = sql`INSERT OR REPLACE INTO lastUpdateBatchIdPerGroupId VALUES (${groupId}, ${batchId})`
|
2022-08-11 16:38:53 +02:00
|
|
|
await this.sqlCipherFacade.run(query, params)
|
2022-01-12 14:43:01 +01:00
|
|
|
}
|
|
|
|
|
|
2022-09-07 17:09:25 +02:00
|
|
|
async getLastUpdateTime(): Promise<LastUpdateTime> {
|
|
|
|
|
const time = await this.getMetadata("lastUpdateTime")
|
2022-12-27 15:37:40 +01:00
|
|
|
return time ? { type: "recorded", time } : { type: "never" }
|
2022-04-20 10:39:52 +02:00
|
|
|
}
|
|
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
async putLastUpdateTime(ms: number): Promise<void> {
|
|
|
|
|
await this.putMetadata("lastUpdateTime", ms)
|
2022-01-12 14:43:01 +01:00
|
|
|
}
|
|
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
async purgeStorage(): Promise<void> {
|
|
|
|
|
for (let name of Object.keys(TableDefinitions)) {
|
|
|
|
|
await this.sqlCipherFacade.run(`DELETE FROM ${name}`, [])
|
|
|
|
|
}
|
2022-01-12 14:43:01 +01:00
|
|
|
}
|
|
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
async deleteRange(typeRef: TypeRef<unknown>, listId: string): Promise<void> {
|
2022-12-27 15:37:40 +01:00
|
|
|
const { query, params } = sql`DELETE FROM ranges WHERE type = ${getTypeId(typeRef)} AND listId = ${listId}`
|
2022-08-11 16:38:53 +02:00
|
|
|
await this.sqlCipherFacade.run(query, params)
|
2022-01-12 14:43:01 +01:00
|
|
|
}
|
|
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
async getListElementsOfType<T extends ListElementEntity>(typeRef: TypeRef<T>): Promise<Array<T>> {
|
2022-12-27 15:37:40 +01:00
|
|
|
const { query, params } = sql`SELECT entity from list_entities WHERE type = ${getTypeId(typeRef)}`
|
|
|
|
|
const items = (await this.sqlCipherFacade.all(query, params)) ?? []
|
|
|
|
|
return this.deserializeList(
|
|
|
|
|
typeRef,
|
|
|
|
|
items.map((row) => row.entity.value as Uint8Array),
|
|
|
|
|
)
|
2022-02-10 16:32:47 +01:00
|
|
|
}
|
|
|
|
|
|
2022-06-16 17:23:48 +02:00
|
|
|
async getElementsOfType<T extends ElementEntity>(typeRef: TypeRef<T>): Promise<Array<T>> {
|
2022-12-27 15:37:40 +01:00
|
|
|
const { query, params } = sql`SELECT entity from element_entities WHERE type = ${getTypeId(typeRef)}`
|
|
|
|
|
const items = (await this.sqlCipherFacade.all(query, params)) ?? []
|
|
|
|
|
return this.deserializeList(
|
|
|
|
|
typeRef,
|
|
|
|
|
items.map((row) => row.entity.value as Uint8Array),
|
|
|
|
|
)
|
2022-06-16 17:23:48 +02:00
|
|
|
}
|
|
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
async getWholeList<T extends ListElementEntity>(typeRef: TypeRef<T>, listId: Id): Promise<Array<T>> {
|
2022-12-27 15:37:40 +01:00
|
|
|
const { query, params } = sql`SELECT entity FROM list_entities WHERE type = ${getTypeId(typeRef)} AND listId = ${listId}`
|
|
|
|
|
const items = (await this.sqlCipherFacade.all(query, params)) ?? []
|
|
|
|
|
return this.deserializeList(
|
|
|
|
|
typeRef,
|
|
|
|
|
items.map((row) => row.entity.value as Uint8Array),
|
|
|
|
|
)
|
2022-02-10 16:32:47 +01:00
|
|
|
}
|
|
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
async dumpMetadata(): Promise<Partial<OfflineDbMeta>> {
|
|
|
|
|
const query = "SELECT * from metadata"
|
2022-12-27 15:37:40 +01:00
|
|
|
const stored = (await this.sqlCipherFacade.all(query, [])).map((row) => [row.key.value as string, row.value.value as Uint8Array] as const)
|
2022-08-11 16:38:53 +02:00
|
|
|
return Object.fromEntries(stored.map(([key, value]) => [key, cborg.decode(value)])) as OfflineDbMeta
|
2022-01-12 14:43:01 +01:00
|
|
|
}
|
|
|
|
|
|
2022-10-21 15:53:39 +02:00
|
|
|
async setStoredModelVersion(model: VersionMetadataBaseKey, version: number) {
|
2022-08-11 16:38:53 +02:00
|
|
|
return this.putMetadata(`${model}-version`, version)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
getCustomCacheHandlerMap(entityRestClient: EntityRestClient): CustomCacheHandlerMap {
|
|
|
|
|
if (this.customCacheHandler == null) {
|
2022-12-27 15:37:40 +01:00
|
|
|
this.customCacheHandler = new CustomCacheHandlerMap({ ref: CalendarEventTypeRef, handler: new CustomCalendarEventCacheHandler(entityRestClient) })
|
2022-08-11 16:38:53 +02:00
|
|
|
}
|
|
|
|
|
return this.customCacheHandler
|
|
|
|
|
}
|
|
|
|
|
|
2022-10-21 15:53:39 +02:00
|
|
|
getUserId(): Id {
|
|
|
|
|
return assertNotNull(this.userId, "No user id, not initialized?")
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async deleteAllOwnedBy(owner: Id): Promise<void> {
|
|
|
|
|
{
|
2022-12-27 15:37:40 +01:00
|
|
|
const { query, params } = sql`DELETE FROM element_entities WHERE ownerGroup = ${owner}`
|
2022-10-21 15:53:39 +02:00
|
|
|
await this.sqlCipherFacade.run(query, params)
|
|
|
|
|
}
|
|
|
|
|
{
|
2022-11-02 11:37:37 +01:00
|
|
|
// first, check which list Ids contain entities owned by the lost group
|
2022-12-27 15:37:40 +01:00
|
|
|
const { query, params } = sql`SELECT listId, type FROM list_entities WHERE ownerGroup = ${owner}`
|
2022-11-02 11:37:37 +01:00
|
|
|
const rangeRows = await this.sqlCipherFacade.all(query, params)
|
2022-12-27 15:37:40 +01:00
|
|
|
const rows = rangeRows.map((row) => untagSqlObject(row) as { listId: string; type: string })
|
|
|
|
|
const listIdsByType: Map<string, Set<Id>> = groupByAndMapUniquely(
|
|
|
|
|
rows,
|
|
|
|
|
(row) => row.type,
|
|
|
|
|
(row) => row.listId,
|
|
|
|
|
)
|
2023-08-16 10:47:09 +02:00
|
|
|
// delete the ranges for those listIds
|
2022-11-02 11:37:37 +01:00
|
|
|
for (const [type, listIds] of listIdsByType.entries()) {
|
2023-08-16 10:47:09 +02:00
|
|
|
// this particular query uses one other SQL var for the type.
|
|
|
|
|
const safeChunkSize = MAX_SAFE_SQL_VARS - 1
|
|
|
|
|
const listIdArr = Array.from(listIds)
|
|
|
|
|
await this.runChunked(safeChunkSize, listIdArr, (c) => sql`DELETE FROM ranges WHERE type = ${type} AND listId IN ${paramList(c)}`)
|
|
|
|
|
await this.runChunked(safeChunkSize, listIdArr, (c) => sql`DELETE FROM list_entities WHERE type = ${type} AND listId IN ${paramList(c)}`)
|
2022-11-02 11:37:37 +01:00
|
|
|
}
|
2022-11-03 11:04:26 +01:00
|
|
|
}
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
{
|
|
|
|
|
const { query, params } = sql`DELETE FROM blob_element_entities WHERE ownerGroup = ${owner}`
|
|
|
|
|
await this.sqlCipherFacade.run(query, params)
|
|
|
|
|
}
|
2022-11-03 11:04:26 +01:00
|
|
|
{
|
2022-12-27 15:37:40 +01:00
|
|
|
const { query, params } = sql`DELETE FROM lastUpdateBatchIdPerGroupId WHERE groupId = ${owner}`
|
2022-11-03 11:04:26 +01:00
|
|
|
await this.sqlCipherFacade.run(query, params)
|
2022-10-21 15:53:39 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
private async putMetadata<K extends keyof OfflineDbMeta>(key: K, value: OfflineDbMeta[K]): Promise<void> {
|
2023-02-07 10:18:22 +01:00
|
|
|
let encodedValue
|
|
|
|
|
try {
|
|
|
|
|
encodedValue = cborg.encode(value)
|
|
|
|
|
} catch (e) {
|
|
|
|
|
console.log("[OfflineStorage] failed to encode metadata for key", key, "with value", value)
|
|
|
|
|
throw e
|
|
|
|
|
}
|
|
|
|
|
const { query, params } = sql`INSERT OR REPLACE INTO metadata VALUES (${key}, ${encodedValue})`
|
2022-08-11 16:38:53 +02:00
|
|
|
await this.sqlCipherFacade.run(query, params)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private async getMetadata<K extends keyof OfflineDbMeta>(key: K): Promise<OfflineDbMeta[K] | null> {
|
2022-12-27 15:37:40 +01:00
|
|
|
const { query, params } = sql`SELECT value from metadata WHERE key = ${key}`
|
2022-08-11 16:38:53 +02:00
|
|
|
const encoded = await this.sqlCipherFacade.get(query, params)
|
|
|
|
|
return encoded && cborg.decode(encoded.value.value as Uint8Array)
|
2022-01-12 14:43:01 +01:00
|
|
|
}
|
2022-02-10 16:32:47 +01:00
|
|
|
|
2022-05-06 14:39:31 +02:00
|
|
|
/**
|
2022-11-28 17:38:17 +01:00
|
|
|
* Clear out unneeded data from the offline database (i.e. trash and spam lists, old data).
|
|
|
|
|
* This will be called after login (CachePostLoginActions.ts) to ensure fast login time.
|
|
|
|
|
* @param timeRangeDays: the maximum age of days that mails should be to be kept in the database. if null, will use a default value
|
|
|
|
|
* @param userId id of the current user. default, last stored userId
|
2022-05-06 14:39:31 +02:00
|
|
|
*/
|
2022-11-28 17:38:17 +01:00
|
|
|
async clearExcludedData(timeRangeDays: number | null = this.timeRangeDays, userId: Id = this.getUserId()): Promise<void> {
|
2022-08-11 16:38:53 +02:00
|
|
|
const user = await this.get(UserTypeRef, null, userId)
|
2022-04-12 14:58:52 +02:00
|
|
|
|
2022-05-06 14:39:31 +02:00
|
|
|
// Free users always have default time range regardless of what is stored
|
|
|
|
|
const isFreeUser = user?.accountType === AccountType.FREE
|
|
|
|
|
const timeRange = isFreeUser || timeRangeDays == null ? OFFLINE_STORAGE_DEFAULT_TIME_RANGE_DAYS : timeRangeDays
|
2024-04-25 11:05:18 +02:00
|
|
|
const now = this.dateProvider.now()
|
|
|
|
|
const daysSinceDayAfterEpoch = now / DAY_IN_MILLIS - 1
|
|
|
|
|
const timeRangeMillisSafe = Math.min(daysSinceDayAfterEpoch, timeRange) * DAY_IN_MILLIS
|
2024-08-26 15:07:37 +02:00
|
|
|
// from May 15th 2109 onward, exceeding daysSinceDayAfterEpoch in the time range setting will
|
2024-04-25 11:05:18 +02:00
|
|
|
// lead to an overflow in our 42 bit timestamp in the id.
|
|
|
|
|
const cutoffTimestamp = now - timeRangeMillisSafe
|
2024-08-26 15:07:37 +02:00
|
|
|
|
2024-08-07 08:38:58 +02:00
|
|
|
const mailBoxes = await this.getElementsOfType(MailBoxTypeRef)
|
2024-08-26 15:07:37 +02:00
|
|
|
const cutoffId = timestampToGeneratedId(cutoffTimestamp)
|
2024-08-07 08:38:58 +02:00
|
|
|
for (const mailBox of mailBoxes) {
|
|
|
|
|
const isMailsetMigrated = mailBox.currentMailBag != null
|
2024-08-26 15:07:37 +02:00
|
|
|
const folders = await this.getWholeList(MailFolderTypeRef, mailBox.folders!.folders)
|
2024-08-07 08:38:58 +02:00
|
|
|
if (isMailsetMigrated) {
|
2024-08-26 15:07:37 +02:00
|
|
|
// deleting mailsetentries first to make sure that once we start deleting mail
|
|
|
|
|
// we don't have any entries that reference that mail
|
|
|
|
|
const folderSystem = new FolderSystem(folders)
|
|
|
|
|
for (const mailSet of folders) {
|
|
|
|
|
if (isSpamOrTrashFolder(folderSystem, mailSet)) {
|
2024-08-27 16:51:40 +02:00
|
|
|
await this.deleteMailSetEntries(mailSet.entries, DEFAULT_MAILSET_ENTRY_CUSTOM_CUTOFF_TIMESTAMP)
|
2024-08-26 15:07:37 +02:00
|
|
|
} else {
|
|
|
|
|
await this.deleteMailSetEntries(mailSet.entries, cutoffTimestamp)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2024-08-23 13:00:37 +02:00
|
|
|
const mailListIds = [mailBox.currentMailBag!, ...mailBox.archivedMailBags].map((mailbag) => mailbag.mails)
|
2024-08-07 08:38:58 +02:00
|
|
|
for (const mailListId of mailListIds) {
|
2024-08-26 15:07:37 +02:00
|
|
|
await this.deleteMailListLegacy(mailListId, cutoffId)
|
2024-08-07 08:38:58 +02:00
|
|
|
}
|
2022-04-20 10:39:52 +02:00
|
|
|
} else {
|
2024-08-07 08:38:58 +02:00
|
|
|
const folderSystem = new FolderSystem(folders)
|
|
|
|
|
for (const folder of folders) {
|
|
|
|
|
if (isSpamOrTrashFolder(folderSystem, folder)) {
|
2024-08-26 15:07:37 +02:00
|
|
|
await this.deleteMailListLegacy(folder.mails, GENERATED_MAX_ID)
|
2024-08-07 08:38:58 +02:00
|
|
|
} else {
|
2024-08-26 15:07:37 +02:00
|
|
|
await this.deleteMailListLegacy(folder.mails, cutoffId)
|
2024-08-07 08:38:58 +02:00
|
|
|
}
|
|
|
|
|
}
|
2022-04-20 10:39:52 +02:00
|
|
|
}
|
|
|
|
|
}
|
2022-08-11 16:38:53 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private async createTables() {
|
|
|
|
|
for (let [name, definition] of Object.entries(TableDefinitions)) {
|
|
|
|
|
await this.sqlCipherFacade.run(`CREATE TABLE IF NOT EXISTS ${name} (${definition})`, [])
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2024-08-23 13:00:37 +02:00
|
|
|
private async getRange(typeRef: TypeRef<ElementEntity | ListElementEntity>, listId: Id): Promise<Range | null> {
|
|
|
|
|
const type = getTypeId(typeRef)
|
|
|
|
|
|
2022-12-27 15:37:40 +01:00
|
|
|
const { query, params } = sql`SELECT upper, lower FROM ranges WHERE type = ${type} AND listId = ${listId}`
|
|
|
|
|
const row = (await this.sqlCipherFacade.get(query, params)) ?? null
|
2024-08-23 13:00:37 +02:00
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
return mapNullable(row, untagSqlObject) as Range | null
|
2022-04-20 10:39:52 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
2024-08-26 15:07:37 +02:00
|
|
|
* This method deletes mails from {@param listId} what are older than {@param cutoffId} as well as associated data.
|
2022-04-20 10:39:52 +02:00
|
|
|
*
|
2024-08-26 15:07:37 +02:00
|
|
|
* it's considered legacy because once we start importing mail into mail bags, maintaining mail list ranges doesn't make
|
|
|
|
|
* sense anymore - mail order in a list is arbitrary at that point.
|
|
|
|
|
*
|
|
|
|
|
* For each mail we delete the mail, its body, headers, all references mail set entries and all referenced attachments.
|
2022-04-20 10:39:52 +02:00
|
|
|
*
|
|
|
|
|
* When we delete the Files, we also delete the whole range for the user's File list. We need to delete the whole
|
|
|
|
|
* range because we only have one file list per mailbox, so if we delete something from the middle of it, the range
|
|
|
|
|
* will no longer be valid. (this is future proofing, because as of now there is not going to be a Range set for the
|
|
|
|
|
* File list anyway, since we currently do not do range requests for Files.
|
|
|
|
|
*
|
|
|
|
|
* We do not delete ConversationEntries because:
|
|
|
|
|
* 1. They are in the same list for the whole conversation so we can't adjust the range
|
|
|
|
|
* 2. We might need them in the future for showing the whole thread
|
|
|
|
|
*/
|
2024-08-26 15:07:37 +02:00
|
|
|
private async deleteMailListLegacy(listId: Id, cutoffId: Id): Promise<void> {
|
2022-11-30 17:15:08 +01:00
|
|
|
// We lock access to the "ranges" db here in order to prevent race conditions when accessing the "ranges" database.
|
|
|
|
|
await this.lockRangesDbAccess(listId)
|
2022-12-12 17:51:21 +01:00
|
|
|
try {
|
|
|
|
|
// This must be done before deleting mails to know what the new range has to be
|
2024-08-26 15:07:37 +02:00
|
|
|
await this.updateRangeForListAndDeleteObsoleteData(MailTypeRef, listId, cutoffId)
|
2022-12-12 17:51:21 +01:00
|
|
|
} finally {
|
|
|
|
|
// We unlock access to the "ranges" db here. We lock it in order to prevent race conditions when accessing the "ranges" database.
|
|
|
|
|
await this.unlockRangesDbAccess(listId)
|
|
|
|
|
}
|
2022-04-20 10:39:52 +02:00
|
|
|
|
|
|
|
|
const mailsToDelete: IdTuple[] = []
|
2024-08-26 15:07:37 +02:00
|
|
|
const attachmentsToDelete: IdTuple[] = []
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
const mailDetailsBlobToDelete: IdTuple[] = []
|
|
|
|
|
const mailDetailsDraftToDelete: IdTuple[] = []
|
2022-04-20 10:39:52 +02:00
|
|
|
|
|
|
|
|
const mails = await this.getWholeList(MailTypeRef, listId)
|
|
|
|
|
for (let mail of mails) {
|
|
|
|
|
if (firstBiggerThanSecond(cutoffId, getElementId(mail))) {
|
|
|
|
|
mailsToDelete.push(mail._id)
|
|
|
|
|
for (const id of mail.attachments) {
|
2024-08-26 15:07:37 +02:00
|
|
|
attachmentsToDelete.push(id)
|
2022-04-20 10:39:52 +02:00
|
|
|
}
|
Remove LegacyMailWrapper (legacy mail bodies) and cleanup TutanotaModel
Prior to starting implementing static MailIds and MailSets, we
want to clean up the current TutanotaModel. Therefore, this commit
removes a lot of legacy metamodel definitions that are not used any
longer, including removing the LegacyMailWrapper (legacy mail bodies).
Additionally, this commit inter alia includes:
* removing types no longer needed after migrating to MailDetails, e.g.
the "body", "toRecipients", "ccRecipients", "bccRecipients",
"replyTos", "sentDate" and "headers" references / values from MAIL_TYPE
* removing "mails" reference form MAIL_BOX_TYPE
* removing "subFolders" reference from MAIL_FOLDER
* removing the legacy types MAIL_BODY_TYPE and MAIL_HEADERS
* removing Value.OLD_OWNER_GROUP_NAME, and Value.OLD_AREA_ID_NAME from
FILE_TYPE and CONTACT_TYPE
Closes #7255
Co-authored-by: sug <sug@tutao.de>
2024-07-23 10:05:35 +02:00
|
|
|
|
|
|
|
|
if (isDraft(mail)) {
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
const mailDetailsId = assertNotNull(mail.mailDetailsDraft)
|
|
|
|
|
mailDetailsDraftToDelete.push(mailDetailsId)
|
|
|
|
|
} else {
|
|
|
|
|
// mailDetailsBlob
|
|
|
|
|
const mailDetailsId = assertNotNull(mail.mailDetails)
|
|
|
|
|
mailDetailsBlobToDelete.push(mailDetailsId)
|
|
|
|
|
}
|
2022-04-20 10:39:52 +02:00
|
|
|
}
|
|
|
|
|
}
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
for (let [listId, elementIds] of groupByAndMap(mailDetailsBlobToDelete, listIdPart, elementIdPart).entries()) {
|
|
|
|
|
await this.deleteIn(MailDetailsBlobTypeRef, listId, elementIds)
|
|
|
|
|
}
|
|
|
|
|
for (let [listId, elementIds] of groupByAndMap(mailDetailsDraftToDelete, listIdPart, elementIdPart).entries()) {
|
|
|
|
|
await this.deleteIn(MailDetailsDraftTypeRef, listId, elementIds)
|
|
|
|
|
}
|
2024-08-26 15:07:37 +02:00
|
|
|
for (let [listId, elementIds] of groupByAndMap(attachmentsToDelete, listIdPart, elementIdPart).entries()) {
|
2022-08-11 16:38:53 +02:00
|
|
|
await this.deleteIn(FileTypeRef, listId, elementIds)
|
|
|
|
|
await this.deleteRange(FileTypeRef, listId)
|
2022-04-20 10:39:52 +02:00
|
|
|
}
|
|
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
await this.deleteIn(MailTypeRef, listId, mailsToDelete.map(elementIdPart))
|
|
|
|
|
}
|
|
|
|
|
|
2024-08-26 15:07:37 +02:00
|
|
|
/**
|
|
|
|
|
* delete all mail set entries of a mail set that reference some mail with a receivedDate older than
|
|
|
|
|
* cutoffTimestamp. this doesn't clean up mails or their associated data because we could be breaking the
|
|
|
|
|
* offline list range invariant by deleting data from the middle of a mail range. cleaning up mails is done
|
|
|
|
|
* the legacy way currently even for mailset users.
|
|
|
|
|
*/
|
|
|
|
|
private async deleteMailSetEntries(entriesListId: Id, cutoffTimestamp: number) {
|
|
|
|
|
const cutoffId = constructMailSetEntryId(new Date(cutoffTimestamp), GENERATED_MAX_ID)
|
|
|
|
|
await this.lockRangesDbAccess(entriesListId)
|
|
|
|
|
try {
|
|
|
|
|
await this.updateRangeForListAndDeleteObsoleteData(MailSetEntryTypeRef, entriesListId, cutoffId)
|
|
|
|
|
} finally {
|
|
|
|
|
// We unlock access to the "ranges" db here. We lock it in order to prevent race conditions when accessing the "ranges" database.
|
|
|
|
|
await this.unlockRangesDbAccess(entriesListId)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const mailSetEntriesToDelete: IdTuple[] = []
|
|
|
|
|
const mailSetEntries = await this.getWholeList(MailSetEntryTypeRef, entriesListId)
|
|
|
|
|
for (let mailSetEntry of mailSetEntries) {
|
|
|
|
|
if (firstBiggerThanSecond(cutoffId, getElementId(mailSetEntry))) {
|
|
|
|
|
mailSetEntriesToDelete.push(mailSetEntry._id)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
await this.deleteIn(MailSetEntryTypeRef, entriesListId, mailSetEntriesToDelete.map(elementIdPart))
|
|
|
|
|
}
|
|
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
private async deleteIn(typeRef: TypeRef<unknown>, listId: Id | null, elementIds: Id[]): Promise<void> {
|
2023-09-18 12:07:46 +02:00
|
|
|
if (elementIds.length === 0) return
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
const typeModel = await resolveTypeReference(typeRef)
|
|
|
|
|
switch (typeModel.type) {
|
2023-01-12 16:48:28 +01:00
|
|
|
case TypeId.Element:
|
2023-08-16 10:47:09 +02:00
|
|
|
return await this.runChunked(
|
|
|
|
|
MAX_SAFE_SQL_VARS - 1,
|
|
|
|
|
elementIds,
|
|
|
|
|
(c) => sql`DELETE FROM element_entities WHERE type = ${getTypeId(typeRef)} AND elementId IN ${paramList(c)}`,
|
|
|
|
|
)
|
2023-01-12 16:48:28 +01:00
|
|
|
case TypeId.ListElement:
|
2023-08-16 10:47:09 +02:00
|
|
|
return await this.runChunked(
|
|
|
|
|
MAX_SAFE_SQL_VARS - 2,
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
elementIds,
|
2023-08-16 10:47:09 +02:00
|
|
|
(c) => sql`DELETE FROM list_entities WHERE type = ${getTypeId(typeRef)} AND listId = ${listId} AND elementId IN ${paramList(c)}`,
|
|
|
|
|
)
|
2023-01-12 16:48:28 +01:00
|
|
|
case TypeId.BlobElement:
|
2023-08-16 10:47:09 +02:00
|
|
|
return await this.runChunked(
|
|
|
|
|
MAX_SAFE_SQL_VARS - 2,
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
elementIds,
|
2023-08-16 10:47:09 +02:00
|
|
|
(c) => sql`DELETE FROM blob_element_entities WHERE type = ${getTypeId(typeRef)} AND listId = ${listId} AND elementId IN ${paramList(c)}`,
|
|
|
|
|
)
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
default:
|
|
|
|
|
throw new Error("must be a persistent type")
|
|
|
|
|
}
|
2022-04-20 10:39:52 +02:00
|
|
|
}
|
|
|
|
|
|
2022-11-30 17:15:08 +01:00
|
|
|
/**
|
|
|
|
|
* We want to lock the access to the "ranges" db when updating / reading the
|
2024-08-26 15:07:37 +02:00
|
|
|
* offline available mail list / mailset ranges for each mail list (referenced using the listId).
|
|
|
|
|
* @param listId the mail list or mail set entry list that we want to lock
|
2022-11-30 17:15:08 +01:00
|
|
|
*/
|
|
|
|
|
async lockRangesDbAccess(listId: Id) {
|
|
|
|
|
await this.sqlCipherFacade.lockRangesDbAccess(listId)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* This is the counterpart to the function "lockRangesDbAccess(listId)".
|
|
|
|
|
* @param listId the mail list that we want to unlock
|
|
|
|
|
*/
|
|
|
|
|
async unlockRangesDbAccess(listId: Id) {
|
|
|
|
|
await this.sqlCipherFacade.unlockRangesDbAccess(listId)
|
|
|
|
|
}
|
|
|
|
|
|
2024-08-26 15:07:37 +02:00
|
|
|
private async updateRangeForListAndDeleteObsoleteData<T extends ListElementEntity>(typeRef: TypeRef<T>, listId: Id, rawCutoffId: Id): Promise<void> {
|
2024-08-23 13:00:37 +02:00
|
|
|
const typeModel = await resolveTypeReference(typeRef)
|
|
|
|
|
const isCustomId = isCustomIdType(typeModel)
|
2024-08-26 15:07:37 +02:00
|
|
|
const convertedCutoffId = ensureBase64Ext(typeModel, rawCutoffId)
|
2022-04-20 10:39:52 +02:00
|
|
|
|
2024-08-23 13:00:37 +02:00
|
|
|
const range = await this.getRange(typeRef, listId)
|
2022-04-20 10:39:52 +02:00
|
|
|
if (range == null) {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If the range for a given list is complete from the beginning (starts at GENERATED_MIN_ID), then we only want to actually modify the
|
|
|
|
|
// saved range if we would be removing elements from the list, in order to not lose the information that the range is complete in storage.
|
|
|
|
|
// So we have to check how old the oldest element in said range is. If it is newer than cutoffId, then we will not modify the range,
|
|
|
|
|
// otherwise we will just modify it normally
|
2024-08-23 13:00:37 +02:00
|
|
|
const expectedMinId = isCustomId ? CUSTOM_MIN_ID : GENERATED_MIN_ID
|
|
|
|
|
if (range.lower === expectedMinId) {
|
|
|
|
|
const entities = await this.provideFromRange(typeRef, listId, expectedMinId, 1, false)
|
2022-04-20 10:39:52 +02:00
|
|
|
const id = mapNullable(entities[0], getElementId)
|
2024-08-26 15:07:37 +02:00
|
|
|
const rangeWontBeModified = id == null || firstBiggerThanSecond(id, convertedCutoffId) || id === convertedCutoffId
|
2022-04-20 10:39:52 +02:00
|
|
|
if (rangeWontBeModified) {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2024-08-26 15:07:37 +02:00
|
|
|
if (firstBiggerThanSecond(convertedCutoffId, range.lower)) {
|
2022-04-20 10:39:52 +02:00
|
|
|
// If the upper id of the range is below the cutoff, then the entire range will be deleted from the storage
|
|
|
|
|
// so we just delete the range as well
|
|
|
|
|
// Otherwise, we only want to modify
|
2024-08-26 15:07:37 +02:00
|
|
|
if (firstBiggerThanSecond(convertedCutoffId, range.upper)) {
|
2022-08-11 16:38:53 +02:00
|
|
|
await this.deleteRange(typeRef, listId)
|
2022-04-20 10:39:52 +02:00
|
|
|
} else {
|
2024-08-26 15:07:37 +02:00
|
|
|
await this.setLowerRangeForList(typeRef, listId, rawCutoffId)
|
2022-04-20 10:39:52 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
private serialize(originalEntity: SomeEntity): Uint8Array {
|
2023-02-07 10:18:22 +01:00
|
|
|
try {
|
|
|
|
|
return cborg.encode(originalEntity, { typeEncoders: customTypeEncoders })
|
|
|
|
|
} catch (e) {
|
|
|
|
|
console.log("[OfflineStorage] failed to encode entity of type", originalEntity._type, "with id", originalEntity._id)
|
|
|
|
|
throw e
|
|
|
|
|
}
|
2022-04-20 10:39:52 +02:00
|
|
|
}
|
|
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
private deserialize<T extends SomeEntity>(typeRef: TypeRef<T>, loaded: Uint8Array): T {
|
2022-12-27 15:37:40 +01:00
|
|
|
const deserialized = cborg.decode(loaded, { tags: customTypeDecoders })
|
2022-08-11 16:38:53 +02:00
|
|
|
// TypeRef cannot be deserialized back automatically. We could write a codec for it but we don't actually
|
|
|
|
|
// need to store it so we just "patch" it.
|
|
|
|
|
// Some places rely on TypeRef being a class and not a plain object.
|
|
|
|
|
deserialized._type = typeRef
|
|
|
|
|
return deserialized
|
2022-04-20 10:39:52 +02:00
|
|
|
}
|
2022-05-17 17:40:44 +02:00
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
private deserializeList<T extends SomeEntity>(typeRef: TypeRef<T>, loaded: Array<Uint8Array>): Array<T> {
|
2022-12-27 15:37:40 +01:00
|
|
|
return loaded.map((entity) => this.deserialize(typeRef, entity))
|
2022-05-17 17:40:44 +02:00
|
|
|
}
|
2023-08-16 10:47:09 +02:00
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* convenience method to run a potentially too large query over several chunks.
|
|
|
|
|
* chunkSize must be chosen such that the total number of SQL variables in the final query does not exceed MAX_SAFE_SQL_VARS
|
|
|
|
|
* */
|
|
|
|
|
private async runChunked(chunkSize: number, originalList: SqlValue[], formatter: (chunk: SqlValue[]) => FormattedQuery): Promise<void> {
|
|
|
|
|
for (const chunk of splitInChunks(chunkSize, originalList)) {
|
|
|
|
|
const formattedQuery = formatter(chunk)
|
|
|
|
|
await this.sqlCipherFacade.run(formattedQuery.query, formattedQuery.params)
|
|
|
|
|
}
|
|
|
|
|
}
|
2024-08-07 08:38:58 +02:00
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* convenience method to execute a potentially too large query over several chunks.
|
|
|
|
|
* chunkSize must be chosen such that the total number of SQL variables in the final query does not exceed MAX_SAFE_SQL_VARS
|
|
|
|
|
* */
|
|
|
|
|
private async allChunked(
|
|
|
|
|
chunkSize: number,
|
|
|
|
|
originalList: SqlValue[],
|
|
|
|
|
formatter: (chunk: SqlValue[]) => FormattedQuery,
|
|
|
|
|
): Promise<Array<Record<string, TaggedSqlValue>>> {
|
|
|
|
|
const result: Array<Record<string, TaggedSqlValue>> = []
|
|
|
|
|
for (const chunk of splitInChunks(chunkSize, originalList)) {
|
|
|
|
|
const formattedQuery = formatter(chunk)
|
|
|
|
|
result.push(...(await this.sqlCipherFacade.all(formattedQuery.query, formattedQuery.params)))
|
|
|
|
|
}
|
|
|
|
|
return result
|
|
|
|
|
}
|
2022-08-11 16:38:53 +02:00
|
|
|
}
|
2022-07-04 14:55:17 +02:00
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
/*
|
2023-08-16 10:47:09 +02:00
|
|
|
* used to automatically create the right amount of SQL variables for selecting ids from a dynamic list.
|
|
|
|
|
* must be used within sql`<query>` template string to inline the logic into the query.
|
|
|
|
|
*
|
|
|
|
|
* It is very important that params is kept to a size such that the total amount of SQL variables is
|
|
|
|
|
* less than MAX_SAFE_SQL_VARS.
|
2022-08-11 16:38:53 +02:00
|
|
|
*/
|
|
|
|
|
function paramList(params: SqlValue[]): SqlFragment {
|
2022-12-27 15:37:40 +01:00
|
|
|
const qs = params.map(() => "?").join(",")
|
2022-08-11 16:38:53 +02:00
|
|
|
return new SqlFragment(`(${qs})`, params)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
2022-08-15 14:22:44 +02:00
|
|
|
* comparison to select ids that are bigger or smaller than a parameter id
|
2023-08-16 10:47:09 +02:00
|
|
|
* must be used within sql`<query>` template string to inline the logic into the query.
|
|
|
|
|
*
|
|
|
|
|
* will always insert 3 constants and 3 SQL variables into the query.
|
2022-08-11 16:38:53 +02:00
|
|
|
*/
|
|
|
|
|
function firstIdBigger(...args: [string, "elementId"] | ["elementId", string]): SqlFragment {
|
|
|
|
|
let [l, r]: [string, string] = args
|
|
|
|
|
let v
|
|
|
|
|
if (l === "elementId") {
|
|
|
|
|
v = r
|
|
|
|
|
r = "?"
|
|
|
|
|
} else {
|
|
|
|
|
v = l
|
|
|
|
|
l = "?"
|
|
|
|
|
}
|
2022-12-27 15:37:40 +01:00
|
|
|
return new SqlFragment(`(CASE WHEN length(${l}) > length(${r}) THEN 1 WHEN length(${l}) < length(${r}) THEN 0 ELSE ${l} > ${r} END)`, [v, v, v])
|
2022-08-11 16:38:53 +02:00
|
|
|
}
|
2024-08-23 13:00:37 +02:00
|
|
|
|
|
|
|
|
export function isCustomIdType(typeModel: TypeModel): boolean {
|
|
|
|
|
return typeModel.values._id.type === ValueType.CustomId
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* We store customIds as base64ext in the db to make them sortable, but we get them as base64url from the server.
|
|
|
|
|
*/
|
|
|
|
|
export function ensureBase64Ext(typeModel: TypeModel, elementId: Id): Id {
|
|
|
|
|
if (isCustomIdType(typeModel)) {
|
|
|
|
|
return base64ToBase64Ext(base64UrlToBase64(elementId))
|
|
|
|
|
}
|
|
|
|
|
return elementId
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
export function customIdToBase64Url(typeModel: TypeModel, elementId: Id): Id {
|
|
|
|
|
if (isCustomIdType(typeModel)) {
|
|
|
|
|
return base64ToBase64Url(base64ExtToBase64(elementId))
|
|
|
|
|
}
|
|
|
|
|
return elementId
|
|
|
|
|
}
|