2023-06-29 18:26:45 +02:00
|
|
|
import o from "@tutao/otest"
|
2022-12-27 15:37:40 +01:00
|
|
|
import { verify } from "@tutao/tutanota-test-utils"
|
|
|
|
|
import { customTypeEncoders, OfflineStorage, sql } from "../../../../../src/api/worker/offline/OfflineStorage.js"
|
|
|
|
|
import { instance, object, when } from "testdouble"
|
2022-04-06 16:55:16 +02:00
|
|
|
import * as cborg from "cborg"
|
2022-12-27 15:37:40 +01:00
|
|
|
import { GENERATED_MIN_ID, generatedIdToTimestamp, getElementId, timestampToGeneratedId } from "../../../../../src/api/common/utils/EntityUtils.js"
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
import { getDayShifted, getFirstOrThrow, getTypeId, lastThrow, mapNullable, promiseMap, TypeRef } from "@tutao/tutanota-utils"
|
2022-12-27 15:37:40 +01:00
|
|
|
import { DateProvider } from "../../../../../src/api/common/DateProvider.js"
|
2022-04-20 10:39:52 +02:00
|
|
|
import {
|
|
|
|
|
createFile,
|
|
|
|
|
createMail,
|
|
|
|
|
createMailBody,
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
createMailDetails,
|
|
|
|
|
createMailDetailsBlob,
|
2022-04-20 10:39:52 +02:00
|
|
|
createMailFolder,
|
|
|
|
|
FileTypeRef,
|
|
|
|
|
Mail,
|
|
|
|
|
MailBody,
|
|
|
|
|
MailBodyTypeRef,
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
MailDetailsBlobTypeRef,
|
|
|
|
|
MailDetailsTypeRef,
|
2022-04-20 10:39:52 +02:00
|
|
|
MailFolderTypeRef,
|
2022-12-27 15:37:40 +01:00
|
|
|
MailTypeRef,
|
2022-05-17 17:40:44 +02:00
|
|
|
} from "../../../../../src/api/entities/tutanota/TypeRefs.js"
|
2022-12-27 15:37:40 +01:00
|
|
|
import { OfflineStorageMigrator } from "../../../../../src/api/worker/offline/OfflineStorageMigrator.js"
|
|
|
|
|
import { InterWindowEventFacadeSendDispatcher } from "../../../../../src/native/common/generatedipc/InterWindowEventFacadeSendDispatcher.js"
|
2023-04-20 17:14:30 +02:00
|
|
|
import * as fs from "node:fs"
|
2022-12-27 15:37:40 +01:00
|
|
|
import { untagSqlObject } from "../../../../../src/api/worker/offline/SqlValue.js"
|
|
|
|
|
import { MailFolderType } from "../../../../../src/api/common/TutanotaConstants.js"
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
import { BlobElementEntity, ElementEntity, ListElementEntity, SomeEntity } from "../../../../../src/api/common/EntityTypes.js"
|
2022-12-27 15:37:40 +01:00
|
|
|
import { resolveTypeReference } from "../../../../../src/api/common/EntityFunctions.js"
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
import { Type as TypeId } from "../../../../../src/api/common/EntityConstants.js"
|
2022-12-27 15:37:40 +01:00
|
|
|
import { expandId } from "../../../../../src/api/worker/rest/DefaultEntityRestCache.js"
|
|
|
|
|
import { WorkerImpl } from "../../../../../src/api/worker/WorkerImpl.js"
|
2023-02-02 17:21:34 +01:00
|
|
|
import { createUser, UserTypeRef } from "../../../../../src/api/entities/sys/TypeRefs.js"
|
2023-04-17 16:56:50 +02:00
|
|
|
import { DesktopSqlCipher } from "../../../../../src/desktop/db/DesktopSqlCipher.js"
|
2023-11-09 17:04:42 +01:00
|
|
|
import { createTestEntity } from "../../../TestUtils.js"
|
2022-04-20 10:39:52 +02:00
|
|
|
|
|
|
|
|
function incrementId(id: Id, ms: number) {
|
|
|
|
|
const timestamp = generatedIdToTimestamp(id)
|
|
|
|
|
return timestampToGeneratedId(timestamp + ms)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
class IdGenerator {
|
2022-12-27 15:37:40 +01:00
|
|
|
constructor(private currentId: Id) {}
|
2022-04-20 10:39:52 +02:00
|
|
|
|
|
|
|
|
getNext(incrementByMs: number = 60000): Id {
|
|
|
|
|
this.currentId = incrementId(this.currentId, incrementByMs)
|
|
|
|
|
return this.currentId
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
function encode(thing) {
|
2022-12-27 15:37:40 +01:00
|
|
|
return cborg.encode(thing, { typeEncoders: customTypeEncoders })
|
2022-04-20 10:39:52 +02:00
|
|
|
}
|
2022-04-06 16:55:16 +02:00
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
const nativePath = buildOptions.sqliteNativePath
|
|
|
|
|
const database = "./testdatabase.sqlite"
|
|
|
|
|
export const offlineDatabaseTestKey = Uint8Array.from([3957386659, 354339016, 3786337319, 3366334248])
|
|
|
|
|
|
2022-04-06 16:55:16 +02:00
|
|
|
o.spec("OfflineStorage", function () {
|
2022-04-20 10:39:52 +02:00
|
|
|
const now = new Date("2022-01-01 00:00:00 UTC")
|
|
|
|
|
const timeRangeDays = 10
|
|
|
|
|
const userId = "userId"
|
2022-07-20 15:28:38 +02:00
|
|
|
const databaseKey = new Uint8Array([0, 1, 2, 3, 4, 5, 6, 7])
|
2022-04-06 16:55:16 +02:00
|
|
|
|
2022-04-20 10:39:52 +02:00
|
|
|
/** get an id based on a timestamp that is {@param days} days away from the time range cutoff */
|
2022-12-27 15:37:40 +01:00
|
|
|
const offsetId = (days) => timestampToGeneratedId(getDayShifted(now, 0 - timeRangeDays + days).getTime())
|
2022-04-20 10:39:52 +02:00
|
|
|
const cutoffId = offsetId(0)
|
|
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
let dbFacade: DesktopSqlCipher
|
|
|
|
|
let dateProviderMock: DateProvider
|
|
|
|
|
let storage: OfflineStorage
|
|
|
|
|
let migratorMock: OfflineStorageMigrator
|
|
|
|
|
let interWindowEventSenderMock: InterWindowEventFacadeSendDispatcher
|
2022-11-28 17:38:17 +01:00
|
|
|
let worker: WorkerImpl
|
2022-08-11 16:38:53 +02:00
|
|
|
|
|
|
|
|
o.beforeEach(async function () {
|
|
|
|
|
dbFacade = new DesktopSqlCipher(nativePath, database, false)
|
|
|
|
|
|
|
|
|
|
dateProviderMock = object<DateProvider>()
|
|
|
|
|
migratorMock = instance(OfflineStorageMigrator)
|
|
|
|
|
interWindowEventSenderMock = instance(InterWindowEventFacadeSendDispatcher)
|
|
|
|
|
when(dateProviderMock.now()).thenReturn(now.getTime())
|
2022-12-12 11:29:42 +01:00
|
|
|
storage = new OfflineStorage(dbFacade, interWindowEventSenderMock, dateProviderMock, migratorMock)
|
2022-08-11 16:38:53 +02:00
|
|
|
})
|
|
|
|
|
|
|
|
|
|
o.afterEach(async function () {
|
|
|
|
|
await dbFacade.closeDb()
|
|
|
|
|
await fs.promises.unlink(database)
|
|
|
|
|
})
|
2022-04-20 10:39:52 +02:00
|
|
|
|
|
|
|
|
o.spec("Unit test", function () {
|
2022-08-11 16:38:53 +02:00
|
|
|
async function insertEntity(entity: SomeEntity) {
|
|
|
|
|
const typeModel = await resolveTypeReference(entity._type)
|
|
|
|
|
const type = getTypeId(entity._type)
|
|
|
|
|
let preparedQuery
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
switch (typeModel.type) {
|
|
|
|
|
case TypeId.Element.valueOf():
|
|
|
|
|
preparedQuery = sql`insert into element_entities values (${type}, ${(entity as ElementEntity)._id}, ${entity._ownerGroup}, ${encode(
|
|
|
|
|
entity,
|
|
|
|
|
)})`
|
|
|
|
|
break
|
|
|
|
|
case TypeId.ListElement.valueOf():
|
|
|
|
|
const [listId, elementId] = (entity as ListElementEntity)._id
|
|
|
|
|
preparedQuery = sql`INSERT INTO list_entities VALUES (${type}, ${listId}, ${elementId}, ${entity._ownerGroup}, ${encode(entity)})`
|
|
|
|
|
break
|
|
|
|
|
case TypeId.BlobElement.valueOf():
|
|
|
|
|
const [archiveId, blobElementId] = (entity as BlobElementEntity)._id
|
|
|
|
|
preparedQuery = sql`INSERT INTO blob_element_entities VALUES (${type}, ${archiveId}, ${blobElementId}, ${entity._ownerGroup}, ${encode(
|
|
|
|
|
entity,
|
|
|
|
|
)})`
|
|
|
|
|
break
|
|
|
|
|
default:
|
|
|
|
|
throw new Error("must be a persistent type")
|
2022-08-11 16:38:53 +02:00
|
|
|
}
|
|
|
|
|
await dbFacade.run(preparedQuery.query, preparedQuery.params)
|
|
|
|
|
}
|
2022-04-20 10:39:52 +02:00
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
async function insertRange(type: TypeRef<unknown>, listId: string, lower: string, upper: string) {
|
2022-12-27 15:37:40 +01:00
|
|
|
const { query, params } = sql`INSERT INTO ranges VALUES(${getTypeId(type)}, ${listId}, ${lower}, ${upper})`
|
2022-08-11 16:38:53 +02:00
|
|
|
await dbFacade.run(query, params)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async function getAllIdsForType(typeRef: TypeRef<unknown>): Promise<Id[]> {
|
|
|
|
|
const typeModel = await resolveTypeReference(typeRef)
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
let preparedQuery
|
|
|
|
|
switch (typeModel.type) {
|
|
|
|
|
case TypeId.Element.valueOf():
|
|
|
|
|
preparedQuery = sql`select * from element_entities where type = ${getTypeId(typeRef)}`
|
|
|
|
|
break
|
|
|
|
|
case TypeId.ListElement.valueOf():
|
|
|
|
|
preparedQuery = sql`select * from list_entities where type = ${getTypeId(typeRef)}`
|
|
|
|
|
break
|
|
|
|
|
case TypeId.BlobElement.valueOf():
|
|
|
|
|
preparedQuery = sql`select * from blob_element_entities where type = ${getTypeId(typeRef)}`
|
|
|
|
|
break
|
|
|
|
|
default:
|
|
|
|
|
throw new Error("must be a persistent type")
|
|
|
|
|
}
|
|
|
|
|
return (await dbFacade.all(preparedQuery.query, preparedQuery.params)).map((r) => r.elementId.value as Id)
|
2022-08-11 16:38:53 +02:00
|
|
|
}
|
2022-05-17 17:40:44 +02:00
|
|
|
|
|
|
|
|
o("migrations are run", async function () {
|
2022-12-27 15:37:40 +01:00
|
|
|
await storage.init({ userId, databaseKey, timeRangeDays, forceNewDatabase: false })
|
2022-10-21 15:53:39 +02:00
|
|
|
verify(migratorMock.migrate(storage, dbFacade))
|
2022-04-20 10:39:52 +02:00
|
|
|
})
|
|
|
|
|
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
o.spec("Offline storage round trip", function () {
|
2023-02-02 17:21:34 +01:00
|
|
|
o.spec("ElementType", function () {
|
|
|
|
|
o("deleteAllOfType", async function () {
|
|
|
|
|
const userId = "id1"
|
|
|
|
|
const storableUser = createUser({ _id: userId })
|
|
|
|
|
|
|
|
|
|
await storage.init({ userId, databaseKey, timeRangeDays, forceNewDatabase: false })
|
|
|
|
|
|
|
|
|
|
let user = await storage.get(UserTypeRef, null, userId)
|
|
|
|
|
o(user).equals(null)
|
|
|
|
|
|
|
|
|
|
await storage.put(storableUser)
|
|
|
|
|
|
|
|
|
|
user = await storage.get(UserTypeRef, null, userId)
|
|
|
|
|
o(user!._id).equals(storableUser._id)
|
|
|
|
|
|
|
|
|
|
await storage.deleteAllOfType(UserTypeRef)
|
|
|
|
|
|
|
|
|
|
user = await storage.get(UserTypeRef, null, userId)
|
|
|
|
|
o(user).equals(null)
|
|
|
|
|
})
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
o.spec("ListElementType", function () {
|
|
|
|
|
o("deleteAllOfType", async function () {
|
|
|
|
|
const listId = "listId1"
|
|
|
|
|
const elementId = "id1"
|
|
|
|
|
const storableMail = createMail({ _id: [listId, elementId] })
|
|
|
|
|
|
|
|
|
|
await storage.init({ userId: elementId, databaseKey, timeRangeDays, forceNewDatabase: false })
|
|
|
|
|
|
|
|
|
|
let mail = await storage.get(MailTypeRef, listId, elementId)
|
|
|
|
|
o(mail).equals(null)
|
|
|
|
|
|
|
|
|
|
await storage.put(storableMail)
|
2023-03-29 14:46:04 +02:00
|
|
|
await storage.setNewRangeForList(MailTypeRef, listId, elementId, elementId)
|
2023-02-02 17:21:34 +01:00
|
|
|
|
|
|
|
|
mail = await storage.get(MailTypeRef, listId, elementId)
|
|
|
|
|
o(mail!._id).deepEquals(storableMail._id)
|
2023-03-29 14:46:04 +02:00
|
|
|
const rangeBefore = await storage.getRangeForList(MailTypeRef, listId)
|
|
|
|
|
o(rangeBefore).deepEquals({ upper: elementId, lower: elementId })
|
2023-02-02 17:21:34 +01:00
|
|
|
await storage.deleteAllOfType(MailTypeRef)
|
|
|
|
|
|
|
|
|
|
mail = await storage.get(MailTypeRef, listId, elementId)
|
|
|
|
|
o(mail).equals(null)
|
2023-03-29 14:46:04 +02:00
|
|
|
const rangeAfter = await storage.getRangeForList(MailTypeRef, listId)
|
|
|
|
|
o(rangeAfter).equals(null)
|
2023-02-02 17:21:34 +01:00
|
|
|
})
|
|
|
|
|
})
|
|
|
|
|
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
o.spec("BlobElementType", function () {
|
|
|
|
|
o("put, get and delete", async function () {
|
|
|
|
|
const archiveId = "archiveId"
|
|
|
|
|
const blobElementId = "id1"
|
2023-11-09 17:04:42 +01:00
|
|
|
const storableMailDetails = createMailDetailsBlob({ _id: [archiveId, blobElementId], details: createTestEntity(MailDetailsTypeRef) })
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
|
|
|
|
|
await storage.init({ userId, databaseKey, timeRangeDays, forceNewDatabase: false })
|
|
|
|
|
|
|
|
|
|
let mailDetailsBlob = await storage.get(MailDetailsBlobTypeRef, archiveId, blobElementId)
|
|
|
|
|
o(mailDetailsBlob).equals(null)
|
|
|
|
|
|
|
|
|
|
await storage.put(storableMailDetails)
|
|
|
|
|
|
|
|
|
|
mailDetailsBlob = await storage.get(MailDetailsBlobTypeRef, archiveId, blobElementId)
|
|
|
|
|
mailDetailsBlob!.details._type = MailDetailsTypeRef // we do not set the proper typeRef class on nested aggregates, so we overwrite it here
|
|
|
|
|
o(mailDetailsBlob).deepEquals(storableMailDetails)
|
|
|
|
|
|
|
|
|
|
await storage.deleteIfExists(MailDetailsBlobTypeRef, archiveId, blobElementId)
|
|
|
|
|
|
|
|
|
|
mailDetailsBlob = await storage.get(MailDetailsBlobTypeRef, archiveId, blobElementId)
|
|
|
|
|
o(mailDetailsBlob).equals(null)
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
o("put, get and deleteAllOwnedBy", async function () {
|
|
|
|
|
const archiveId = "archiveId"
|
|
|
|
|
const blobElementId = "id1"
|
|
|
|
|
const _ownerGroup = "ownerGroup"
|
2023-11-09 17:04:42 +01:00
|
|
|
const storableMailDetails = createMailDetailsBlob({
|
|
|
|
|
_id: [archiveId, blobElementId],
|
|
|
|
|
_ownerGroup,
|
|
|
|
|
details: createTestEntity(MailDetailsTypeRef),
|
|
|
|
|
})
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
|
|
|
|
|
await storage.init({ userId, databaseKey, timeRangeDays, forceNewDatabase: false })
|
|
|
|
|
|
|
|
|
|
await storage.put(storableMailDetails)
|
|
|
|
|
|
|
|
|
|
await storage.deleteAllOwnedBy(_ownerGroup)
|
|
|
|
|
|
|
|
|
|
const mailDetailsBlob = await storage.get(MailDetailsBlobTypeRef, archiveId, blobElementId)
|
|
|
|
|
o(mailDetailsBlob).equals(null)
|
|
|
|
|
})
|
|
|
|
|
})
|
|
|
|
|
})
|
|
|
|
|
|
2022-04-20 10:39:52 +02:00
|
|
|
o.spec("Clearing excluded data", function () {
|
2023-01-12 14:54:42 +01:00
|
|
|
const spamFolderId = "spamFolder"
|
|
|
|
|
const trashFolderId = "trashFolder"
|
|
|
|
|
const spamListId = "spamList"
|
|
|
|
|
const trashListId = "trashList"
|
2022-04-20 10:39:52 +02:00
|
|
|
const listId = "listId"
|
2022-05-23 17:09:09 +02:00
|
|
|
const mailType = getTypeId(MailTypeRef)
|
2023-01-12 14:54:42 +01:00
|
|
|
const mailFolderType = getTypeId(MailFolderTypeRef)
|
|
|
|
|
const mailBodyType = getTypeId(MailBodyTypeRef)
|
2022-04-20 10:39:52 +02:00
|
|
|
|
2023-01-12 14:54:42 +01:00
|
|
|
o.beforeEach(async function () {
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
await storage.init({ userId, databaseKey, timeRangeDays, forceNewDatabase: false })
|
|
|
|
|
|
2023-01-12 14:54:42 +01:00
|
|
|
await insertEntity(createMailFolder({ _id: ["mailFolderList", spamFolderId], mails: spamListId, folderType: MailFolderType.SPAM }))
|
|
|
|
|
await insertEntity(createMailFolder({ _id: ["mailFolderList", trashFolderId], mails: trashListId, folderType: MailFolderType.TRASH }))
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
})
|
|
|
|
|
|
2023-01-12 14:54:42 +01:00
|
|
|
o("old ranges will be deleted", async function () {
|
2022-04-20 10:39:52 +02:00
|
|
|
const upper = offsetId(-1)
|
|
|
|
|
const lower = offsetId(-2)
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
const mailDetailsBlobId: IdTuple = ["mailDetailsList", "mailDetailsBlobId"]
|
2022-12-27 15:37:40 +01:00
|
|
|
await insertEntity(createMailFolder({ _id: ["mailFolderList", "mailFolderId"], mails: listId }))
|
2023-11-09 17:04:42 +01:00
|
|
|
await insertEntity(createMailDetailsBlob({ _id: mailDetailsBlobId, details: createTestEntity(MailDetailsTypeRef) }))
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
await insertEntity(createMail({ _id: [listId, "anything"], mailDetails: mailDetailsBlobId }))
|
2022-08-11 16:38:53 +02:00
|
|
|
await insertRange(MailTypeRef, listId, lower, upper)
|
2022-04-20 10:39:52 +02:00
|
|
|
|
2022-11-30 17:15:08 +01:00
|
|
|
// Here we clear the excluded data
|
|
|
|
|
await storage.clearExcludedData(timeRangeDays, userId)
|
|
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
const allRanges = await dbFacade.all("SELECT * FROM ranges", [])
|
|
|
|
|
o(allRanges).deepEquals([])
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
const allMails = await getAllIdsForType(MailTypeRef)
|
|
|
|
|
o(allMails).deepEquals([])
|
|
|
|
|
const allBlobDetails = await getAllIdsForType(MailDetailsBlobTypeRef)
|
|
|
|
|
o(allBlobDetails).deepEquals([])
|
2022-04-20 10:39:52 +02:00
|
|
|
})
|
|
|
|
|
|
|
|
|
|
o("modified ranges will be shrunk", async function () {
|
|
|
|
|
const upper = offsetId(2)
|
|
|
|
|
const lower = offsetId(-2)
|
2022-12-27 15:37:40 +01:00
|
|
|
await insertEntity(createMailFolder({ _id: ["mailFolderListId", "mailFolderId"], mails: listId }))
|
2022-08-11 16:38:53 +02:00
|
|
|
await insertRange(MailTypeRef, listId, lower, upper)
|
2022-11-30 17:15:08 +01:00
|
|
|
|
|
|
|
|
// Here we clear the excluded data
|
|
|
|
|
await storage.clearExcludedData(timeRangeDays, userId)
|
|
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
const newRange = await dbFacade.get("select * from ranges", [])
|
2022-12-27 15:37:40 +01:00
|
|
|
o(mapNullable(newRange, untagSqlObject)).deepEquals({ type: mailType, listId, lower: cutoffId, upper })
|
2022-04-20 10:39:52 +02:00
|
|
|
})
|
|
|
|
|
|
|
|
|
|
o("unmodified ranges will not be deleted or shrunk", async function () {
|
|
|
|
|
const upper = offsetId(2)
|
|
|
|
|
const lower = offsetId(1)
|
|
|
|
|
|
2022-12-27 15:37:40 +01:00
|
|
|
await insertEntity(createMailFolder({ _id: ["mailFolderList", "mailFolderId"], mails: listId }))
|
2022-08-11 16:38:53 +02:00
|
|
|
await insertRange(MailTypeRef, listId, lower, upper)
|
2022-04-20 10:39:52 +02:00
|
|
|
|
2022-11-30 17:15:08 +01:00
|
|
|
// Here we clear the excluded data
|
|
|
|
|
await storage.clearExcludedData(timeRangeDays, userId)
|
|
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
const newRange = await dbFacade.get("select * from ranges", [])
|
2022-12-27 15:37:40 +01:00
|
|
|
o(mapNullable(newRange, untagSqlObject)).deepEquals({ type: mailType, listId, lower, upper })
|
2022-04-20 10:39:52 +02:00
|
|
|
})
|
|
|
|
|
|
|
|
|
|
o("complete ranges won't be lost if entities are all newer than cutoff", async function () {
|
|
|
|
|
const upper = offsetId(2)
|
|
|
|
|
const lower = GENERATED_MIN_ID
|
2022-12-27 15:37:40 +01:00
|
|
|
const mail = createMail({ _id: [listId, offsetId(1)] })
|
|
|
|
|
const mailFolder = createMailFolder({ _id: ["mailFolderList", "folderId"], mails: listId })
|
2022-08-11 16:38:53 +02:00
|
|
|
await insertEntity(mailFolder)
|
|
|
|
|
await insertEntity(mail)
|
|
|
|
|
await insertRange(MailTypeRef, listId, lower, upper)
|
2022-11-30 17:15:08 +01:00
|
|
|
|
|
|
|
|
// Here we clear the excluded data
|
|
|
|
|
await storage.clearExcludedData(timeRangeDays, userId)
|
2022-08-11 16:38:53 +02:00
|
|
|
|
|
|
|
|
const newRange = await dbFacade.get("select * from ranges", [])
|
2022-12-27 15:37:40 +01:00
|
|
|
o(mapNullable(newRange, untagSqlObject)).deepEquals({ type: mailType, listId, lower, upper })
|
2022-04-20 10:39:52 +02:00
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
const allFolderIds = await getAllIdsForType(MailFolderTypeRef)
|
2023-01-12 14:54:42 +01:00
|
|
|
o(allFolderIds).deepEquals(["folderId", spamFolderId, trashFolderId])
|
2022-08-11 16:38:53 +02:00
|
|
|
const allMailIds = await getAllIdsForType(MailTypeRef)
|
|
|
|
|
o(allMailIds).deepEquals([getElementId(mail)])
|
2022-04-20 10:39:52 +02:00
|
|
|
})
|
|
|
|
|
|
2023-01-12 17:26:41 +01:00
|
|
|
o("legacy trash and spam are cleared", async function () {
|
2023-01-12 14:54:42 +01:00
|
|
|
const spamMailBodyId = "spamMailBodyId"
|
|
|
|
|
const trashMailBodyId = "trashMailBodyId"
|
|
|
|
|
|
|
|
|
|
const spamMailId = offsetId(2)
|
|
|
|
|
const spamMail = createMail({ _id: [spamListId, spamMailId], body: spamMailBodyId })
|
|
|
|
|
const trashMailId = offsetId(2)
|
|
|
|
|
const trashMail = createMail({ _id: [trashListId, trashMailId], body: trashMailBodyId })
|
|
|
|
|
|
|
|
|
|
await insertEntity(spamMail)
|
|
|
|
|
await insertEntity(trashMail)
|
|
|
|
|
await insertEntity(createMailBody({ _id: spamMailBodyId }))
|
|
|
|
|
await insertEntity(createMailBody({ _id: trashMailBodyId }))
|
|
|
|
|
|
|
|
|
|
// Here we clear the excluded data
|
|
|
|
|
await storage.clearExcludedData(timeRangeDays, userId)
|
|
|
|
|
|
|
|
|
|
const allEntities = await dbFacade.all("select * from list_entities", [])
|
|
|
|
|
o(allEntities.map((r) => r.elementId.value)).deepEquals([spamFolderId, trashFolderId])
|
|
|
|
|
|
|
|
|
|
o(await getAllIdsForType(MailFolderTypeRef)).deepEquals([spamFolderId, trashFolderId])
|
|
|
|
|
o(await getAllIdsForType(MailTypeRef)).deepEquals([])
|
|
|
|
|
o(await getAllIdsForType(MailBodyTypeRef)).deepEquals([])
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
o("trash and spam descendants are cleared", async function () {
|
|
|
|
|
const trashSubfolderId = "trashSubfolderId"
|
|
|
|
|
const trashSubfolderListId = "trashSubfolderListId"
|
2022-04-20 10:39:52 +02:00
|
|
|
const spamMailBodyId = "spamMailBodyId"
|
|
|
|
|
const trashMailBodyId = "trashMailBodyId"
|
2023-01-12 14:54:42 +01:00
|
|
|
const trashSubfolderMailBodyId = "trashSubfolderMailBodyId"
|
2022-04-20 10:39:52 +02:00
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
const spamMailId = offsetId(2)
|
2022-12-27 15:37:40 +01:00
|
|
|
const spamMail = createMail({ _id: [spamListId, spamMailId], body: spamMailBodyId })
|
2022-08-11 16:38:53 +02:00
|
|
|
const trashMailId = offsetId(2)
|
2022-12-27 15:37:40 +01:00
|
|
|
const trashMail = createMail({ _id: [trashListId, trashMailId], body: trashMailBodyId })
|
2023-01-12 14:54:42 +01:00
|
|
|
const trashSubfolderMailId = offsetId(2)
|
|
|
|
|
const trashSubfolderMail = createMail({ _id: [trashSubfolderListId, trashSubfolderMailId], body: trashSubfolderMailBodyId })
|
|
|
|
|
|
|
|
|
|
await insertEntity(
|
|
|
|
|
createMailFolder({
|
|
|
|
|
_id: ["mailFolderList", trashSubfolderId],
|
|
|
|
|
parentFolder: ["mailFolderList", trashFolderId],
|
|
|
|
|
mails: trashSubfolderListId,
|
|
|
|
|
folderType: MailFolderType.CUSTOM,
|
|
|
|
|
}),
|
|
|
|
|
)
|
2022-08-11 16:38:53 +02:00
|
|
|
await insertEntity(spamMail)
|
|
|
|
|
await insertEntity(trashMail)
|
2023-01-12 14:54:42 +01:00
|
|
|
await insertEntity(trashSubfolderMail)
|
2022-12-27 15:37:40 +01:00
|
|
|
await insertEntity(createMailBody({ _id: spamMailBodyId }))
|
|
|
|
|
await insertEntity(createMailBody({ _id: trashMailBodyId }))
|
2023-01-12 14:54:42 +01:00
|
|
|
await insertEntity(createMailBody({ _id: trashSubfolderMailBodyId }))
|
2022-04-20 10:39:52 +02:00
|
|
|
|
2022-11-30 17:15:08 +01:00
|
|
|
// Here we clear the excluded data
|
|
|
|
|
await storage.clearExcludedData(timeRangeDays, userId)
|
|
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
const allEntities = await dbFacade.all("select * from list_entities", [])
|
2023-01-12 14:54:42 +01:00
|
|
|
o(allEntities.map((r) => r.elementId.value)).deepEquals([spamFolderId, trashFolderId, trashSubfolderId])
|
2022-08-11 16:38:53 +02:00
|
|
|
|
2023-01-12 14:54:42 +01:00
|
|
|
o(await getAllIdsForType(MailFolderTypeRef)).deepEquals([spamFolderId, trashFolderId, trashSubfolderId])
|
2022-08-11 16:38:53 +02:00
|
|
|
o(await getAllIdsForType(MailTypeRef)).deepEquals([])
|
|
|
|
|
o(await getAllIdsForType(MailBodyTypeRef)).deepEquals([])
|
2022-04-20 10:39:52 +02:00
|
|
|
})
|
|
|
|
|
|
2023-01-12 17:26:41 +01:00
|
|
|
o("trash and spam are cleared", async function () {
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
const spamDetailsId: IdTuple = ["detailsListId", "spamDetailsId"]
|
|
|
|
|
const trashDetailsId: IdTuple = ["detailsListId", "trashDetailsId"]
|
|
|
|
|
|
|
|
|
|
const spamMailId = offsetId(2)
|
|
|
|
|
const trashMailId = offsetId(2)
|
|
|
|
|
const spamMail = createMail({ _id: [spamListId, spamMailId], mailDetails: spamDetailsId })
|
|
|
|
|
const trashMail = createMail({ _id: [trashListId, trashMailId], mailDetails: trashDetailsId })
|
|
|
|
|
|
|
|
|
|
await storage.init({ userId, databaseKey, timeRangeDays, forceNewDatabase: false })
|
|
|
|
|
|
|
|
|
|
await insertEntity(spamMail)
|
|
|
|
|
await insertEntity(trashMail)
|
2023-11-09 17:04:42 +01:00
|
|
|
await insertEntity(createMailDetailsBlob({ _id: spamDetailsId, details: createTestEntity(MailDetailsTypeRef) }))
|
|
|
|
|
await insertEntity(createMailDetailsBlob({ _id: trashDetailsId, details: createTestEntity(MailDetailsTypeRef) }))
|
add MailDetails feature, #4719
server issues: 1276, 1271, 1279, 1272, 1270, 1258, 1254, 1253, 1242, 1241
2022-11-03 19:03:54 +01:00
|
|
|
|
|
|
|
|
// Here we clear the excluded data
|
|
|
|
|
await storage.clearExcludedData(timeRangeDays, userId)
|
|
|
|
|
|
|
|
|
|
const allEntities = await dbFacade.all("select * from list_entities", [])
|
|
|
|
|
o(allEntities.map((r) => r.elementId.value)).deepEquals([spamFolderId, trashFolderId])
|
|
|
|
|
|
|
|
|
|
o(await getAllIdsForType(MailFolderTypeRef)).deepEquals([spamFolderId, trashFolderId])
|
|
|
|
|
o(await getAllIdsForType(MailTypeRef)).deepEquals([])
|
|
|
|
|
o(await getAllIdsForType(MailDetailsBlobTypeRef)).deepEquals([])
|
|
|
|
|
})
|
|
|
|
|
|
2022-04-20 10:39:52 +02:00
|
|
|
o("normal folder is partially cleared", async function () {
|
|
|
|
|
const inboxMailList = "inboxMailList"
|
|
|
|
|
const beforeMailBodyId = "beforeMailBodyId"
|
|
|
|
|
const afterMailBodyId = "afterMailBodyId"
|
|
|
|
|
|
2022-12-27 15:37:40 +01:00
|
|
|
const mailBefore = createMail({ _id: [inboxMailList, offsetId(-2)], body: beforeMailBodyId })
|
|
|
|
|
const mailAfter = createMail({ _id: [inboxMailList, offsetId(2)], body: afterMailBodyId })
|
2022-04-20 10:39:52 +02:00
|
|
|
|
2022-12-27 15:37:40 +01:00
|
|
|
await insertEntity(createMailFolder({ _id: ["mailFolderList", "folderId"], mails: inboxMailList, folderType: MailFolderType.INBOX }))
|
2022-08-11 16:38:53 +02:00
|
|
|
await insertEntity(mailBefore)
|
|
|
|
|
await insertEntity(mailAfter)
|
2022-12-27 15:37:40 +01:00
|
|
|
await insertEntity(createMailBody({ _id: beforeMailBodyId }))
|
|
|
|
|
await insertEntity(createMailBody({ _id: afterMailBodyId }))
|
2022-11-30 17:15:08 +01:00
|
|
|
|
|
|
|
|
// Here we clear the excluded data
|
|
|
|
|
await storage.clearExcludedData(timeRangeDays, userId)
|
2022-08-11 16:38:53 +02:00
|
|
|
|
|
|
|
|
const allMailIds = await getAllIdsForType(MailTypeRef)
|
|
|
|
|
o(allMailIds).deepEquals([getElementId(mailAfter)])
|
|
|
|
|
const allMailBodyIds = await getAllIdsForType(MailBodyTypeRef)
|
|
|
|
|
o(allMailBodyIds).deepEquals([afterMailBodyId])
|
2022-04-20 10:39:52 +02:00
|
|
|
})
|
2022-04-06 16:55:16 +02:00
|
|
|
|
2022-04-20 10:39:52 +02:00
|
|
|
o("normal folder is completely cleared", async function () {
|
|
|
|
|
const inboxMailList = "inboxMailList"
|
|
|
|
|
const mailBodyId1 = "mailBodyId1"
|
|
|
|
|
const mailBodyId2 = "afterMailBodyId"
|
2022-04-06 16:55:16 +02:00
|
|
|
|
2022-12-27 15:37:40 +01:00
|
|
|
const mail1 = createMail({ _id: [inboxMailList, offsetId(-2)], body: mailBodyId1 })
|
|
|
|
|
const mail2 = createMail({ _id: [inboxMailList, offsetId(-3)], body: mailBodyId2 })
|
2022-04-06 16:55:16 +02:00
|
|
|
|
2022-12-27 15:37:40 +01:00
|
|
|
await insertEntity(createMailFolder({ _id: ["mailFolderList", "folderId"], mails: inboxMailList, folderType: MailFolderType.INBOX }))
|
2022-08-11 16:38:53 +02:00
|
|
|
await insertEntity(mail1)
|
|
|
|
|
await insertEntity(mail2)
|
2022-12-27 15:37:40 +01:00
|
|
|
await insertEntity(createMailBody({ _id: mailBodyId1 }))
|
|
|
|
|
await insertEntity(createMailBody({ _id: mailBodyId2 }))
|
2022-11-30 17:15:08 +01:00
|
|
|
|
|
|
|
|
// Here we clear the excluded data
|
|
|
|
|
await storage.clearExcludedData(timeRangeDays, userId)
|
2022-08-11 16:38:53 +02:00
|
|
|
|
|
|
|
|
o(await getAllIdsForType(MailTypeRef)).deepEquals([])
|
|
|
|
|
o(await getAllIdsForType(MailBodyTypeRef)).deepEquals([])
|
2022-04-20 10:39:52 +02:00
|
|
|
})
|
|
|
|
|
|
|
|
|
|
o("when mail is deleted, attachment is also deleted", async function () {
|
|
|
|
|
const inboxMailList = "inboxMailList"
|
|
|
|
|
const beforeMailBodyId = "beforeMailBodyId"
|
|
|
|
|
const afterMailBodyId = "afterMailBodyId"
|
|
|
|
|
const fileListId = "fileListId"
|
|
|
|
|
|
2022-12-27 15:37:40 +01:00
|
|
|
const fileBefore = createFile({ _id: [fileListId, "fileBefore"] })
|
|
|
|
|
const fileAfter = createFile({ _id: [fileListId, "fileAfter"] })
|
|
|
|
|
const mailBefore = createMail({ _id: [inboxMailList, offsetId(-2)], body: beforeMailBodyId, attachments: [fileBefore._id] })
|
|
|
|
|
const mailAfter = createMail({ _id: [inboxMailList, offsetId(2)], body: afterMailBodyId, attachments: [fileAfter._id] })
|
2022-04-20 10:39:52 +02:00
|
|
|
|
2022-12-27 15:37:40 +01:00
|
|
|
await insertEntity(createMailFolder({ _id: ["mailFolderList", "folderId"], mails: inboxMailList, folderType: MailFolderType.INBOX }))
|
2022-08-11 16:38:53 +02:00
|
|
|
await insertEntity(mailBefore)
|
|
|
|
|
await insertEntity(mailAfter)
|
|
|
|
|
await insertEntity(fileBefore)
|
|
|
|
|
await insertEntity(fileAfter)
|
2022-12-27 15:37:40 +01:00
|
|
|
await insertEntity(createMailBody({ _id: beforeMailBodyId }))
|
|
|
|
|
await insertEntity(createMailBody({ _id: afterMailBodyId }))
|
2022-08-11 16:38:53 +02:00
|
|
|
|
2022-11-30 17:15:08 +01:00
|
|
|
// Here we clear the excluded data
|
|
|
|
|
await storage.clearExcludedData(timeRangeDays, userId)
|
|
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
o(await getAllIdsForType(MailTypeRef)).deepEquals([getElementId(mailAfter)])
|
|
|
|
|
o(await getAllIdsForType(FileTypeRef)).deepEquals([getElementId(fileAfter)])
|
|
|
|
|
})
|
2022-04-20 10:39:52 +02:00
|
|
|
})
|
2022-04-06 16:55:16 +02:00
|
|
|
})
|
|
|
|
|
|
2022-04-20 10:39:52 +02:00
|
|
|
o.spec("Integration test", function () {
|
2022-12-27 15:37:40 +01:00
|
|
|
function createMailList(numMails, listId, idGenerator, getSubject, getBody): { mails: Array<Mail>; mailBodies: Array<MailBody> } {
|
2022-04-20 10:39:52 +02:00
|
|
|
const mails: Array<Mail> = []
|
|
|
|
|
const mailBodies: Array<MailBody> = []
|
|
|
|
|
for (let i = 0; i < numMails; ++i) {
|
|
|
|
|
const mailId = idGenerator.getNext()
|
|
|
|
|
const bodyId = idGenerator.getNext()
|
2022-12-27 15:37:40 +01:00
|
|
|
mails.push(
|
|
|
|
|
createMail({
|
|
|
|
|
_id: [listId, mailId],
|
|
|
|
|
subject: getSubject(i),
|
|
|
|
|
body: bodyId,
|
|
|
|
|
}),
|
|
|
|
|
)
|
|
|
|
|
mailBodies.push(
|
|
|
|
|
createMailBody({
|
|
|
|
|
_id: bodyId,
|
|
|
|
|
text: getBody(i),
|
|
|
|
|
}),
|
|
|
|
|
)
|
2022-04-20 10:39:52 +02:00
|
|
|
}
|
2022-12-27 15:37:40 +01:00
|
|
|
return { mails, mailBodies }
|
2022-04-20 10:39:52 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
o("cleanup works as expected", async function () {
|
|
|
|
|
// Time range is five days
|
|
|
|
|
const oldIds = new IdGenerator(offsetId(-5))
|
|
|
|
|
const newIds = new IdGenerator(offsetId(5))
|
|
|
|
|
|
|
|
|
|
const inboxListId = oldIds.getNext()
|
|
|
|
|
const inboxFolder = createMailFolder({
|
|
|
|
|
_id: [userId, oldIds.getNext()],
|
|
|
|
|
mails: inboxListId,
|
|
|
|
|
folderType: MailFolderType.INBOX,
|
|
|
|
|
})
|
2022-12-27 15:37:40 +01:00
|
|
|
const { mails: oldInboxMails, mailBodies: oldInboxMailBodies } = createMailList(
|
|
|
|
|
3,
|
|
|
|
|
inboxListId,
|
|
|
|
|
oldIds,
|
|
|
|
|
(i) => `old subject ${i}`,
|
|
|
|
|
(i) => `old body ${i}`,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
const { mails: newInboxMails, mailBodies: newInboxMailBodies } = createMailList(
|
|
|
|
|
3,
|
|
|
|
|
inboxListId,
|
|
|
|
|
newIds,
|
|
|
|
|
(i) => `new subject ${i}`,
|
|
|
|
|
(i) => `new body ${i}`,
|
|
|
|
|
)
|
2022-04-20 10:39:52 +02:00
|
|
|
|
|
|
|
|
const trashListId = oldIds.getNext()
|
|
|
|
|
const trashFolder = createMailFolder({
|
|
|
|
|
_id: [userId, oldIds.getNext()],
|
|
|
|
|
mails: trashListId,
|
|
|
|
|
folderType: MailFolderType.TRASH,
|
|
|
|
|
})
|
2022-12-27 15:37:40 +01:00
|
|
|
const { mails: trashMails, mailBodies: trashMailBodies } = createMailList(
|
|
|
|
|
3,
|
|
|
|
|
trashListId,
|
|
|
|
|
newIds,
|
|
|
|
|
(i) => `trash subject ${i}`,
|
|
|
|
|
(i) => `trash body ${i}`,
|
|
|
|
|
)
|
2022-04-20 10:39:52 +02:00
|
|
|
|
2023-01-12 14:54:42 +01:00
|
|
|
const spamListId = oldIds.getNext()
|
|
|
|
|
const spamFolder = createMailFolder({
|
|
|
|
|
_id: [userId, oldIds.getNext()],
|
|
|
|
|
mails: spamListId,
|
|
|
|
|
folderType: MailFolderType.SPAM,
|
|
|
|
|
})
|
|
|
|
|
|
2022-04-20 10:39:52 +02:00
|
|
|
const everyEntity = [
|
2022-12-27 15:37:40 +01:00
|
|
|
inboxFolder,
|
|
|
|
|
trashFolder,
|
2023-01-12 14:54:42 +01:00
|
|
|
spamFolder,
|
2022-12-27 15:37:40 +01:00
|
|
|
...oldInboxMails,
|
|
|
|
|
...oldInboxMailBodies,
|
|
|
|
|
...newInboxMails,
|
|
|
|
|
...newInboxMailBodies,
|
|
|
|
|
...trashMails,
|
|
|
|
|
...trashMailBodies,
|
2022-04-20 10:39:52 +02:00
|
|
|
]
|
|
|
|
|
|
2022-12-27 15:37:40 +01:00
|
|
|
await storage.init({ userId, databaseKey: offlineDatabaseTestKey, timeRangeDays, forceNewDatabase: false })
|
2022-05-17 17:40:44 +02:00
|
|
|
|
2022-04-20 10:39:52 +02:00
|
|
|
for (let entity of everyEntity) {
|
2022-08-11 16:38:53 +02:00
|
|
|
await storage.put(entity)
|
2022-04-20 10:39:52 +02:00
|
|
|
}
|
|
|
|
|
|
2022-12-15 13:18:25 +01:00
|
|
|
await storage.setNewRangeForList(MailTypeRef, inboxListId, getFirstOrThrow(oldInboxMails)._id[1], lastThrow(newInboxMails)._id[1])
|
|
|
|
|
await storage.setNewRangeForList(MailTypeRef, trashListId, getFirstOrThrow(trashMails)._id[1], lastThrow(trashMails)._id[1])
|
2022-04-20 10:39:52 +02:00
|
|
|
|
2022-11-30 17:15:08 +01:00
|
|
|
// Here we clear the excluded data
|
|
|
|
|
await storage.clearExcludedData(timeRangeDays, userId)
|
2022-04-20 10:39:52 +02:00
|
|
|
|
2022-12-27 15:37:40 +01:00
|
|
|
const assertContents = async ({ _id, _type }, expected, msg) => {
|
|
|
|
|
const { listId, elementId } = expandId(_id)
|
2022-08-11 16:38:53 +02:00
|
|
|
return o(await storage.get(_type, listId, elementId)).deepEquals(expected)(msg)
|
2022-04-20 10:39:52 +02:00
|
|
|
}
|
|
|
|
|
|
2022-12-27 15:37:40 +01:00
|
|
|
await promiseMap(oldInboxMails, (mail) => assertContents(mail, null, `old mail ${mail._id} was deleted`))
|
|
|
|
|
await promiseMap(oldInboxMailBodies, (body) => assertContents(body, null, `old mailBody ${body._id} was deleted`))
|
2022-04-20 10:39:52 +02:00
|
|
|
|
2022-12-27 15:37:40 +01:00
|
|
|
await promiseMap(newInboxMails, (mail) => assertContents(mail, mail, `new mail ${mail._id} was not deleted`))
|
|
|
|
|
await promiseMap(newInboxMailBodies, (body) => assertContents(body, body, `new mailBody ${body._id} was not deleted`))
|
2022-04-20 10:39:52 +02:00
|
|
|
|
|
|
|
|
// All of trash should be cleared, even though the ids are old
|
2022-12-27 15:37:40 +01:00
|
|
|
await promiseMap(trashMails, (mail) => assertContents(mail, null, `trash mail ${mail._id} was deleted`))
|
|
|
|
|
await promiseMap(trashMailBodies, (body) => assertContents(body, null, `trash mailBody ${body._id} was deleted`))
|
2022-04-20 10:39:52 +02:00
|
|
|
|
|
|
|
|
await assertContents(inboxFolder, inboxFolder, `inbox folder was not deleted`)
|
|
|
|
|
await assertContents(trashFolder, trashFolder, `trash folder was not deleted`)
|
|
|
|
|
|
2022-08-11 16:38:53 +02:00
|
|
|
o(await storage.getRangeForList(MailTypeRef, inboxListId)).deepEquals({
|
2022-04-20 10:39:52 +02:00
|
|
|
lower: cutoffId,
|
2022-12-27 15:37:40 +01:00
|
|
|
upper: lastThrow(newInboxMails)._id[1],
|
2022-04-20 10:39:52 +02:00
|
|
|
})("lower range for inbox was set to cutoff")
|
2022-08-11 16:38:53 +02:00
|
|
|
o(await storage.getRangeForList(MailTypeRef, trashListId)).equals(null)("range for trash was deleted")
|
2022-04-20 10:39:52 +02:00
|
|
|
})
|
2022-04-06 16:55:16 +02:00
|
|
|
})
|
2022-12-27 15:37:40 +01:00
|
|
|
})
|