allow replace on aggregations for patch operation

We now allow replacement of aggregate lists if all items in the lists
match (i.e. addedItems and removedItems are empty) but the order of the
aggregated entities do not. We replace the entire list in that case to
update the order of the aggregates

Co-authored-by: das <das@tutao.de>
Co-authored-by: jomapp <17314077+jomapp@users.noreply.github.com>
This commit is contained in:
abp 2025-07-14 11:01:59 +02:00
parent f778789f94
commit 227d311a2c
No known key found for this signature in database
GPG key ID: 791D4EC38A7AA7C2
5 changed files with 104 additions and 24 deletions

View file

@ -3,7 +3,7 @@ import { ClientTypeReferenceResolver, PatchOperationType } from "../EntityFuncti
import { createPatch, createPatchList, Patch, PatchList } from "../../entities/sys/TypeRefs"
import { AssociationType, Cardinality, ValueType } from "../EntityConstants"
import { assertNotNull, deepEqual, Nullable } from "@tutao/tutanota-utils/dist/Utils"
import { arrayEquals, TypeRef } from "@tutao/tutanota-utils"
import { arrayEquals, arrayEqualsWithPredicate, isEmpty, TypeRef } from "@tutao/tutanota-utils"
import { AttributeModel } from "../AttributeModel"
import { ProgrammingError } from "../error/ProgrammingError"
import { IDENTITY_FIELDS, isSameId } from "./EntityUtils"
@ -211,6 +211,21 @@ export async function computePatches(
}),
)
}
const areItemsIdentical = originalAggregatedEntities.every((item) => modifiedAggregatedEntities.some((element) => deepEqual(element, item)))
if (
isEmpty(addedItems) &&
isEmpty(removedItems) &&
areItemsIdentical &&
!arrayEqualsWithPredicate(originalAggregatedEntities, modifiedAggregatedEntities, deepEqual)
) {
patches.push(
createPatch({
attributePath: attributeIdStr,
value: JSON.stringify(modifiedAggregatedUntypedEntities),
patchOperation: PatchOperationType.REPLACE,
}),
)
}
} else {
// non aggregation associations
const originalAssociationValue = (originalInstance[attributeId] ?? []) as Array<Id | IdTuple>

View file

@ -220,10 +220,8 @@ export class PatchMerger {
case PatchOperationType.REPLACE: {
if (isValue) {
instanceToChange[attributeId] = value as ParsedValue
} else if (!isAggregationAssociation) {
} else if (isAssociation) {
instanceToChange[attributeId] = value as ParsedAssociation
} else {
throw new PatchOperationError("attempted to replace aggregation " + typeModel.associations[attributeId].name + " on " + typeModel.name)
}
break
}

View file

@ -59,6 +59,7 @@ import "./api/worker/rest/CustomCacheHandlerTest.js"
import "./api/worker/rest/EntityRestCacheTest.js"
import "./api/worker/rest/EntityRestClientTest.js"
import "./api/worker/rest/EphemeralCacheStorageTest.js"
import "./api/worker/rest/PatchGeneratorTest.js"
import "./api/worker/rest/ServiceExecutorTest.js"
import "./api/worker/search/BulkMailLoaderTest.js"
import "./api/worker/search/ContactIndexerTest.js"

View file

@ -28,6 +28,7 @@ import {
Mail,
MailAddress,
MailAddressTypeRef,
MailBox,
MailboxGroupRoot,
MailboxGroupRootTypeRef,
MailBoxTypeRef,
@ -35,6 +36,7 @@ import {
MailDetailsBlobTypeRef,
MailDetailsTypeRef,
MailFolderRefTypeRef,
MailFolderTypeRef,
MailTypeRef,
OutOfOfficeNotificationRecipientListTypeRef,
RecipientsTypeRef,
@ -428,7 +430,7 @@ o.spec("PatchMergerTest", () => {
])
})
o.test("apply_replace_on_One_aggregation_throws", async () => {
o.test("apply_replace_on_One_aggregation_works", async () => {
const testMail = createSystemMail({
_id: ["listId", "elementId"],
_ownerEncSessionKey: encryptedSessionKey.key,
@ -453,27 +455,30 @@ o.spec("PatchMergerTest", () => {
}),
]
const e = await assertThrows(
PatchOperationError,
async () => await patchMerger.getPatchedInstanceParsed(MailTypeRef, "listId", "elementId", patches),
)
o(e.message.toString().includes("attempted to replace aggregation sender on Mail")).equals(true)
const testMailPatchedParsed = assertNotNull(await patchMerger.getPatchedInstanceParsed(MailTypeRef, "listId", "elementId", patches))
const testMailPatched = await instancePipeline.modelMapper.mapToInstance<Mail>(MailTypeRef, testMailPatchedParsed)
o(testMailPatched.sender.name).deepEquals("new name")
o(testMailPatched.sender.address).deepEquals("address@tutao.de")
})
o.test("apply_replace_on_ZeroOrOne_aggregation_throws", async () => {
o.test("apply_replace_on_ZeroOrOne_aggregation_works", async () => {
const mailbox = createTestEntity(MailBoxTypeRef, {
_id: "elementId",
_ownerEncSessionKey: encryptedSessionKey.key,
_ownerKeyVersion: encryptedSessionKey.encryptingKeyVersion.toString(),
_ownerGroup: ownerGroupId,
folders: createTestEntity(MailFolderRefTypeRef),
folders: null,
sentAttachments: "attachmentsId",
receivedAttachments: "attachmentsId",
importedAttachments: "attachmentsId",
mailImportStates: "importStatesId",
})
await storage.put(MailBoxTypeRef, await toStorableInstance(mailbox))
const mailBoxTypeModel = await typeModelResolver.resolveClientTypeReference(MailBoxTypeRef)
const mailFolderRefAttributeId = assertNotNull(AttributeModel.getAttributeId(mailBoxTypeModel, "folders"))
const mailFolderRefToAdd = createTestEntity(MailFolderRefTypeRef)
const mailFolderRefToAdd = createTestEntity(MailFolderRefTypeRef, { folders: "mailFolderId" })
const untypedMailFolderRef = await instancePipeline.mapAndEncrypt(MailFolderRefTypeRef, mailFolderRefToAdd, sk)
const patches: Array<Patch> = [
@ -483,15 +488,13 @@ o.spec("PatchMergerTest", () => {
patchOperation: PatchOperationType.REPLACE,
}),
]
const e = await assertThrows(
PatchOperationError,
async () => await patchMerger.getPatchedInstanceParsed(MailBoxTypeRef, null, "elementId", patches),
)
o(e.message.toString().includes("attempted to replace aggregation folders on MailBox")).equals(true)
o(mailbox.folders).equals(null)
const testMailBoxPatchedParsed = assertNotNull(await patchMerger.getPatchedInstanceParsed(MailBoxTypeRef, null, "elementId", patches))
const testMailBoxPatched = await instancePipeline.modelMapper.mapToInstance<MailBox>(MailBoxTypeRef, testMailBoxPatchedParsed)
o(testMailBoxPatched.folders?.folders).equals("mailFolderId")
})
o.test("apply_replace_on_Any_aggregation_throws", async () => {
o.test("apply_replace_on_Any_aggregation_works", async () => {
const mailDetailsBlob = createTestEntity(
MailDetailsBlobTypeRef,
{
@ -535,11 +538,16 @@ o.spec("PatchMergerTest", () => {
}),
]
const e = await assertThrows(
PatchOperationError,
async () => await patchMerger.getPatchedInstanceParsed(MailDetailsBlobTypeRef, "listId", "elementId", patches),
const mailDetailsBlobPatchedParsed = assertNotNull(
await patchMerger.getPatchedInstanceParsed(MailDetailsBlobTypeRef, "listId", "elementId", patches),
)
o(e.message.toString().includes("attempted to replace aggregation toRecipients on Recipients")).equals(true)
const mailDetailsBlobPatched = await instancePipeline.modelMapper.mapToInstance<MailDetailsBlob>(
MailDetailsBlobTypeRef,
mailDetailsBlobPatchedParsed,
)
const addedToRecipient = assertNotNull(mailDetailsBlobPatched.details.recipients.toRecipients.pop())
o(addedToRecipient.name).equals("new name")
o(addedToRecipient.address).equals("address@tutao.de")
})
})

View file

@ -332,6 +332,64 @@ o.spec("computePatches", function () {
])
})
o("computePatches works on aggregations and replace operation when aggregates are identical but have different order", async function () {
const testEntity = await createFilledTestEntity()
testEntity.testAssociation.push(
await createTestEntityWithDummyResolver(TestAggregateRef, {
_id: "newAgId",
testNumber: "1",
}),
)
testEntity.testAssociation.push(
await createTestEntityWithDummyResolver(TestAggregateRef, {
_id: "newAgId2",
testNumber: "2",
}),
)
testEntity._original = structuredClone(testEntity)
const elementToMove = testEntity.testAssociation[0]
testEntity.testAssociation.splice(0, 1)
testEntity.testAssociation.push(elementToMove)
let sk = aes256RandomKey()
const originalParsedInstance = await dummyInstancePipeline.modelMapper.mapToClientModelParsedInstance(TestTypeRef, assertNotNull(testEntity._original))
const currentParsedInstance = await dummyInstancePipeline.modelMapper.mapToClientModelParsedInstance(TestTypeRef, testEntity)
const currentEncryptedParsedInstance = await dummyInstancePipeline.cryptoMapper.encryptParsedInstance(
testTypeModel as ClientTypeModel,
currentParsedInstance,
sk,
)
const currentUntypedInstance = await dummyInstancePipeline.typeMapper.applyDbTypes(testTypeModel as ClientTypeModel, currentEncryptedParsedInstance)
const encryptedAssociationArray = AttributeModel.getAttribute(
currentEncryptedParsedInstance,
"testAssociation",
testTypeModel,
) as Array<ClientModelEncryptedParsedInstance>
const testAssociationFirstEncryptedInstance = encryptedAssociationArray[0]
const testAssociationSecondEncryptedInstance = encryptedAssociationArray[1]
const testAssociationOriginalEncryptedInstance = encryptedAssociationArray[2]
let objectDiff = await computePatches(
originalParsedInstance,
currentParsedInstance,
currentUntypedInstance,
testTypeModel,
dummyTypeReferenceResolver,
false,
)
o(objectDiff).deepEquals([
createPatch({
attributePath: "3",
value: JSON.stringify([
testAssociationFirstEncryptedInstance,
testAssociationSecondEncryptedInstance,
testAssociationOriginalEncryptedInstance,
]),
patchOperation: PatchOperationType.REPLACE,
}),
])
})
o("computePatches works on aggregations and removeitem operation", async function () {
const testEntity = await createFilledTestEntity()
testEntity.testAssociation.pop()