Only remove previous deltas from metadata under some conditions (#3419)
Only allow removing previous delta tokens and not previous paths. This ensures folder deletions will be picked up if all items are enumerated Updates tests to match new behavior, mostly adjusting expectations around del list when doNotMerge is applied --- #### Does this PR need a docs update or release note? - [ ] ✅ Yes, it's included - [ ] 🕐 Yes, but in a later PR - [x] ⛔ No #### Type of change - [x] 🌻 Feature - [ ] 🐛 Bugfix - [ ] 🗺️ Documentation - [ ] 🤖 Supportability/Tests - [ ] 💻 CI/Deployment - [ ] 🧹 Tech Debt/Cleanup #### Issue(s) * closes #2334 #### Test Plan - [ ] 💪 Manual - [x] ⚡ Unit test - [ ] 💚 E2E
This commit is contained in:
parent
a26676c942
commit
f8252b4cd3
@ -9,7 +9,6 @@ import (
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
"github.com/pkg/errors"
|
||||
"golang.org/x/exp/maps"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||
@ -193,42 +192,41 @@ func deserializeMetadata(
|
||||
continue
|
||||
}
|
||||
|
||||
// This is conservative, but report an error if any of the items for
|
||||
// any of the deserialized maps have duplicate drive IDs. This will
|
||||
// cause the entire backup to fail, but it's not clear if higher
|
||||
// layers would have caught this. Worst case if we don't handle this
|
||||
// we end up in a situation where we're sourcing items from the wrong
|
||||
// base in kopia wrapper.
|
||||
if errors.Is(err, errExistingMapping) {
|
||||
return nil, nil, clues.Wrap(err, "deserializing metadata file").WithClues(ictx)
|
||||
// This is conservative, but report an error if either any of the items
|
||||
// for any of the deserialized maps have duplicate drive IDs or there's
|
||||
// some other problem deserializing things. This will cause the entire
|
||||
// backup to fail, but it's not clear if higher layers would have caught
|
||||
// these cases. We can make the logic for deciding when to continue vs.
|
||||
// when to fail less strict in the future if needed.
|
||||
if err != nil {
|
||||
return nil, nil, clues.Stack(err).WithClues(ictx)
|
||||
}
|
||||
|
||||
err = clues.Stack(err).WithClues(ictx)
|
||||
|
||||
el.AddRecoverable(err)
|
||||
logger.CtxErr(ictx, err).Error("deserializing base backup metadata")
|
||||
}
|
||||
}
|
||||
|
||||
// Go through and remove partial results (i.e. path mapping but no delta URL
|
||||
// or vice-versa).
|
||||
for k, v := range prevDeltas {
|
||||
// Remove entries with an empty delta token as it's not useful.
|
||||
if len(v) == 0 {
|
||||
delete(prevDeltas, k)
|
||||
delete(prevFolders, k)
|
||||
// Go through and remove delta tokens if we didn't have any paths for them
|
||||
// or one or more paths are empty (incorrect somehow). This will ensure we
|
||||
// don't accidentally try to pull in delta results when we should have
|
||||
// enumerated everything instead.
|
||||
//
|
||||
// Loop over the set of previous deltas because it's alright to have paths
|
||||
// without a delta but not to have a delta without paths. This way ensures
|
||||
// we check at least all the path sets for the deltas we have.
|
||||
for drive := range prevDeltas {
|
||||
paths := prevFolders[drive]
|
||||
if len(paths) == 0 {
|
||||
delete(prevDeltas, drive)
|
||||
}
|
||||
|
||||
// Remove entries without a folders map as we can't tell kopia the
|
||||
// hierarchy changes.
|
||||
if _, ok := prevFolders[k]; !ok {
|
||||
delete(prevDeltas, k)
|
||||
}
|
||||
}
|
||||
|
||||
for k := range prevFolders {
|
||||
if _, ok := prevDeltas[k]; !ok {
|
||||
delete(prevFolders, k)
|
||||
// Drives have only a single delta token. If we find any folder that
|
||||
// seems like the path is bad we need to drop the entire token and start
|
||||
// fresh. Since we know the token will be gone we can also stop checking
|
||||
// for other possibly incorrect folder paths.
|
||||
for _, prevPath := range paths {
|
||||
if len(prevPath) == 0 {
|
||||
delete(prevDeltas, drive)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -912,8 +912,12 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
},
|
||||
},
|
||||
expectedDeltas: map[string]string{},
|
||||
expectedPaths: map[string]map[string]string{},
|
||||
errCheck: assert.NoError,
|
||||
expectedPaths: map[string]map[string]string{
|
||||
driveID1: {
|
||||
folderID1: path1,
|
||||
},
|
||||
},
|
||||
errCheck: assert.NoError,
|
||||
},
|
||||
{
|
||||
// An empty path map but valid delta results in metadata being returned
|
||||
@ -936,7 +940,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
}
|
||||
},
|
||||
},
|
||||
expectedDeltas: map[string]string{driveID1: deltaURL1},
|
||||
expectedDeltas: map[string]string{},
|
||||
expectedPaths: map[string]map[string]string{driveID1: {}},
|
||||
errCheck: assert.NoError,
|
||||
},
|
||||
@ -965,9 +969,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
}
|
||||
},
|
||||
},
|
||||
expectedDeltas: map[string]string{},
|
||||
expectedPaths: map[string]map[string]string{},
|
||||
errCheck: assert.NoError,
|
||||
expectedDeltas: map[string]string{driveID1: ""},
|
||||
expectedPaths: map[string]map[string]string{
|
||||
driveID1: {
|
||||
folderID1: path1,
|
||||
},
|
||||
},
|
||||
errCheck: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "SuccessTwoDrivesTwoCollections",
|
||||
@ -1033,9 +1041,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
}
|
||||
},
|
||||
},
|
||||
expectedDeltas: map[string]string{},
|
||||
expectedPaths: map[string]map[string]string{},
|
||||
errCheck: assert.Error,
|
||||
errCheck: assert.Error,
|
||||
},
|
||||
{
|
||||
// Unexpected files are logged and skipped. They don't cause an error to
|
||||
@ -1167,8 +1173,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
deltas, paths, err := deserializeMetadata(ctx, cols, fault.New(true))
|
||||
test.errCheck(t, err)
|
||||
|
||||
assert.Equal(t, test.expectedDeltas, deltas)
|
||||
assert.Equal(t, test.expectedPaths, paths)
|
||||
assert.Equal(t, test.expectedDeltas, deltas, "deltas")
|
||||
assert.Equal(t, test.expectedPaths, paths, "paths")
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -1281,9 +1287,11 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
prevFolderPaths map[string]map[string]string
|
||||
// Collection name -> set of item IDs. We can't check item data because
|
||||
// that's not mocked out. Metadata is checked separately.
|
||||
expectedCollections map[string]map[data.CollectionState][]string
|
||||
expectedDeltaURLs map[string]string
|
||||
expectedFolderPaths map[string]map[string]string
|
||||
expectedCollections map[string]map[data.CollectionState][]string
|
||||
expectedDeltaURLs map[string]string
|
||||
expectedFolderPaths map[string]map[string]string
|
||||
// Items that should be excluded from the base. Only populated if the delta
|
||||
// was valid and there was at least 1 previous folder path.
|
||||
expectedDelList *pmMock.PrefixMap
|
||||
expectedSkippedCount int
|
||||
// map full or previous path (prefers full) -> bool
|
||||
@ -1366,10 +1374,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
},
|
||||
},
|
||||
},
|
||||
errCheck: assert.NoError,
|
||||
prevFolderPaths: map[string]map[string]string{
|
||||
driveID1: {},
|
||||
},
|
||||
errCheck: assert.NoError,
|
||||
prevFolderPaths: map[string]map[string]string{},
|
||||
expectedCollections: map[string]map[data.CollectionState][]string{
|
||||
rootFolderPath1: {data.NewState: {}},
|
||||
folderPath1: {data.NewState: {"folder", "file"}},
|
||||
@ -1383,9 +1389,11 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
"folder": folderPath1,
|
||||
},
|
||||
},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{
|
||||
rootFolderPath1: getDelList("file"),
|
||||
}),
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
|
||||
doNotMergeItems: map[string]bool{
|
||||
rootFolderPath1: true,
|
||||
folderPath1: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "OneDrive_OneItemPage_NoErrors_FileRenamedMultiple",
|
||||
@ -1403,10 +1411,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
},
|
||||
},
|
||||
},
|
||||
errCheck: assert.NoError,
|
||||
prevFolderPaths: map[string]map[string]string{
|
||||
driveID1: {},
|
||||
},
|
||||
errCheck: assert.NoError,
|
||||
prevFolderPaths: map[string]map[string]string{},
|
||||
expectedCollections: map[string]map[data.CollectionState][]string{
|
||||
rootFolderPath1: {data.NewState: {}},
|
||||
folderPath1: {data.NewState: {"folder", "file"}},
|
||||
@ -1420,9 +1426,11 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
"folder": folderPath1,
|
||||
},
|
||||
},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{
|
||||
rootFolderPath1: getDelList("file"),
|
||||
}),
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
|
||||
doNotMergeItems: map[string]bool{
|
||||
rootFolderPath1: true,
|
||||
folderPath1: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "OneDrive_OneItemPage_NoErrors_FileMovedMultiple",
|
||||
@ -1442,7 +1450,9 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
},
|
||||
errCheck: assert.NoError,
|
||||
prevFolderPaths: map[string]map[string]string{
|
||||
driveID1: {},
|
||||
driveID1: {
|
||||
"root": rootFolderPath1,
|
||||
},
|
||||
},
|
||||
expectedCollections: map[string]map[data.CollectionState][]string{
|
||||
rootFolderPath1: {data.NotMovedState: {"file"}},
|
||||
@ -1484,11 +1494,18 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
rootFolderPath1: {data.NewState: {}},
|
||||
folderPath1: {data.NewState: {"folder", "file"}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{},
|
||||
expectedFolderPaths: map[string]map[string]string{},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{
|
||||
rootFolderPath1: getDelList("file"),
|
||||
}),
|
||||
expectedDeltaURLs: map[string]string{},
|
||||
expectedFolderPaths: map[string]map[string]string{
|
||||
driveID1: {
|
||||
"root": rootFolderPath1,
|
||||
"folder": folderPath1,
|
||||
},
|
||||
},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
|
||||
doNotMergeItems: map[string]bool{
|
||||
rootFolderPath1: true,
|
||||
folderPath1: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "OneDrive_TwoItemPages_NoErrors",
|
||||
@ -1530,9 +1547,11 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
"folder": folderPath1,
|
||||
},
|
||||
},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{
|
||||
rootFolderPath1: getDelList("file", "file2"),
|
||||
}),
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
|
||||
doNotMergeItems: map[string]bool{
|
||||
rootFolderPath1: true,
|
||||
folderPath1: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "TwoDrives_OneItemPageEach_NoErrors",
|
||||
@ -1587,10 +1606,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
"folder2": folderPath2,
|
||||
},
|
||||
},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{
|
||||
rootFolderPath1: getDelList("file"),
|
||||
rootFolderPath2: getDelList("file2"),
|
||||
}),
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
|
||||
doNotMergeItems: map[string]bool{
|
||||
rootFolderPath1: true,
|
||||
folderPath1: true,
|
||||
rootFolderPath2: true,
|
||||
folderPath2: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "TwoDrives_DuplicateIDs_OneItemPageEach_NoErrors",
|
||||
@ -1645,10 +1667,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
"folder": folderPath2,
|
||||
},
|
||||
},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{
|
||||
rootFolderPath1: getDelList("file"),
|
||||
rootFolderPath2: getDelList("file2"),
|
||||
}),
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
|
||||
doNotMergeItems: map[string]bool{
|
||||
rootFolderPath1: true,
|
||||
folderPath1: true,
|
||||
rootFolderPath2: true,
|
||||
folderPath2: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "OneDrive_OneItemPage_Errors",
|
||||
@ -1772,7 +1797,9 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
},
|
||||
errCheck: assert.NoError,
|
||||
prevFolderPaths: map[string]map[string]string{
|
||||
driveID1: {},
|
||||
driveID1: {
|
||||
"root": rootFolderPath1,
|
||||
},
|
||||
},
|
||||
expectedCollections: map[string]map[data.CollectionState][]string{
|
||||
rootFolderPath1: {data.NotMovedState: {"file"}},
|
||||
@ -1929,9 +1956,11 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
"folder": folderPath1,
|
||||
},
|
||||
},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{
|
||||
rootFolderPath1: getDelList("file", "file2"),
|
||||
}),
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
|
||||
doNotMergeItems: map[string]bool{
|
||||
rootFolderPath1: true,
|
||||
folderPath1: true,
|
||||
},
|
||||
expectedSkippedCount: 2,
|
||||
},
|
||||
{
|
||||
@ -2110,9 +2139,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
"root": rootFolderPath1,
|
||||
},
|
||||
},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{
|
||||
rootFolderPath1: getDelList("file"),
|
||||
}),
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
|
||||
doNotMergeItems: map[string]bool{
|
||||
rootFolderPath1: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "One Drive Item Made And Deleted",
|
||||
@ -2153,9 +2183,11 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
"folder": folderPath1,
|
||||
},
|
||||
},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{
|
||||
rootFolderPath1: getDelList("file"),
|
||||
}),
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
|
||||
doNotMergeItems: map[string]bool{
|
||||
rootFolderPath1: true,
|
||||
folderPath1: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "One Drive Random Folder Delete",
|
||||
@ -2187,6 +2219,9 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
},
|
||||
},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
|
||||
doNotMergeItems: map[string]bool{
|
||||
rootFolderPath1: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "One Drive Random Item Delete",
|
||||
@ -2217,9 +2252,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
"root": rootFolderPath1,
|
||||
},
|
||||
},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{
|
||||
rootFolderPath1: getDelList("file"),
|
||||
}),
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
|
||||
doNotMergeItems: map[string]bool{
|
||||
rootFolderPath1: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "TwoPriorDrives_OneTombstoned",
|
||||
@ -2352,7 +2388,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
}
|
||||
|
||||
assert.Equal(t, test.expectedDeltaURLs, deltas, "delta urls")
|
||||
assert.Equal(t, test.expectedFolderPaths, paths, "folder paths")
|
||||
assert.Equal(t, test.expectedFolderPaths, paths, "folder paths")
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user