deltaURL stub refactor (#4865)

clean up the handling of deltaURL by using its own func instead of the id() func.  Also minimizes the delta stub constructor by using the suffix pattern used by id and folder constructors.

no logic changes, just a quick refactor.

---

#### Does this PR need a docs update or release note?

- [x]  No

#### Type of change

- [x] 🤖 Supportability/Tests

#### Issue(s)

* #4689

#### Test Plan

- [x]  Unit test
- [x] 💚 E2E
This commit is contained in:
Keepers 2023-12-19 13:12:42 -07:00 committed by GitHub
parent b896405e92
commit 92b996a3de
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 925 additions and 438 deletions

View File

@ -8,7 +8,6 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/pkg/errors"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
@ -31,8 +30,6 @@ import (
"github.com/alcionai/corso/src/pkg/services/m365/custom" "github.com/alcionai/corso/src/pkg/services/m365/custom"
) )
var errGetTreeNotImplemented = clues.New("forced error: cannot run tree-based backup: incomplete implementation")
const ( const (
restrictedDirectory = "Site Pages" restrictedDirectory = "Site Pages"
@ -117,7 +114,7 @@ func deserializeAndValidateMetadata(
paths := prevs[drive] paths := prevs[drive]
if len(paths) == 0 { if len(paths) == 0 {
logger.Ctx(ictx).Info("dropping drive delta due to 0 prev paths") logger.Ctx(ictx).Info("dropping delta metadata: no matching drive entry in previous paths")
delete(deltas, drive) delete(deltas, drive)
} }
@ -127,7 +124,7 @@ func deserializeAndValidateMetadata(
// for other possibly incorrect folder paths. // for other possibly incorrect folder paths.
for _, prevPath := range paths { for _, prevPath := range paths {
if len(prevPath) == 0 { if len(prevPath) == 0 {
logger.Ctx(ictx).Info("dropping drive delta due to 0 len path") logger.Ctx(ictx).Info("dropping delta metadata: 0 previous paths")
delete(deltas, drive) delete(deltas, drive)
break break
@ -270,19 +267,12 @@ func DeserializeMap[T any](reader io.ReadCloser, alreadyFound map[string]T) erro
return clues.Wrap(err, "deserializing file contents") return clues.Wrap(err, "deserializing file contents")
} }
var duplicate bool
for k := range tmp { for k := range tmp {
if _, ok := alreadyFound[k]; ok { if _, ok := alreadyFound[k]; ok {
duplicate = true return clues.Stack(errExistingMapping).With("duplicate_key", k)
break
} }
} }
if duplicate {
return clues.Stack(errExistingMapping)
}
maps.Copy(alreadyFound, tmp) maps.Copy(alreadyFound, tmp)
return nil return nil
@ -297,13 +287,10 @@ func (c *Collections) Get(
) ([]data.BackupCollection, bool, error) { ) ([]data.BackupCollection, bool, error) {
if c.ctrl.ToggleFeatures.UseDeltaTree { if c.ctrl.ToggleFeatures.UseDeltaTree {
colls, canUsePrevBackup, err := c.getTree(ctx, prevMetadata, globalExcludeItemIDs, errs) colls, canUsePrevBackup, err := c.getTree(ctx, prevMetadata, globalExcludeItemIDs, errs)
if err != nil && !errors.Is(err, errGetTreeNotImplemented) {
return nil, false, clues.Wrap(err, "processing backup using tree")
}
return colls, return colls,
canUsePrevBackup, canUsePrevBackup,
errGetTreeNotImplemented clues.Wrap(err, "processing backup using tree").OrNil()
} }
deltasByDriveID, prevPathsByDriveID, canUsePrevBackup, err := deserializeAndValidateMetadata( deltasByDriveID, prevPathsByDriveID, canUsePrevBackup, err := deserializeAndValidateMetadata(

View File

@ -871,7 +871,7 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() {
mbh.DriveItemEnumeration = driveEnumerator( mbh.DriveItemEnumeration = driveEnumerator(
drive.newEnumer().with( drive.newEnumer().with(
delta("notempty", nil).with( delta(nil, "notempty").with(
aPage(test.items...)))) aPage(test.items...))))
sel := selectors.NewOneDriveBackup([]string{user}) sel := selectors.NewOneDriveBackup([]string{user})
@ -962,7 +962,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
return []graph.MetadataCollectionEntry{ return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry( graph.NewMetadataEntry(
bupMD.DeltaURLsFileName, bupMD.DeltaURLsFileName,
map[string]string{d.id: id(deltaURL)}), map[string]string{d.id: deltaURL()}),
graph.NewMetadataEntry( graph.NewMetadataEntry(
bupMD.PreviousPathFileName, bupMD.PreviousPathFileName,
map[string]map[string]string{ map[string]map[string]string{
@ -974,7 +974,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
}, },
}, },
expectedDeltas: map[string]string{ expectedDeltas: map[string]string{
d.id: id(deltaURL), d.id: deltaURL(),
}, },
expectedPaths: map[string]map[string]string{ expectedPaths: map[string]map[string]string{
d.id: { d.id: {
@ -991,7 +991,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
return []graph.MetadataCollectionEntry{ return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry( graph.NewMetadataEntry(
bupMD.DeltaURLsFileName, bupMD.DeltaURLsFileName,
map[string]string{d.id: id(deltaURL)}), map[string]string{d.id: deltaURL()}),
} }
}, },
}, },
@ -1034,7 +1034,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
return []graph.MetadataCollectionEntry{ return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry( graph.NewMetadataEntry(
bupMD.DeltaURLsFileName, bupMD.DeltaURLsFileName,
map[string]string{d.id: id(deltaURL)}), map[string]string{d.id: deltaURL()}),
graph.NewMetadataEntry( graph.NewMetadataEntry(
bupMD.PreviousPathFileName, bupMD.PreviousPathFileName,
map[string]map[string]string{ map[string]map[string]string{
@ -1087,7 +1087,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
return []graph.MetadataCollectionEntry{ return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry( graph.NewMetadataEntry(
bupMD.DeltaURLsFileName, bupMD.DeltaURLsFileName,
map[string]string{d.id: id(deltaURL)}), map[string]string{d.id: deltaURL()}),
graph.NewMetadataEntry( graph.NewMetadataEntry(
bupMD.PreviousPathFileName, bupMD.PreviousPathFileName,
map[string]map[string]string{ map[string]map[string]string{
@ -1101,7 +1101,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
return []graph.MetadataCollectionEntry{ return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry( graph.NewMetadataEntry(
bupMD.DeltaURLsFileName, bupMD.DeltaURLsFileName,
map[string]string{d2.id: id(deltaURL, 2)}), map[string]string{d2.id: deltaURL(2)}),
graph.NewMetadataEntry( graph.NewMetadataEntry(
bupMD.PreviousPathFileName, bupMD.PreviousPathFileName,
map[string]map[string]string{ map[string]map[string]string{
@ -1113,8 +1113,8 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
}, },
}, },
expectedDeltas: map[string]string{ expectedDeltas: map[string]string{
d.id: id(deltaURL), d.id: deltaURL(),
d2.id: id(deltaURL, 2), d2.id: deltaURL(2),
}, },
expectedPaths: map[string]map[string]string{ expectedPaths: map[string]map[string]string{
d.id: { d.id: {
@ -1138,7 +1138,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
return []graph.MetadataCollectionEntry{ return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry( graph.NewMetadataEntry(
bupMD.PreviousPathFileName, bupMD.PreviousPathFileName,
map[string]string{d.id: id(deltaURL)}), map[string]string{d.id: deltaURL()}),
} }
}, },
}, },
@ -1154,7 +1154,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
return []graph.MetadataCollectionEntry{ return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry( graph.NewMetadataEntry(
bupMD.DeltaURLsFileName, bupMD.DeltaURLsFileName,
map[string]string{d.id: id(deltaURL)}), map[string]string{d.id: deltaURL()}),
graph.NewMetadataEntry( graph.NewMetadataEntry(
bupMD.PreviousPathFileName, bupMD.PreviousPathFileName,
map[string]map[string]string{ map[string]map[string]string{
@ -1164,12 +1164,12 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
}), }),
graph.NewMetadataEntry( graph.NewMetadataEntry(
"foo", "foo",
map[string]string{d.id: id(deltaURL)}), map[string]string{d.id: deltaURL()}),
} }
}, },
}, },
expectedDeltas: map[string]string{ expectedDeltas: map[string]string{
d.id: id(deltaURL), d.id: deltaURL(),
}, },
expectedPaths: map[string]map[string]string{ expectedPaths: map[string]map[string]string{
d.id: { d.id: {
@ -1186,7 +1186,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
return []graph.MetadataCollectionEntry{ return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry( graph.NewMetadataEntry(
bupMD.DeltaURLsFileName, bupMD.DeltaURLsFileName,
map[string]string{d.id: id(deltaURL)}), map[string]string{d.id: deltaURL()}),
graph.NewMetadataEntry( graph.NewMetadataEntry(
bupMD.PreviousPathFileName, bupMD.PreviousPathFileName,
map[string]map[string]string{ map[string]map[string]string{
@ -1220,7 +1220,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
return []graph.MetadataCollectionEntry{ return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry( graph.NewMetadataEntry(
bupMD.DeltaURLsFileName, bupMD.DeltaURLsFileName,
map[string]string{d.id: id(deltaURL)}), map[string]string{d.id: deltaURL()}),
graph.NewMetadataEntry( graph.NewMetadataEntry(
bupMD.PreviousPathFileName, bupMD.PreviousPathFileName,
map[string]map[string]string{ map[string]map[string]string{
@ -1234,7 +1234,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
return []graph.MetadataCollectionEntry{ return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry( graph.NewMetadataEntry(
bupMD.DeltaURLsFileName, bupMD.DeltaURLsFileName,
map[string]string{d.id: id(deltaURL, 2)}), map[string]string{d.id: deltaURL(2)}),
} }
}, },
}, },
@ -1250,7 +1250,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
return []graph.MetadataCollectionEntry{ return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry( graph.NewMetadataEntry(
bupMD.DeltaURLsFileName, bupMD.DeltaURLsFileName,
map[string]string{d.id: id(deltaURL)}), map[string]string{d.id: deltaURL()}),
graph.NewMetadataEntry( graph.NewMetadataEntry(
bupMD.PreviousPathFileName, bupMD.PreviousPathFileName,
map[string]map[string]string{ map[string]map[string]string{
@ -1263,7 +1263,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
}, },
}, },
expectedDeltas: map[string]string{ expectedDeltas: map[string]string{
d.id: id(deltaURL), d.id: deltaURL(),
}, },
expectedPaths: map[string]map[string]string{ expectedPaths: map[string]map[string]string{
d.id: { d.id: {
@ -1283,7 +1283,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
graph.NewMetadataEntry( graph.NewMetadataEntry(
bupMD.DeltaURLsFileName, bupMD.DeltaURLsFileName,
map[string]string{ map[string]string{
d.id: id(deltaURL), d.id: deltaURL(),
}), }),
graph.NewMetadataEntry( graph.NewMetadataEntry(
bupMD.PreviousPathFileName, bupMD.PreviousPathFileName,
@ -1299,7 +1299,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
return []graph.MetadataCollectionEntry{ return []graph.MetadataCollectionEntry{
graph.NewMetadataEntry( graph.NewMetadataEntry(
bupMD.DeltaURLsFileName, bupMD.DeltaURLsFileName,
map[string]string{d2.id: id(deltaURL, 2)}), map[string]string{d2.id: deltaURL(2)}),
graph.NewMetadataEntry( graph.NewMetadataEntry(
bupMD.PreviousPathFileName, bupMD.PreviousPathFileName,
map[string]map[string]string{ map[string]map[string]string{
@ -1311,8 +1311,8 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
}, },
}, },
expectedDeltas: map[string]string{ expectedDeltas: map[string]string{
d.id: id(deltaURL), d.id: deltaURL(),
d2.id: id(deltaURL, 2), d2.id: deltaURL(2),
}, },
expectedPaths: map[string]map[string]string{ expectedPaths: map[string]map[string]string{
d.id: { d.id: {
@ -1398,29 +1398,6 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata_ReadFailure() {
require.False(t, canUsePreviousBackup) require.False(t, canUsePreviousBackup)
} }
func (suite *CollectionsUnitSuite) TestGet_treeCannotBeUsedWhileIncomplete() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
mbh := defaultOneDriveBH(user)
opts := control.DefaultOptions()
opts.ToggleFeatures.UseDeltaTree = true
mbh.DriveItemEnumeration = driveEnumerator(
drive().newEnumer().with(
delta(id(deltaURL), nil).with(
aPage(
delItem(fileID(), rootID, isFile)))))
c := collWithMBH(mbh)
c.ctrl = opts
_, _, err := c.Get(ctx, nil, nil, fault.New(true))
require.ErrorIs(t, err, errGetTreeNotImplemented, clues.ToCore(err))
}
func (suite *CollectionsUnitSuite) TestGet() { func (suite *CollectionsUnitSuite) TestGet() {
metadataPath, err := path.BuildMetadata( metadataPath, err := path.BuildMetadata(
tenant, tenant,
@ -1456,7 +1433,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "OneDrive_OneItemPage_DelFileOnly_NoFolders_NoErrors", name: "OneDrive_OneItemPage_DelFileOnly_NoFolders_NoErrors",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
delta(id(deltaURL), nil).with( delta(nil).with(
aPage( aPage(
delItem(fileID(), rootID, isFile))))), delItem(fileID(), rootID, isFile))))),
canUsePreviousBackup: true, canUsePreviousBackup: true,
@ -1468,7 +1445,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
d.strPath(t): {data.NotMovedState: {}}, d.strPath(t): {data.NotMovedState: {}},
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL), id(drivePfx, 1): deltaURL(),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): {rootID: d.strPath(t)}, id(drivePfx, 1): {rootID: d.strPath(t)},
@ -1481,7 +1458,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "OneDrive_OneItemPage_NoFolderDeltas_NoErrors", name: "OneDrive_OneItemPage_NoFolderDeltas_NoErrors",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
delta(id(deltaURL), nil).with( delta(nil).with(
aPage( aPage(
driveFile(d.dir(), rootID))))), driveFile(d.dir(), rootID))))),
canUsePreviousBackup: true, canUsePreviousBackup: true,
@ -1493,7 +1470,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
d.strPath(t): {data.NotMovedState: {fileID()}}, d.strPath(t): {data.NotMovedState: {fileID()}},
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL), id(drivePfx, 1): deltaURL(),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): {rootID: d.strPath(t)}, id(drivePfx, 1): {rootID: d.strPath(t)},
@ -1506,7 +1483,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "OneDrive_OneItemPage_NoErrors", name: "OneDrive_OneItemPage_NoErrors",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
deltaWReset(id(deltaURL), nil).with( deltaWReset(nil).with(
aPage( aPage(
driveFolder(d.dir(), rootID), driveFolder(d.dir(), rootID),
driveFile(d.dir(folderName()), folderID()))))), driveFile(d.dir(folderName()), folderID()))))),
@ -1518,7 +1495,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
d.strPath(t, folderName()): {data.NewState: {folderID(), fileID()}}, d.strPath(t, folderName()): {data.NewState: {folderID(), fileID()}},
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL), id(drivePfx, 1): deltaURL(),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -1536,7 +1513,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "OneDrive_OneItemPage_NoErrors_FileRenamedMultiple", name: "OneDrive_OneItemPage_NoErrors_FileRenamedMultiple",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
deltaWReset(id(deltaURL), nil).with( deltaWReset(nil).with(
aPage( aPage(
driveFolder(d.dir(), rootID), driveFolder(d.dir(), rootID),
driveFile(d.dir(folderName()), folderID()), driveFile(d.dir(folderName()), folderID()),
@ -1549,7 +1526,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
d.strPath(t, folderName()): {data.NewState: {folderID(), fileID()}}, d.strPath(t, folderName()): {data.NewState: {folderID(), fileID()}},
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL), id(drivePfx, 1): deltaURL(),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -1567,7 +1544,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "OneDrive_OneItemPage_NoErrors_FileMovedMultiple", name: "OneDrive_OneItemPage_NoErrors_FileMovedMultiple",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
delta(id(deltaURL), nil).with( delta(nil).with(
aPage( aPage(
driveFolder(d.dir(), rootID), driveFolder(d.dir(), rootID),
driveFile(d.dir(folderName()), folderID()), driveFile(d.dir(folderName()), folderID()),
@ -1584,7 +1561,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
d.strPath(t, folderName()): {data.NewState: {folderID()}}, d.strPath(t, folderName()): {data.NewState: {folderID()}},
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL), id(drivePfx, 1): deltaURL(),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -1600,7 +1577,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "OneDrive_TwoItemPages_NoErrors", name: "OneDrive_TwoItemPages_NoErrors",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
deltaWReset(id(deltaURL), nil).with( deltaWReset(nil).with(
aPage( aPage(
driveFolder(d.dir(), rootID), driveFolder(d.dir(), rootID),
driveFile(d.dir(folderName()), folderID())), driveFile(d.dir(folderName()), folderID())),
@ -1617,7 +1594,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
d.strPath(t, folderName()): {data.NewState: {folderID(), fileID(), fileID(2)}}, d.strPath(t, folderName()): {data.NewState: {folderID(), fileID(), fileID(2)}},
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL), id(drivePfx, 1): deltaURL(),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -1635,7 +1612,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "OneDrive_TwoItemPages_WithReset", name: "OneDrive_TwoItemPages_WithReset",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
deltaWReset(id(deltaURL), nil).with( deltaWReset(nil).with(
aPage( aPage(
driveFolder(d.dir(), rootID), driveFolder(d.dir(), rootID),
driveFile(d.dir(folderName()), folderID()), driveFile(d.dir(folderName()), folderID()),
@ -1657,7 +1634,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
d.strPath(t, folderName()): {data.NewState: {folderID(), fileID(), fileID(2)}}, d.strPath(t, folderName()): {data.NewState: {folderID(), fileID(), fileID(2)}},
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL), id(drivePfx, 1): deltaURL(),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -1675,7 +1652,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "OneDrive_TwoItemPages_WithResetCombinedWithItems", name: "OneDrive_TwoItemPages_WithResetCombinedWithItems",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
deltaWReset(id(deltaURL), nil).with( deltaWReset(nil).with(
aPage( aPage(
driveFolder(d.dir(), rootID), driveFolder(d.dir(), rootID),
driveFile(d.dir(folderName()), folderID())), driveFile(d.dir(folderName()), folderID())),
@ -1695,7 +1672,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
d.strPath(t, folderName()): {data.NewState: {folderID(), fileID(), fileID(2)}}, d.strPath(t, folderName()): {data.NewState: {folderID(), fileID(), fileID(2)}},
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL), id(drivePfx, 1): deltaURL(),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -1713,12 +1690,12 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "TwoDrives_OneItemPageEach_NoErrors", name: "TwoDrives_OneItemPageEach_NoErrors",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
deltaWReset(id(deltaURL), nil).with( deltaWReset(nil).with(
aPage( aPage(
driveFolder(d.dir(), rootID), driveFolder(d.dir(), rootID),
driveFile(d.dir(folderName()), folderID())))), driveFile(d.dir(folderName()), folderID())))),
d2.newEnumer().with( d2.newEnumer().with(
deltaWReset(id(deltaURL, 2), nil).with(aPage( deltaWReset(nil, 2).with(aPage(
driveItem(folderID(2), folderName(), d2.dir(), rootID, isFolder), driveItem(folderID(2), folderName(), d2.dir(), rootID, isFolder),
driveItem(fileID(2), fileName(), d2.dir(folderName()), folderID(2), isFile))))), driveItem(fileID(2), fileName(), d2.dir(folderName()), folderID(2), isFile))))),
canUsePreviousBackup: true, canUsePreviousBackup: true,
@ -1734,8 +1711,8 @@ func (suite *CollectionsUnitSuite) TestGet() {
d2.strPath(t, folderName()): {data.NewState: {folderID(2), fileID(2)}}, d2.strPath(t, folderName()): {data.NewState: {folderID(2), fileID(2)}},
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL), id(drivePfx, 1): deltaURL(),
d2.id: id(deltaURL, 2), d2.id: deltaURL(2),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -1759,12 +1736,12 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "TwoDrives_DuplicateIDs_OneItemPageEach_NoErrors", name: "TwoDrives_DuplicateIDs_OneItemPageEach_NoErrors",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
deltaWReset(id(deltaURL), nil).with( deltaWReset(nil).with(
aPage( aPage(
driveFolder(d.dir(), rootID), driveFolder(d.dir(), rootID),
driveFile(d.dir(folderName()), folderID())))), driveFile(d.dir(folderName()), folderID())))),
d2.newEnumer().with( d2.newEnumer().with(
deltaWReset(id(deltaURL, 2), nil).with( deltaWReset(nil, 2).with(
aPage( aPage(
driveFolder(d2.dir(), rootID), driveFolder(d2.dir(), rootID),
driveItem(fileID(2), fileName(), d2.dir(folderName()), folderID(), isFile))))), driveItem(fileID(2), fileName(), d2.dir(folderName()), folderID(), isFile))))),
@ -1781,8 +1758,8 @@ func (suite *CollectionsUnitSuite) TestGet() {
d2.strPath(t, folderName()): {data.NewState: {folderID(), fileID(2)}}, d2.strPath(t, folderName()): {data.NewState: {folderID(), fileID(2)}},
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL), id(drivePfx, 1): deltaURL(),
d2.id: id(deltaURL, 2), d2.id: deltaURL(2),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -1806,7 +1783,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "OneDrive_OneItemPage_Errors", name: "OneDrive_OneItemPage_Errors",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
delta("", assert.AnError))), delta(assert.AnError))),
canUsePreviousBackup: false, canUsePreviousBackup: false,
errCheck: assert.Error, errCheck: assert.Error,
previousPaths: map[string]map[string]string{ previousPaths: map[string]map[string]string{
@ -1821,7 +1798,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "OneDrive_OneItemPage_InvalidPrevDelta_DeleteNonExistentFolder", name: "OneDrive_OneItemPage_InvalidPrevDelta_DeleteNonExistentFolder",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
deltaWReset(id(deltaURL), nil).with( deltaWReset(nil).with(
aReset(), aReset(),
aPage( aPage(
driveFolder(d.dir(), rootID, 2), driveFolder(d.dir(), rootID, 2),
@ -1840,7 +1817,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
d.strPath(t, folderName(2)): {data.NewState: {folderID(2), fileID()}}, d.strPath(t, folderName(2)): {data.NewState: {folderID(2), fileID()}},
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL), id(drivePfx, 1): deltaURL(),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -1859,7 +1836,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "OneDrive_OneItemPage_InvalidPrevDeltaCombinedWithItems_DeleteNonExistentFolder", name: "OneDrive_OneItemPage_InvalidPrevDeltaCombinedWithItems_DeleteNonExistentFolder",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
deltaWReset(id(deltaURL), nil).with( deltaWReset(nil).with(
aReset(), aReset(),
aPage( aPage(
driveFolder(d.dir(), rootID, 2), driveFolder(d.dir(), rootID, 2),
@ -1878,7 +1855,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
d.strPath(t, folderName(2)): {data.NewState: {folderID(2), fileID()}}, d.strPath(t, folderName(2)): {data.NewState: {folderID(2), fileID()}},
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL), id(drivePfx, 1): deltaURL(),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -1897,7 +1874,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "OneDrive_OneItemPage_InvalidPrevDelta_AnotherFolderAtDeletedLocation", name: "OneDrive_OneItemPage_InvalidPrevDelta_AnotherFolderAtDeletedLocation",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
deltaWReset(id(deltaURL), nil).with( deltaWReset(nil).with(
aPage( aPage(
driveItem(folderID(2), folderName(), d.dir(), rootID, isFolder), driveItem(folderID(2), folderName(), d.dir(), rootID, isFolder),
driveFile(d.dir(folderName()), folderID(2))), driveFile(d.dir(folderName()), folderID(2))),
@ -1923,7 +1900,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
}, },
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL), id(drivePfx, 1): deltaURL(),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -1941,7 +1918,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "OneDrive_OneItemPage_InvalidPrevDelta_AnotherFolderAtExistingLocation", name: "OneDrive_OneItemPage_InvalidPrevDelta_AnotherFolderAtExistingLocation",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
deltaWReset(id(deltaURL), nil).with( deltaWReset(nil).with(
aPage( aPage(
driveFolder(d.dir(), rootID), driveFolder(d.dir(), rootID),
driveFile(d.dir(folderName()), folderID())), driveFile(d.dir(folderName()), folderID())),
@ -1964,7 +1941,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
}, },
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL), id(drivePfx, 1): deltaURL(),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -1982,7 +1959,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "OneDrive_OneItemPage_ImmediateInvalidPrevDelta_MoveFolderToPreviouslyExistingPath", name: "OneDrive_OneItemPage_ImmediateInvalidPrevDelta_MoveFolderToPreviouslyExistingPath",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
deltaWReset(id(deltaURL), nil).with( deltaWReset(nil).with(
aReset(), aReset(),
aPage( aPage(
driveItem(folderID(2), folderName(), d.dir(), rootID, isFolder), driveItem(folderID(2), folderName(), d.dir(), rootID, isFolder),
@ -2003,7 +1980,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
}, },
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL), id(drivePfx, 1): deltaURL(),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -2021,7 +1998,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "OneDrive_OneItemPage_InvalidPrevDelta_AnotherFolderAtDeletedLocation", name: "OneDrive_OneItemPage_InvalidPrevDelta_AnotherFolderAtDeletedLocation",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
deltaWReset(id(deltaURL), nil).with( deltaWReset(nil).with(
aReset(), aReset(),
aPage( aPage(
driveItem(folderID(2), folderName(), d.dir(), rootID, isFolder), driveItem(folderID(2), folderName(), d.dir(), rootID, isFolder),
@ -2044,7 +2021,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
}, },
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL), id(drivePfx, 1): deltaURL(),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -2062,7 +2039,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "OneDrive Two Item Pages with Malware", name: "OneDrive Two Item Pages with Malware",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
deltaWReset(id(deltaURL), nil).with( deltaWReset(nil).with(
aPage( aPage(
driveFolder(d.dir(), rootID), driveFolder(d.dir(), rootID),
driveFile(d.dir(folderName()), folderID()), driveFile(d.dir(folderName()), folderID()),
@ -2081,7 +2058,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
d.strPath(t, folderName()): {data.NewState: {folderID(), fileID(), fileID(2)}}, d.strPath(t, folderName()): {data.NewState: {folderID(), fileID(), fileID(2)}},
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL), id(drivePfx, 1): deltaURL(),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -2100,7 +2077,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "One Drive Deleted Folder In New Results With Invalid Delta", name: "One Drive Deleted Folder In New Results With Invalid Delta",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
deltaWReset(id(deltaURL, 2), nil).with( deltaWReset(nil, 2).with(
aPage( aPage(
driveFolder(d.dir(), rootID), driveFolder(d.dir(), rootID),
driveFile(d.dir(folderName()), folderID()), driveFile(d.dir(folderName()), folderID()),
@ -2127,7 +2104,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
d.strPath(t, folderName(2)): {data.DeletedState: {}}, d.strPath(t, folderName(2)): {data.DeletedState: {}},
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL, 2), id(drivePfx, 1): deltaURL(2),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -2146,7 +2123,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "One Drive Folder Delete After Invalid Delta", name: "One Drive Folder Delete After Invalid Delta",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
deltaWReset(id(deltaURL), nil).with( deltaWReset(nil).with(
aPageWReset( aPageWReset(
delItem(folderID(), rootID, isFolder))))), delItem(folderID(), rootID, isFolder))))),
canUsePreviousBackup: true, canUsePreviousBackup: true,
@ -2162,7 +2139,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
d.strPath(t, folderName()): {data.DeletedState: {}}, d.strPath(t, folderName()): {data.DeletedState: {}},
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL), id(drivePfx, 1): deltaURL(),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -2179,7 +2156,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "One Drive Item Delete After Invalid Delta", name: "One Drive Item Delete After Invalid Delta",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
deltaWReset(id(deltaURL), nil).with( deltaWReset(nil).with(
aPageWReset( aPageWReset(
delItem(fileID(), rootID, isFile))))), delItem(fileID(), rootID, isFile))))),
canUsePreviousBackup: true, canUsePreviousBackup: true,
@ -2193,7 +2170,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
d.strPath(t): {data.NewState: {}}, d.strPath(t): {data.NewState: {}},
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL), id(drivePfx, 1): deltaURL(),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -2209,7 +2186,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "One Drive Folder Made And Deleted", name: "One Drive Folder Made And Deleted",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
deltaWReset(id(deltaURL, 2), nil).with( deltaWReset(nil, 2).with(
aPage( aPage(
driveFolder(d.dir(), rootID), driveFolder(d.dir(), rootID),
driveFile(d.dir(folderName()), folderID())), driveFile(d.dir(folderName()), folderID())),
@ -2225,7 +2202,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
d.strPath(t): {data.NewState: {}}, d.strPath(t): {data.NewState: {}},
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL, 2), id(drivePfx, 1): deltaURL(2),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -2241,7 +2218,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "One Drive Folder Created -> Deleted -> Created", name: "One Drive Folder Created -> Deleted -> Created",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
deltaWReset(id(deltaURL, 2), nil).with( deltaWReset(nil, 2).with(
aPage( aPage(
driveFolder(d.dir(), rootID), driveFolder(d.dir(), rootID),
driveFile(d.dir(folderName()), folderID())), driveFile(d.dir(folderName()), folderID())),
@ -2261,7 +2238,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
d.strPath(t, folderName()): {data.NewState: {folderID(1), fileID(1)}}, d.strPath(t, folderName()): {data.NewState: {folderID(1), fileID(1)}},
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL, 2), id(drivePfx, 1): deltaURL(2),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -2279,7 +2256,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "One Drive Folder Deleted -> Created -> Deleted", name: "One Drive Folder Deleted -> Created -> Deleted",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
deltaWReset(id(deltaURL, 2), nil).with( deltaWReset(nil, 2).with(
aPage( aPage(
delItem(folderID(), rootID, isFolder), delItem(folderID(), rootID, isFolder),
delItem(fileID(), rootID, isFile)), delItem(fileID(), rootID, isFile)),
@ -2302,7 +2279,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
d.strPath(t, folderName()): {data.DeletedState: {}}, d.strPath(t, folderName()): {data.DeletedState: {}},
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL, 2), id(drivePfx, 1): deltaURL(2),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -2316,7 +2293,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "One Drive Folder Created -> Deleted -> Created with prev", name: "One Drive Folder Created -> Deleted -> Created with prev",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
deltaWReset(id(deltaURL, 2), nil).with( deltaWReset(nil, 2).with(
aPage( aPage(
driveFolder(d.dir(), rootID), driveFolder(d.dir(), rootID),
driveFile(d.dir(folderName()), folderID())), driveFile(d.dir(folderName()), folderID())),
@ -2339,7 +2316,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
d.strPath(t, folderName()): {data.DeletedState: {}, data.NewState: {folderID(1), fileID(1)}}, d.strPath(t, folderName()): {data.DeletedState: {}, data.NewState: {folderID(1), fileID(1)}},
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL, 2), id(drivePfx, 1): deltaURL(2),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -2357,7 +2334,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "One Drive Item Made And Deleted", name: "One Drive Item Made And Deleted",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
deltaWReset(id(deltaURL), nil).with( deltaWReset(nil).with(
aPage( aPage(
driveFolder(d.dir(), rootID), driveFolder(d.dir(), rootID),
driveFile(d.dir(folderName()), folderID())), driveFile(d.dir(folderName()), folderID())),
@ -2372,7 +2349,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
d.strPath(t, folderName()): {data.NewState: {folderID()}}, d.strPath(t, folderName()): {data.NewState: {folderID()}},
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL), id(drivePfx, 1): deltaURL(),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -2390,7 +2367,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "One Drive Random Folder Delete", name: "One Drive Random Folder Delete",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
deltaWReset(id(deltaURL), nil).with( deltaWReset(nil).with(
aPage( aPage(
delItem(folderID(), rootID, isFolder))))), delItem(folderID(), rootID, isFolder))))),
canUsePreviousBackup: true, canUsePreviousBackup: true,
@ -2402,7 +2379,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
d.strPath(t): {data.NewState: {}}, d.strPath(t): {data.NewState: {}},
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL), id(drivePfx, 1): deltaURL(),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -2418,7 +2395,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "One Drive Random Item Delete", name: "One Drive Random Item Delete",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
delta(id(deltaURL), nil).with( delta(nil).with(
aPage( aPage(
delItem(fileID(), rootID, isFile))))), delItem(fileID(), rootID, isFile))))),
canUsePreviousBackup: true, canUsePreviousBackup: true,
@ -2430,7 +2407,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
d.strPath(t): {data.NewState: {}}, d.strPath(t): {data.NewState: {}},
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL), id(drivePfx, 1): deltaURL(),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -2446,7 +2423,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "TwoPriorDrives_OneTombstoned", name: "TwoPriorDrives_OneTombstoned",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
delta(id(deltaURL), nil).with(aPage()))), // root only delta(nil).with(aPage()))), // root only
canUsePreviousBackup: true, canUsePreviousBackup: true,
errCheck: assert.NoError, errCheck: assert.NoError,
previousPaths: map[string]map[string]string{ previousPaths: map[string]map[string]string{
@ -2457,7 +2434,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
d.strPath(t): {data.NotMovedState: {}}, d.strPath(t): {data.NotMovedState: {}},
d2.strPath(t): {data.DeletedState: {}}, d2.strPath(t): {data.DeletedState: {}},
}, },
expectedDeltaURLs: map[string]string{id(drivePfx, 1): id(deltaURL)}, expectedDeltaURLs: map[string]string{id(drivePfx, 1): deltaURL()},
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): {rootID: d.strPath(t)}, id(drivePfx, 1): {rootID: d.strPath(t)},
}, },
@ -2470,14 +2447,14 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "duplicate previous paths in metadata", name: "duplicate previous paths in metadata",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
delta(id(deltaURL), nil).with( delta(nil).with(
aPage( aPage(
driveFolder(d.dir(), rootID), driveFolder(d.dir(), rootID),
driveFile(d.dir(folderName()), folderID()), driveFile(d.dir(folderName()), folderID()),
driveFolder(d.dir(), rootID, 2), driveFolder(d.dir(), rootID, 2),
driveFile(d.dir(folderName(2)), folderID(2), 2)))), driveFile(d.dir(folderName(2)), folderID(2), 2)))),
d2.newEnumer().with( d2.newEnumer().with(
delta(id(deltaURL, 2), nil).with( delta(nil, 2).with(
aPage( aPage(
driveFolder(d2.dir(), rootID), driveFolder(d2.dir(), rootID),
driveFile(d2.dir(folderName()), folderID()), driveFile(d2.dir(folderName()), folderID()),
@ -2519,8 +2496,8 @@ func (suite *CollectionsUnitSuite) TestGet() {
}, },
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL), id(drivePfx, 1): deltaURL(),
d2.id: id(deltaURL, 2), d2.id: deltaURL(2),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -2545,7 +2522,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "out of order item enumeration causes prev path collisions", name: "out of order item enumeration causes prev path collisions",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
delta(id(deltaURL), nil).with( delta(nil).with(
aPage( aPage(
driveItem(folderID(fanny, 2), folderName(fanny), d.dir(), rootID, isFolder), driveItem(folderID(fanny, 2), folderName(fanny), d.dir(), rootID, isFolder),
driveFile(d.dir(folderName(fanny)), folderID(fanny, 2), 2), driveFile(d.dir(folderName(fanny)), folderID(fanny, 2), 2),
@ -2571,7 +2548,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
}, },
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL), id(drivePfx, 1): deltaURL(),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -2589,7 +2566,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
name: "out of order item enumeration causes opposite prev path collisions", name: "out of order item enumeration causes opposite prev path collisions",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
delta(id(deltaURL), nil).with( delta(nil).with(
aPage( aPage(
driveFile(d.dir(), rootID, 1), driveFile(d.dir(), rootID, 1),
driveFolder(d.dir(), rootID, fanny), driveFolder(d.dir(), rootID, fanny),
@ -2625,7 +2602,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
}, },
}, },
expectedDeltaURLs: map[string]string{ expectedDeltaURLs: map[string]string{
id(drivePfx, 1): id(deltaURL), id(drivePfx, 1): deltaURL(),
}, },
expectedPreviousPaths: map[string]map[string]string{ expectedPreviousPaths: map[string]map[string]string{
id(drivePfx, 1): { id(drivePfx, 1): {
@ -2792,12 +2769,12 @@ func (suite *CollectionsUnitSuite) TestAddURLCacheToDriveCollections() {
name: "Two drives with unique url cache instances", name: "Two drives with unique url cache instances",
enumerator: driveEnumerator( enumerator: driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
delta(id(deltaURL), nil).with( delta(nil).with(
aPage( aPage(
driveFolder(d.dir(), rootID), driveFolder(d.dir(), rootID),
driveFile(d.dir(folderName()), folderID())))), driveFile(d.dir(folderName()), folderID())))),
d2.newEnumer().with( d2.newEnumer().with(
delta(id(deltaURL, 2), nil).with( delta(nil, 2).with(
aPage( aPage(
driveItem(folderID(2), folderName(), d2.dir(), rootID, isFolder), driveItem(folderID(2), folderName(), d2.dir(), rootID, isFolder),
driveItem(fileID(2), fileName(), d2.dir(folderName()), folderID(2), isFile))))), driveItem(fileID(2), fileName(), d2.dir(folderName()), folderID(2), isFile))))),

View File

@ -161,7 +161,7 @@ func (c *Collections) getTree(
return nil, false, nil return nil, false, nil
} }
return collections, canUsePrevBackup, errGetTreeNotImplemented return collections, canUsePrevBackup, nil
} }
func (c *Collections) makeDriveCollections( func (c *Collections) makeDriveCollections(
@ -242,6 +242,8 @@ func (c *Collections) makeDriveCollections(
globalExcludeItemIDsByDrivePrefix.Add(p.String(), excludedItemIDs) globalExcludeItemIDsByDrivePrefix.Add(p.String(), excludedItemIDs)
} }
counter.Add(count.NewPrevPaths, int64(len(newPrevs)))
return collections, newPrevs, du, nil return collections, newPrevs, du, nil
} }

File diff suppressed because it is too large Load Diff

View File

@ -15,6 +15,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
dataMock "github.com/alcionai/corso/src/internal/data/mock" dataMock "github.com/alcionai/corso/src/internal/data/mock"
@ -168,6 +169,18 @@ func makeExcludeMap(files ...string) map[string]struct{} {
return delList return delList
} }
func defaultMetadataPath(t *testing.T) path.Path {
metadataPath, err := path.BuildMetadata(
tenant,
user,
path.OneDriveService,
path.FilesCategory,
false)
require.NoError(t, err, "making default metadata path", clues.ToCore(err))
return metadataPath
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// limiter // limiter
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -237,58 +250,35 @@ func aReset(items ...models.DriveItemable) nextPage {
// metadata // metadata
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
func makePrevMetadataColls( func compareMetadata(
t *testing.T, t *testing.T,
mbh BackupHandler, mdColl data.Collection,
previousPaths map[string]map[string]string, expectDeltas map[string]string,
) []data.RestoreCollection { expectPrevPaths map[string]map[string]string,
pathPrefix, err := mbh.MetadataPathPrefix(tenant) ) {
require.NoError(t, err, clues.ToCore(err)) ctx, flush := tester.NewContext(t)
defer flush()
prevDeltas := map[string]string{} colls := []data.RestoreCollection{
for driveID := range previousPaths {
prevDeltas[driveID] = id(deltaURL, "prev")
}
mdColl, err := graph.MakeMetadataCollection(
pathPrefix,
[]graph.MetadataCollectionEntry{
graph.NewMetadataEntry(bupMD.DeltaURLsFileName, prevDeltas),
graph.NewMetadataEntry(bupMD.PreviousPathFileName, previousPaths),
},
func(*support.ControllerOperationStatus) {},
count.New())
require.NoError(t, err, "creating metadata collection", clues.ToCore(err))
return []data.RestoreCollection{
dataMock.NewUnversionedRestoreCollection(t, data.NoFetchRestoreCollection{Collection: mdColl}), dataMock.NewUnversionedRestoreCollection(t, data.NoFetchRestoreCollection{Collection: mdColl}),
} }
p := mdColl.FullPath()
deltas, prevs, _, err := deserializeAndValidateMetadata(
ctx,
colls,
count.New(),
fault.New(true))
require.NoError(t, err, "deserializing metadata", clues.ToCore(err))
if expectDeltas != nil {
assert.Equal(t, expectDeltas, deltas, "delta urls in collection:\n\t %q", p)
}
assert.Equal(t, expectPrevPaths, prevs, "previous path in collection:\n\t %q", p)
} }
// func compareMetadata(
// t *testing.T,
// mdColl data.Collection,
// expectDeltas map[string]string,
// expectPrevPaths map[string]map[string]string,
// ) {
// ctx, flush := tester.NewContext(t)
// defer flush()
// colls := []data.RestoreCollection{
// dataMock.NewUnversionedRestoreCollection(t, data.NoFetchRestoreCollection{Collection: mdColl}),
// }
// deltas, prevs, _, err := deserializeAndValidateMetadata(
// ctx,
// colls,
// count.New(),
// fault.New(true))
// require.NoError(t, err, "deserializing metadata", clues.ToCore(err))
// assert.Equal(t, expectDeltas, deltas, "delta urls")
// assert.Equal(t, expectPrevPaths, prevs, "previous paths")
// }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// collections // collections
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -303,6 +293,10 @@ type collectionAssertion struct {
// this flag gets flipped when calling assertions.compare. // this flag gets flipped when calling assertions.compare.
// any unseen collection will error on requireNoUnseenCollections // any unseen collection will error on requireNoUnseenCollections
sawCollection bool sawCollection bool
// used for metadata collection comparison
deltas map[string]string
prevPaths map[string]map[string]string
} }
func aColl( func aColl(
@ -330,10 +324,27 @@ func aColl(
} }
} }
func aTomb(
prev path.Path,
) *collectionAssertion {
return aColl(nil, prev)
}
func aMetadata(
deltas map[string]string,
prevPaths map[string]map[string]string,
) *collectionAssertion {
return &collectionAssertion{
deltas: deltas,
prevPaths: prevPaths,
}
}
// to aggregate all collection-related expectations in the backup // to aggregate all collection-related expectations in the backup
// map collection path -> collection state -> assertion // map collection path -> collection state -> assertion
type expectedCollections struct { type expectedCollections struct {
assertions map[string]*collectionAssertion assertions map[string]*collectionAssertion
metadata *collectionAssertion
doNotMerge assert.BoolAssertionFunc doNotMerge assert.BoolAssertionFunc
hasURLCache assert.ValueAssertionFunc hasURLCache assert.ValueAssertionFunc
} }
@ -343,9 +354,17 @@ func expectCollections(
hasURLCache bool, hasURLCache bool,
colls ...*collectionAssertion, colls ...*collectionAssertion,
) expectedCollections { ) expectedCollections {
as := map[string]*collectionAssertion{} var (
as = map[string]*collectionAssertion{}
md *collectionAssertion
)
for _, coll := range colls { for _, coll := range colls {
if coll.prevPaths != nil {
md = coll
continue
}
as[expectFullOrPrev(coll).String()] = coll as[expectFullOrPrev(coll).String()] = coll
} }
@ -361,6 +380,7 @@ func expectCollections(
return expectedCollections{ return expectedCollections{
assertions: as, assertions: as,
metadata: md,
doNotMerge: dontMerge, doNotMerge: dontMerge,
hasURLCache: hasCache, hasURLCache: hasCache,
} }
@ -384,6 +404,14 @@ func (ecs expectedCollections) compareColl(t *testing.T, coll data.BackupCollect
p = fullOrPrevPath(t, coll) p = fullOrPrevPath(t, coll)
) )
// check the metadata collection separately
if p.Equal(defaultMetadataPath(t)) {
ecs.metadata.sawCollection = true
compareMetadata(t, coll, ecs.metadata.deltas, ecs.metadata.prevPaths)
return
}
if coll.State() != data.DeletedState { if coll.State() != data.DeletedState {
for itm := range coll.Items(ctx, fault.New(true)) { for itm := range coll.Items(ctx, fault.New(true)) {
itemIDs = append(itemIDs, itm.ID()) itemIDs = append(itemIDs, itm.ID())
@ -394,7 +422,7 @@ func (ecs expectedCollections) compareColl(t *testing.T, coll data.BackupCollect
require.NotNil( require.NotNil(
t, t,
expect, expect,
"test should have an expected entry for collection with:\n\tstate %q\n\tpath %q", "collection present in result, but not in test expectations:\n\tstate %q\n\tpath %q",
coll.State(), coll.State(),
p) p)
@ -409,25 +437,25 @@ func (ecs expectedCollections) compareColl(t *testing.T, coll data.BackupCollect
p) p)
if expect.prev == nil { if expect.prev == nil {
assert.Nil(t, coll.PreviousPath(), "previous path") assert.Nil(t, coll.PreviousPath(), "no previousPath for collection:\n\t %q", p)
} else { } else {
assert.Equal(t, expect.prev, coll.PreviousPath()) assert.Equal(t, expect.prev, coll.PreviousPath(), "wanted previousPath for collection:\n\t %q", p)
} }
if expect.curr == nil { if expect.curr == nil {
assert.Nil(t, coll.FullPath(), "collection path") assert.Nil(t, coll.FullPath(), "no currPath for collection:\n\t %q", p)
} else { } else {
assert.Equal(t, expect.curr, coll.FullPath()) assert.Equal(t, expect.curr, coll.FullPath(), "wanted currPath for collection:\n\t %q", p)
} }
ecs.doNotMerge( ecs.doNotMerge(
t, t,
coll.DoNotMergeItems(), coll.DoNotMergeItems(),
"expected collection to have the appropariate doNotMerge flag") "expected the appropariate doNotMerge flag")
driveColl := coll.(*Collection) if driveColl, ok := coll.(*Collection); ok {
ecs.hasURLCache(t, driveColl.urlCache, "wanted a populated url cache handler in collection:\n\t %q", p)
ecs.hasURLCache(t, driveColl.urlCache, "has a populated url cache handler") }
} }
// ensure that no collections in the expected set are still flagged // ensure that no collections in the expected set are still flagged
@ -440,6 +468,13 @@ func (ecs expectedCollections) requireNoUnseenCollections(t *testing.T) {
"results did not include collection at:\n\tstate %q\t\npath %q", "results did not include collection at:\n\tstate %q\t\npath %q",
ca.state, expectFullOrPrev(ca)) ca.state, expectFullOrPrev(ca))
} }
if ecs.metadata != nil {
require.True(
t,
ecs.metadata.sawCollection,
"results did not include the metadata collection")
}
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -948,27 +983,27 @@ func (en enumerateDriveItemsDelta) EnumerateDriveItemsDelta(
} }
func (en enumerateDriveItemsDelta) drivePager() *apiMock.Pager[models.Driveable] { func (en enumerateDriveItemsDelta) drivePager() *apiMock.Pager[models.Driveable] {
dvs := []models.Driveable{} enumerableDrives := []models.Driveable{}
for _, dp := range en.DrivePagers { for _, dp := range en.DrivePagers {
dvs = append(dvs, dp.Drive.able) enumerableDrives = append(enumerableDrives, dp.Drive.able)
} }
return &apiMock.Pager[models.Driveable]{ return &apiMock.Pager[models.Driveable]{
ToReturn: []apiMock.PagerResult[models.Driveable]{ ToReturn: []apiMock.PagerResult[models.Driveable]{
{Values: dvs}, {Values: enumerableDrives},
}, },
} }
} }
func (en enumerateDriveItemsDelta) getDrives() []*deltaDrive { func (en enumerateDriveItemsDelta) getDrives() []*deltaDrive {
dvs := []*deltaDrive{} enumerableDrives := []*deltaDrive{}
for _, dp := range en.DrivePagers { for _, dp := range en.DrivePagers {
dvs = append(dvs, dp.Drive) enumerableDrives = append(enumerableDrives, dp.Drive)
} }
return dvs return enumerableDrives
} }
type deltaDrive struct { type deltaDrive struct {
@ -996,6 +1031,96 @@ func (dd *deltaDrive) newEnumer() *DeltaDriveEnumerator {
return &DeltaDriveEnumerator{Drive: clone} return &DeltaDriveEnumerator{Drive: clone}
} }
type drivePrevPaths struct {
id string
folderIDToPrevPath map[string]string
}
func (dd *deltaDrive) newPrevPaths(
t *testing.T,
idPathPairs ...string,
) *drivePrevPaths {
dpp := drivePrevPaths{
id: dd.id,
folderIDToPrevPath: map[string]string{},
}
require.Zero(t, len(idPathPairs)%2, "idPathPairs has an even count of elements")
for i := 0; i < len(idPathPairs); i += 2 {
dpp.folderIDToPrevPath[idPathPairs[i]] = idPathPairs[i+1]
}
return &dpp
}
// transforms 0 or more drivePrevPaths to a map[driveID]map[folderID]prevPathString
func multiDrivePrevPaths(drivePrevs ...*drivePrevPaths) map[string]map[string]string {
prevPathsByDriveID := map[string]map[string]string{}
for _, dp := range drivePrevs {
prevPathsByDriveID[dp.id] = dp.folderIDToPrevPath
}
return prevPathsByDriveID
}
type driveExcludes struct {
pathPfx string
excludes map[string]struct{}
}
func (dd *deltaDrive) newExcludes(t *testing.T, excludes map[string]struct{}) driveExcludes {
return driveExcludes{
pathPfx: dd.strPath(t),
excludes: excludes,
}
}
func multiDriveExcludeMap(driveExclds ...driveExcludes) *prefixmatcher.StringSetMatchBuilder {
globalExcludes := prefixmatcher.NewStringSetBuilder()
for _, de := range driveExclds {
globalExcludes.Add(de.pathPfx, de.excludes)
}
return globalExcludes
}
// transforms 0 or more drivePrevPaths to a []data.RestoreCollection containing
// a metadata collection.
// DeltaURLs are currently always populated with {driveID: deltaURL()}.
func multiDriveMetadata(
t *testing.T,
drivePrevs ...*drivePrevPaths,
) []data.RestoreCollection {
restoreColls := []data.RestoreCollection{}
for _, drivePrev := range drivePrevs {
mdColl := []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(
bupMD.DeltaURLsFileName,
map[string]string{drivePrev.id: deltaURL()}),
graph.NewMetadataEntry(
bupMD.PreviousPathFileName,
multiDrivePrevPaths(drivePrev)),
}
mc, err := graph.MakeMetadataCollection(
defaultMetadataPath(t),
mdColl,
func(*support.ControllerOperationStatus) {},
count.New())
require.NoError(t, err, clues.ToCore(err))
restoreColls = append(restoreColls, dataMock.NewUnversionedRestoreCollection(
t,
data.NoFetchRestoreCollection{Collection: mc}))
}
return restoreColls
}
type DeltaDriveEnumerator struct { type DeltaDriveEnumerator struct {
Drive *deltaDrive Drive *deltaDrive
idx int idx int
@ -1053,22 +1178,22 @@ type deltaQuery struct {
} }
func delta( func delta(
resultDeltaID string,
err error, err error,
deltaTokenSuffix ...any,
) *deltaQuery { ) *deltaQuery {
return &deltaQuery{ return &deltaQuery{
DeltaUpdate: pagers.DeltaUpdate{URL: resultDeltaID}, DeltaUpdate: pagers.DeltaUpdate{URL: deltaURL(deltaTokenSuffix...)},
Err: err, Err: err,
} }
} }
func deltaWReset( func deltaWReset(
resultDeltaID string,
err error, err error,
deltaTokenSuffix ...any,
) *deltaQuery { ) *deltaQuery {
return &deltaQuery{ return &deltaQuery{
DeltaUpdate: pagers.DeltaUpdate{ DeltaUpdate: pagers.DeltaUpdate{
URL: resultDeltaID, URL: deltaURL(deltaTokenSuffix...),
Reset: true, Reset: true,
}, },
Err: err, Err: err,
@ -1493,6 +1618,26 @@ func (dd *deltaDrive) packageAtRoot() models.DriveItemable {
// id, name, path factories // id, name, path factories
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
func deltaURL(suffixes ...any) string {
if len(suffixes) > 1 {
// this should fail any tests. we could pass in a
// testing.T instead and fail the call here, but that
// produces a whole lot of chaff where this check should
// still get us the expected failure
return fmt.Sprintf(
"too many suffixes in the URL; should only be 0 or 1, got %d",
len(suffixes))
}
url := "https://delta.token.url"
for _, sfx := range suffixes {
url = fmt.Sprintf("%s?%v", url, sfx)
}
return url
}
// assumption is only one suffix per id. Mostly using // assumption is only one suffix per id. Mostly using
// the variadic as an "optional" extension. // the variadic as an "optional" extension.
func id(v string, suffixes ...any) string { func id(v string, suffixes ...any) string {
@ -1605,7 +1750,6 @@ func (dd *deltaDrive) dir(elems ...string) string {
// common item names // common item names
const ( const (
bar = "bar" bar = "bar"
deltaURL = "delta_url"
drivePfx = "drive" drivePfx = "drive"
fanny = "fanny" fanny = "fanny"
file = "file" file = "file"

View File

@ -55,7 +55,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
}, },
enumerator: driveEnumerator( enumerator: driveEnumerator(
d1.newEnumer().with( d1.newEnumer().with(
delta(id(deltaURL), nil).with( delta(nil).with(
aPage( aPage(
d1.fileWSizeAt(7, root, "f1"), d1.fileWSizeAt(7, root, "f1"),
d1.fileWSizeAt(1, root, "f2"), d1.fileWSizeAt(1, root, "f2"),
@ -76,7 +76,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
}, },
enumerator: driveEnumerator( enumerator: driveEnumerator(
d1.newEnumer().with( d1.newEnumer().with(
delta(id(deltaURL), nil).with( delta(nil).with(
aPage( aPage(
d1.fileWSizeAt(1, root, "f1"), d1.fileWSizeAt(1, root, "f1"),
d1.fileWSizeAt(2, root, "f2"), d1.fileWSizeAt(2, root, "f2"),
@ -97,7 +97,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
}, },
enumerator: driveEnumerator( enumerator: driveEnumerator(
d1.newEnumer().with( d1.newEnumer().with(
delta(id(deltaURL), nil).with( delta(nil).with(
aPage( aPage(
d1.fileWSizeAt(1, root, "f1"), d1.fileWSizeAt(1, root, "f1"),
d1.folderAt(root), d1.folderAt(root),
@ -120,7 +120,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
}, },
enumerator: driveEnumerator( enumerator: driveEnumerator(
d1.newEnumer().with( d1.newEnumer().with(
delta(id(deltaURL), nil).with( delta(nil).with(
aPage( aPage(
d1.fileAt(root, "f1"), d1.fileAt(root, "f1"),
d1.fileAt(root, "f2"), d1.fileAt(root, "f2"),
@ -144,7 +144,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
}, },
enumerator: driveEnumerator( enumerator: driveEnumerator(
d1.newEnumer().with( d1.newEnumer().with(
delta(id(deltaURL), nil).with( delta(nil).with(
aPage( aPage(
d1.fileAt(root, "f1"), d1.fileAt(root, "f1"),
d1.fileAt(root, "f2")), d1.fileAt(root, "f2")),
@ -173,7 +173,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
}, },
enumerator: driveEnumerator( enumerator: driveEnumerator(
d1.newEnumer().with( d1.newEnumer().with(
delta(id(deltaURL), nil).with( delta(nil).with(
aPage( aPage(
d1.fileAt(root, "f1"), d1.fileAt(root, "f1"),
d1.fileAt(root, "f2")), d1.fileAt(root, "f2")),
@ -199,7 +199,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
}, },
enumerator: driveEnumerator( enumerator: driveEnumerator(
d1.newEnumer().with( d1.newEnumer().with(
delta(id(deltaURL), nil).with( delta(nil).with(
aPage( aPage(
d1.fileAt(root, "f1"), d1.fileAt(root, "f1"),
d1.fileAt(root, "f2"), d1.fileAt(root, "f2"),
@ -231,7 +231,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
}, },
enumerator: driveEnumerator( enumerator: driveEnumerator(
d1.newEnumer().with( d1.newEnumer().with(
delta(id(deltaURL), nil).with( delta(nil).with(
aPage( aPage(
d1.folderAt(root), d1.folderAt(root),
d1.fileAt(folder, "f1"), d1.fileAt(folder, "f1"),
@ -259,7 +259,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
}, },
enumerator: driveEnumerator( enumerator: driveEnumerator(
d1.newEnumer().with( d1.newEnumer().with(
delta(id(deltaURL), nil).with( delta(nil).with(
aPage( aPage(
d1.fileAt(root, "f1"), d1.fileAt(root, "f1"),
d1.fileAt(root, "f2"), d1.fileAt(root, "f2"),
@ -295,7 +295,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
}, },
enumerator: driveEnumerator( enumerator: driveEnumerator(
d1.newEnumer().with( d1.newEnumer().with(
delta(id(deltaURL), nil).with( delta(nil).with(
aPage( aPage(
d1.fileAt(root, "f1"), d1.fileAt(root, "f1"),
d1.fileAt(root, "f2"), d1.fileAt(root, "f2"),
@ -323,7 +323,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
}, },
enumerator: driveEnumerator( enumerator: driveEnumerator(
d1.newEnumer().with( d1.newEnumer().with(
delta(id(deltaURL), nil).with( delta(nil).with(
aPage( aPage(
d1.fileAt(root, "f1"), d1.fileAt(root, "f1"),
d1.fileAt(root, "f2"), d1.fileAt(root, "f2"),
@ -354,7 +354,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
}, },
enumerator: driveEnumerator( enumerator: driveEnumerator(
d1.newEnumer().with( d1.newEnumer().with(
delta(id(deltaURL), nil).with( delta(nil).with(
aPage( aPage(
d1.fileAt(root, "f1"), d1.fileAt(root, "f1"),
d1.fileAt(root, "f2"), d1.fileAt(root, "f2"),
@ -386,7 +386,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
}, },
enumerator: driveEnumerator( enumerator: driveEnumerator(
d1.newEnumer().with( d1.newEnumer().with(
delta(id(deltaURL), nil).with( delta(nil).with(
aPage( aPage(
d1.fileAt(root, "f1"), d1.fileAt(root, "f1"),
d1.fileAt(root, "f2"), d1.fileAt(root, "f2"),
@ -394,7 +394,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
d1.fileAt(root, "f4"), d1.fileAt(root, "f4"),
d1.fileAt(root, "f5")))), d1.fileAt(root, "f5")))),
d2.newEnumer().with( d2.newEnumer().with(
delta(id(deltaURL), nil).with( delta(nil).with(
aPage( aPage(
d2.fileAt(root, "f1"), d2.fileAt(root, "f1"),
d2.fileAt(root, "f2"), d2.fileAt(root, "f2"),
@ -417,7 +417,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
}, },
enumerator: driveEnumerator( enumerator: driveEnumerator(
d1.newEnumer().with( d1.newEnumer().with(
delta(id(deltaURL), nil).with( delta(nil).with(
aPage( aPage(
d1.fileAt(root, "f1"), d1.fileAt(root, "f1"),
d1.fileAt(root, "f2"), d1.fileAt(root, "f2"),
@ -500,13 +500,7 @@ func runGetPreviewLimits(
) )
cols, canUsePreviousBackup, err := c.Get(ctx, nil, delList, errs) cols, canUsePreviousBackup, err := c.Get(ctx, nil, delList, errs)
require.NoError(t, err, clues.ToCore(err))
if opts.ToggleFeatures.UseDeltaTree {
require.ErrorIs(t, err, errGetTreeNotImplemented, clues.ToCore(err))
} else {
require.NoError(t, err, clues.ToCore(err))
}
assert.True(t, canUsePreviousBackup, "can use previous backup") assert.True(t, canUsePreviousBackup, "can use previous backup")
assert.Empty(t, errs.Skipped()) assert.Empty(t, errs.Skipped())
@ -772,7 +766,7 @@ func runGetPreviewLimitsDefaults(
var ( var (
mockEnumerator = driveEnumerator( mockEnumerator = driveEnumerator(
d.newEnumer().with( d.newEnumer().with(
delta(id(deltaURL), nil).with(pages...))) delta(nil).with(pages...)))
mbh = defaultDriveBHWith(user, mockEnumerator) mbh = defaultDriveBHWith(user, mockEnumerator)
c = collWithMBHAndOpts(mbh, opts) c = collWithMBHAndOpts(mbh, opts)
errs = fault.New(true) errs = fault.New(true)
@ -782,13 +776,7 @@ func runGetPreviewLimitsDefaults(
) )
cols, canUsePreviousBackup, err := c.Get(ctx, nil, delList, errs) cols, canUsePreviousBackup, err := c.Get(ctx, nil, delList, errs)
require.NoError(t, err, clues.ToCore(err))
if opts.ToggleFeatures.UseDeltaTree {
require.ErrorIs(t, err, errGetTreeNotImplemented, clues.ToCore(err))
} else {
require.NoError(t, err, clues.ToCore(err))
}
assert.True(t, canUsePreviousBackup, "can use previous backup") assert.True(t, canUsePreviousBackup, "can use previous backup")
assert.Empty(t, errs.Skipped()) assert.Empty(t, errs.Skipped())

View File

@ -539,7 +539,7 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
drive.newEnumer(). drive.newEnumer().
withErr(test.pagerErr). withErr(test.pagerErr).
with( with(
delta(deltaURL, test.pagerErr). delta(test.pagerErr).
with(test.pages...))) with(test.pages...)))
cache, err := newURLCache( cache, err := newURLCache(

View File

@ -102,6 +102,7 @@ type Path interface {
// and will likely be updated to handle encoded elements instead of clear-text // and will likely be updated to handle encoded elements instead of clear-text
// elements in the future. // elements in the future.
Elements() Elements Elements() Elements
Equal(other Path) bool
// Append returns a new Path object with the given element added to the end of // Append returns a new Path object with the given element added to the end of
// the old Path if possible. If the old Path is an item Path then Append // the old Path if possible. If the old Path is an item Path then Append
// returns an error. // returns an error.

View File

@ -1,6 +1,8 @@
package path package path
import ( import (
"slices"
"github.com/alcionai/clues" "github.com/alcionai/clues"
) )
@ -135,3 +137,16 @@ func (rp dataLayerResourcePath) ToBuilder() *Builder {
func (rp *dataLayerResourcePath) UpdateParent(prev, cur Path) bool { func (rp *dataLayerResourcePath) UpdateParent(prev, cur Path) bool {
return rp.Builder.UpdateParent(prev.ToBuilder(), cur.ToBuilder()) return rp.Builder.UpdateParent(prev.ToBuilder(), cur.ToBuilder())
} }
func (rp *dataLayerResourcePath) Equal(other Path) bool {
if rp == nil && other == nil {
return true
}
if (rp == nil && other != nil) ||
(other == nil && rp != nil) {
return false
}
return slices.Equal(rp.elements, other.Elements())
}