deltaURL stub refactor (#4865)
clean up the handling of deltaURL by using its own func instead of the id() func. Also minimizes the delta stub constructor by using the suffix pattern used by id and folder constructors. no logic changes, just a quick refactor. --- #### Does this PR need a docs update or release note? - [x] ⛔ No #### Type of change - [x] 🤖 Supportability/Tests #### Issue(s) * #4689 #### Test Plan - [x] ⚡ Unit test - [x] 💚 E2E
This commit is contained in:
parent
b896405e92
commit
92b996a3de
@ -8,7 +8,6 @@ import (
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
"github.com/pkg/errors"
|
||||
"golang.org/x/exp/maps"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
@ -31,8 +30,6 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/custom"
|
||||
)
|
||||
|
||||
var errGetTreeNotImplemented = clues.New("forced error: cannot run tree-based backup: incomplete implementation")
|
||||
|
||||
const (
|
||||
restrictedDirectory = "Site Pages"
|
||||
|
||||
@ -117,7 +114,7 @@ func deserializeAndValidateMetadata(
|
||||
|
||||
paths := prevs[drive]
|
||||
if len(paths) == 0 {
|
||||
logger.Ctx(ictx).Info("dropping drive delta due to 0 prev paths")
|
||||
logger.Ctx(ictx).Info("dropping delta metadata: no matching drive entry in previous paths")
|
||||
delete(deltas, drive)
|
||||
}
|
||||
|
||||
@ -127,7 +124,7 @@ func deserializeAndValidateMetadata(
|
||||
// for other possibly incorrect folder paths.
|
||||
for _, prevPath := range paths {
|
||||
if len(prevPath) == 0 {
|
||||
logger.Ctx(ictx).Info("dropping drive delta due to 0 len path")
|
||||
logger.Ctx(ictx).Info("dropping delta metadata: 0 previous paths")
|
||||
delete(deltas, drive)
|
||||
|
||||
break
|
||||
@ -270,19 +267,12 @@ func DeserializeMap[T any](reader io.ReadCloser, alreadyFound map[string]T) erro
|
||||
return clues.Wrap(err, "deserializing file contents")
|
||||
}
|
||||
|
||||
var duplicate bool
|
||||
|
||||
for k := range tmp {
|
||||
if _, ok := alreadyFound[k]; ok {
|
||||
duplicate = true
|
||||
break
|
||||
return clues.Stack(errExistingMapping).With("duplicate_key", k)
|
||||
}
|
||||
}
|
||||
|
||||
if duplicate {
|
||||
return clues.Stack(errExistingMapping)
|
||||
}
|
||||
|
||||
maps.Copy(alreadyFound, tmp)
|
||||
|
||||
return nil
|
||||
@ -297,13 +287,10 @@ func (c *Collections) Get(
|
||||
) ([]data.BackupCollection, bool, error) {
|
||||
if c.ctrl.ToggleFeatures.UseDeltaTree {
|
||||
colls, canUsePrevBackup, err := c.getTree(ctx, prevMetadata, globalExcludeItemIDs, errs)
|
||||
if err != nil && !errors.Is(err, errGetTreeNotImplemented) {
|
||||
return nil, false, clues.Wrap(err, "processing backup using tree")
|
||||
}
|
||||
|
||||
return colls,
|
||||
canUsePrevBackup,
|
||||
errGetTreeNotImplemented
|
||||
clues.Wrap(err, "processing backup using tree").OrNil()
|
||||
}
|
||||
|
||||
deltasByDriveID, prevPathsByDriveID, canUsePrevBackup, err := deserializeAndValidateMetadata(
|
||||
|
||||
@ -871,7 +871,7 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() {
|
||||
|
||||
mbh.DriveItemEnumeration = driveEnumerator(
|
||||
drive.newEnumer().with(
|
||||
delta("notempty", nil).with(
|
||||
delta(nil, "notempty").with(
|
||||
aPage(test.items...))))
|
||||
|
||||
sel := selectors.NewOneDriveBackup([]string{user})
|
||||
@ -962,7 +962,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
return []graph.MetadataCollectionEntry{
|
||||
graph.NewMetadataEntry(
|
||||
bupMD.DeltaURLsFileName,
|
||||
map[string]string{d.id: id(deltaURL)}),
|
||||
map[string]string{d.id: deltaURL()}),
|
||||
graph.NewMetadataEntry(
|
||||
bupMD.PreviousPathFileName,
|
||||
map[string]map[string]string{
|
||||
@ -974,7 +974,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
},
|
||||
},
|
||||
expectedDeltas: map[string]string{
|
||||
d.id: id(deltaURL),
|
||||
d.id: deltaURL(),
|
||||
},
|
||||
expectedPaths: map[string]map[string]string{
|
||||
d.id: {
|
||||
@ -991,7 +991,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
return []graph.MetadataCollectionEntry{
|
||||
graph.NewMetadataEntry(
|
||||
bupMD.DeltaURLsFileName,
|
||||
map[string]string{d.id: id(deltaURL)}),
|
||||
map[string]string{d.id: deltaURL()}),
|
||||
}
|
||||
},
|
||||
},
|
||||
@ -1034,7 +1034,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
return []graph.MetadataCollectionEntry{
|
||||
graph.NewMetadataEntry(
|
||||
bupMD.DeltaURLsFileName,
|
||||
map[string]string{d.id: id(deltaURL)}),
|
||||
map[string]string{d.id: deltaURL()}),
|
||||
graph.NewMetadataEntry(
|
||||
bupMD.PreviousPathFileName,
|
||||
map[string]map[string]string{
|
||||
@ -1087,7 +1087,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
return []graph.MetadataCollectionEntry{
|
||||
graph.NewMetadataEntry(
|
||||
bupMD.DeltaURLsFileName,
|
||||
map[string]string{d.id: id(deltaURL)}),
|
||||
map[string]string{d.id: deltaURL()}),
|
||||
graph.NewMetadataEntry(
|
||||
bupMD.PreviousPathFileName,
|
||||
map[string]map[string]string{
|
||||
@ -1101,7 +1101,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
return []graph.MetadataCollectionEntry{
|
||||
graph.NewMetadataEntry(
|
||||
bupMD.DeltaURLsFileName,
|
||||
map[string]string{d2.id: id(deltaURL, 2)}),
|
||||
map[string]string{d2.id: deltaURL(2)}),
|
||||
graph.NewMetadataEntry(
|
||||
bupMD.PreviousPathFileName,
|
||||
map[string]map[string]string{
|
||||
@ -1113,8 +1113,8 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
},
|
||||
},
|
||||
expectedDeltas: map[string]string{
|
||||
d.id: id(deltaURL),
|
||||
d2.id: id(deltaURL, 2),
|
||||
d.id: deltaURL(),
|
||||
d2.id: deltaURL(2),
|
||||
},
|
||||
expectedPaths: map[string]map[string]string{
|
||||
d.id: {
|
||||
@ -1138,7 +1138,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
return []graph.MetadataCollectionEntry{
|
||||
graph.NewMetadataEntry(
|
||||
bupMD.PreviousPathFileName,
|
||||
map[string]string{d.id: id(deltaURL)}),
|
||||
map[string]string{d.id: deltaURL()}),
|
||||
}
|
||||
},
|
||||
},
|
||||
@ -1154,7 +1154,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
return []graph.MetadataCollectionEntry{
|
||||
graph.NewMetadataEntry(
|
||||
bupMD.DeltaURLsFileName,
|
||||
map[string]string{d.id: id(deltaURL)}),
|
||||
map[string]string{d.id: deltaURL()}),
|
||||
graph.NewMetadataEntry(
|
||||
bupMD.PreviousPathFileName,
|
||||
map[string]map[string]string{
|
||||
@ -1164,12 +1164,12 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
}),
|
||||
graph.NewMetadataEntry(
|
||||
"foo",
|
||||
map[string]string{d.id: id(deltaURL)}),
|
||||
map[string]string{d.id: deltaURL()}),
|
||||
}
|
||||
},
|
||||
},
|
||||
expectedDeltas: map[string]string{
|
||||
d.id: id(deltaURL),
|
||||
d.id: deltaURL(),
|
||||
},
|
||||
expectedPaths: map[string]map[string]string{
|
||||
d.id: {
|
||||
@ -1186,7 +1186,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
return []graph.MetadataCollectionEntry{
|
||||
graph.NewMetadataEntry(
|
||||
bupMD.DeltaURLsFileName,
|
||||
map[string]string{d.id: id(deltaURL)}),
|
||||
map[string]string{d.id: deltaURL()}),
|
||||
graph.NewMetadataEntry(
|
||||
bupMD.PreviousPathFileName,
|
||||
map[string]map[string]string{
|
||||
@ -1220,7 +1220,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
return []graph.MetadataCollectionEntry{
|
||||
graph.NewMetadataEntry(
|
||||
bupMD.DeltaURLsFileName,
|
||||
map[string]string{d.id: id(deltaURL)}),
|
||||
map[string]string{d.id: deltaURL()}),
|
||||
graph.NewMetadataEntry(
|
||||
bupMD.PreviousPathFileName,
|
||||
map[string]map[string]string{
|
||||
@ -1234,7 +1234,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
return []graph.MetadataCollectionEntry{
|
||||
graph.NewMetadataEntry(
|
||||
bupMD.DeltaURLsFileName,
|
||||
map[string]string{d.id: id(deltaURL, 2)}),
|
||||
map[string]string{d.id: deltaURL(2)}),
|
||||
}
|
||||
},
|
||||
},
|
||||
@ -1250,7 +1250,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
return []graph.MetadataCollectionEntry{
|
||||
graph.NewMetadataEntry(
|
||||
bupMD.DeltaURLsFileName,
|
||||
map[string]string{d.id: id(deltaURL)}),
|
||||
map[string]string{d.id: deltaURL()}),
|
||||
graph.NewMetadataEntry(
|
||||
bupMD.PreviousPathFileName,
|
||||
map[string]map[string]string{
|
||||
@ -1263,7 +1263,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
},
|
||||
},
|
||||
expectedDeltas: map[string]string{
|
||||
d.id: id(deltaURL),
|
||||
d.id: deltaURL(),
|
||||
},
|
||||
expectedPaths: map[string]map[string]string{
|
||||
d.id: {
|
||||
@ -1283,7 +1283,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
graph.NewMetadataEntry(
|
||||
bupMD.DeltaURLsFileName,
|
||||
map[string]string{
|
||||
d.id: id(deltaURL),
|
||||
d.id: deltaURL(),
|
||||
}),
|
||||
graph.NewMetadataEntry(
|
||||
bupMD.PreviousPathFileName,
|
||||
@ -1299,7 +1299,7 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
return []graph.MetadataCollectionEntry{
|
||||
graph.NewMetadataEntry(
|
||||
bupMD.DeltaURLsFileName,
|
||||
map[string]string{d2.id: id(deltaURL, 2)}),
|
||||
map[string]string{d2.id: deltaURL(2)}),
|
||||
graph.NewMetadataEntry(
|
||||
bupMD.PreviousPathFileName,
|
||||
map[string]map[string]string{
|
||||
@ -1311,8 +1311,8 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
},
|
||||
},
|
||||
expectedDeltas: map[string]string{
|
||||
d.id: id(deltaURL),
|
||||
d2.id: id(deltaURL, 2),
|
||||
d.id: deltaURL(),
|
||||
d2.id: deltaURL(2),
|
||||
},
|
||||
expectedPaths: map[string]map[string]string{
|
||||
d.id: {
|
||||
@ -1398,29 +1398,6 @@ func (suite *CollectionsUnitSuite) TestDeserializeMetadata_ReadFailure() {
|
||||
require.False(t, canUsePreviousBackup)
|
||||
}
|
||||
|
||||
func (suite *CollectionsUnitSuite) TestGet_treeCannotBeUsedWhileIncomplete() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
mbh := defaultOneDriveBH(user)
|
||||
opts := control.DefaultOptions()
|
||||
opts.ToggleFeatures.UseDeltaTree = true
|
||||
|
||||
mbh.DriveItemEnumeration = driveEnumerator(
|
||||
drive().newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
aPage(
|
||||
delItem(fileID(), rootID, isFile)))))
|
||||
|
||||
c := collWithMBH(mbh)
|
||||
c.ctrl = opts
|
||||
|
||||
_, _, err := c.Get(ctx, nil, nil, fault.New(true))
|
||||
require.ErrorIs(t, err, errGetTreeNotImplemented, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *CollectionsUnitSuite) TestGet() {
|
||||
metadataPath, err := path.BuildMetadata(
|
||||
tenant,
|
||||
@ -1456,7 +1433,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "OneDrive_OneItemPage_DelFileOnly_NoFolders_NoErrors",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
delItem(fileID(), rootID, isFile))))),
|
||||
canUsePreviousBackup: true,
|
||||
@ -1468,7 +1445,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
d.strPath(t): {data.NotMovedState: {}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL),
|
||||
id(drivePfx, 1): deltaURL(),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {rootID: d.strPath(t)},
|
||||
@ -1481,7 +1458,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "OneDrive_OneItemPage_NoFolderDeltas_NoErrors",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
driveFile(d.dir(), rootID))))),
|
||||
canUsePreviousBackup: true,
|
||||
@ -1493,7 +1470,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
d.strPath(t): {data.NotMovedState: {fileID()}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL),
|
||||
id(drivePfx, 1): deltaURL(),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {rootID: d.strPath(t)},
|
||||
@ -1506,7 +1483,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "OneDrive_OneItemPage_NoErrors",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL), nil).with(
|
||||
deltaWReset(nil).with(
|
||||
aPage(
|
||||
driveFolder(d.dir(), rootID),
|
||||
driveFile(d.dir(folderName()), folderID()))))),
|
||||
@ -1518,7 +1495,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
d.strPath(t, folderName()): {data.NewState: {folderID(), fileID()}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL),
|
||||
id(drivePfx, 1): deltaURL(),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -1536,7 +1513,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "OneDrive_OneItemPage_NoErrors_FileRenamedMultiple",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL), nil).with(
|
||||
deltaWReset(nil).with(
|
||||
aPage(
|
||||
driveFolder(d.dir(), rootID),
|
||||
driveFile(d.dir(folderName()), folderID()),
|
||||
@ -1549,7 +1526,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
d.strPath(t, folderName()): {data.NewState: {folderID(), fileID()}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL),
|
||||
id(drivePfx, 1): deltaURL(),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -1567,7 +1544,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "OneDrive_OneItemPage_NoErrors_FileMovedMultiple",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
driveFolder(d.dir(), rootID),
|
||||
driveFile(d.dir(folderName()), folderID()),
|
||||
@ -1584,7 +1561,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
d.strPath(t, folderName()): {data.NewState: {folderID()}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL),
|
||||
id(drivePfx, 1): deltaURL(),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -1600,7 +1577,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "OneDrive_TwoItemPages_NoErrors",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL), nil).with(
|
||||
deltaWReset(nil).with(
|
||||
aPage(
|
||||
driveFolder(d.dir(), rootID),
|
||||
driveFile(d.dir(folderName()), folderID())),
|
||||
@ -1617,7 +1594,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
d.strPath(t, folderName()): {data.NewState: {folderID(), fileID(), fileID(2)}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL),
|
||||
id(drivePfx, 1): deltaURL(),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -1635,7 +1612,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "OneDrive_TwoItemPages_WithReset",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL), nil).with(
|
||||
deltaWReset(nil).with(
|
||||
aPage(
|
||||
driveFolder(d.dir(), rootID),
|
||||
driveFile(d.dir(folderName()), folderID()),
|
||||
@ -1657,7 +1634,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
d.strPath(t, folderName()): {data.NewState: {folderID(), fileID(), fileID(2)}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL),
|
||||
id(drivePfx, 1): deltaURL(),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -1675,7 +1652,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "OneDrive_TwoItemPages_WithResetCombinedWithItems",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL), nil).with(
|
||||
deltaWReset(nil).with(
|
||||
aPage(
|
||||
driveFolder(d.dir(), rootID),
|
||||
driveFile(d.dir(folderName()), folderID())),
|
||||
@ -1695,7 +1672,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
d.strPath(t, folderName()): {data.NewState: {folderID(), fileID(), fileID(2)}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL),
|
||||
id(drivePfx, 1): deltaURL(),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -1713,12 +1690,12 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "TwoDrives_OneItemPageEach_NoErrors",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL), nil).with(
|
||||
deltaWReset(nil).with(
|
||||
aPage(
|
||||
driveFolder(d.dir(), rootID),
|
||||
driveFile(d.dir(folderName()), folderID())))),
|
||||
d2.newEnumer().with(
|
||||
deltaWReset(id(deltaURL, 2), nil).with(aPage(
|
||||
deltaWReset(nil, 2).with(aPage(
|
||||
driveItem(folderID(2), folderName(), d2.dir(), rootID, isFolder),
|
||||
driveItem(fileID(2), fileName(), d2.dir(folderName()), folderID(2), isFile))))),
|
||||
canUsePreviousBackup: true,
|
||||
@ -1734,8 +1711,8 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
d2.strPath(t, folderName()): {data.NewState: {folderID(2), fileID(2)}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL),
|
||||
d2.id: id(deltaURL, 2),
|
||||
id(drivePfx, 1): deltaURL(),
|
||||
d2.id: deltaURL(2),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -1759,12 +1736,12 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "TwoDrives_DuplicateIDs_OneItemPageEach_NoErrors",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL), nil).with(
|
||||
deltaWReset(nil).with(
|
||||
aPage(
|
||||
driveFolder(d.dir(), rootID),
|
||||
driveFile(d.dir(folderName()), folderID())))),
|
||||
d2.newEnumer().with(
|
||||
deltaWReset(id(deltaURL, 2), nil).with(
|
||||
deltaWReset(nil, 2).with(
|
||||
aPage(
|
||||
driveFolder(d2.dir(), rootID),
|
||||
driveItem(fileID(2), fileName(), d2.dir(folderName()), folderID(), isFile))))),
|
||||
@ -1781,8 +1758,8 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
d2.strPath(t, folderName()): {data.NewState: {folderID(), fileID(2)}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL),
|
||||
d2.id: id(deltaURL, 2),
|
||||
id(drivePfx, 1): deltaURL(),
|
||||
d2.id: deltaURL(2),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -1806,7 +1783,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "OneDrive_OneItemPage_Errors",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta("", assert.AnError))),
|
||||
delta(assert.AnError))),
|
||||
canUsePreviousBackup: false,
|
||||
errCheck: assert.Error,
|
||||
previousPaths: map[string]map[string]string{
|
||||
@ -1821,7 +1798,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "OneDrive_OneItemPage_InvalidPrevDelta_DeleteNonExistentFolder",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL), nil).with(
|
||||
deltaWReset(nil).with(
|
||||
aReset(),
|
||||
aPage(
|
||||
driveFolder(d.dir(), rootID, 2),
|
||||
@ -1840,7 +1817,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
d.strPath(t, folderName(2)): {data.NewState: {folderID(2), fileID()}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL),
|
||||
id(drivePfx, 1): deltaURL(),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -1859,7 +1836,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "OneDrive_OneItemPage_InvalidPrevDeltaCombinedWithItems_DeleteNonExistentFolder",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL), nil).with(
|
||||
deltaWReset(nil).with(
|
||||
aReset(),
|
||||
aPage(
|
||||
driveFolder(d.dir(), rootID, 2),
|
||||
@ -1878,7 +1855,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
d.strPath(t, folderName(2)): {data.NewState: {folderID(2), fileID()}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL),
|
||||
id(drivePfx, 1): deltaURL(),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -1897,7 +1874,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "OneDrive_OneItemPage_InvalidPrevDelta_AnotherFolderAtDeletedLocation",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL), nil).with(
|
||||
deltaWReset(nil).with(
|
||||
aPage(
|
||||
driveItem(folderID(2), folderName(), d.dir(), rootID, isFolder),
|
||||
driveFile(d.dir(folderName()), folderID(2))),
|
||||
@ -1923,7 +1900,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL),
|
||||
id(drivePfx, 1): deltaURL(),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -1941,7 +1918,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "OneDrive_OneItemPage_InvalidPrevDelta_AnotherFolderAtExistingLocation",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL), nil).with(
|
||||
deltaWReset(nil).with(
|
||||
aPage(
|
||||
driveFolder(d.dir(), rootID),
|
||||
driveFile(d.dir(folderName()), folderID())),
|
||||
@ -1964,7 +1941,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL),
|
||||
id(drivePfx, 1): deltaURL(),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -1982,7 +1959,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "OneDrive_OneItemPage_ImmediateInvalidPrevDelta_MoveFolderToPreviouslyExistingPath",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL), nil).with(
|
||||
deltaWReset(nil).with(
|
||||
aReset(),
|
||||
aPage(
|
||||
driveItem(folderID(2), folderName(), d.dir(), rootID, isFolder),
|
||||
@ -2003,7 +1980,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL),
|
||||
id(drivePfx, 1): deltaURL(),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -2021,7 +1998,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "OneDrive_OneItemPage_InvalidPrevDelta_AnotherFolderAtDeletedLocation",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL), nil).with(
|
||||
deltaWReset(nil).with(
|
||||
aReset(),
|
||||
aPage(
|
||||
driveItem(folderID(2), folderName(), d.dir(), rootID, isFolder),
|
||||
@ -2044,7 +2021,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL),
|
||||
id(drivePfx, 1): deltaURL(),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -2062,7 +2039,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "OneDrive Two Item Pages with Malware",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL), nil).with(
|
||||
deltaWReset(nil).with(
|
||||
aPage(
|
||||
driveFolder(d.dir(), rootID),
|
||||
driveFile(d.dir(folderName()), folderID()),
|
||||
@ -2081,7 +2058,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
d.strPath(t, folderName()): {data.NewState: {folderID(), fileID(), fileID(2)}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL),
|
||||
id(drivePfx, 1): deltaURL(),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -2100,7 +2077,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "One Drive Deleted Folder In New Results With Invalid Delta",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL, 2), nil).with(
|
||||
deltaWReset(nil, 2).with(
|
||||
aPage(
|
||||
driveFolder(d.dir(), rootID),
|
||||
driveFile(d.dir(folderName()), folderID()),
|
||||
@ -2127,7 +2104,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
d.strPath(t, folderName(2)): {data.DeletedState: {}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL, 2),
|
||||
id(drivePfx, 1): deltaURL(2),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -2146,7 +2123,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "One Drive Folder Delete After Invalid Delta",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL), nil).with(
|
||||
deltaWReset(nil).with(
|
||||
aPageWReset(
|
||||
delItem(folderID(), rootID, isFolder))))),
|
||||
canUsePreviousBackup: true,
|
||||
@ -2162,7 +2139,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
d.strPath(t, folderName()): {data.DeletedState: {}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL),
|
||||
id(drivePfx, 1): deltaURL(),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -2179,7 +2156,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "One Drive Item Delete After Invalid Delta",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL), nil).with(
|
||||
deltaWReset(nil).with(
|
||||
aPageWReset(
|
||||
delItem(fileID(), rootID, isFile))))),
|
||||
canUsePreviousBackup: true,
|
||||
@ -2193,7 +2170,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
d.strPath(t): {data.NewState: {}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL),
|
||||
id(drivePfx, 1): deltaURL(),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -2209,7 +2186,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "One Drive Folder Made And Deleted",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL, 2), nil).with(
|
||||
deltaWReset(nil, 2).with(
|
||||
aPage(
|
||||
driveFolder(d.dir(), rootID),
|
||||
driveFile(d.dir(folderName()), folderID())),
|
||||
@ -2225,7 +2202,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
d.strPath(t): {data.NewState: {}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL, 2),
|
||||
id(drivePfx, 1): deltaURL(2),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -2241,7 +2218,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "One Drive Folder Created -> Deleted -> Created",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL, 2), nil).with(
|
||||
deltaWReset(nil, 2).with(
|
||||
aPage(
|
||||
driveFolder(d.dir(), rootID),
|
||||
driveFile(d.dir(folderName()), folderID())),
|
||||
@ -2261,7 +2238,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
d.strPath(t, folderName()): {data.NewState: {folderID(1), fileID(1)}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL, 2),
|
||||
id(drivePfx, 1): deltaURL(2),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -2279,7 +2256,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "One Drive Folder Deleted -> Created -> Deleted",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL, 2), nil).with(
|
||||
deltaWReset(nil, 2).with(
|
||||
aPage(
|
||||
delItem(folderID(), rootID, isFolder),
|
||||
delItem(fileID(), rootID, isFile)),
|
||||
@ -2302,7 +2279,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
d.strPath(t, folderName()): {data.DeletedState: {}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL, 2),
|
||||
id(drivePfx, 1): deltaURL(2),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -2316,7 +2293,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "One Drive Folder Created -> Deleted -> Created with prev",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL, 2), nil).with(
|
||||
deltaWReset(nil, 2).with(
|
||||
aPage(
|
||||
driveFolder(d.dir(), rootID),
|
||||
driveFile(d.dir(folderName()), folderID())),
|
||||
@ -2339,7 +2316,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
d.strPath(t, folderName()): {data.DeletedState: {}, data.NewState: {folderID(1), fileID(1)}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL, 2),
|
||||
id(drivePfx, 1): deltaURL(2),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -2357,7 +2334,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "One Drive Item Made And Deleted",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL), nil).with(
|
||||
deltaWReset(nil).with(
|
||||
aPage(
|
||||
driveFolder(d.dir(), rootID),
|
||||
driveFile(d.dir(folderName()), folderID())),
|
||||
@ -2372,7 +2349,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
d.strPath(t, folderName()): {data.NewState: {folderID()}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL),
|
||||
id(drivePfx, 1): deltaURL(),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -2390,7 +2367,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "One Drive Random Folder Delete",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL), nil).with(
|
||||
deltaWReset(nil).with(
|
||||
aPage(
|
||||
delItem(folderID(), rootID, isFolder))))),
|
||||
canUsePreviousBackup: true,
|
||||
@ -2402,7 +2379,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
d.strPath(t): {data.NewState: {}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL),
|
||||
id(drivePfx, 1): deltaURL(),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -2418,7 +2395,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "One Drive Random Item Delete",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
delItem(fileID(), rootID, isFile))))),
|
||||
canUsePreviousBackup: true,
|
||||
@ -2430,7 +2407,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
d.strPath(t): {data.NewState: {}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL),
|
||||
id(drivePfx, 1): deltaURL(),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -2446,7 +2423,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "TwoPriorDrives_OneTombstoned",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(aPage()))), // root only
|
||||
delta(nil).with(aPage()))), // root only
|
||||
canUsePreviousBackup: true,
|
||||
errCheck: assert.NoError,
|
||||
previousPaths: map[string]map[string]string{
|
||||
@ -2457,7 +2434,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
d.strPath(t): {data.NotMovedState: {}},
|
||||
d2.strPath(t): {data.DeletedState: {}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{id(drivePfx, 1): id(deltaURL)},
|
||||
expectedDeltaURLs: map[string]string{id(drivePfx, 1): deltaURL()},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {rootID: d.strPath(t)},
|
||||
},
|
||||
@ -2470,14 +2447,14 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "duplicate previous paths in metadata",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
driveFolder(d.dir(), rootID),
|
||||
driveFile(d.dir(folderName()), folderID()),
|
||||
driveFolder(d.dir(), rootID, 2),
|
||||
driveFile(d.dir(folderName(2)), folderID(2), 2)))),
|
||||
d2.newEnumer().with(
|
||||
delta(id(deltaURL, 2), nil).with(
|
||||
delta(nil, 2).with(
|
||||
aPage(
|
||||
driveFolder(d2.dir(), rootID),
|
||||
driveFile(d2.dir(folderName()), folderID()),
|
||||
@ -2519,8 +2496,8 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL),
|
||||
d2.id: id(deltaURL, 2),
|
||||
id(drivePfx, 1): deltaURL(),
|
||||
d2.id: deltaURL(2),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -2545,7 +2522,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "out of order item enumeration causes prev path collisions",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
driveItem(folderID(fanny, 2), folderName(fanny), d.dir(), rootID, isFolder),
|
||||
driveFile(d.dir(folderName(fanny)), folderID(fanny, 2), 2),
|
||||
@ -2571,7 +2548,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL),
|
||||
id(drivePfx, 1): deltaURL(),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -2589,7 +2566,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
name: "out of order item enumeration causes opposite prev path collisions",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
driveFile(d.dir(), rootID, 1),
|
||||
driveFolder(d.dir(), rootID, fanny),
|
||||
@ -2625,7 +2602,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
||||
},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{
|
||||
id(drivePfx, 1): id(deltaURL),
|
||||
id(drivePfx, 1): deltaURL(),
|
||||
},
|
||||
expectedPreviousPaths: map[string]map[string]string{
|
||||
id(drivePfx, 1): {
|
||||
@ -2792,12 +2769,12 @@ func (suite *CollectionsUnitSuite) TestAddURLCacheToDriveCollections() {
|
||||
name: "Two drives with unique url cache instances",
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
driveFolder(d.dir(), rootID),
|
||||
driveFile(d.dir(folderName()), folderID())))),
|
||||
d2.newEnumer().with(
|
||||
delta(id(deltaURL, 2), nil).with(
|
||||
delta(nil, 2).with(
|
||||
aPage(
|
||||
driveItem(folderID(2), folderName(), d2.dir(), rootID, isFolder),
|
||||
driveItem(fileID(2), fileName(), d2.dir(folderName()), folderID(2), isFile))))),
|
||||
|
||||
@ -161,7 +161,7 @@ func (c *Collections) getTree(
|
||||
return nil, false, nil
|
||||
}
|
||||
|
||||
return collections, canUsePrevBackup, errGetTreeNotImplemented
|
||||
return collections, canUsePrevBackup, nil
|
||||
}
|
||||
|
||||
func (c *Collections) makeDriveCollections(
|
||||
@ -242,6 +242,8 @@ func (c *Collections) makeDriveCollections(
|
||||
globalExcludeItemIDsByDrivePrefix.Add(p.String(), excludedItemIDs)
|
||||
}
|
||||
|
||||
counter.Add(count.NewPrevPaths, int64(len(newPrevs)))
|
||||
|
||||
return collections, newPrevs, du, nil
|
||||
}
|
||||
|
||||
|
||||
@ -136,44 +136,223 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeMetadataCollections()
|
||||
|
||||
// This test is primarily aimed at multi-drive handling.
|
||||
// More complicated single-drive tests can be found in _MakeDriveCollections.
|
||||
// Note: these tests intentionally duplicate file and folder IDs and names
|
||||
// across drives. This mimics real drive api behavior, where ids and names are
|
||||
// only unique within each drive.
|
||||
func (suite *CollectionsTreeUnitSuite) TestCollections_GetTree() {
|
||||
// metadataPath, err := path.BuildMetadata(
|
||||
// tenant,
|
||||
// user,
|
||||
// path.OneDriveService,
|
||||
// path.FilesCategory,
|
||||
// false)
|
||||
// require.NoError(suite.T(), err, "making metadata path", clues.ToCore(err))
|
||||
t := suite.T()
|
||||
|
||||
type expected struct {
|
||||
canUsePrevBackup assert.BoolAssertionFunc
|
||||
counts countTD.Expected
|
||||
deltas map[string]string
|
||||
prevPaths map[string]map[string]string
|
||||
skips int
|
||||
collections expectedCollections
|
||||
globalExcludedFileIDs *prefixmatcher.StringSetMatchBuilder
|
||||
}
|
||||
|
||||
d1 := drive()
|
||||
d2 := drive(2)
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
enumerator enumerateDriveItemsDelta
|
||||
previousPaths map[string]map[string]string
|
||||
|
||||
metadata []data.RestoreCollection
|
||||
expect expected
|
||||
}{
|
||||
{
|
||||
name: "not yet implemented",
|
||||
// in the real world we'd be guaranteed the root folder in the first
|
||||
// delta query, at minimum (ie, the backup before this one).
|
||||
// but this makes for a good sanity check on no-op behavior.
|
||||
name: "no data either previously or in delta",
|
||||
enumerator: driveEnumerator(
|
||||
drive().newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
aPage()))),
|
||||
d1.newEnumer().with(delta(nil)),
|
||||
d2.newEnumer().with(delta(nil))),
|
||||
metadata: multiDriveMetadata(
|
||||
t,
|
||||
d1.newPrevPaths(t),
|
||||
d2.newPrevPaths(t)),
|
||||
expect: expected{
|
||||
canUsePrevBackup: assert.False,
|
||||
counts: countTD.Expected{
|
||||
count.PrevPaths: 0,
|
||||
canUsePrevBackup: assert.True,
|
||||
collections: expectCollections(
|
||||
false,
|
||||
true,
|
||||
aMetadata(nil, multiDrivePrevPaths(
|
||||
d1.newPrevPaths(t),
|
||||
d2.newPrevPaths(t)))),
|
||||
globalExcludedFileIDs: multiDriveExcludeMap(
|
||||
d1.newExcludes(t, makeExcludeMap()),
|
||||
d2.newExcludes(t, makeExcludeMap())),
|
||||
},
|
||||
deltas: map[string]string{},
|
||||
prevPaths: map[string]map[string]string{},
|
||||
skips: 0,
|
||||
},
|
||||
{
|
||||
name: "a new backup",
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d1.fileAt(root, "r"),
|
||||
d1.folderAt(root),
|
||||
d1.fileAt(folder, "f")))),
|
||||
d2.newEnumer().with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d2.fileAt(root, "r"),
|
||||
d2.folderAt(root),
|
||||
d2.fileAt(folder, "f"))))),
|
||||
metadata: multiDriveMetadata(
|
||||
t,
|
||||
d1.newPrevPaths(t),
|
||||
d2.newPrevPaths(t)),
|
||||
expect: expected{
|
||||
canUsePrevBackup: assert.True,
|
||||
collections: expectCollections(
|
||||
false,
|
||||
true,
|
||||
aColl(
|
||||
d1.fullPath(t),
|
||||
nil,
|
||||
fileID("r")),
|
||||
aColl(
|
||||
d1.fullPath(t, folderName()),
|
||||
nil,
|
||||
fileID("f")),
|
||||
aColl(
|
||||
d2.fullPath(t),
|
||||
nil,
|
||||
fileID("r")),
|
||||
aColl(
|
||||
d2.fullPath(t, folderName()),
|
||||
nil,
|
||||
fileID("f")),
|
||||
aMetadata(nil, multiDrivePrevPaths(
|
||||
d1.newPrevPaths(
|
||||
t,
|
||||
rootID, d1.strPath(t),
|
||||
folderID(), d1.strPath(t, folderName())),
|
||||
d2.newPrevPaths(
|
||||
t,
|
||||
rootID, d2.strPath(t),
|
||||
folderID(), d2.strPath(t, folderName()))))),
|
||||
globalExcludedFileIDs: multiDriveExcludeMap(
|
||||
d1.newExcludes(t, makeExcludeMap(fileID("r"), fileID("f"))),
|
||||
d2.newExcludes(t, makeExcludeMap(fileID("r"), fileID("f")))),
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "an incremental backup with no changes",
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(delta(nil).with(aPage())),
|
||||
d2.newEnumer().with(delta(nil).with(aPage()))),
|
||||
metadata: multiDriveMetadata(
|
||||
t,
|
||||
d1.newPrevPaths(
|
||||
t,
|
||||
rootID, d1.strPath(t),
|
||||
folderID(), d1.strPath(t, folderName())),
|
||||
d2.newPrevPaths(
|
||||
t,
|
||||
rootID, d2.strPath(t),
|
||||
folderID(), d2.strPath(t, folderName()))),
|
||||
expect: expected{
|
||||
canUsePrevBackup: assert.True,
|
||||
collections: expectCollections(
|
||||
false,
|
||||
true,
|
||||
aColl(d1.fullPath(t), d1.fullPath(t)),
|
||||
aColl(d2.fullPath(t), d2.fullPath(t)),
|
||||
aMetadata(nil, multiDrivePrevPaths(
|
||||
d1.newPrevPaths(
|
||||
t,
|
||||
rootID, d1.strPath(t),
|
||||
folderID(), d1.strPath(t, folderName())),
|
||||
d2.newPrevPaths(
|
||||
t,
|
||||
rootID, d2.strPath(t),
|
||||
folderID(), d2.strPath(t, folderName()))))),
|
||||
globalExcludedFileIDs: multiDriveExcludeMap(
|
||||
d1.newExcludes(t, makeExcludeMap()),
|
||||
d2.newExcludes(t, makeExcludeMap())),
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "an incremental backup with deletions in one drive and movements in the other",
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
delItem(fileID("r"), rootID, isFile),
|
||||
delItem(folderID(), rootID, isFolder),
|
||||
delItem(fileID("f"), folderID(), isFile)))),
|
||||
d2.newEnumer().with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
driveItem(fileID("r"), fileName("r2"), d2.dir(), rootID, isFile),
|
||||
driveItem(folderID(), folderName(2), d2.dir(), rootID, isFolder),
|
||||
driveItem(fileID("f"), fileName("f2"), d2.dir(folderName()), folderID(), isFile))))),
|
||||
metadata: multiDriveMetadata(
|
||||
t,
|
||||
d1.newPrevPaths(
|
||||
t,
|
||||
rootID, d1.strPath(t),
|
||||
folderID(), d1.strPath(t, folderName())),
|
||||
d2.newPrevPaths(
|
||||
t,
|
||||
rootID, d2.strPath(t),
|
||||
folderID(), d2.strPath(t, folderName()))),
|
||||
expect: expected{
|
||||
canUsePrevBackup: assert.True,
|
||||
collections: expectCollections(
|
||||
false,
|
||||
true,
|
||||
aColl(d1.fullPath(t), d1.fullPath(t)),
|
||||
aTomb(d1.fullPath(t, folderName())),
|
||||
aColl(
|
||||
d2.fullPath(t),
|
||||
d2.fullPath(t),
|
||||
fileID("r")),
|
||||
aColl(
|
||||
d2.fullPath(t, folderName(2)),
|
||||
d2.fullPath(t, folderName()),
|
||||
fileID("f")),
|
||||
aMetadata(nil, multiDrivePrevPaths(
|
||||
d1.newPrevPaths(
|
||||
t,
|
||||
rootID, d1.strPath(t)),
|
||||
d2.newPrevPaths(
|
||||
t,
|
||||
rootID, d2.strPath(t),
|
||||
folderID(), d2.strPath(t, folderName(2)))))),
|
||||
globalExcludedFileIDs: multiDriveExcludeMap(
|
||||
d1.newExcludes(t, makeExcludeMap(fileID("r"), fileID("f"))),
|
||||
d2.newExcludes(t, makeExcludeMap(fileID("r"), fileID("f")))),
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "tombstone an entire drive",
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(delta(nil).with(aPage()))),
|
||||
metadata: multiDriveMetadata(
|
||||
t,
|
||||
d1.newPrevPaths(
|
||||
t,
|
||||
rootID, d1.strPath(t),
|
||||
folderID(), d1.strPath(t, folderName())),
|
||||
d2.newPrevPaths(
|
||||
t,
|
||||
rootID, d2.strPath(t),
|
||||
folderID(), d2.strPath(t, folderName()))),
|
||||
expect: expected{
|
||||
canUsePrevBackup: assert.True,
|
||||
collections: expectCollections(
|
||||
false,
|
||||
true,
|
||||
aColl(d1.fullPath(t), d1.fullPath(t)),
|
||||
aTomb(d2.fullPath(t)),
|
||||
aMetadata(nil, multiDrivePrevPaths(
|
||||
d1.newPrevPaths(
|
||||
t,
|
||||
rootID, d1.strPath(t),
|
||||
folderID(), d1.strPath(t, folderName()))))),
|
||||
globalExcludedFileIDs: multiDriveExcludeMap(
|
||||
d1.newExcludes(t, makeExcludeMap())),
|
||||
},
|
||||
},
|
||||
}
|
||||
@ -187,45 +366,31 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_GetTree() {
|
||||
var (
|
||||
mbh = defaultDriveBHWith(user, test.enumerator)
|
||||
c = collWithMBH(mbh)
|
||||
prevMetadata = makePrevMetadataColls(t, mbh, test.previousPaths)
|
||||
globalExcludes = prefixmatcher.NewStringSetBuilder()
|
||||
errs = fault.New(true)
|
||||
)
|
||||
|
||||
_, _, err := c.getTree(
|
||||
results, canUsePrevBackup, err := c.getTree(
|
||||
ctx,
|
||||
prevMetadata,
|
||||
test.metadata,
|
||||
globalExcludes,
|
||||
errs)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
test.expect.canUsePrevBackup(t, canUsePrevBackup, "can use previous backup")
|
||||
|
||||
require.ErrorIs(t, err, errGetTreeNotImplemented, clues.ToCore(err))
|
||||
// TODO(keepers): awaiting implementation
|
||||
// assert.Empty(t, colls)
|
||||
// assert.Equal(t, test.expect.skips, len(errs.Skipped()))
|
||||
// test.expect.canUsePrevBackup(t, canUsePrevBackup)
|
||||
// test.expect.counts.Compare(t, c.counter)
|
||||
expectColls := test.expect.collections
|
||||
expectColls.compare(t, results)
|
||||
expectColls.requireNoUnseenCollections(t)
|
||||
|
||||
// if err != nil {
|
||||
// return
|
||||
// }
|
||||
for _, d := range test.expect.globalExcludedFileIDs.Keys() {
|
||||
suite.Run(d, func() {
|
||||
expect, _ := test.expect.globalExcludedFileIDs.Get(d)
|
||||
result, rok := globalExcludes.Get(d)
|
||||
|
||||
// for _, coll := range colls {
|
||||
// collPath := fullOrPrevPath(t, coll)
|
||||
|
||||
// if collPath.String() == metadataPath.String() {
|
||||
// compareMetadata(
|
||||
// t,
|
||||
// coll,
|
||||
// test.expect.deltas,
|
||||
// test.expect.prevPaths)
|
||||
|
||||
// continue
|
||||
// }
|
||||
|
||||
// test.expect.collAssertions.compare(t, coll, globalExcludes)
|
||||
// }
|
||||
|
||||
// test.expect.collAssertions.requireNoUnseenCollections(t)
|
||||
require.True(t, rok, "drive results have a global excludes entry")
|
||||
assert.Equal(t, expect, result, "global excluded file IDs")
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -239,23 +404,41 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveCollections() {
|
||||
d := drive()
|
||||
t := suite.T()
|
||||
|
||||
type expected struct {
|
||||
collections expectedCollections
|
||||
counts countTD.Expected
|
||||
globalExcludedFileIDs map[string]struct{}
|
||||
prevPaths map[string]string
|
||||
}
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
drive *deltaDrive
|
||||
enumerator enumerateDriveItemsDelta
|
||||
prevPaths map[string]string
|
||||
expectCounts countTD.Expected
|
||||
expect expected
|
||||
}{
|
||||
{
|
||||
name: "only root in delta, no prev paths",
|
||||
drive: d,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage()))),
|
||||
prevPaths: map[string]string{},
|
||||
expectCounts: countTD.Expected{
|
||||
expect: expected{
|
||||
counts: countTD.Expected{
|
||||
count.PrevPaths: 0,
|
||||
count.NewPrevPaths: 1,
|
||||
},
|
||||
collections: expectCollections(
|
||||
false,
|
||||
true,
|
||||
aColl(d.fullPath(t), nil)),
|
||||
globalExcludedFileIDs: makeExcludeMap(),
|
||||
prevPaths: map[string]string{
|
||||
rootID: d.strPath(t),
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -263,13 +446,26 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveCollections() {
|
||||
drive: d,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage()))),
|
||||
prevPaths: map[string]string{
|
||||
rootID: d.strPath(t),
|
||||
folderID(): d.strPath(t, folderName()),
|
||||
},
|
||||
expect: expected{
|
||||
counts: countTD.Expected{
|
||||
count.PrevPaths: 2,
|
||||
count.NewPrevPaths: 2,
|
||||
},
|
||||
collections: expectCollections(
|
||||
false,
|
||||
true,
|
||||
aColl(d.fullPath(t), d.fullPath(t))),
|
||||
globalExcludedFileIDs: makeExcludeMap(),
|
||||
prevPaths: map[string]string{
|
||||
rootID: d.strPath(t),
|
||||
folderID(): d.strPath(t, folderName()),
|
||||
},
|
||||
expectCounts: countTD.Expected{
|
||||
count.PrevPaths: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -277,13 +473,29 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveCollections() {
|
||||
drive: d,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d.folderAt(root),
|
||||
d.fileAt(folder))))),
|
||||
prevPaths: map[string]string{},
|
||||
expectCounts: countTD.Expected{
|
||||
expect: expected{
|
||||
counts: countTD.Expected{
|
||||
count.PrevPaths: 0,
|
||||
count.NewPrevPaths: 2,
|
||||
},
|
||||
collections: expectCollections(
|
||||
false,
|
||||
true,
|
||||
aColl(d.fullPath(t), nil),
|
||||
aColl(
|
||||
d.fullPath(t, folderName()),
|
||||
nil,
|
||||
fileID())),
|
||||
globalExcludedFileIDs: makeExcludeMap(fileID()),
|
||||
prevPaths: map[string]string{
|
||||
rootID: d.strPath(t),
|
||||
folderID(): d.strPath(t, folderName()),
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -291,15 +503,67 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveCollections() {
|
||||
drive: d,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d.folderAt(root),
|
||||
d.fileAt(folder))))),
|
||||
prevPaths: map[string]string{
|
||||
rootID: d.strPath(t),
|
||||
folderID(): d.strPath(t, folderName()),
|
||||
},
|
||||
expectCounts: countTD.Expected{
|
||||
count.PrevPaths: 1,
|
||||
expect: expected{
|
||||
counts: countTD.Expected{
|
||||
count.PrevPaths: 2,
|
||||
count.NewPrevPaths: 2,
|
||||
},
|
||||
collections: expectCollections(
|
||||
false,
|
||||
true,
|
||||
aColl(
|
||||
d.fullPath(t),
|
||||
d.fullPath(t)),
|
||||
aColl(
|
||||
d.fullPath(t, folderName()),
|
||||
d.fullPath(t, folderName()),
|
||||
fileID())),
|
||||
globalExcludedFileIDs: makeExcludeMap(fileID()),
|
||||
prevPaths: map[string]string{
|
||||
rootID: d.strPath(t),
|
||||
folderID(): d.strPath(t, folderName()),
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "only previous path data",
|
||||
drive: d,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(nil).with(
|
||||
aPage()))),
|
||||
prevPaths: map[string]string{
|
||||
rootID: d.strPath(t),
|
||||
folderID(): d.strPath(t, folderName()),
|
||||
folderID("sib"): d.strPath(t, folderName("sib")),
|
||||
folderID("chld"): d.strPath(t, folderName(), folderName("chld")),
|
||||
},
|
||||
expect: expected{
|
||||
counts: countTD.Expected{
|
||||
count.PrevPaths: 4,
|
||||
count.NewPrevPaths: 4,
|
||||
},
|
||||
collections: expectCollections(
|
||||
false,
|
||||
true,
|
||||
aColl(
|
||||
d.fullPath(t),
|
||||
d.fullPath(t))),
|
||||
globalExcludedFileIDs: makeExcludeMap(),
|
||||
prevPaths: map[string]string{
|
||||
rootID: d.strPath(t),
|
||||
folderID(): d.strPath(t, folderName()),
|
||||
folderID("sib"): d.strPath(t, folderName("sib")),
|
||||
folderID("chld"): d.strPath(t, folderName(), folderName("chld")),
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -307,12 +571,23 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveCollections() {
|
||||
drive: d,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL), nil).with(
|
||||
deltaWReset(nil).with(
|
||||
aReset(),
|
||||
aPage()))),
|
||||
prevPaths: map[string]string{},
|
||||
expectCounts: countTD.Expected{
|
||||
expect: expected{
|
||||
counts: countTD.Expected{
|
||||
count.PrevPaths: 0,
|
||||
count.NewPrevPaths: 1,
|
||||
},
|
||||
collections: expectCollections(
|
||||
true,
|
||||
true,
|
||||
aColl(d.fullPath(t), nil)),
|
||||
globalExcludedFileIDs: nil,
|
||||
prevPaths: map[string]string{
|
||||
rootID: d.strPath(t),
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -320,14 +595,29 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveCollections() {
|
||||
drive: d,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL), nil).with(
|
||||
deltaWReset(nil).with(
|
||||
aReset(),
|
||||
aPage()))),
|
||||
prevPaths: map[string]string{
|
||||
rootID: d.strPath(t),
|
||||
folderID(): d.strPath(t, folderName()),
|
||||
},
|
||||
expectCounts: countTD.Expected{
|
||||
count.PrevPaths: 1,
|
||||
expect: expected{
|
||||
counts: countTD.Expected{
|
||||
count.PrevPaths: 2,
|
||||
count.NewPrevPaths: 1,
|
||||
},
|
||||
collections: expectCollections(
|
||||
true,
|
||||
true,
|
||||
aColl(
|
||||
d.fullPath(t),
|
||||
d.fullPath(t)),
|
||||
aTomb(d.fullPath(t, folderName()))),
|
||||
globalExcludedFileIDs: nil,
|
||||
prevPaths: map[string]string{
|
||||
rootID: d.strPath(t),
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -335,14 +625,32 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveCollections() {
|
||||
drive: d,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL), nil).with(
|
||||
deltaWReset(nil).with(
|
||||
aReset(),
|
||||
aPage(
|
||||
d.folderAt(root),
|
||||
d.fileAt(folder))))),
|
||||
prevPaths: map[string]string{},
|
||||
expectCounts: countTD.Expected{
|
||||
expect: expected{
|
||||
counts: countTD.Expected{
|
||||
count.PrevPaths: 0,
|
||||
count.NewPrevPaths: 2,
|
||||
},
|
||||
collections: expectCollections(
|
||||
true,
|
||||
true,
|
||||
aColl(
|
||||
d.fullPath(t),
|
||||
nil),
|
||||
aColl(
|
||||
d.fullPath(t, folderName()),
|
||||
nil,
|
||||
fileID())),
|
||||
globalExcludedFileIDs: nil,
|
||||
prevPaths: map[string]string{
|
||||
rootID: d.strPath(t),
|
||||
folderID(): d.strPath(t, folderName()),
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -350,16 +658,35 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveCollections() {
|
||||
drive: d,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
deltaWReset(id(deltaURL), nil).with(
|
||||
deltaWReset(nil).with(
|
||||
aReset(),
|
||||
aPage(
|
||||
d.folderAt(root),
|
||||
d.fileAt(folder))))),
|
||||
prevPaths: map[string]string{
|
||||
rootID: d.strPath(t),
|
||||
folderID(): d.strPath(t, folderName()),
|
||||
},
|
||||
expect: expected{
|
||||
counts: countTD.Expected{
|
||||
count.PrevPaths: 2,
|
||||
count.NewPrevPaths: 2,
|
||||
},
|
||||
collections: expectCollections(
|
||||
true,
|
||||
true,
|
||||
aColl(
|
||||
d.fullPath(t),
|
||||
d.fullPath(t)),
|
||||
aColl(
|
||||
d.fullPath(t, folderName()),
|
||||
d.fullPath(t, folderName()),
|
||||
fileID())),
|
||||
globalExcludedFileIDs: nil,
|
||||
prevPaths: map[string]string{
|
||||
rootID: d.strPath(t),
|
||||
folderID(): d.strPath(t, folderName()),
|
||||
},
|
||||
expectCounts: countTD.Expected{
|
||||
count.PrevPaths: 1,
|
||||
},
|
||||
},
|
||||
}
|
||||
@ -374,19 +701,35 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveCollections() {
|
||||
mbh.DriveItemEnumeration = test.enumerator
|
||||
|
||||
c := collWithMBH(mbh)
|
||||
globalExcludes := prefixmatcher.NewStringSetBuilder()
|
||||
|
||||
_, _, _, err := c.makeDriveCollections(
|
||||
results, prevPaths, _, err := c.makeDriveCollections(
|
||||
ctx,
|
||||
test.drive.able,
|
||||
test.prevPaths,
|
||||
id(deltaURL, "prev"),
|
||||
deltaURL("prev"),
|
||||
newPagerLimiter(control.DefaultOptions()),
|
||||
prefixmatcher.NewStringSetBuilder(),
|
||||
globalExcludes,
|
||||
c.counter,
|
||||
fault.New(true))
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
test.expectCounts.Compare(t, c.counter)
|
||||
test.expect.counts.Compare(t, c.counter)
|
||||
|
||||
expectColls := test.expect.collections
|
||||
expectColls.compare(t, results)
|
||||
expectColls.requireNoUnseenCollections(t)
|
||||
|
||||
assert.Equal(t, test.expect.prevPaths, prevPaths, "new previous paths")
|
||||
|
||||
if test.expect.globalExcludedFileIDs != nil {
|
||||
excludes, ok := globalExcludes.Get(d.strPath(t))
|
||||
require.True(t, ok, "drive path produces a global file exclusions entry")
|
||||
assert.Equal(t, test.expect.globalExcludedFileIDs, excludes)
|
||||
} else {
|
||||
_, ok := globalExcludes.Get(d.strPath(t))
|
||||
require.False(t, ok, "drive path does not produce a global file exclusions entry")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -474,7 +817,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_TurnTreeIntoCollections()
|
||||
|
||||
type expected struct {
|
||||
prevPaths map[string]string
|
||||
collections func(t *testing.T, d *deltaDrive) expectedCollections
|
||||
collections expectedCollections
|
||||
globalExcludedFileIDs map[string]struct{}
|
||||
}
|
||||
|
||||
@ -496,8 +839,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_TurnTreeIntoCollections()
|
||||
folderID("parent"): d.strPath(t, folderName("parent")),
|
||||
folderID(): d.strPath(t, folderName("parent"), folderName()),
|
||||
},
|
||||
collections: func(t *testing.T, d *deltaDrive) expectedCollections {
|
||||
return expectCollections(
|
||||
collections: expectCollections(
|
||||
false,
|
||||
true,
|
||||
aColl(
|
||||
@ -511,8 +853,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_TurnTreeIntoCollections()
|
||||
aColl(
|
||||
d.fullPath(t, folderName("parent"), folderName()),
|
||||
nil,
|
||||
fileID("f")))
|
||||
},
|
||||
fileID("f"))),
|
||||
globalExcludedFileIDs: makeExcludeMap(
|
||||
fileID("r"),
|
||||
fileID("p"),
|
||||
@ -540,8 +881,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_TurnTreeIntoCollections()
|
||||
folderID("prev"): d.strPath(t, folderName("parent"), folderName("prev")),
|
||||
folderID("prev-chld"): d.strPath(t, folderName("parent"), folderName("prev"), folderName("prev-chld")),
|
||||
},
|
||||
collections: func(t *testing.T, d *deltaDrive) expectedCollections {
|
||||
return expectCollections(
|
||||
collections: expectCollections(
|
||||
false,
|
||||
true,
|
||||
aColl(
|
||||
@ -556,8 +896,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_TurnTreeIntoCollections()
|
||||
d.fullPath(t, folderName("parent"), folderName()),
|
||||
d.fullPath(t, folderName("parent-prev"), folderName()),
|
||||
fileID("f")),
|
||||
aColl(nil, d.fullPath(t, folderName("tombstone-prev"))))
|
||||
},
|
||||
aColl(nil, d.fullPath(t, folderName("tombstone-prev")))),
|
||||
globalExcludedFileIDs: makeExcludeMap(
|
||||
fileID("r"),
|
||||
fileID("p"),
|
||||
@ -585,8 +924,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_TurnTreeIntoCollections()
|
||||
folderID("pr/ev"): d.strPath(t, folderName("pa/rent"), folderName("pr/ev")),
|
||||
folderID("prev/chld"): d.strPath(t, folderName("pa/rent"), folderName("pr/ev"), folderName("prev/chld")),
|
||||
},
|
||||
collections: func(t *testing.T, d *deltaDrive) expectedCollections {
|
||||
return expectCollections(
|
||||
collections: expectCollections(
|
||||
false,
|
||||
true,
|
||||
aColl(
|
||||
@ -601,8 +939,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_TurnTreeIntoCollections()
|
||||
d.fullPath(t, folderName("pa/rent"), folderName()),
|
||||
d.fullPath(t, folderName("parent/prev"), folderName()),
|
||||
fileID("f")),
|
||||
aColl(nil, d.fullPath(t, folderName("tombstone/prev"))))
|
||||
},
|
||||
aColl(nil, d.fullPath(t, folderName("tombstone/prev")))),
|
||||
globalExcludedFileIDs: makeExcludeMap(
|
||||
fileID("r"),
|
||||
fileID("p"),
|
||||
@ -634,8 +971,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_TurnTreeIntoCollections()
|
||||
folderID("prev"): d.strPath(t, folderName("prev")),
|
||||
folderID("prev-chld"): d.strPath(t, folderName("prev"), folderName("prev-chld")),
|
||||
},
|
||||
collections: func(t *testing.T, d *deltaDrive) expectedCollections {
|
||||
return expectCollections(
|
||||
collections: expectCollections(
|
||||
false,
|
||||
true,
|
||||
aColl(
|
||||
@ -650,8 +986,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_TurnTreeIntoCollections()
|
||||
d.fullPath(t, folderName("parent"), folderName()),
|
||||
d.fullPath(t, folderName("parent"), folderName()),
|
||||
fileID("f")),
|
||||
aColl(nil, d.fullPath(t, folderName("tombstone"))))
|
||||
},
|
||||
aColl(nil, d.fullPath(t, folderName("tombstone")))),
|
||||
globalExcludedFileIDs: makeExcludeMap(
|
||||
fileID("r"),
|
||||
fileID("p"),
|
||||
@ -684,8 +1019,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_TurnTreeIntoCollections()
|
||||
folderID("pr/ev"): d.strPath(t, folderName("pa/rent"), folderName("pr/ev")),
|
||||
folderID("prev/chld"): d.strPath(t, folderName("pa/rent"), folderName("pr/ev"), folderName("prev/chld")),
|
||||
},
|
||||
collections: func(t *testing.T, d *deltaDrive) expectedCollections {
|
||||
return expectCollections(
|
||||
collections: expectCollections(
|
||||
false,
|
||||
true,
|
||||
aColl(
|
||||
@ -700,8 +1034,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_TurnTreeIntoCollections()
|
||||
d.fullPath(t, folderName("pa/rent"), folderName()),
|
||||
d.fullPath(t, folderName("pa/rent"), folderName()),
|
||||
fileID("f")),
|
||||
aColl(nil, d.fullPath(t, folderName("to/mbstone"))))
|
||||
},
|
||||
aColl(nil, d.fullPath(t, folderName("to/mbstone")))),
|
||||
globalExcludedFileIDs: makeExcludeMap(
|
||||
fileID("r"),
|
||||
fileID("p"),
|
||||
@ -734,13 +1067,13 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_TurnTreeIntoCollections()
|
||||
tree,
|
||||
d.able,
|
||||
test.prevPaths,
|
||||
deltaURL,
|
||||
deltaURL(),
|
||||
countPages,
|
||||
fault.New(true))
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
assert.Equal(t, test.expect.prevPaths, newPrevPaths, "new previous paths")
|
||||
|
||||
expectColls := test.expect.collections(t, d)
|
||||
expectColls := test.expect.collections
|
||||
expectColls.compare(t, colls)
|
||||
expectColls.requireNoUnseenCollections(t)
|
||||
|
||||
@ -786,7 +1119,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta(
|
||||
Drive: d,
|
||||
DeltaQueries: []*deltaQuery{{
|
||||
Pages: nil,
|
||||
DeltaUpdate: pagers.DeltaUpdate{URL: id(deltaURL)},
|
||||
DeltaUpdate: pagers.DeltaUpdate{URL: deltaURL()},
|
||||
}},
|
||||
},
|
||||
},
|
||||
@ -808,7 +1141,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta(
|
||||
tree: newTree,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage()))),
|
||||
limiter: newPagerLimiter(control.DefaultOptions()),
|
||||
expect: populateTreeExpected{
|
||||
@ -833,7 +1166,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta(
|
||||
tree: newTree,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(),
|
||||
aPage()))),
|
||||
limiter: newPagerLimiter(control.DefaultOptions()),
|
||||
@ -859,7 +1192,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta(
|
||||
tree: newTree,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(d.folderAt(root)),
|
||||
aPage(d.folderAt(root, "sib")),
|
||||
aPage(
|
||||
@ -891,7 +1224,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta(
|
||||
tree: newTree,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d.folderAt(root),
|
||||
d.fileAt(folder)),
|
||||
@ -932,7 +1265,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta(
|
||||
tree: newTree,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(delItem(folderID(), folderID("parent"), isFolder))))),
|
||||
limiter: newPagerLimiter(control.DefaultOptions()),
|
||||
expect: populateTreeExpected{
|
||||
@ -960,7 +1293,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta(
|
||||
tree: treeWithFileInFolder,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(aPage(
|
||||
delta(nil).with(aPage(
|
||||
delItem(folderID(), folderID("parent"), isFolder))))),
|
||||
limiter: newPagerLimiter(control.DefaultOptions()),
|
||||
expect: populateTreeExpected{
|
||||
@ -993,7 +1326,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta(
|
||||
tree: newTree,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d.folderAt(root),
|
||||
d.fileAt(folder)),
|
||||
@ -1026,7 +1359,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta(
|
||||
tree: treeWithFolders,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d.folderAt(root, "parent"),
|
||||
driveItem(folderID(), folderName("moved"), d.dir(), folderID("parent"), isFolder),
|
||||
@ -1061,7 +1394,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta(
|
||||
tree: newTree,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(delItem(folderID(), rootID, isFolder)),
|
||||
aPage(
|
||||
d.folderAt(root),
|
||||
@ -1093,7 +1426,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta(
|
||||
tree: treeWithRoot,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(delItem(folderID(), rootID, isFolder)),
|
||||
aPage(
|
||||
d.folderAt(root),
|
||||
@ -1125,7 +1458,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta(
|
||||
tree: treeWithFileAtRoot,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d.folderAt(root),
|
||||
d.fileAt(folder)),
|
||||
@ -1161,7 +1494,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta(
|
||||
tree: newTree,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d.folderAt(root),
|
||||
d.fileAt(folder)),
|
||||
@ -1212,15 +1545,15 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_multiDelta()
|
||||
tree: newTree,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).
|
||||
delta(nil).
|
||||
with(aPage(
|
||||
d.folderAt(root),
|
||||
d.fileAt(folder))),
|
||||
delta(id(deltaURL), nil).
|
||||
delta(nil).
|
||||
with(aPage(
|
||||
d.folderAt(root, "sib"),
|
||||
d.fileAt("sib", "fsib"))),
|
||||
delta(id(deltaURL), nil).
|
||||
delta(nil).
|
||||
with(aPage(
|
||||
d.folderAt(root),
|
||||
d.folderAt(folder, "chld"),
|
||||
@ -1253,18 +1586,58 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_multiDelta()
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "file moved multiple times",
|
||||
tree: newTree,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(nil).
|
||||
with(aPage(
|
||||
d.folderAt(root),
|
||||
d.fileAt(folder))),
|
||||
delta(nil).
|
||||
with(aPage(
|
||||
d.fileAt(root))),
|
||||
delta(nil).
|
||||
with(aPage(
|
||||
d.folderAt(root),
|
||||
d.fileAt(folder))))),
|
||||
limiter: newPagerLimiter(control.DefaultOptions()),
|
||||
expect: populateTreeExpected{
|
||||
counts: countTD.Expected{
|
||||
count.TotalDeltasProcessed: 4,
|
||||
count.TotalDeleteFoldersProcessed: 0,
|
||||
count.TotalDeleteFilesProcessed: 0,
|
||||
count.TotalFilesProcessed: 3,
|
||||
count.TotalFoldersProcessed: 5,
|
||||
count.TotalPagesEnumerated: 4,
|
||||
},
|
||||
err: require.NoError,
|
||||
numLiveFiles: 1,
|
||||
numLiveFolders: 2,
|
||||
sizeBytes: 1 * defaultFileSize,
|
||||
treeContainsFolderIDs: []string{
|
||||
rootID,
|
||||
folderID(),
|
||||
},
|
||||
treeContainsTombstoneIDs: []string{},
|
||||
treeContainsFileIDsWithParent: map[string]string{
|
||||
fileID(): folderID(),
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "create->delete,create",
|
||||
tree: newTree,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d.folderAt(root),
|
||||
d.fileAt(folder))),
|
||||
// a (delete,create) pair in the same delta can occur when
|
||||
// a user deletes and restores an item in-between deltas.
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
delItem(folderID(), rootID, isFolder),
|
||||
delItem(fileID(), folderID(), isFile)),
|
||||
@ -1298,11 +1671,11 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_multiDelta()
|
||||
tree: newTree,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d.folderAt(root),
|
||||
d.fileAt(folder))),
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
driveItem(folderID(), folderName("rename"), d.dir(), rootID, isFolder),
|
||||
driveItem(fileID(), fileName("rename"), d.dir(folderName("rename")), folderID(), isFile))))),
|
||||
@ -1335,7 +1708,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_multiDelta()
|
||||
tree: newTree,
|
||||
enumerator: driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
// first page: create /root/folder and /root/folder/file
|
||||
aPage(
|
||||
d.folderAt(root),
|
||||
@ -1350,7 +1723,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_multiDelta()
|
||||
driveItem(folderID(2), folderName(), d.dir(), rootID, isFolder),
|
||||
driveItem(fileID(), fileName(), d.dir(folderName()), folderID(2), isFile))),
|
||||
// the next delta, containing the delete marker for the original /root/folder
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
delItem(folderID(), rootID, isFolder))))),
|
||||
limiter: newPagerLimiter(control.DefaultOptions()),
|
||||
@ -1404,7 +1777,7 @@ func runPopulateTreeTest(
|
||||
ctx,
|
||||
tree,
|
||||
d,
|
||||
id(deltaURL),
|
||||
deltaURL(),
|
||||
test.limiter,
|
||||
counter,
|
||||
fault.New(true))
|
||||
|
||||
@ -15,6 +15,7 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||
@ -168,6 +169,18 @@ func makeExcludeMap(files ...string) map[string]struct{} {
|
||||
return delList
|
||||
}
|
||||
|
||||
func defaultMetadataPath(t *testing.T) path.Path {
|
||||
metadataPath, err := path.BuildMetadata(
|
||||
tenant,
|
||||
user,
|
||||
path.OneDriveService,
|
||||
path.FilesCategory,
|
||||
false)
|
||||
require.NoError(t, err, "making default metadata path", clues.ToCore(err))
|
||||
|
||||
return metadataPath
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// limiter
|
||||
// ---------------------------------------------------------------------------
|
||||
@ -237,57 +250,34 @@ func aReset(items ...models.DriveItemable) nextPage {
|
||||
// metadata
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func makePrevMetadataColls(
|
||||
func compareMetadata(
|
||||
t *testing.T,
|
||||
mbh BackupHandler,
|
||||
previousPaths map[string]map[string]string,
|
||||
) []data.RestoreCollection {
|
||||
pathPrefix, err := mbh.MetadataPathPrefix(tenant)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
mdColl data.Collection,
|
||||
expectDeltas map[string]string,
|
||||
expectPrevPaths map[string]map[string]string,
|
||||
) {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
prevDeltas := map[string]string{}
|
||||
|
||||
for driveID := range previousPaths {
|
||||
prevDeltas[driveID] = id(deltaURL, "prev")
|
||||
}
|
||||
|
||||
mdColl, err := graph.MakeMetadataCollection(
|
||||
pathPrefix,
|
||||
[]graph.MetadataCollectionEntry{
|
||||
graph.NewMetadataEntry(bupMD.DeltaURLsFileName, prevDeltas),
|
||||
graph.NewMetadataEntry(bupMD.PreviousPathFileName, previousPaths),
|
||||
},
|
||||
func(*support.ControllerOperationStatus) {},
|
||||
count.New())
|
||||
require.NoError(t, err, "creating metadata collection", clues.ToCore(err))
|
||||
|
||||
return []data.RestoreCollection{
|
||||
colls := []data.RestoreCollection{
|
||||
dataMock.NewUnversionedRestoreCollection(t, data.NoFetchRestoreCollection{Collection: mdColl}),
|
||||
}
|
||||
|
||||
p := mdColl.FullPath()
|
||||
|
||||
deltas, prevs, _, err := deserializeAndValidateMetadata(
|
||||
ctx,
|
||||
colls,
|
||||
count.New(),
|
||||
fault.New(true))
|
||||
require.NoError(t, err, "deserializing metadata", clues.ToCore(err))
|
||||
|
||||
if expectDeltas != nil {
|
||||
assert.Equal(t, expectDeltas, deltas, "delta urls in collection:\n\t %q", p)
|
||||
}
|
||||
|
||||
// func compareMetadata(
|
||||
// t *testing.T,
|
||||
// mdColl data.Collection,
|
||||
// expectDeltas map[string]string,
|
||||
// expectPrevPaths map[string]map[string]string,
|
||||
// ) {
|
||||
// ctx, flush := tester.NewContext(t)
|
||||
// defer flush()
|
||||
|
||||
// colls := []data.RestoreCollection{
|
||||
// dataMock.NewUnversionedRestoreCollection(t, data.NoFetchRestoreCollection{Collection: mdColl}),
|
||||
// }
|
||||
|
||||
// deltas, prevs, _, err := deserializeAndValidateMetadata(
|
||||
// ctx,
|
||||
// colls,
|
||||
// count.New(),
|
||||
// fault.New(true))
|
||||
// require.NoError(t, err, "deserializing metadata", clues.ToCore(err))
|
||||
// assert.Equal(t, expectDeltas, deltas, "delta urls")
|
||||
// assert.Equal(t, expectPrevPaths, prevs, "previous paths")
|
||||
// }
|
||||
assert.Equal(t, expectPrevPaths, prevs, "previous path in collection:\n\t %q", p)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// collections
|
||||
@ -303,6 +293,10 @@ type collectionAssertion struct {
|
||||
// this flag gets flipped when calling assertions.compare.
|
||||
// any unseen collection will error on requireNoUnseenCollections
|
||||
sawCollection bool
|
||||
|
||||
// used for metadata collection comparison
|
||||
deltas map[string]string
|
||||
prevPaths map[string]map[string]string
|
||||
}
|
||||
|
||||
func aColl(
|
||||
@ -330,10 +324,27 @@ func aColl(
|
||||
}
|
||||
}
|
||||
|
||||
func aTomb(
|
||||
prev path.Path,
|
||||
) *collectionAssertion {
|
||||
return aColl(nil, prev)
|
||||
}
|
||||
|
||||
func aMetadata(
|
||||
deltas map[string]string,
|
||||
prevPaths map[string]map[string]string,
|
||||
) *collectionAssertion {
|
||||
return &collectionAssertion{
|
||||
deltas: deltas,
|
||||
prevPaths: prevPaths,
|
||||
}
|
||||
}
|
||||
|
||||
// to aggregate all collection-related expectations in the backup
|
||||
// map collection path -> collection state -> assertion
|
||||
type expectedCollections struct {
|
||||
assertions map[string]*collectionAssertion
|
||||
metadata *collectionAssertion
|
||||
doNotMerge assert.BoolAssertionFunc
|
||||
hasURLCache assert.ValueAssertionFunc
|
||||
}
|
||||
@ -343,9 +354,17 @@ func expectCollections(
|
||||
hasURLCache bool,
|
||||
colls ...*collectionAssertion,
|
||||
) expectedCollections {
|
||||
as := map[string]*collectionAssertion{}
|
||||
var (
|
||||
as = map[string]*collectionAssertion{}
|
||||
md *collectionAssertion
|
||||
)
|
||||
|
||||
for _, coll := range colls {
|
||||
if coll.prevPaths != nil {
|
||||
md = coll
|
||||
continue
|
||||
}
|
||||
|
||||
as[expectFullOrPrev(coll).String()] = coll
|
||||
}
|
||||
|
||||
@ -361,6 +380,7 @@ func expectCollections(
|
||||
|
||||
return expectedCollections{
|
||||
assertions: as,
|
||||
metadata: md,
|
||||
doNotMerge: dontMerge,
|
||||
hasURLCache: hasCache,
|
||||
}
|
||||
@ -384,6 +404,14 @@ func (ecs expectedCollections) compareColl(t *testing.T, coll data.BackupCollect
|
||||
p = fullOrPrevPath(t, coll)
|
||||
)
|
||||
|
||||
// check the metadata collection separately
|
||||
if p.Equal(defaultMetadataPath(t)) {
|
||||
ecs.metadata.sawCollection = true
|
||||
compareMetadata(t, coll, ecs.metadata.deltas, ecs.metadata.prevPaths)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
if coll.State() != data.DeletedState {
|
||||
for itm := range coll.Items(ctx, fault.New(true)) {
|
||||
itemIDs = append(itemIDs, itm.ID())
|
||||
@ -394,7 +422,7 @@ func (ecs expectedCollections) compareColl(t *testing.T, coll data.BackupCollect
|
||||
require.NotNil(
|
||||
t,
|
||||
expect,
|
||||
"test should have an expected entry for collection with:\n\tstate %q\n\tpath %q",
|
||||
"collection present in result, but not in test expectations:\n\tstate %q\n\tpath %q",
|
||||
coll.State(),
|
||||
p)
|
||||
|
||||
@ -409,25 +437,25 @@ func (ecs expectedCollections) compareColl(t *testing.T, coll data.BackupCollect
|
||||
p)
|
||||
|
||||
if expect.prev == nil {
|
||||
assert.Nil(t, coll.PreviousPath(), "previous path")
|
||||
assert.Nil(t, coll.PreviousPath(), "no previousPath for collection:\n\t %q", p)
|
||||
} else {
|
||||
assert.Equal(t, expect.prev, coll.PreviousPath())
|
||||
assert.Equal(t, expect.prev, coll.PreviousPath(), "wanted previousPath for collection:\n\t %q", p)
|
||||
}
|
||||
|
||||
if expect.curr == nil {
|
||||
assert.Nil(t, coll.FullPath(), "collection path")
|
||||
assert.Nil(t, coll.FullPath(), "no currPath for collection:\n\t %q", p)
|
||||
} else {
|
||||
assert.Equal(t, expect.curr, coll.FullPath())
|
||||
assert.Equal(t, expect.curr, coll.FullPath(), "wanted currPath for collection:\n\t %q", p)
|
||||
}
|
||||
|
||||
ecs.doNotMerge(
|
||||
t,
|
||||
coll.DoNotMergeItems(),
|
||||
"expected collection to have the appropariate doNotMerge flag")
|
||||
"expected the appropariate doNotMerge flag")
|
||||
|
||||
driveColl := coll.(*Collection)
|
||||
|
||||
ecs.hasURLCache(t, driveColl.urlCache, "has a populated url cache handler")
|
||||
if driveColl, ok := coll.(*Collection); ok {
|
||||
ecs.hasURLCache(t, driveColl.urlCache, "wanted a populated url cache handler in collection:\n\t %q", p)
|
||||
}
|
||||
}
|
||||
|
||||
// ensure that no collections in the expected set are still flagged
|
||||
@ -440,6 +468,13 @@ func (ecs expectedCollections) requireNoUnseenCollections(t *testing.T) {
|
||||
"results did not include collection at:\n\tstate %q\t\npath %q",
|
||||
ca.state, expectFullOrPrev(ca))
|
||||
}
|
||||
|
||||
if ecs.metadata != nil {
|
||||
require.True(
|
||||
t,
|
||||
ecs.metadata.sawCollection,
|
||||
"results did not include the metadata collection")
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
@ -948,27 +983,27 @@ func (en enumerateDriveItemsDelta) EnumerateDriveItemsDelta(
|
||||
}
|
||||
|
||||
func (en enumerateDriveItemsDelta) drivePager() *apiMock.Pager[models.Driveable] {
|
||||
dvs := []models.Driveable{}
|
||||
enumerableDrives := []models.Driveable{}
|
||||
|
||||
for _, dp := range en.DrivePagers {
|
||||
dvs = append(dvs, dp.Drive.able)
|
||||
enumerableDrives = append(enumerableDrives, dp.Drive.able)
|
||||
}
|
||||
|
||||
return &apiMock.Pager[models.Driveable]{
|
||||
ToReturn: []apiMock.PagerResult[models.Driveable]{
|
||||
{Values: dvs},
|
||||
{Values: enumerableDrives},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (en enumerateDriveItemsDelta) getDrives() []*deltaDrive {
|
||||
dvs := []*deltaDrive{}
|
||||
enumerableDrives := []*deltaDrive{}
|
||||
|
||||
for _, dp := range en.DrivePagers {
|
||||
dvs = append(dvs, dp.Drive)
|
||||
enumerableDrives = append(enumerableDrives, dp.Drive)
|
||||
}
|
||||
|
||||
return dvs
|
||||
return enumerableDrives
|
||||
}
|
||||
|
||||
type deltaDrive struct {
|
||||
@ -996,6 +1031,96 @@ func (dd *deltaDrive) newEnumer() *DeltaDriveEnumerator {
|
||||
return &DeltaDriveEnumerator{Drive: clone}
|
||||
}
|
||||
|
||||
type drivePrevPaths struct {
|
||||
id string
|
||||
folderIDToPrevPath map[string]string
|
||||
}
|
||||
|
||||
func (dd *deltaDrive) newPrevPaths(
|
||||
t *testing.T,
|
||||
idPathPairs ...string,
|
||||
) *drivePrevPaths {
|
||||
dpp := drivePrevPaths{
|
||||
id: dd.id,
|
||||
folderIDToPrevPath: map[string]string{},
|
||||
}
|
||||
|
||||
require.Zero(t, len(idPathPairs)%2, "idPathPairs has an even count of elements")
|
||||
|
||||
for i := 0; i < len(idPathPairs); i += 2 {
|
||||
dpp.folderIDToPrevPath[idPathPairs[i]] = idPathPairs[i+1]
|
||||
}
|
||||
|
||||
return &dpp
|
||||
}
|
||||
|
||||
// transforms 0 or more drivePrevPaths to a map[driveID]map[folderID]prevPathString
|
||||
func multiDrivePrevPaths(drivePrevs ...*drivePrevPaths) map[string]map[string]string {
|
||||
prevPathsByDriveID := map[string]map[string]string{}
|
||||
|
||||
for _, dp := range drivePrevs {
|
||||
prevPathsByDriveID[dp.id] = dp.folderIDToPrevPath
|
||||
}
|
||||
|
||||
return prevPathsByDriveID
|
||||
}
|
||||
|
||||
type driveExcludes struct {
|
||||
pathPfx string
|
||||
excludes map[string]struct{}
|
||||
}
|
||||
|
||||
func (dd *deltaDrive) newExcludes(t *testing.T, excludes map[string]struct{}) driveExcludes {
|
||||
return driveExcludes{
|
||||
pathPfx: dd.strPath(t),
|
||||
excludes: excludes,
|
||||
}
|
||||
}
|
||||
|
||||
func multiDriveExcludeMap(driveExclds ...driveExcludes) *prefixmatcher.StringSetMatchBuilder {
|
||||
globalExcludes := prefixmatcher.NewStringSetBuilder()
|
||||
|
||||
for _, de := range driveExclds {
|
||||
globalExcludes.Add(de.pathPfx, de.excludes)
|
||||
}
|
||||
|
||||
return globalExcludes
|
||||
}
|
||||
|
||||
// transforms 0 or more drivePrevPaths to a []data.RestoreCollection containing
|
||||
// a metadata collection.
|
||||
// DeltaURLs are currently always populated with {driveID: deltaURL()}.
|
||||
func multiDriveMetadata(
|
||||
t *testing.T,
|
||||
drivePrevs ...*drivePrevPaths,
|
||||
) []data.RestoreCollection {
|
||||
restoreColls := []data.RestoreCollection{}
|
||||
|
||||
for _, drivePrev := range drivePrevs {
|
||||
mdColl := []graph.MetadataCollectionEntry{
|
||||
graph.NewMetadataEntry(
|
||||
bupMD.DeltaURLsFileName,
|
||||
map[string]string{drivePrev.id: deltaURL()}),
|
||||
graph.NewMetadataEntry(
|
||||
bupMD.PreviousPathFileName,
|
||||
multiDrivePrevPaths(drivePrev)),
|
||||
}
|
||||
|
||||
mc, err := graph.MakeMetadataCollection(
|
||||
defaultMetadataPath(t),
|
||||
mdColl,
|
||||
func(*support.ControllerOperationStatus) {},
|
||||
count.New())
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
restoreColls = append(restoreColls, dataMock.NewUnversionedRestoreCollection(
|
||||
t,
|
||||
data.NoFetchRestoreCollection{Collection: mc}))
|
||||
}
|
||||
|
||||
return restoreColls
|
||||
}
|
||||
|
||||
type DeltaDriveEnumerator struct {
|
||||
Drive *deltaDrive
|
||||
idx int
|
||||
@ -1053,22 +1178,22 @@ type deltaQuery struct {
|
||||
}
|
||||
|
||||
func delta(
|
||||
resultDeltaID string,
|
||||
err error,
|
||||
deltaTokenSuffix ...any,
|
||||
) *deltaQuery {
|
||||
return &deltaQuery{
|
||||
DeltaUpdate: pagers.DeltaUpdate{URL: resultDeltaID},
|
||||
DeltaUpdate: pagers.DeltaUpdate{URL: deltaURL(deltaTokenSuffix...)},
|
||||
Err: err,
|
||||
}
|
||||
}
|
||||
|
||||
func deltaWReset(
|
||||
resultDeltaID string,
|
||||
err error,
|
||||
deltaTokenSuffix ...any,
|
||||
) *deltaQuery {
|
||||
return &deltaQuery{
|
||||
DeltaUpdate: pagers.DeltaUpdate{
|
||||
URL: resultDeltaID,
|
||||
URL: deltaURL(deltaTokenSuffix...),
|
||||
Reset: true,
|
||||
},
|
||||
Err: err,
|
||||
@ -1493,6 +1618,26 @@ func (dd *deltaDrive) packageAtRoot() models.DriveItemable {
|
||||
// id, name, path factories
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func deltaURL(suffixes ...any) string {
|
||||
if len(suffixes) > 1 {
|
||||
// this should fail any tests. we could pass in a
|
||||
// testing.T instead and fail the call here, but that
|
||||
// produces a whole lot of chaff where this check should
|
||||
// still get us the expected failure
|
||||
return fmt.Sprintf(
|
||||
"too many suffixes in the URL; should only be 0 or 1, got %d",
|
||||
len(suffixes))
|
||||
}
|
||||
|
||||
url := "https://delta.token.url"
|
||||
|
||||
for _, sfx := range suffixes {
|
||||
url = fmt.Sprintf("%s?%v", url, sfx)
|
||||
}
|
||||
|
||||
return url
|
||||
}
|
||||
|
||||
// assumption is only one suffix per id. Mostly using
|
||||
// the variadic as an "optional" extension.
|
||||
func id(v string, suffixes ...any) string {
|
||||
@ -1605,7 +1750,6 @@ func (dd *deltaDrive) dir(elems ...string) string {
|
||||
// common item names
|
||||
const (
|
||||
bar = "bar"
|
||||
deltaURL = "delta_url"
|
||||
drivePfx = "drive"
|
||||
fanny = "fanny"
|
||||
file = "file"
|
||||
|
||||
@ -55,7 +55,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
|
||||
},
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d1.fileWSizeAt(7, root, "f1"),
|
||||
d1.fileWSizeAt(1, root, "f2"),
|
||||
@ -76,7 +76,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
|
||||
},
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d1.fileWSizeAt(1, root, "f1"),
|
||||
d1.fileWSizeAt(2, root, "f2"),
|
||||
@ -97,7 +97,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
|
||||
},
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d1.fileWSizeAt(1, root, "f1"),
|
||||
d1.folderAt(root),
|
||||
@ -120,7 +120,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
|
||||
},
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d1.fileAt(root, "f1"),
|
||||
d1.fileAt(root, "f2"),
|
||||
@ -144,7 +144,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
|
||||
},
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d1.fileAt(root, "f1"),
|
||||
d1.fileAt(root, "f2")),
|
||||
@ -173,7 +173,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
|
||||
},
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d1.fileAt(root, "f1"),
|
||||
d1.fileAt(root, "f2")),
|
||||
@ -199,7 +199,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
|
||||
},
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d1.fileAt(root, "f1"),
|
||||
d1.fileAt(root, "f2"),
|
||||
@ -231,7 +231,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
|
||||
},
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d1.folderAt(root),
|
||||
d1.fileAt(folder, "f1"),
|
||||
@ -259,7 +259,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
|
||||
},
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d1.fileAt(root, "f1"),
|
||||
d1.fileAt(root, "f2"),
|
||||
@ -295,7 +295,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
|
||||
},
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d1.fileAt(root, "f1"),
|
||||
d1.fileAt(root, "f2"),
|
||||
@ -323,7 +323,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
|
||||
},
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d1.fileAt(root, "f1"),
|
||||
d1.fileAt(root, "f2"),
|
||||
@ -354,7 +354,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
|
||||
},
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d1.fileAt(root, "f1"),
|
||||
d1.fileAt(root, "f2"),
|
||||
@ -386,7 +386,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
|
||||
},
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d1.fileAt(root, "f1"),
|
||||
d1.fileAt(root, "f2"),
|
||||
@ -394,7 +394,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
|
||||
d1.fileAt(root, "f4"),
|
||||
d1.fileAt(root, "f5")))),
|
||||
d2.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d2.fileAt(root, "f1"),
|
||||
d2.fileAt(root, "f2"),
|
||||
@ -417,7 +417,7 @@ func backupLimitTable(t *testing.T, d1, d2 *deltaDrive) []backupLimitTest {
|
||||
},
|
||||
enumerator: driveEnumerator(
|
||||
d1.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(
|
||||
delta(nil).with(
|
||||
aPage(
|
||||
d1.fileAt(root, "f1"),
|
||||
d1.fileAt(root, "f2"),
|
||||
@ -500,13 +500,7 @@ func runGetPreviewLimits(
|
||||
)
|
||||
|
||||
cols, canUsePreviousBackup, err := c.Get(ctx, nil, delList, errs)
|
||||
|
||||
if opts.ToggleFeatures.UseDeltaTree {
|
||||
require.ErrorIs(t, err, errGetTreeNotImplemented, clues.ToCore(err))
|
||||
} else {
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
assert.True(t, canUsePreviousBackup, "can use previous backup")
|
||||
assert.Empty(t, errs.Skipped())
|
||||
|
||||
@ -772,7 +766,7 @@ func runGetPreviewLimitsDefaults(
|
||||
var (
|
||||
mockEnumerator = driveEnumerator(
|
||||
d.newEnumer().with(
|
||||
delta(id(deltaURL), nil).with(pages...)))
|
||||
delta(nil).with(pages...)))
|
||||
mbh = defaultDriveBHWith(user, mockEnumerator)
|
||||
c = collWithMBHAndOpts(mbh, opts)
|
||||
errs = fault.New(true)
|
||||
@ -782,13 +776,7 @@ func runGetPreviewLimitsDefaults(
|
||||
)
|
||||
|
||||
cols, canUsePreviousBackup, err := c.Get(ctx, nil, delList, errs)
|
||||
|
||||
if opts.ToggleFeatures.UseDeltaTree {
|
||||
require.ErrorIs(t, err, errGetTreeNotImplemented, clues.ToCore(err))
|
||||
} else {
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
assert.True(t, canUsePreviousBackup, "can use previous backup")
|
||||
assert.Empty(t, errs.Skipped())
|
||||
|
||||
|
||||
@ -539,7 +539,7 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
drive.newEnumer().
|
||||
withErr(test.pagerErr).
|
||||
with(
|
||||
delta(deltaURL, test.pagerErr).
|
||||
delta(test.pagerErr).
|
||||
with(test.pages...)))
|
||||
|
||||
cache, err := newURLCache(
|
||||
|
||||
@ -102,6 +102,7 @@ type Path interface {
|
||||
// and will likely be updated to handle encoded elements instead of clear-text
|
||||
// elements in the future.
|
||||
Elements() Elements
|
||||
Equal(other Path) bool
|
||||
// Append returns a new Path object with the given element added to the end of
|
||||
// the old Path if possible. If the old Path is an item Path then Append
|
||||
// returns an error.
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
package path
|
||||
|
||||
import (
|
||||
"slices"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
)
|
||||
|
||||
@ -135,3 +137,16 @@ func (rp dataLayerResourcePath) ToBuilder() *Builder {
|
||||
func (rp *dataLayerResourcePath) UpdateParent(prev, cur Path) bool {
|
||||
return rp.Builder.UpdateParent(prev.ToBuilder(), cur.ToBuilder())
|
||||
}
|
||||
|
||||
func (rp *dataLayerResourcePath) Equal(other Path) bool {
|
||||
if rp == nil && other == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
if (rp == nil && other != nil) ||
|
||||
(other == nil && rp != nil) {
|
||||
return false
|
||||
}
|
||||
|
||||
return slices.Equal(rp.elements, other.Elements())
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user