From 54ba241fbebb7a352f5f097b5f3942edfab5a423 Mon Sep 17 00:00:00 2001 From: Keepers Date: Thu, 7 Dec 2023 16:54:45 -0700 Subject: [PATCH] require rootID on tree construction (#4746) Turns out the root ID name isn't an appropriate match for establishing the root node. Instead, the backup hander is now extended with a getRootFolder method and will pass the expected root folder ID into the tree's constructor func to ensure we establish the correct root node. --- #### Does this PR need a docs update or release note? - [x] :no_entry: No #### Type of change - [x] :bug: Bugfix #### Issue(s) * #4689 #### Test Plan - [x] :zap: Unit test - [x] :green_heart: E2E --- .../m365/collection/drive/collections.go | 5 +- .../m365/collection/drive/collections_test.go | 1367 ++++------------- .../m365/collection/drive/collections_tree.go | 144 +- .../collection/drive/collections_tree_test.go | 1055 ++++++------- .../m365/collection/drive/delta_tree.go | 16 +- .../m365/collection/drive/delta_tree_test.go | 77 +- .../m365/collection/drive/handlers.go | 1 + .../m365/collection/drive/helper_test.go | 783 ++++++++++ .../m365/collection/drive/limiter_test.go | 438 ++---- .../m365/collection/drive/site_handler.go | 7 + .../m365/collection/drive/url_cache_test.go | 26 +- .../collection/drive/user_drive_handler.go | 7 + .../m365/service/onedrive/mock/handlers.go | 161 +- .../m365/service/sharepoint/backup_test.go | 18 +- src/pkg/count/keys.go | 3 +- 15 files changed, 2026 insertions(+), 2082 deletions(-) diff --git a/src/internal/m365/collection/drive/collections.go b/src/internal/m365/collection/drive/collections.go index 59e651341..c64c6314d 100644 --- a/src/internal/m365/collection/drive/collections.go +++ b/src/internal/m365/collection/drive/collections.go @@ -8,6 +8,7 @@ import ( "github.com/alcionai/clues" "github.com/microsoftgraph/msgraph-sdk-go/models" + "github.com/pkg/errors" "golang.org/x/exp/maps" "github.com/alcionai/corso/src/internal/common/idname" @@ -296,7 +297,7 @@ func (c *Collections) Get( ) ([]data.BackupCollection, bool, error) { if c.ctrl.ToggleFeatures.UseDeltaTree { colls, canUsePrevBackup, err := c.getTree(ctx, prevMetadata, ssmb, errs) - if err != nil { + if err != nil && !errors.Is(err, errGetTreeNotImplemented) { return nil, false, clues.Wrap(err, "processing backup using tree") } @@ -828,7 +829,7 @@ func (c *Collections) PopulateDriveCollections( break } - counter.Inc(count.PagesEnumerated) + counter.Inc(count.TotalPagesEnumerated) if reset { counter.Inc(count.PagerResets) diff --git a/src/internal/m365/collection/drive/collections_test.go b/src/internal/m365/collection/drive/collections_test.go index 30c756eb4..36fdfe24a 100644 --- a/src/internal/m365/collection/drive/collections_test.go +++ b/src/internal/m365/collection/drive/collections_test.go @@ -1,8 +1,6 @@ package drive import ( - "context" - "fmt" "testing" "github.com/alcionai/clues" @@ -18,8 +16,6 @@ import ( "github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/data" dataMock "github.com/alcionai/corso/src/internal/data/mock" - "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata" - odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts" "github.com/alcionai/corso/src/internal/m365/service/onedrive/mock" "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/tester" @@ -31,305 +27,8 @@ import ( "github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/services/m365/api/graph" apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock" - "github.com/alcionai/corso/src/pkg/services/m365/api/pagers" ) -// --------------------------------------------------------------------------- -// helpers -// --------------------------------------------------------------------------- - -type statePath struct { - state data.CollectionState - currPath path.Path - prevPath path.Path -} - -func toODPath(t *testing.T, s string) path.Path { - spl := path.Split(s) - p, err := path.Builder{}. - Append(spl[4:]...). - ToDataLayerPath( - spl[0], - spl[2], - path.OneDriveService, - path.FilesCategory, - false) - require.NoError(t, err, clues.ToCore(err)) - - return p -} - -func asDeleted(t *testing.T, prev string) statePath { - return statePath{ - state: data.DeletedState, - prevPath: toODPath(t, prev), - } -} - -func asMoved(t *testing.T, prev, curr string) statePath { - return statePath{ - state: data.MovedState, - prevPath: toODPath(t, prev), - currPath: toODPath(t, curr), - } -} - -func asNew(t *testing.T, curr string) statePath { - return statePath{ - state: data.NewState, - currPath: toODPath(t, curr), - } -} - -func asNotMoved(t *testing.T, p string) statePath { - return statePath{ - state: data.NotMovedState, - prevPath: toODPath(t, p), - currPath: toODPath(t, p), - } -} - -type itemType int - -const ( - isFile itemType = 1 - isFolder itemType = 2 - isPackage itemType = 3 -) - -func coreItem( - id, name, parentPath, parentID string, - it itemType, -) *models.DriveItem { - item := models.NewDriveItem() - item.SetName(&name) - item.SetId(&id) - - parentReference := models.NewItemReference() - parentReference.SetPath(&parentPath) - parentReference.SetId(&parentID) - item.SetParentReference(parentReference) - - switch it { - case isFile: - item.SetSize(ptr.To[int64](defaultItemSize)) - item.SetFile(models.NewFile()) - case isFolder: - item.SetFolder(models.NewFolder()) - case isPackage: - item.SetPackageEscaped(models.NewPackageEscaped()) - } - - return item -} - -func driveItem( - id, name, parentPath, parentID string, - it itemType, -) models.DriveItemable { - return coreItem(id, name, parentPath, parentID, it) -} - -func driveItemWithSize( - id, name, parentPath, parentID string, - size int64, - it itemType, -) models.DriveItemable { - res := coreItem(id, name, parentPath, parentID, it) - res.SetSize(ptr.To(size)) - - return res -} - -func fileItem( - id, name, parentPath, parentID, url string, - deleted bool, -) models.DriveItemable { - di := driveItem(id, name, parentPath, parentID, isFile) - di.SetAdditionalData(map[string]any{ - "@microsoft.graph.downloadUrl": url, - }) - - if deleted { - di.SetDeleted(models.NewDeleted()) - } - - return di -} - -func malwareItem( - id, name, parentPath, parentID string, - it itemType, -) models.DriveItemable { - c := coreItem(id, name, parentPath, parentID, it) - - mal := models.NewMalware() - malStr := "test malware" - mal.SetDescription(&malStr) - - c.SetMalware(mal) - - return c -} - -func driveRootItem() models.DriveItemable { - name := rootName - item := models.NewDriveItem() - item.SetName(&name) - item.SetId(ptr.To(rootID)) - item.SetRoot(models.NewRoot()) - item.SetFolder(models.NewFolder()) - - return item -} - -// delItem creates a DriveItemable that is marked as deleted. path must be set -// to the base drive path. -func delItem( - id string, - parentPath string, - parentID string, - it itemType, -) models.DriveItemable { - item := models.NewDriveItem() - item.SetId(&id) - item.SetDeleted(models.NewDeleted()) - - parentReference := models.NewItemReference() - parentReference.SetId(&parentID) - item.SetParentReference(parentReference) - - switch it { - case isFile: - item.SetFile(models.NewFile()) - case isFolder: - item.SetFolder(models.NewFolder()) - case isPackage: - item.SetPackageEscaped(models.NewPackageEscaped()) - } - - return item -} - -func id(v string) string { - return fmt.Sprintf("id_%s_0", v) -} - -func idx(v string, sfx any) string { - return fmt.Sprintf("id_%s_%v", v, sfx) -} - -func name(v string) string { - return fmt.Sprintf("n_%s_0", v) -} - -func namex(v string, sfx any) string { - return fmt.Sprintf("n_%s_%v", v, sfx) -} - -func toPath(elems ...string) string { - es := []string{} - for _, elem := range elems { - es = append(es, path.Split(elem)...) - } - - switch len(es) { - case 0: - return "" - case 1: - return es[0] - default: - return path.Builder{}.Append(es...).String() - } -} - -func fullPath(elems ...string) string { - return toPath(append( - []string{ - tenant, - path.OneDriveService.String(), - user, - path.FilesCategory.String(), - odConsts.DriveFolderPrefixBuilder(id(drive)).String(), - }, - elems...)...) -} - -func driveFullPath(driveID any, elems ...string) string { - return toPath(append( - []string{ - tenant, - path.OneDriveService.String(), - user, - path.FilesCategory.String(), - odConsts.DriveFolderPrefixBuilder(idx(drive, driveID)).String(), - }, - elems...)...) -} - -func parentDir(elems ...string) string { - return toPath(append( - []string{odConsts.DriveFolderPrefixBuilder(id(drive)).String()}, - elems...)...) -} - -func driveParentDir(driveID any, elems ...string) string { - return toPath(append( - []string{odConsts.DriveFolderPrefixBuilder(idx(drive, driveID)).String()}, - elems...)...) -} - -// just for readability -const ( - doMergeItems = true - doNotMergeItems = false -) - -// common item names -const ( - bar = "bar" - delta = "delta_url" - drive = "drive" - fanny = "fanny" - file = "file" - folder = "folder" - foo = "foo" - item = "item" - malware = "malware" - nav = "nav" - pkg = "package" - rootID = odConsts.RootID - rootName = odConsts.RootPathDir - subfolder = "subfolder" - tenant = "t" - user = "u" -) - -var anyFolderScope = (&selectors.OneDriveBackup{}).Folders(selectors.Any())[0] - -type failingColl struct{} - -func (f failingColl) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item { - ic := make(chan data.Item) - defer close(ic) - - errs.AddRecoverable(ctx, assert.AnError) - - return ic -} -func (f failingColl) FullPath() path.Path { return nil } -func (f failingColl) FetchItemByName(context.Context, string) (data.Item, error) { return nil, nil } - -func makeExcludeMap(files ...string) map[string]struct{} { - delList := map[string]struct{}{} - for _, file := range files { - delList[file+metadata.DataFileSuffix] = struct{}{} - delList[file+metadata.MetaFileSuffix] = struct{}{} - } - - return delList -} - // --------------------------------------------------------------------------- // tests // --------------------------------------------------------------------------- @@ -729,9 +428,9 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { name: "deleted folder tree twice within backup", items: []models.DriveItemable{ driveRootItem(), - delItem(id(folder), driveParentDir(drive), rootID, isFolder), + delItem(id(folder), rootID, isFolder), driveItem(id(folder), name(drive), driveParentDir(drive), rootID, isFolder), - delItem(id(folder), driveParentDir(drive), rootID, isFolder), + delItem(id(folder), rootID, isFolder), }, previousPaths: map[string]string{ id(folder): driveFullPath(drive), @@ -759,7 +458,7 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { items: []models.DriveItemable{ driveRootItem(), driveItem(id(folder), name(folder), driveParentDir(drive), rootID, isFolder), - delItem(id(folder), driveParentDir(drive), rootID, isFolder), + delItem(id(folder), rootID, isFolder), driveItem(idx(folder, 2), name(folder), driveParentDir(drive), rootID, isFolder), }, previousPaths: map[string]string{ @@ -789,9 +488,9 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { items: []models.DriveItemable{ driveRootItem(), driveItem(idx(folder, 1), name(folder), driveParentDir(drive), rootID, isFolder), - delItem(idx(folder, 1), driveParentDir(drive), rootID, isFolder), + delItem(idx(folder, 1), rootID, isFolder), driveItem(idx(folder, 2), name(folder), driveParentDir(drive), rootID, isFolder), - delItem(idx(folder, 2), driveParentDir(drive), rootID, isFolder), + delItem(idx(folder, 2), rootID, isFolder), }, previousPaths: map[string]string{ idx(folder, 1): driveFullPath(drive, namex(folder, "a")), @@ -996,8 +695,8 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { name: "deleted folder and package", items: []models.DriveItemable{ driveRootItem(), // root is always present, but not necessary here - delItem(id(folder), driveParentDir(drive), rootID, isFolder), - delItem(id(pkg), driveParentDir(drive), rootID, isPackage), + delItem(id(folder), rootID, isFolder), + delItem(id(pkg), rootID, isPackage), }, previousPaths: map[string]string{ rootID: driveFullPath(drive), @@ -1025,7 +724,7 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { name: "delete folder without previous", items: []models.DriveItemable{ driveRootItem(), - delItem(id(folder), driveParentDir(drive), rootID, isFolder), + delItem(id(folder), rootID, isFolder), }, previousPaths: map[string]string{ rootID: driveFullPath(drive), @@ -1049,7 +748,7 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { name: "delete folder tree move subfolder", items: []models.DriveItemable{ driveRootItem(), - delItem(id(folder), driveParentDir(drive), rootID, isFolder), + delItem(id(folder), rootID, isFolder), driveItem(id(subfolder), name(subfolder), driveParentDir(drive), rootID, isFolder), }, previousPaths: map[string]string{ @@ -1079,7 +778,7 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { name: "delete file", items: []models.DriveItemable{ driveRootItem(), - delItem(id(item), driveParentDir(drive), rootID, isFile), + delItem(id(item), rootID, isFile), }, previousPaths: map[string]string{ rootID: driveFullPath(drive), @@ -1166,24 +865,16 @@ func (suite *CollectionsUnitSuite) TestPopulateDriveCollections() { defer flush() var ( - driveID = idx(drive, drive) - mbh = mock.DefaultOneDriveBH(user) - du = pagers.DeltaUpdate{ - URL: "notempty", - Reset: false, - } + driveID = idx(drive, drive) + mbh = mock.DefaultOneDriveBH(user) excludes = map[string]struct{}{} errs = fault.New(true) ) - mbh.DriveItemEnumeration = mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID: { - Pages: []mock.NextPage{{Items: test.items}}, - DeltaUpdate: du, - }, - }, - } + mbh.DriveItemEnumeration = mock.DriveEnumerator( + mock.Drive(driveID).With( + mock.Delta("notempty", nil).With( + aPage(test.items...)))) sel := selectors.NewOneDriveBackup([]string{user}) sel.Include([]selectors.OneDriveScope{test.scope}) @@ -1720,19 +1411,10 @@ func (suite *CollectionsUnitSuite) TestGet_treeCannotBeUsedWhileIncomplete() { opts.ToggleFeatures.UseDeltaTree = true mbh.DrivePagerV = pagerForDrives(drv) - mbh.DriveItemEnumeration = mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: []mock.NextPage{{ - Items: []models.DriveItemable{ - driveRootItem(), // will be present, not needed - delItem(id(file), parentDir(), rootID, isFile), - }, - }}, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - } + mbh.DriveItemEnumeration = mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With( + aPage(delItem(id(file), rootID, isFile))))) c := collWithMBH(mbh) c.ctrl = opts @@ -1761,7 +1443,7 @@ func (suite *CollectionsUnitSuite) TestGet() { table := []struct { name string drives []models.Driveable - enumerator mock.EnumerateItemsDeltaByDrive + enumerator mock.EnumerateDriveItemsDelta canUsePreviousBackup bool errCheck assert.ErrorAssertionFunc previousPaths map[string]map[string]string @@ -1780,19 +1462,10 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "OneDrive_OneItemPage_DelFileOnly_NoFolders_NoErrors", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{{ - Items: []models.DriveItemable{ - driveRootItem(), // will be present, not needed - delItem(id(file), driveParentDir(1), rootID, isFile), - }, - }}, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.Delta(id(delta), nil).With(aPage( + delItem(id(file), rootID, isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -1814,19 +1487,10 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "OneDrive_OneItemPage_NoFolderDeltas_NoErrors", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{{ - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(file), name(file), driveParentDir(1), rootID, isFile), - }, - }}, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.Delta(id(delta), nil).With(aPage( + driveItem(id(file), name(file), driveParentDir(1), rootID, isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -1848,20 +1512,11 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "OneDrive_OneItemPage_NoErrors", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{{ - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - }, - }}, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.DeltaWReset(id(delta), nil).With(aPage( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{}, @@ -1887,21 +1542,12 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "OneDrive_OneItemPage_NoErrors_FileRenamedMultiple", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{{ - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - driveItem(id(file), namex(file, 2), driveParentDir(1, name(folder)), id(folder), isFile), - }, - }}, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.DeltaWReset(id(delta), nil).With(aPage( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), + driveItem(id(file), namex(file, 2), driveParentDir(1, name(folder)), id(folder), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{}, @@ -1927,19 +1573,12 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "OneDrive_OneItemPage_NoErrors_FileMovedMultiple", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - driveItem(id(file), namex(file, 2), driveParentDir(1), rootID, isFile), - }}}, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.Delta(id(delta), nil).With(aPage( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), + driveItem(id(file), namex(file, 2), driveParentDir(1), rootID, isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -1964,69 +1603,18 @@ func (suite *CollectionsUnitSuite) TestGet() { driveFullPath(1): makeExcludeMap(id(file)), }), }, - { - name: "OneDrive_OneItemPage_EmptyDelta_NoErrors", - drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - }}}, - DeltaUpdate: pagers.DeltaUpdate{URL: "", Reset: true}, - }, - }, - }, - canUsePreviousBackup: true, - errCheck: assert.NoError, - previousPaths: map[string]map[string]string{ - idx(drive, 1): {}, - }, - expectedCollections: map[string]map[data.CollectionState][]string{ - driveFullPath(1): {data.NewState: {}}, - driveFullPath(1, name(folder)): {data.NewState: {id(folder), id(file)}}, - }, - expectedDeltaURLs: map[string]string{}, - expectedPreviousPaths: map[string]map[string]string{ - idx(drive, 1): { - rootID: driveFullPath(1), - id(folder): driveFullPath(1, name(folder)), - }, - }, - expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}), - doNotMergeItems: map[string]bool{ - driveFullPath(1): true, - driveFullPath(1, name(folder)): true, - }, - }, { name: "OneDrive_TwoItemPages_NoErrors", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - }, - }, - { - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, name(folder)), id(folder), isFile), - }, - }, - }, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.DeltaWReset(id(delta), nil).With( + aPage( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)), + aPage( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, name(folder)), id(folder), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -2054,41 +1642,20 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "OneDrive_TwoItemPages_WithReset", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - driveItem(idx(file, 3), namex(file, 3), driveParentDir(1, name(folder)), id(folder), isFile), - }, - }, - { - Items: []models.DriveItemable{}, - Reset: true, - }, - { - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - }, - }, - { - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, name(folder)), id(folder), isFile), - }, - }, - }, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.DeltaWReset(id(delta), nil).With( + aPage( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), + driveItem(idx(file, 3), namex(file, 3), driveParentDir(1, name(folder)), id(folder), isFile)), + aReset(), + aPage( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)), + aPage( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, name(folder)), id(folder), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -2116,37 +1683,18 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "OneDrive_TwoItemPages_WithResetCombinedWithItems", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - }, - }, - { - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - }, - Reset: true, - }, - { - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, name(folder)), id(folder), isFile), - }, - }, - }, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.DeltaWReset(id(delta), nil).With( + aPage( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)), + aPageWReset( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)), + aPage( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, name(folder)), id(folder), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -2177,26 +1725,15 @@ func (suite *CollectionsUnitSuite) TestGet() { drive1, drive2, }, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - }}}, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, - }, - idx(drive, 2): { - Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem(), - driveItem(idx(folder, 2), name(folder), driveParentDir(2), rootID, isFolder), - driveItem(idx(file, 2), name(file), driveParentDir(2, name(folder)), idx(folder, 2), isFile), - }}}, - DeltaUpdate: pagers.DeltaUpdate{URL: idx(delta, 2), Reset: true}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.DeltaWReset(id(delta), nil).With(aPage( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)))), + mock.Drive(idx(drive, 2)).With( + mock.DeltaWReset(idx(delta, 2), nil).With(aPage( + driveItem(idx(folder, 2), name(folder), driveParentDir(2), rootID, isFolder), + driveItem(idx(file, 2), name(file), driveParentDir(2, name(folder)), idx(folder, 2), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -2237,26 +1774,15 @@ func (suite *CollectionsUnitSuite) TestGet() { drive1, drive2, }, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - }}}, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, - }, - idx(drive, 2): { - Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(2), rootID, isFolder), - driveItem(idx(file, 2), name(file), driveParentDir(2, name(folder)), id(folder), isFile), - }}}, - DeltaUpdate: pagers.DeltaUpdate{URL: idx(delta, 2), Reset: true}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.DeltaWReset(id(delta), nil).With(aPage( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)))), + mock.Drive(idx(drive, 2)).With( + mock.DeltaWReset(idx(delta, 2), nil).With(aPage( + driveItem(id(folder), name(folder), driveParentDir(2), rootID, isFolder), + driveItem(idx(file, 2), name(file), driveParentDir(2, name(folder)), id(folder), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -2294,15 +1820,9 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "OneDrive_OneItemPage_Errors", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{{Items: []models.DriveItemable{}}}, - DeltaUpdate: pagers.DeltaUpdate{}, - Err: assert.AnError, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.Delta("", assert.AnError))), canUsePreviousBackup: false, errCheck: assert.Error, previousPaths: map[string]map[string]string{ @@ -2316,26 +1836,13 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "OneDrive_OneItemPage_InvalidPrevDelta_DeleteNonExistentFolder", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{}, - Reset: true, - }, - { - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(idx(folder, 2), namex(folder, 2), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, namex(folder, 2)), idx(folder, 2), isFile), - }, - }, - }, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.DeltaWReset(id(delta), nil).With( + aReset(), + aPage( + driveItem(idx(folder, 2), namex(folder, 2), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, namex(folder, 2)), idx(folder, 2), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -2368,26 +1875,13 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "OneDrive_OneItemPage_InvalidPrevDeltaCombinedWithItems_DeleteNonExistentFolder", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{}, - Reset: true, - }, - { - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(idx(folder, 2), namex(folder, 2), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, namex(folder, 2)), idx(folder, 2), isFile), - }, - }, - }, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.DeltaWReset(id(delta), nil).With( + aReset(), + aPage( + driveItem(idx(folder, 2), namex(folder, 2), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, namex(folder, 2)), idx(folder, 2), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -2420,37 +1914,16 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "OneDrive_OneItemPage_InvalidPrevDelta_AnotherFolderAtDeletedLocation", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - // on the first page, if this is the total data, we'd expect both folder and folder2 - // since new previousPaths merge with the old previousPaths. - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(idx(folder, 2), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), idx(folder, 2), isFile), - }, - }, - { - Items: []models.DriveItemable{}, - Reset: true, - }, - { - // but after a delta reset, we treat this as the total end set of folders, which means - // we don't expect folder to exist any longer. - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(idx(folder, 2), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), idx(folder, 2), isFile), - }, - }, - }, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.DeltaWReset(id(delta), nil).With( + aPage( + driveItem(idx(folder, 2), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), idx(folder, 2), isFile)), + aReset(), + aPage( + driveItem(idx(folder, 2), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), idx(folder, 2), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -2486,33 +1959,16 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "OneDrive_OneItemPage_InvalidPrevDelta_AnotherFolderAtExistingLocation", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - }, - }, - { - Items: []models.DriveItemable{}, - Reset: true, - }, - { - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - }, - }, - }, - DeltaUpdate: pagers.DeltaUpdate{URL: delta, Reset: true}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.DeltaWReset(id(delta), nil).With( + aPage( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)), + aReset(), + aPage( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -2528,7 +1984,7 @@ func (suite *CollectionsUnitSuite) TestGet() { }, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): delta, + idx(drive, 1): id(delta), }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { @@ -2545,26 +2001,13 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "OneDrive_OneItemPage_ImmediateInvalidPrevDelta_MoveFolderToPreviouslyExistingPath", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{}, - Reset: true, - }, - { - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(idx(folder, 2), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(idx(file, 2), name(file), driveParentDir(1, name(folder)), idx(folder, 2), isFile), - }, - }, - }, - DeltaUpdate: pagers.DeltaUpdate{URL: delta, Reset: true}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.DeltaWReset(id(delta), nil).With( + aReset(), + aPage( + driveItem(idx(folder, 2), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(idx(file, 2), name(file), driveParentDir(1, name(folder)), idx(folder, 2), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -2581,7 +2024,7 @@ func (suite *CollectionsUnitSuite) TestGet() { }, }, expectedDeltaURLs: map[string]string{ - idx(drive, 1): delta, + idx(drive, 1): id(delta), }, expectedPreviousPaths: map[string]map[string]string{ idx(drive, 1): { @@ -2598,26 +2041,13 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "OneDrive_OneItemPage_InvalidPrevDelta_AnotherFolderAtDeletedLocation", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{}, - Reset: true, - }, - { - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(idx(folder, 2), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), idx(folder, 2), isFile), - }, - }, - }, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.DeltaWReset(id(delta), nil).With( + aReset(), + aPage( + driveItem(idx(folder, 2), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), idx(folder, 2), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -2653,31 +2083,17 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "OneDrive Two Item Pages with Malware", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - malwareItem(id(malware), name(malware), driveParentDir(1, name(folder)), id(folder), isFile), - }, - }, - { - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, name(folder)), id(folder), isFile), - malwareItem(idx(malware, 2), namex(malware, 2), driveParentDir(1, name(folder)), id(folder), isFile), - }, - }, - }, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.DeltaWReset(id(delta), nil).With( + aPage( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), + malwareItem(id(malware), name(malware), driveParentDir(1, name(folder)), id(folder), isFile)), + aPage( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, name(folder)), id(folder), isFile), + malwareItem(idx(malware, 2), namex(malware, 2), driveParentDir(1, name(folder)), id(folder), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -2706,36 +2122,20 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "One Drive Deleted Folder In New Results With Invalid Delta", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - driveItem(idx(folder, 2), namex(folder, 2), driveParentDir(1), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, namex(folder, 2)), idx(folder, 2), isFile), - }, - }, - { - Reset: true, - }, - { - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - delItem(idx(folder, 2), driveParentDir(1), rootID, isFolder), - delItem(namex(file, 2), driveParentDir(1), rootID, isFile), - }, - }, - }, - DeltaUpdate: pagers.DeltaUpdate{URL: idx(delta, 2), Reset: true}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.DeltaWReset(idx(delta, 2), nil).With( + aPage( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), + driveItem(idx(folder, 2), namex(folder, 2), driveParentDir(1), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, namex(folder, 2)), idx(folder, 2), isFile)), + aReset(), + aPage( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), + delItem(idx(folder, 2), rootID, isFolder), + delItem(namex(file, 2), rootID, isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -2769,20 +2169,10 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "One Drive Folder Delete After Invalid Delta", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{{ - Items: []models.DriveItemable{ - driveRootItem(), - delItem(id(folder), driveParentDir(1), rootID, isFolder), - }, - Reset: true, - }}, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.DeltaWReset(id(delta), nil).With(aPageWReset( + delItem(id(folder), rootID, isFolder))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -2812,22 +2202,10 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "One Drive Item Delete After Invalid Delta", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{ - driveRootItem(), - delItem(id(file), driveParentDir(1), rootID, isFile), - }, - Reset: true, - }, - }, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.DeltaWReset(id(delta), nil).With(aPageWReset( + delItem(id(file), rootID, isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -2854,29 +2232,15 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "One Drive Folder Made And Deleted", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - }, - }, - { - Items: []models.DriveItemable{ - driveRootItem(), - delItem(id(folder), driveParentDir(1), rootID, isFolder), - delItem(id(file), driveParentDir(1), rootID, isFile), - }, - }, - }, - DeltaUpdate: pagers.DeltaUpdate{URL: idx(delta, 2), Reset: true}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.DeltaWReset(idx(delta, 2), nil).With( + aPage( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)), + aPage( + delItem(id(folder), rootID, isFolder), + delItem(id(file), rootID, isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -2901,36 +2265,18 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "One Drive Folder Created -> Deleted -> Created", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - }, - }, - { - Items: []models.DriveItemable{ - driveRootItem(), - delItem(id(folder), driveParentDir(1), rootID, isFolder), - delItem(id(file), driveParentDir(1), rootID, isFile), - }, - }, - { - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(idx(folder, 1), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(idx(file, 1), name(file), driveParentDir(1, name(folder)), idx(folder, 1), isFile), - }, - }, - }, - DeltaUpdate: pagers.DeltaUpdate{URL: idx(delta, 2), Reset: true}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.DeltaWReset(idx(delta, 2), nil).With( + aPage( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)), + aPage( + delItem(id(folder), rootID, isFolder), + delItem(id(file), rootID, isFile)), + aPage( + driveItem(idx(folder, 1), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(idx(file, 1), name(file), driveParentDir(1, name(folder)), idx(folder, 1), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -2958,36 +2304,18 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "One Drive Folder Deleted -> Created -> Deleted", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{ - driveRootItem(), - delItem(id(folder), driveParentDir(1), rootID, isFolder), - delItem(id(file), driveParentDir(1, name(folder)), rootID, isFile), - }, - }, - { - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - }, - }, - { - Items: []models.DriveItemable{ - driveRootItem(), - delItem(id(folder), driveParentDir(1), rootID, isFolder), - delItem(id(file), driveParentDir(1, name(folder)), rootID, isFile), - }, - }, - }, - DeltaUpdate: pagers.DeltaUpdate{URL: idx(delta, 2), Reset: true}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.DeltaWReset(idx(delta, 2), nil).With( + aPage( + delItem(id(folder), rootID, isFolder), + delItem(id(file), rootID, isFile)), + aPage( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)), + aPage( + delItem(id(folder), rootID, isFolder), + delItem(id(file), rootID, isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -3014,36 +2342,18 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "One Drive Folder Created -> Deleted -> Created with prev", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - }, - }, - { - Items: []models.DriveItemable{ - driveRootItem(), - delItem(id(folder), driveParentDir(1), rootID, isFolder), - delItem(id(file), driveParentDir(1), rootID, isFile), - }, - }, - { - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(idx(folder, 1), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(idx(file, 1), name(file), driveParentDir(1, name(folder)), idx(folder, 1), isFile), - }, - }, - }, - DeltaUpdate: pagers.DeltaUpdate{URL: idx(delta, 2), Reset: true}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.DeltaWReset(idx(delta, 2), nil).With( + aPage( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)), + aPage( + delItem(id(folder), rootID, isFolder), + delItem(id(file), rootID, isFile)), + aPage( + driveItem(idx(folder, 1), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(idx(file, 1), name(file), driveParentDir(1, name(folder)), idx(folder, 1), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -3074,28 +2384,13 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "One Drive Item Made And Deleted", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{ - { - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - }, - }, - { - Items: []models.DriveItemable{ - driveRootItem(), - delItem(id(file), driveParentDir(1), rootID, isFile), - }, - }, - }, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.DeltaWReset(id(delta), nil).With( + aPage( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)), + aPage(delItem(id(file), rootID, isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -3123,17 +2418,10 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "One Drive Random Folder Delete", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem(), - delItem(id(folder), driveParentDir(1), rootID, isFolder), - }}}, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.DeltaWReset(id(delta), nil).With(aPage( + delItem(id(folder), rootID, isFolder))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -3158,17 +2446,10 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "One Drive Random Item Delete", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem(), - delItem(id(file), driveParentDir(1), rootID, isFile), - }}}, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.Delta(id(delta), nil).With(aPage( + delItem(id(file), rootID, isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -3193,16 +2474,9 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "TwoPriorDrives_OneTombstoned", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem(), // will be present - }}}, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.Delta(id(delta), nil).With(aPage()))), // root only canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -3225,36 +2499,19 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "duplicate previous paths in metadata", drives: []models.Driveable{drive1, drive2}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - // contains duplicates in previousPath - idx(drive, 1): { - Pages: []mock.NextPage{{ - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - driveItem(idx(folder, 2), namex(folder, 2), driveParentDir(1), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, namex(folder, 2)), idx(folder, 2), isFile), - }, - }}, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - // does not contain duplicates - idx(drive, 2): { - Pages: []mock.NextPage{{ - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(2), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(2, name(folder)), id(folder), isFile), - driveItem(idx(folder, 2), namex(folder, 2), driveParentDir(2), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), driveParentDir(2, namex(folder, 2)), idx(folder, 2), isFile), - }, - }}, - DeltaUpdate: pagers.DeltaUpdate{URL: idx(delta, 2)}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.Delta(id(delta), nil).With(aPage( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), + driveItem(idx(folder, 2), namex(folder, 2), driveParentDir(1), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, namex(folder, 2)), idx(folder, 2), isFile)))), + mock.Drive(idx(drive, 2)).With( + mock.Delta(idx(delta, 2), nil).With(aPage( + driveItem(id(folder), name(folder), driveParentDir(2), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(2, name(folder)), id(folder), isFile), + driveItem(idx(folder, 2), namex(folder, 2), driveParentDir(2), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), driveParentDir(2, namex(folder, 2)), idx(folder, 2), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -3316,22 +2573,13 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "out of order item enumeration causes prev path collisions", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{{ - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(idx(fanny, 2), name(fanny), driveParentDir(1), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, name(fanny)), idx(fanny, 2), isFile), - driveItem(id(nav), name(nav), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(nav)), id(nav), isFile), - }, - }}, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.Delta(id(delta), nil).With(aPage( + driveItem(idx(fanny, 2), name(fanny), driveParentDir(1), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, name(fanny)), idx(fanny, 2), isFile), + driveItem(id(nav), name(nav), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(nav)), id(nav), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -3369,22 +2617,13 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "out of order item enumeration causes prev path collisions", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{{ - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(idx(fanny, 2), name(fanny), driveParentDir(1), rootID, isFolder), - driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, name(fanny)), idx(fanny, 2), isFile), - driveItem(id(nav), name(nav), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(nav)), id(nav), isFile), - }, - }}, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.Delta(id(delta), nil).With(aPage( + driveItem(idx(fanny, 2), name(fanny), driveParentDir(1), rootID, isFolder), + driveItem(idx(file, 2), namex(file, 2), driveParentDir(1, name(fanny)), idx(fanny, 2), isFile), + driveItem(id(nav), name(nav), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(nav)), id(nav), isFile))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -3422,23 +2661,14 @@ func (suite *CollectionsUnitSuite) TestGet() { { name: "out of order item enumeration causes opposite prev path collisions", drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{{ - Items: []models.DriveItemable{ - driveRootItem(), - driveItem(idx(file, 1), namex(file, 1), driveParentDir(1), rootID, isFile), - driveItem(id(fanny), name(fanny), driveParentDir(1), rootID, isFolder), - driveItem(id(nav), name(nav), driveParentDir(1), rootID, isFolder), - driveItem(id(foo), name(foo), driveParentDir(1, name(fanny)), id(fanny), isFolder), - driveItem(id(bar), name(foo), driveParentDir(1, name(nav)), id(nav), isFolder), - }, - }}, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.Delta(id(delta), nil).With(aPage( + driveItem(idx(file, 1), namex(file, 1), driveParentDir(1), rootID, isFile), + driveItem(id(fanny), name(fanny), driveParentDir(1), rootID, isFolder), + driveItem(id(nav), name(nav), driveParentDir(1), rootID, isFolder), + driveItem(id(foo), name(foo), driveParentDir(1, name(fanny)), id(fanny), isFolder), + driveItem(id(bar), name(foo), driveParentDir(1, name(nav)), id(nav), isFolder))))), canUsePreviousBackup: true, errCheck: assert.NoError, previousPaths: map[string]map[string]string{ @@ -3641,7 +2871,7 @@ func (suite *CollectionsUnitSuite) TestAddURLCacheToDriveCollections() { table := []struct { name string drives []models.Driveable - enumerator mock.EnumerateItemsDeltaByDrive + enumerator mock.EnumerateDriveItemsDelta errCheck assert.ErrorAssertionFunc }{ { @@ -3650,26 +2880,15 @@ func (suite *CollectionsUnitSuite) TestAddURLCacheToDriveCollections() { drive1, drive2, }, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - idx(drive, 1): { - Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem(), - driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), - driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile), - }}}, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta), Reset: true}, - }, - idx(drive, 2): { - Pages: []mock.NextPage{{Items: []models.DriveItemable{ - driveRootItem(), - driveItem(idx(folder, 2), name(folder), driveParentDir(2), rootID, isFolder), - driveItem(idx(file, 2), name(file), driveParentDir(2, name(folder)), idx(folder, 2), isFile), - }}}, - DeltaUpdate: pagers.DeltaUpdate{URL: idx(delta, 2), Reset: true}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(idx(drive, 1)).With( + mock.Delta(id(delta), nil).With(aPage( + driveItem(id(folder), name(folder), driveParentDir(1), rootID, isFolder), + driveItem(id(file), name(file), driveParentDir(1, name(folder)), id(folder), isFile)))), + mock.Drive(idx(drive, 2)).With( + mock.Delta(idx(delta, 2), nil).With(aPage( + driveItem(idx(folder, 2), name(folder), driveParentDir(2), rootID, isFolder), + driveItem(idx(file, 2), name(file), driveParentDir(2, name(folder)), idx(folder, 2), isFile))))), errCheck: assert.NoError, }, // TODO(pandeyabs): Add a test case to check that the cache is not attached diff --git a/src/internal/m365/collection/drive/collections_tree.go b/src/internal/m365/collection/drive/collections_tree.go index 96a1463fc..b58389cb2 100644 --- a/src/internal/m365/collection/drive/collections_tree.go +++ b/src/internal/m365/collection/drive/collections_tree.go @@ -154,7 +154,12 @@ func (c *Collections) getTree( logger.Ctx(ctx).Infow("produced collections", "count_collections", len(collections)) - return collections, canUsePrevBackup, nil + // hack to satisfy the linter since we're returning an error + if ctx == nil { + return nil, false, nil + } + + return collections, canUsePrevBackup, errGetTreeNotImplemented } func (c *Collections) makeDriveCollections( @@ -171,7 +176,12 @@ func (c *Collections) makeDriveCollections( return nil, nil, pagers.DeltaUpdate{}, clues.Wrap(err, "generating backup tree prefix") } - tree := newFolderyMcFolderFace(ppfx) + root, err := c.handler.GetRootFolder(ctx, ptr.Val(drv.GetId())) + if err != nil { + return nil, nil, pagers.DeltaUpdate{}, clues.Wrap(err, "getting root folder") + } + + tree := newFolderyMcFolderFace(ppfx, ptr.Val(root.GetId())) counter.Add(count.PrevPaths, int64(len(prevPaths))) @@ -272,65 +282,105 @@ func (c *Collections) populateTree( ctx = clues.Add(ctx, "invalid_prev_delta", len(prevDeltaLink) == 0) var ( - driveID = ptr.Val(drv.GetId()) - el = errs.Local() + currDeltaLink = prevDeltaLink + driveID = ptr.Val(drv.GetId()) + el = errs.Local() + du pagers.DeltaUpdate + finished bool + hitLimit bool + // TODO: plug this into the limiter + maxDeltas = 100 + countDeltas = 0 ) - // TODO(keepers): to end in a correct state, we'll eventually need to run this - // query multiple times over, until it ends in an empty change set. - pager := c.handler.EnumerateDriveItemsDelta( - ctx, - driveID, - prevDeltaLink, - api.CallConfig{ - Select: api.DefaultDriveItemProps(), - }) + // enumerate through multiple deltas until we either: + // 1. hit a consistent state (ie: no changes since last delta enum) + // 2. hit the limit + for !hitLimit && !finished && el.Failure() == nil { + counter.Inc(count.TotalDeltasProcessed) - for page, reset, done := pager.NextPage(); !done; page, reset, done = pager.NextPage() { - if el.Failure() != nil { - break - } + var ( + pageCount int + pageItemCount int + err error + ) - if reset { - counter.Inc(count.PagerResets) - tree.reset() - c.resetStats() - } + countDeltas++ - err := c.enumeratePageOfItems( + pager := c.handler.EnumerateDriveItemsDelta( ctx, - tree, - drv, - page, - limiter, - counter, - errs) - if err != nil { - if errors.Is(err, errHitLimit) { - break + driveID, + currDeltaLink, + api.CallConfig{ + Select: api.DefaultDriveItemProps(), + }) + + for page, reset, done := pager.NextPage(); !done; page, reset, done = pager.NextPage() { + if el.Failure() != nil { + return du, el.Failure() } - el.AddRecoverable(ctx, clues.Stack(err)) + if reset { + counter.Inc(count.PagerResets) + tree.reset() + c.resetStats() + + pageCount = 0 + pageItemCount = 0 + countDeltas = 0 + } else { + counter.Inc(count.TotalPagesEnumerated) + } + + err = c.enumeratePageOfItems( + ctx, + tree, + drv, + page, + limiter, + counter, + errs) + if err != nil { + if errors.Is(err, errHitLimit) { + hitLimit = true + break + } + + el.AddRecoverable(ctx, clues.Stack(err)) + } + + pageCount++ + + pageItemCount += len(page) + + // Stop enumeration early if we've reached the page limit. Keep this + // at the end of the loop so we don't request another page (pager.NextPage) + // before seeing we've passed the limit. + if limiter.hitPageLimit(pageCount) { + hitLimit = true + break + } } - counter.Inc(count.PagesEnumerated) + // Always cancel the pager so that even if we exit early from the loop above + // we don't deadlock. Cancelling a pager that's already completed is + // essentially a noop. + pager.Cancel() - // Stop enumeration early if we've reached the page limit. Keep this - // at the end of the loop so we don't request another page (pager.NextPage) - // before seeing we've passed the limit. - if limiter.hitPageLimit(int(counter.Get(count.PagesEnumerated))) { - break + du, err = pager.Results() + if err != nil { + return du, clues.Stack(err) } - } - // Always cancel the pager so that even if we exit early from the loop above - // we don't deadlock. Cancelling a pager that's already completed is - // essentially a noop. - pager.Cancel() + currDeltaLink = du.URL - du, err := pager.Results() - if err != nil { - return du, clues.Stack(err) + // 0 pages is never expected. We should at least have one (empty) page to + // consume. But checking pageCount == 1 is brittle in a non-helpful way. + finished = pageCount < 2 && pageItemCount == 0 + + if countDeltas >= maxDeltas { + return pagers.DeltaUpdate{}, clues.New("unable to produce consistent delta after 100 queries") + } } logger.Ctx(ctx).Infow("enumerated collection delta", "stats", counter.Values()) diff --git a/src/internal/m365/collection/drive/collections_tree_test.go b/src/internal/m365/collection/drive/collections_tree_test.go index 9786b728f..519c86e6f 100644 --- a/src/internal/m365/collection/drive/collections_tree_test.go +++ b/src/internal/m365/collection/drive/collections_tree_test.go @@ -9,262 +9,20 @@ import ( "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" - "github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/prefixmatcher" "github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/data" - dataMock "github.com/alcionai/corso/src/internal/data/mock" odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts" "github.com/alcionai/corso/src/internal/m365/service/onedrive/mock" - "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/tester" - bupMD "github.com/alcionai/corso/src/pkg/backup/metadata" "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/count" countTD "github.com/alcionai/corso/src/pkg/count/testdata" "github.com/alcionai/corso/src/pkg/fault" - "github.com/alcionai/corso/src/pkg/services/m365/api/graph" apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock" "github.com/alcionai/corso/src/pkg/services/m365/api/pagers" ) -// --------------------------------------------------------------------------- -// helpers -// --------------------------------------------------------------------------- - -func collWithMBH(mbh BackupHandler) *Collections { - return NewCollections( - mbh, - tenant, - idname.NewProvider(user, user), - func(*support.ControllerOperationStatus) {}, - control.Options{ToggleFeatures: control.Toggles{ - UseDeltaTree: true, - }}, - count.New()) -} - -func collWithMBHAndOpts( - mbh BackupHandler, - opts control.Options, -) *Collections { - return NewCollections( - mbh, - tenant, - idname.NewProvider(user, user), - func(*support.ControllerOperationStatus) {}, - opts, - count.New()) -} - -// func fullOrPrevPath( -// t *testing.T, -// coll data.BackupCollection, -// ) path.Path { -// var collPath path.Path - -// if coll.State() != data.DeletedState { -// collPath = coll.FullPath() -// } else { -// collPath = coll.PreviousPath() -// } - -// require.False( -// t, -// len(collPath.Elements()) < 4, -// "malformed or missing collection path") - -// return collPath -// } - -func pagerForDrives(drives ...models.Driveable) *apiMock.Pager[models.Driveable] { - return &apiMock.Pager[models.Driveable]{ - ToReturn: []apiMock.PagerResult[models.Driveable]{ - {Values: drives}, - }, - } -} - -func makePrevMetadataColls( - t *testing.T, - mbh BackupHandler, - previousPaths map[string]map[string]string, -) []data.RestoreCollection { - pathPrefix, err := mbh.MetadataPathPrefix(tenant) - require.NoError(t, err, clues.ToCore(err)) - - prevDeltas := map[string]string{} - - for driveID := range previousPaths { - prevDeltas[driveID] = idx(delta, "prev") - } - - mdColl, err := graph.MakeMetadataCollection( - pathPrefix, - []graph.MetadataCollectionEntry{ - graph.NewMetadataEntry(bupMD.DeltaURLsFileName, prevDeltas), - graph.NewMetadataEntry(bupMD.PreviousPathFileName, previousPaths), - }, - func(*support.ControllerOperationStatus) {}, - count.New()) - require.NoError(t, err, "creating metadata collection", clues.ToCore(err)) - - return []data.RestoreCollection{ - dataMock.NewUnversionedRestoreCollection(t, data.NoFetchRestoreCollection{Collection: mdColl}), - } -} - -// func compareMetadata( -// t *testing.T, -// mdColl data.Collection, -// expectDeltas map[string]string, -// expectPrevPaths map[string]map[string]string, -// ) { -// ctx, flush := tester.NewContext(t) -// defer flush() - -// colls := []data.RestoreCollection{ -// dataMock.NewUnversionedRestoreCollection(t, data.NoFetchRestoreCollection{Collection: mdColl}), -// } - -// deltas, prevs, _, err := deserializeAndValidateMetadata( -// ctx, -// colls, -// count.New(), -// fault.New(true)) -// require.NoError(t, err, "deserializing metadata", clues.ToCore(err)) -// assert.Equal(t, expectDeltas, deltas, "delta urls") -// assert.Equal(t, expectPrevPaths, prevs, "previous paths") -// } - -// for comparisons done by collection state -type stateAssertion struct { - itemIDs []string - // should never get set by the user. - // this flag gets flipped when calling assertions.compare. - // any unseen collection will error on requireNoUnseenCollections - // sawCollection bool -} - -// for comparisons done by a given collection path -type collectionAssertion struct { - doNotMerge assert.BoolAssertionFunc - states map[data.CollectionState]*stateAssertion - excludedItems map[string]struct{} -} - -type statesToItemIDs map[data.CollectionState][]string - -// TODO(keepers): move excludeItems to a more global position. -func newCollAssertion( - doNotMerge bool, - itemsByState statesToItemIDs, - excludeItems ...string, -) collectionAssertion { - states := map[data.CollectionState]*stateAssertion{} - - for state, itemIDs := range itemsByState { - states[state] = &stateAssertion{ - itemIDs: itemIDs, - } - } - - dnm := assert.False - if doNotMerge { - dnm = assert.True - } - - return collectionAssertion{ - doNotMerge: dnm, - states: states, - excludedItems: makeExcludeMap(excludeItems...), - } -} - -// to aggregate all collection-related expectations in the backup -// map collection path -> collection state -> assertion -type collectionAssertions map[string]collectionAssertion - -// ensure the provided collection matches expectations as set by the test. -// func (cas collectionAssertions) compare( -// t *testing.T, -// coll data.BackupCollection, -// excludes *prefixmatcher.StringSetMatchBuilder, -// ) { -// ctx, flush := tester.NewContext(t) -// defer flush() - -// var ( -// itemCh = coll.Items(ctx, fault.New(true)) -// itemIDs = []string{} -// ) - -// p := fullOrPrevPath(t, coll) - -// for itm := range itemCh { -// itemIDs = append(itemIDs, itm.ID()) -// } - -// expect := cas[p.String()] -// expectState := expect.states[coll.State()] -// expectState.sawCollection = true - -// assert.ElementsMatchf( -// t, -// expectState.itemIDs, -// itemIDs, -// "expected all items to match in collection with:\nstate %q\npath %q", -// coll.State(), -// p) - -// expect.doNotMerge( -// t, -// coll.DoNotMergeItems(), -// "expected collection to have the appropariate doNotMerge flag") - -// if result, ok := excludes.Get(p.String()); ok { -// assert.Equal( -// t, -// expect.excludedItems, -// result, -// "excluded items") -// } -// } - -// ensure that no collections in the expected set are still flagged -// as sawCollection == false. -// func (cas collectionAssertions) requireNoUnseenCollections( -// t *testing.T, -// ) { -// for p, withPath := range cas { -// for _, state := range withPath.states { -// require.True( -// t, -// state.sawCollection, -// "results should have contained collection:\n\t%q\t\n%q", -// state, p) -// } -// } -// } - -func pageItems(items ...models.DriveItemable) []models.DriveItemable { - return append([]models.DriveItemable{driveRootItem()}, items...) -} - -func pagesOf(pages ...[]models.DriveItemable) []mock.NextPage { - mnp := []mock.NextPage{} - - for _, page := range pages { - mnp = append(mnp, mock.NextPage{Items: page}) - } - - return mnp -} - -// --------------------------------------------------------------------------- -// tests -// --------------------------------------------------------------------------- - type CollectionsTreeUnitSuite struct { tester.Suite } @@ -396,7 +154,6 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_GetTree() { collAssertions collectionAssertions counts countTD.Expected deltas map[string]string - err require.ErrorAssertionFunc prevPaths map[string]map[string]string skips int } @@ -404,7 +161,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_GetTree() { table := []struct { name string drivePager *apiMock.Pager[models.Driveable] - enumerator mock.EnumerateItemsDeltaByDrive + enumerator mock.EnumerateDriveItemsDelta previousPaths map[string]map[string]string metadata []data.RestoreCollection @@ -413,14 +170,10 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_GetTree() { { name: "not yet implemented", drivePager: pagerForDrives(drv), - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: pagesOf(pageItems()), - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With( + aPage()))), expect: expected{ canUsePrevBackup: assert.False, collAssertions: collectionAssertions{ @@ -433,7 +186,6 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_GetTree() { count.PrevPaths: 0, }, deltas: map[string]string{}, - err: require.Error, prevPaths: map[string]map[string]string{}, skips: 0, }, @@ -460,7 +212,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_GetTree() { globalExcludes, errs) - test.expect.err(t, err, clues.ToCore(err)) + require.ErrorIs(t, err, errGetTreeNotImplemented, clues.ToCore(err)) // TODO(keepers): awaiting implementation // assert.Empty(t, colls) // assert.Equal(t, test.expect.skips, len(errs.Skipped())) @@ -504,24 +256,12 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveCollections() { name string drive models.Driveable drivePager *apiMock.Pager[models.Driveable] - enumerator mock.EnumerateItemsDeltaByDrive prevPaths map[string]string - expectErr require.ErrorAssertionFunc expectCounts countTD.Expected }{ { - name: "not yet implemented", - drive: drv, - drivePager: pagerForDrives(drv), - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: pagesOf(pageItems()), - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, - expectErr: require.Error, + name: "not yet implemented", + drive: drv, expectCounts: countTD.Expected{ count.PrevPaths: 0, }, @@ -534,10 +274,16 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveCollections() { ctx, flush := tester.NewContext(t) defer flush() - mbh := mock.DefaultDriveBHWith(user, test.drivePager, test.enumerator) + mbh := mock.DefaultOneDriveBH(user) + mbh.DrivePagerV = pagerForDrives(drv) + mbh.DriveItemEnumeration = mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With( + aPage()))) + c := collWithMBH(mbh) - colls, paths, du, err := c.makeDriveCollections( + _, _, _, err := c.makeDriveCollections( ctx, test.drive, test.prevPaths, @@ -547,55 +293,62 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeDriveCollections() { fault.New(true)) // TODO(keepers): awaiting implementation - test.expectErr(t, err, clues.ToCore(err)) - assert.Empty(t, colls) - assert.Empty(t, paths) - assert.Equal(t, id(delta), du.URL) + require.ErrorIs(t, err, errGetTreeNotImplemented, clues.ToCore(err)) + // assert.Empty(t, colls) + // assert.Empty(t, paths) + // assert.Empty(t, delta.URL) - test.expectCounts.Compare(t, c.counter) + // test.expectCounts.Compare(t, c.counter) }) } } -// This test focuses on the population of a tree using delta enumeration data, -// and is not concerned with unifying previous paths or post-processing collections. -func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { +type populateTreeExpected struct { + counts countTD.Expected + err require.ErrorAssertionFunc + numLiveFiles int + numLiveFolders int + shouldHitLimit bool + sizeBytes int64 + treeContainsFolderIDs []string + treeContainsTombstoneIDs []string + treeContainsFileIDsWithParent map[string]string +} + +type populateTreeTest struct { + name string + enumerator mock.EnumerateDriveItemsDelta + tree *folderyMcFolderFace + limiter *pagerLimiter + expect populateTreeExpected +} + +// this test focuses on the population of a tree using a single delta's enumeration data. +// It is not concerned with unifying previous paths or post-processing collections. +func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_singleDelta() { drv := models.NewDrive() drv.SetId(ptr.To(id(drive))) drv.SetName(ptr.To(name(drive))) - type expected struct { - counts countTD.Expected - err require.ErrorAssertionFunc - numLiveFiles int - numLiveFolders int - shouldHitLimit bool - sizeBytes int64 - treeContainsFolderIDs []string - treeContainsTombstoneIDs []string - treeContainsFileIDsWithParent map[string]string - } - - table := []struct { - name string - enumerator mock.EnumerateItemsDeltaByDrive - tree *folderyMcFolderFace - limiter *pagerLimiter - expect expected - }{ + table := []populateTreeTest{ { name: "nil page", - tree: newFolderyMcFolderFace(nil), - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ + tree: newFolderyMcFolderFace(nil, rootID), + // special case enumerator to generate a null page. + // otherwise all enumerators should be DriveEnumerator()s. + enumerator: mock.EnumerateDriveItemsDelta{ + DrivePagers: map[string]*mock.DriveDeltaEnumerator{ id(drive): { - Pages: nil, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, + DriveID: id(drive), + DeltaQueries: []*mock.DeltaQuery{{ + Pages: nil, + DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, + }}, }, }, }, limiter: newPagerLimiter(control.DefaultOptions()), - expect: expected{ + expect: populateTreeExpected{ counts: countTD.Expected{}, err: require.NoError, numLiveFiles: 0, @@ -608,21 +361,17 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { }, { name: "root only", - tree: newFolderyMcFolderFace(nil), - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: pagesOf(pageItems()), - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + tree: newFolderyMcFolderFace(nil, rootID), + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With( + aPage()))), limiter: newPagerLimiter(control.DefaultOptions()), - expect: expected{ + expect: populateTreeExpected{ counts: countTD.Expected{ count.TotalFoldersProcessed: 1, count.TotalFilesProcessed: 0, - count.PagesEnumerated: 1, + count.TotalPagesEnumerated: 2, }, err: require.NoError, numLiveFiles: 0, @@ -637,21 +386,18 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { }, { name: "root only on two pages", - tree: newFolderyMcFolderFace(nil), - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: pagesOf(pageItems(), pageItems()), - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + tree: newFolderyMcFolderFace(nil, rootID), + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With( + aPage(), + aPage()))), limiter: newPagerLimiter(control.DefaultOptions()), - expect: expected{ + expect: populateTreeExpected{ counts: countTD.Expected{ count.TotalFoldersProcessed: 2, count.TotalFilesProcessed: 0, - count.PagesEnumerated: 2, + count.TotalPagesEnumerated: 3, }, err: require.NoError, numLiveFiles: 0, @@ -666,25 +412,20 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { }, { name: "many folders in a hierarchy across multiple pages", - tree: newFolderyMcFolderFace(nil), - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: pagesOf( - pageItems(driveItem(id(folder), name(folder), parentDir(), rootID, isFolder)), - pageItems(driveItem(idx(folder, "sib"), namex(folder, "sib"), parentDir(), rootID, isFolder)), - pageItems( - driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), - driveItem(idx(folder, "chld"), namex(folder, "chld"), parentDir(), id(folder), isFolder))), - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + tree: newFolderyMcFolderFace(nil, rootID), + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With( + aPage(folderAtRoot()), + aPage(folderxAtRoot("sib")), + aPage( + folderAtRoot(), + folderxAt("chld", folder))))), limiter: newPagerLimiter(control.DefaultOptions()), - expect: expected{ + expect: populateTreeExpected{ counts: countTD.Expected{ count.TotalFoldersProcessed: 7, - count.PagesEnumerated: 3, + count.TotalPagesEnumerated: 4, count.TotalFilesProcessed: 0, }, err: require.NoError, @@ -703,31 +444,26 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { }, { name: "many folders with files", - tree: newFolderyMcFolderFace(nil), - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: pagesOf( - pageItems( - driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), - driveItem(id(file), name(file), parentDir(name(folder)), id(folder), isFile)), - pageItems( - driveItem(idx(folder, "sib"), namex(folder, "sib"), parentDir(), rootID, isFolder), - driveItem(idx(file, "sib"), namex(file, "sib"), parentDir(namex(folder, "sib")), idx(folder, "sib"), isFile)), - pageItems( - driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), - driveItem(idx(folder, "chld"), namex(folder, "chld"), parentDir(), id(folder), isFolder), - driveItem(idx(file, "chld"), namex(file, "chld"), parentDir(namex(folder, "chld")), idx(folder, "chld"), isFile))), - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + tree: newFolderyMcFolderFace(nil, rootID), + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With( + aPage( + folderAtRoot(), + fileAt(folder)), + aPage( + folderxAtRoot("sib"), + filexAt("fsib", "sib")), + aPage( + folderAtRoot(), + folderxAt("chld", folder), + filexAt("fchld", "chld"))))), limiter: newPagerLimiter(control.DefaultOptions()), - expect: expected{ + expect: populateTreeExpected{ counts: countTD.Expected{ count.TotalFoldersProcessed: 7, count.TotalFilesProcessed: 3, - count.PagesEnumerated: 3, + count.TotalPagesEnumerated: 4, }, err: require.NoError, numLiveFiles: 3, @@ -741,9 +477,49 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { }, treeContainsTombstoneIDs: []string{}, treeContainsFileIDsWithParent: map[string]string{ - id(file): id(folder), - idx(file, "sib"): idx(folder, "sib"), - idx(file, "chld"): idx(folder, "chld"), + id(file): id(folder), + idx(file, "fsib"): idx(folder, "sib"), + idx(file, "fchld"): idx(folder, "chld"), + }, + }, + }, + { + name: "many folders with files across multiple deltas", + tree: newFolderyMcFolderFace(nil, rootID), + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With(aPage( + folderAtRoot(), + fileAt(folder))), + mock.Delta(id(delta), nil).With(aPage( + folderxAtRoot("sib"), + filexAt("fsib", "sib"))), + mock.Delta(id(delta), nil).With(aPage( + folderAtRoot(), + folderxAt("chld", folder), + filexAt("fchld", "chld"))))), + limiter: newPagerLimiter(control.DefaultOptions()), + expect: populateTreeExpected{ + counts: countTD.Expected{ + count.TotalFoldersProcessed: 7, + count.TotalFilesProcessed: 3, + count.TotalPagesEnumerated: 4, + }, + err: require.NoError, + numLiveFiles: 3, + numLiveFolders: 4, + sizeBytes: 3 * 42, + treeContainsFolderIDs: []string{ + rootID, + id(folder), + idx(folder, "sib"), + idx(folder, "chld"), + }, + treeContainsTombstoneIDs: []string{}, + treeContainsFileIDsWithParent: map[string]string{ + id(file): id(folder), + idx(file, "fsib"): idx(folder, "sib"), + idx(file, "fchld"): idx(folder, "chld"), }, }, }, @@ -751,26 +527,21 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { // technically you won't see this behavior from graph deltas, since deletes always // precede creates/updates. But it's worth checking that we can handle it anyways. name: "create, delete on next page", - tree: newFolderyMcFolderFace(nil), - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: pagesOf( - pageItems( - driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), - driveItem(id(file), name(file), parentDir(name(folder)), id(folder), isFile)), - pageItems(delItem(id(folder), parentDir(), rootID, isFolder))), - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + tree: newFolderyMcFolderFace(nil, rootID), + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With( + aPage( + folderAtRoot(), + fileAt(folder)), + aPage(delItem(id(folder), rootID, isFolder))))), limiter: newPagerLimiter(control.DefaultOptions()), - expect: expected{ + expect: populateTreeExpected{ counts: countTD.Expected{ count.TotalFoldersProcessed: 3, count.TotalFilesProcessed: 1, count.TotalDeleteFoldersProcessed: 1, - count.PagesEnumerated: 2, + count.TotalPagesEnumerated: 3, }, err: require.NoError, numLiveFiles: 0, @@ -790,26 +561,21 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { // precede creates/updates. But it's worth checking that we can handle it anyways. name: "move->delete folder with populated tree", tree: treeWithFolders(), - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: pagesOf( - pageItems( - driveItem(idx(folder, "parent"), namex(folder, "parent"), parentDir(), rootID, isFolder), - driveItem(id(folder), namex(folder, "moved"), parentDir(), idx(folder, "parent"), isFolder), - driveItem(id(file), name(file), parentDir(namex(folder, "parent"), name(folder)), id(folder), isFile)), - pageItems(delItem(id(folder), parentDir(), idx(folder, "parent"), isFolder))), - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With( + aPage( + folderxAtRoot("parent"), + driveItem(id(folder), namex(folder, "moved"), parentDir(), idx(folder, "parent"), isFolder), + fileAtDeep(parentDir(namex(folder, "parent"), name(folder)), id(folder))), + aPage(delItem(id(folder), idx(folder, "parent"), isFolder))))), limiter: newPagerLimiter(control.DefaultOptions()), - expect: expected{ + expect: populateTreeExpected{ counts: countTD.Expected{ count.TotalFoldersProcessed: 4, count.TotalDeleteFoldersProcessed: 1, count.TotalFilesProcessed: 1, - count.PagesEnumerated: 2, + count.TotalPagesEnumerated: 3, }, err: require.NoError, numLiveFiles: 0, @@ -830,31 +596,26 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { { name: "at folder limit before enumeration", tree: treeWithFileAtRoot(), - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: pagesOf( - pageItems( - driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), - driveItem(id(file), name(file), parentDir(name(folder)), id(folder), isFile)), - pageItems( - driveItem(idx(folder, "sib"), namex(folder, "sib"), parentDir(), rootID, isFolder), - driveItem(idx(file, "sib"), namex(file, "sib"), parentDir(namex(folder, "sib")), idx(folder, "sib"), isFile)), - pageItems( - driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), - driveItem(idx(folder, "chld"), namex(folder, "chld"), parentDir(), id(folder), isFolder), - driveItem(idx(file, "chld"), namex(file, "chld"), parentDir(namex(folder, "chld")), idx(folder, "chld"), isFile))), - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With( + aPage( + folderAtRoot(), + fileAt(folder)), + aPage( + folderxAtRoot("sib"), + filexAt("fsib", "sib")), + aPage( + folderAtRoot(), + folderxAt("chld", folder), + filexAt("fchld", "chld"))))), limiter: newPagerLimiter(minimumLimitOpts()), - expect: expected{ + expect: populateTreeExpected{ counts: countTD.Expected{ count.TotalDeleteFoldersProcessed: 0, count.TotalFoldersProcessed: 1, count.TotalFilesProcessed: 0, - count.PagesEnumerated: 0, + count.TotalPagesEnumerated: 1, }, err: require.NoError, shouldHitLimit: true, @@ -870,32 +631,27 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { }, { name: "hit folder limit during enumeration", - tree: newFolderyMcFolderFace(nil), - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: pagesOf( - pageItems( - driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), - driveItem(id(file), name(file), parentDir(name(folder)), id(folder), isFile)), - pageItems( - driveItem(idx(folder, "sib"), namex(folder, "sib"), parentDir(), rootID, isFolder), - driveItem(idx(file, "sib"), namex(file, "sib"), parentDir(namex(folder, "sib")), idx(folder, "sib"), isFile)), - pageItems( - driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), - driveItem(idx(folder, "chld"), namex(folder, "chld"), parentDir(), id(folder), isFolder), - driveItem(idx(file, "chld"), namex(file, "chld"), parentDir(namex(folder, "chld")), idx(folder, "chld"), isFile))), - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + tree: newFolderyMcFolderFace(nil, rootID), + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With( + aPage( + folderAtRoot(), + fileAt(folder)), + aPage( + folderxAtRoot("sib"), + filexAt("fsib", "sib")), + aPage( + folderAtRoot(), + folderxAt("chld", folder), + filexAt("fchld", "chld"))))), limiter: newPagerLimiter(minimumLimitOpts()), - expect: expected{ + expect: populateTreeExpected{ counts: countTD.Expected{ count.TotalDeleteFoldersProcessed: 0, count.TotalFoldersProcessed: 1, count.TotalFilesProcessed: 0, - count.PagesEnumerated: 0, + count.TotalPagesEnumerated: 1, }, err: require.NoError, shouldHitLimit: true, @@ -912,67 +668,254 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree() { } for _, test := range table { suite.Run(test.name, func() { - t := suite.T() - - ctx, flush := tester.NewContext(t) - defer flush() - - mockDrivePager := &apiMock.Pager[models.Driveable]{ - ToReturn: []apiMock.PagerResult[models.Driveable]{ - {Values: []models.Driveable{drv}}, - }, - } - - mbh := mock.DefaultDriveBHWith(user, mockDrivePager, test.enumerator) - c := collWithMBH(mbh) - counter := count.New() - - _, err := c.populateTree( - ctx, - test.tree, - drv, - id(delta), - test.limiter, - counter, - fault.New(true)) - - test.expect.err(t, err, clues.ToCore(err)) - - assert.Equal( - t, - test.expect.numLiveFolders, - test.tree.countLiveFolders(), - "count folders in tree") - - countSize := test.tree.countLiveFilesAndSizes() - assert.Equal( - t, - test.expect.numLiveFiles, - countSize.numFiles, - "count files in tree") - assert.Equal( - t, - test.expect.sizeBytes, - countSize.totalBytes, - "count total bytes in tree") - test.expect.counts.Compare(t, counter) - - for _, id := range test.expect.treeContainsFolderIDs { - assert.NotNil(t, test.tree.folderIDToNode[id], "node exists") - } - - for _, id := range test.expect.treeContainsTombstoneIDs { - assert.NotNil(t, test.tree.tombstones[id], "tombstone exists") - } - - for iID, pID := range test.expect.treeContainsFileIDsWithParent { - assert.Contains(t, test.tree.fileIDToParentID, iID, "file should exist in tree") - assert.Equal(t, pID, test.tree.fileIDToParentID[iID], "file should reference correct parent") - } + runPopulateTreeTest(suite.T(), drv, test) }) } } +// this test focuses on quirks that can only arise from cases that occur across +// multiple delta enumerations. +// It is not concerned with unifying previous paths or post-processing collections. +func (suite *CollectionsTreeUnitSuite) TestCollections_PopulateTree_multiDelta() { + drv := models.NewDrive() + drv.SetId(ptr.To(id(drive))) + drv.SetName(ptr.To(name(drive))) + + table := []populateTreeTest{ + { + name: "sanity case: normal enumeration split across multiple deltas", + tree: newFolderyMcFolderFace(nil, rootID), + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With( + aPage( + folderAtRoot(), + fileAt(folder))), + mock.Delta(id(delta), nil).With( + aPage( + folderxAtRoot("sib"), + filexAt("fsib", "sib"))), + mock.Delta(id(delta), nil).With( + aPage( + folderAtRoot(), + folderxAt("chld", folder), + filexAt("fchld", "chld"))))), + limiter: newPagerLimiter(control.DefaultOptions()), + expect: populateTreeExpected{ + counts: countTD.Expected{ + count.TotalDeltasProcessed: 4, + count.TotalDeleteFoldersProcessed: 0, + count.TotalDeleteFilesProcessed: 0, + count.TotalFilesProcessed: 3, + count.TotalFoldersProcessed: 7, + count.TotalPagesEnumerated: 4, + }, + err: require.NoError, + numLiveFiles: 3, + numLiveFolders: 4, + sizeBytes: 3 * 42, + treeContainsFolderIDs: []string{ + rootID, + id(folder), + idx(folder, "sib"), + idx(folder, "chld"), + }, + treeContainsTombstoneIDs: []string{}, + treeContainsFileIDsWithParent: map[string]string{ + id(file): id(folder), + idx(file, "fsib"): idx(folder, "sib"), + idx(file, "fchld"): idx(folder, "chld"), + }, + }, + }, + { + name: "create->delete,create", + tree: newFolderyMcFolderFace(nil, rootID), + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With( + aPage( + folderAtRoot(), + fileAt(folder))), + // a (delete,create) pair in the same delta can occur when + // a user deletes and restores an item in-between deltas. + mock.Delta(id(delta), nil).With( + aPage( + delItem(id(folder), rootID, isFolder), + delItem(id(file), id(folder), isFile)), + aPage( + folderAtRoot(), + fileAt(folder))))), + limiter: newPagerLimiter(control.DefaultOptions()), + expect: populateTreeExpected{ + counts: countTD.Expected{ + count.TotalDeltasProcessed: 3, + count.TotalDeleteFoldersProcessed: 1, + count.TotalDeleteFilesProcessed: 1, + count.TotalFilesProcessed: 2, + count.TotalFoldersProcessed: 5, + count.TotalPagesEnumerated: 4, + }, + err: require.NoError, + numLiveFiles: 1, + numLiveFolders: 2, + sizeBytes: 42, + treeContainsFolderIDs: []string{ + rootID, + id(folder), + }, + treeContainsTombstoneIDs: []string{}, + treeContainsFileIDsWithParent: map[string]string{}, + }, + }, + { + name: "visit->rename", + tree: newFolderyMcFolderFace(nil, rootID), + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With( + aPage( + folderAtRoot(), + fileAt(folder))), + mock.Delta(id(delta), nil).With( + aPage( + driveItem(id(folder), namex(folder, "rename"), parentDir(), rootID, isFolder), + driveItem(id(file), namex(file, "rename"), parentDir(namex(folder, "rename")), id(folder), isFile))))), + limiter: newPagerLimiter(control.DefaultOptions()), + expect: populateTreeExpected{ + counts: countTD.Expected{ + count.TotalDeltasProcessed: 3, + count.TotalDeleteFilesProcessed: 0, + count.TotalDeleteFoldersProcessed: 0, + count.TotalFilesProcessed: 2, + count.TotalFoldersProcessed: 4, + count.TotalPagesEnumerated: 3, + }, + err: require.NoError, + numLiveFiles: 1, + numLiveFolders: 2, + sizeBytes: 42, + treeContainsFolderIDs: []string{ + rootID, + id(folder), + }, + treeContainsTombstoneIDs: []string{}, + treeContainsFileIDsWithParent: map[string]string{ + id(file): id(folder), + }, + }, + }, + { + name: "duplicate folder name from deferred delete marker", + tree: newFolderyMcFolderFace(nil, rootID), + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With( + // first page: create /root/folder and /root/folder/file + aPage( + folderAtRoot(), + fileAt(folder)), + // assume the user makes changes at this point: + // * create a new /root/folder + // * move /root/folder/file from old to new folder (same file ID) + // * delete /root/folder + // in drive deltas, this will show up as another folder creation sharing + // the same dirname, but we won't see the delete until... + aPage( + driveItem(idx(folder, 2), name(folder), parentDir(), rootID, isFolder), + driveItem(id(file), name(file), parentDir(name(folder)), idx(folder, 2), isFile))), + // the next delta, containing the delete marker for the original /root/folder + mock.Delta(id(delta), nil).With( + aPage( + delItem(id(folder), rootID, isFolder))))), + limiter: newPagerLimiter(control.DefaultOptions()), + expect: populateTreeExpected{ + counts: countTD.Expected{ + count.TotalDeltasProcessed: 3, + count.TotalDeleteFilesProcessed: 0, + count.TotalDeleteFoldersProcessed: 1, + count.TotalFilesProcessed: 2, + count.TotalFoldersProcessed: 5, + count.TotalPagesEnumerated: 4, + }, + err: require.NoError, + numLiveFiles: 1, + numLiveFolders: 2, + sizeBytes: 42, + treeContainsFolderIDs: []string{ + rootID, + idx(folder, 2), + }, + treeContainsTombstoneIDs: []string{}, + treeContainsFileIDsWithParent: map[string]string{ + id(file): idx(folder, 2), + }, + }, + }, + } + for _, test := range table { + suite.Run(test.name, func() { + runPopulateTreeTest(suite.T(), drv, test) + }) + } +} + +func runPopulateTreeTest( + t *testing.T, + drv models.Driveable, + test populateTreeTest, +) { + ctx, flush := tester.NewContext(t) + defer flush() + + mbh := mock.DefaultDriveBHWith(user, pagerForDrives(drv), test.enumerator) + c := collWithMBH(mbh) + counter := count.New() + + _, err := c.populateTree( + ctx, + test.tree, + drv, + id(delta), + test.limiter, + counter, + fault.New(true)) + + test.expect.err(t, err, clues.ToCore(err)) + + assert.Equal( + t, + test.expect.numLiveFolders, + test.tree.countLiveFolders(), + "count live folders in tree") + + cAndS := test.tree.countLiveFilesAndSizes() + assert.Equal( + t, + test.expect.numLiveFiles, + cAndS.numFiles, + "count live files in tree") + assert.Equal( + t, + test.expect.sizeBytes, + cAndS.totalBytes, + "count total bytes in tree") + test.expect.counts.Compare(t, counter) + + for _, id := range test.expect.treeContainsFolderIDs { + assert.NotNil(t, test.tree.folderIDToNode[id], "node exists") + } + + for _, id := range test.expect.treeContainsTombstoneIDs { + assert.NotNil(t, test.tree.tombstones[id], "tombstone exists") + } + + for iID, pID := range test.expect.treeContainsFileIDsWithParent { + assert.Contains(t, test.tree.fileIDToParentID, iID, "file should exist in tree") + assert.Equal(t, pID, test.tree.fileIDToParentID[iID], "file should reference correct parent") + } +} + // --------------------------------------------------------------------------- // folder tests // --------------------------------------------------------------------------- @@ -996,14 +939,14 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold table := []struct { name string tree *folderyMcFolderFace - page []models.DriveItemable + page mock.NextPage limiter *pagerLimiter expect expected }{ { name: "nil page", tree: treeWithRoot(), - page: nil, + page: mock.NextPage{}, limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{}, @@ -1018,7 +961,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold { name: "empty page", tree: treeWithRoot(), - page: []models.DriveItemable{}, + page: mock.NextPage{Items: []models.DriveItemable{}}, limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{}, @@ -1033,7 +976,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold { name: "root only", tree: treeWithRoot(), - page: pageItems(), + page: aPage(), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -1050,10 +993,10 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold { name: "many folders in a hierarchy", tree: treeWithRoot(), - page: pageItems( - driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), - driveItem(idx(folder, "sib"), namex(folder, "sib"), parentDir(), rootID, isFolder), - driveItem(idx(folder, "chld"), namex(folder, "chld"), parentDir(name(folder)), id(folder), isFolder)), + page: aPage( + folderAtRoot(), + folderxAtRoot("sib"), + folderxAt("chld", folder)), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -1073,9 +1016,9 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold { name: "create->delete", tree: treeWithRoot(), - page: pageItems( - driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), - delItem(id(folder), parentDir(), rootID, isFolder)), + page: aPage( + folderAtRoot(), + delItem(id(folder), rootID, isFolder)), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -1093,10 +1036,10 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold { name: "move->delete", tree: treeWithFolders(), - page: pageItems( - driveItem(idx(folder, "parent"), namex(folder, "parent"), parentDir(), rootID, isFolder), + page: aPage( + folderxAtRoot("parent"), driveItem(id(folder), namex(folder, "moved"), parentDir(namex(folder, "parent")), idx(folder, "parent"), isFolder), - delItem(id(folder), parentDir(), idx(folder, "parent"), isFolder)), + delItem(id(folder), idx(folder, "parent"), isFolder)), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -1117,9 +1060,9 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold { name: "delete->create with previous path", tree: treeWithRoot(), - page: pageItems( - delItem(id(folder), parentDir(), rootID, isFolder), - driveItem(id(folder), name(folder), parentDir(), rootID, isFolder)), + page: aPage( + delItem(id(folder), rootID, isFolder), + folderAtRoot()), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -1138,9 +1081,9 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold { name: "delete->create without previous path", tree: treeWithRoot(), - page: pageItems( - delItem(id(folder), parentDir(), rootID, isFolder), - driveItem(id(folder), name(folder), parentDir(), rootID, isFolder)), + page: aPage( + delItem(id(folder), rootID, isFolder), + folderAtRoot()), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -1171,7 +1114,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_fold ctx, test.tree, drv, - test.page, + test.page.Items, test.limiter, counter, fault.New(true)) @@ -1204,10 +1147,10 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFolderToTree() { drv.SetId(ptr.To(id(drive))) drv.SetName(ptr.To(name(drive))) - fld := driveItem(id(folder), name(folder), parentDir(), rootID, isFolder) - subFld := driveItem(id(folder), name(folder), driveParentDir(drv, namex(folder, "parent")), idx(folder, "parent"), isFolder) + fld := folderAtRoot() + subFld := folderAtDeep(driveParentDir(drv, namex(folder, "parent")), idx(folder, "parent")) pack := driveItem(id(pkg), name(pkg), parentDir(), rootID, isPackage) - del := delItem(id(folder), parentDir(), rootID, isFolder) + del := delItem(id(folder), rootID, isFolder) mal := malwareItem(idx(folder, "mal"), namex(folder, "mal"), parentDir(), rootID, isFolder) type expected struct { @@ -1305,7 +1248,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFolderToTree() { }, { name: "tombstone new folder in unpopulated tree", - tree: newFolderyMcFolderFace(nil), + tree: newFolderyMcFolderFace(nil, rootID), folder: del, limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ @@ -1478,7 +1421,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_MakeFolderCollectionPath( }, { name: "folder", - folder: driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), + folder: folderAtRoot(), expect: folderPath.String(), expectErr: require.NoError, }, @@ -1530,13 +1473,13 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file table := []struct { name string tree *folderyMcFolderFace - page []models.DriveItemable + page mock.NextPage expect expected }{ { name: "one file at root", tree: treeWithRoot(), - page: pageItems(driveItem(id(file), name(file), parentDir(name(folder)), rootID, isFile)), + page: aPage(fileAtRoot()), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 0, @@ -1553,10 +1496,10 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file }, { name: "one file in a folder", - tree: newFolderyMcFolderFace(nil), - page: pageItems( - driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), - driveItem(id(file), name(file), parentDir(name(folder)), id(folder), isFile)), + tree: newFolderyMcFolderFace(nil, rootID), + page: aPage( + folderAtRoot(), + fileAt(folder)), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 0, @@ -1574,10 +1517,10 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file { name: "many files in a hierarchy", tree: treeWithRoot(), - page: pageItems( - driveItem(id(file), name(file), parentDir(), rootID, isFile), - driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), - driveItem(idx(file, "chld"), namex(file, "chld"), parentDir(name(folder)), id(folder), isFile)), + page: aPage( + fileAtRoot(), + folderAtRoot(), + filexAt("chld", folder)), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 0, @@ -1596,8 +1539,8 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file { name: "many updates to the same file", tree: treeWithRoot(), - page: pageItems( - driveItem(id(file), name(file), parentDir(), rootID, isFile), + page: aPage( + fileAtRoot(), driveItem(id(file), namex(file, 1), parentDir(), rootID, isFile), driveItem(id(file), namex(file, 2), parentDir(), rootID, isFile)), expect: expected{ @@ -1617,7 +1560,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file { name: "delete an existing file", tree: treeWithFileAtRoot(), - page: pageItems(delItem(id(file), parentDir(), rootID, isFile)), + page: aPage(delItem(id(file), rootID, isFile)), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 1, @@ -1633,9 +1576,9 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file { name: "delete the same file twice", tree: treeWithFileAtRoot(), - page: pageItems( - delItem(id(file), parentDir(), rootID, isFile), - delItem(id(file), parentDir(), rootID, isFile)), + page: aPage( + delItem(id(file), rootID, isFile), + delItem(id(file), rootID, isFile)), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 2, @@ -1651,9 +1594,9 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file { name: "create->delete", tree: treeWithRoot(), - page: pageItems( - driveItem(id(file), name(file), parentDir(), rootID, isFile), - delItem(id(file), parentDir(), rootID, isFile)), + page: aPage( + fileAtRoot(), + delItem(id(file), rootID, isFile)), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 1, @@ -1669,10 +1612,10 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file { name: "move->delete", tree: treeWithFileAtRoot(), - page: pageItems( - driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), - driveItem(id(file), name(file), parentDir(name(folder)), id(folder), isFile), - delItem(id(file), parentDir(name(folder)), id(folder), isFile)), + page: aPage( + folderAtRoot(), + fileAt(folder), + delItem(id(file), id(folder), isFile)), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 1, @@ -1688,9 +1631,9 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file { name: "delete->create an existing file", tree: treeWithFileAtRoot(), - page: pageItems( - delItem(id(file), parentDir(), rootID, isFile), - driveItem(id(file), name(file), parentDir(), rootID, isFile)), + page: aPage( + delItem(id(file), rootID, isFile), + fileAtRoot()), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 1, @@ -1708,9 +1651,9 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file { name: "delete->create a non-existing file", tree: treeWithRoot(), - page: pageItems( - delItem(id(file), parentDir(), rootID, isFile), - driveItem(id(file), name(file), parentDir(), rootID, isFile)), + page: aPage( + delItem(id(file), rootID, isFile), + fileAtRoot()), expect: expected{ counts: countTD.Expected{ count.TotalDeleteFilesProcessed: 1, @@ -1740,7 +1683,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_EnumeratePageOfItems_file ctx, test.tree, drv, - test.page, + test.page.Items, newPagerLimiter(control.DefaultOptions()), counter, fault.New(true)) @@ -1780,7 +1723,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { { name: "add new file", tree: treeWithRoot(), - file: driveItem(id(file), name(file), parentDir(), rootID, isFile), + file: fileAtRoot(), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -1798,7 +1741,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { { name: "duplicate file", tree: treeWithFileAtRoot(), - file: driveItem(id(file), name(file), parentDir(), rootID, isFile), + file: fileAtRoot(), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -1816,7 +1759,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { { name: "error file seen before parent", tree: treeWithRoot(), - file: driveItem(id(file), name(file), parentDir(name(folder)), id(folder), isFile), + file: fileAt(folder), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -1848,7 +1791,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { { name: "delete non-existing file", tree: treeWithRoot(), - file: delItem(id(file), parentDir(name(folder)), id(folder), isFile), + file: delItem(id(file), id(folder), isFile), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -1864,7 +1807,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { { name: "delete existing file", tree: treeWithFileAtRoot(), - file: delItem(id(file), parentDir(), rootID, isFile), + file: delItem(id(file), rootID, isFile), limiter: newPagerLimiter(control.DefaultOptions()), expect: expected{ counts: countTD.Expected{ @@ -1880,7 +1823,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { { name: "already at container file limit", tree: treeWithFileAtRoot(), - file: driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile), + file: filexAtRoot(2), limiter: newPagerLimiter(minimumLimitOpts()), expect: expected{ counts: countTD.Expected{ @@ -1899,7 +1842,7 @@ func (suite *CollectionsTreeUnitSuite) TestCollections_AddFileToTree() { { name: "goes over total byte limit", tree: treeWithRoot(), - file: driveItem(id(file), name(file), parentDir(), rootID, isFile), + file: fileAtRoot(), limiter: newPagerLimiter(minimumLimitOpts()), expect: expected{ counts: countTD.Expected{ diff --git a/src/internal/m365/collection/drive/delta_tree.go b/src/internal/m365/collection/drive/delta_tree.go index 8bfafce44..4bfad289b 100644 --- a/src/internal/m365/collection/drive/delta_tree.go +++ b/src/internal/m365/collection/drive/delta_tree.go @@ -6,7 +6,6 @@ import ( "github.com/alcionai/clues" - odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts" "github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/path" ) @@ -23,6 +22,10 @@ type folderyMcFolderFace struct { // new, moved, and notMoved root root *nodeyMcNodeFace + // the ID of the actual root folder. + // required to ensure correct population of the root node. + rootID string + // the majority of operations we perform can be handled with // a folder ID lookup instead of re-walking the entire tree. // Ex: adding a new file to its parent folder. @@ -45,9 +48,11 @@ type folderyMcFolderFace struct { func newFolderyMcFolderFace( prefix path.Path, + rootID string, ) *folderyMcFolderFace { return &folderyMcFolderFace{ prefix: prefix, + rootID: rootID, folderIDToNode: map[string]*nodeyMcNodeFace{}, tombstones: map[string]*nodeyMcNodeFace{}, fileIDToParentID: map[string]string{}, @@ -150,17 +155,12 @@ func (face *folderyMcFolderFace) setFolder( return clues.NewWC(ctx, "missing folder name") } - // drive doesn't normally allow the `:` character in folder names. - // so `root:` is, by default, the only folder that can match this - // name. That makes this check a little bit brittle, but generally - // reliable, since we should always see the root first and can rely - // on the naming structure. - if len(parentID) == 0 && name != odConsts.RootPathDir { + if len(parentID) == 0 && id != face.rootID { return clues.NewWC(ctx, "non-root folder missing parent id") } // only set the root node once. - if name == odConsts.RootPathDir { + if id == face.rootID { if face.root == nil { root := newNodeyMcNodeFace(nil, id, name, isPackage) face.root = root diff --git a/src/internal/m365/collection/drive/delta_tree_test.go b/src/internal/m365/collection/drive/delta_tree_test.go index 161f1e6b4..f3d8b39a4 100644 --- a/src/internal/m365/collection/drive/delta_tree_test.go +++ b/src/internal/m365/collection/drive/delta_tree_test.go @@ -13,75 +13,6 @@ import ( "github.com/alcionai/corso/src/pkg/path" ) -// --------------------------------------------------------------------------- -// helpers -// --------------------------------------------------------------------------- - -var loc = path.NewElements("root:/foo/bar/baz/qux/fnords/smarf/voi/zumba/bangles/howdyhowdyhowdy") - -func treeWithRoot() *folderyMcFolderFace { - tree := newFolderyMcFolderFace(nil) - rootey := newNodeyMcNodeFace(nil, rootID, rootName, false) - tree.root = rootey - tree.folderIDToNode[rootID] = rootey - - return tree -} - -func treeWithTombstone() *folderyMcFolderFace { - tree := treeWithRoot() - tree.tombstones[id(folder)] = newNodeyMcNodeFace(nil, id(folder), "", false) - - return tree -} - -func treeWithFolders() *folderyMcFolderFace { - tree := treeWithRoot() - - o := newNodeyMcNodeFace(tree.root, idx(folder, "parent"), namex(folder, "parent"), true) - tree.folderIDToNode[o.id] = o - tree.root.children[o.id] = o - - f := newNodeyMcNodeFace(o, id(folder), name(folder), false) - tree.folderIDToNode[f.id] = f - o.children[f.id] = f - - return tree -} - -func treeWithFileAtRoot() *folderyMcFolderFace { - tree := treeWithRoot() - tree.root.files[id(file)] = fileyMcFileFace{ - lastModified: time.Now(), - contentSize: 42, - } - tree.fileIDToParentID[id(file)] = rootID - - return tree -} - -func treeWithFileInFolder() *folderyMcFolderFace { - tree := treeWithFolders() - tree.folderIDToNode[id(folder)].files[id(file)] = fileyMcFileFace{ - lastModified: time.Now(), - contentSize: 42, - } - tree.fileIDToParentID[id(file)] = id(folder) - - return tree -} - -func treeWithFileInTombstone() *folderyMcFolderFace { - tree := treeWithTombstone() - tree.tombstones[id(folder)].files[id(file)] = fileyMcFileFace{ - lastModified: time.Now(), - contentSize: 42, - } - tree.fileIDToParentID[id(file)] = id(folder) - - return tree -} - // --------------------------------------------------------------------------- // tests // --------------------------------------------------------------------------- @@ -102,7 +33,7 @@ func (suite *DeltaTreeUnitSuite) TestNewFolderyMcFolderFace() { require.NoError(t, err, clues.ToCore(err)) - folderFace := newFolderyMcFolderFace(p) + folderFace := newFolderyMcFolderFace(p, rootID) assert.Equal(t, p, folderFace.prefix) assert.Nil(t, folderFace.root) assert.NotNil(t, folderFace.folderIDToNode) @@ -144,7 +75,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_SetFolder() { }{ { tname: "add root", - tree: newFolderyMcFolderFace(nil), + tree: newFolderyMcFolderFace(nil, rootID), id: rootID, name: rootName, isPackage: true, @@ -272,7 +203,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddTombstone() { { name: "add tombstone", id: id(folder), - tree: newFolderyMcFolderFace(nil), + tree: newFolderyMcFolderFace(nil, rootID), expectErr: assert.NoError, }, { @@ -283,7 +214,7 @@ func (suite *DeltaTreeUnitSuite) TestFolderyMcFolderFace_AddTombstone() { }, { name: "missing ID", - tree: newFolderyMcFolderFace(nil), + tree: newFolderyMcFolderFace(nil, rootID), expectErr: assert.Error, }, { diff --git a/src/internal/m365/collection/drive/handlers.go b/src/internal/m365/collection/drive/handlers.go index 9d3ca774d..1a2b98479 100644 --- a/src/internal/m365/collection/drive/handlers.go +++ b/src/internal/m365/collection/drive/handlers.go @@ -39,6 +39,7 @@ type BackupHandler interface { api.Getter GetItemPermissioner GetItemer + GetRootFolderer NewDrivePagerer EnumerateDriveItemsDeltaer diff --git a/src/internal/m365/collection/drive/helper_test.go b/src/internal/m365/collection/drive/helper_test.go index 807b1e04f..369b3078f 100644 --- a/src/internal/m365/collection/drive/helper_test.go +++ b/src/internal/m365/collection/drive/helper_test.go @@ -1,17 +1,35 @@ package drive import ( + "context" + "fmt" "testing" + "time" "github.com/alcionai/clues" + "github.com/microsoftgraph/msgraph-sdk-go/models" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/alcionai/corso/src/internal/common/idname" + "github.com/alcionai/corso/src/internal/common/ptr" + "github.com/alcionai/corso/src/internal/data" + dataMock "github.com/alcionai/corso/src/internal/data/mock" + "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata" + odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts" + "github.com/alcionai/corso/src/internal/m365/service/onedrive/mock" "github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/pkg/account" + bupMD "github.com/alcionai/corso/src/pkg/backup/metadata" "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/count" + "github.com/alcionai/corso/src/pkg/fault" + "github.com/alcionai/corso/src/pkg/path" + "github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/services/m365/api" + "github.com/alcionai/corso/src/pkg/services/m365/api/graph" + apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock" ) const defaultItemSize int64 = 42 @@ -59,3 +77,768 @@ func loadTestService(t *testing.T) *oneDriveService { return service } + +// --------------------------------------------------------------------------- +// collections +// --------------------------------------------------------------------------- + +type statePath struct { + state data.CollectionState + currPath path.Path + prevPath path.Path +} + +func toODPath(t *testing.T, s string) path.Path { + spl := path.Split(s) + p, err := path.Builder{}. + Append(spl[4:]...). + ToDataLayerPath( + spl[0], + spl[2], + path.OneDriveService, + path.FilesCategory, + false) + require.NoError(t, err, clues.ToCore(err)) + + return p +} + +func asDeleted(t *testing.T, prev string) statePath { + return statePath{ + state: data.DeletedState, + prevPath: toODPath(t, prev), + } +} + +func asMoved(t *testing.T, prev, curr string) statePath { + return statePath{ + state: data.MovedState, + prevPath: toODPath(t, prev), + currPath: toODPath(t, curr), + } +} + +func asNew(t *testing.T, curr string) statePath { + return statePath{ + state: data.NewState, + currPath: toODPath(t, curr), + } +} + +func asNotMoved(t *testing.T, p string) statePath { + return statePath{ + state: data.NotMovedState, + prevPath: toODPath(t, p), + currPath: toODPath(t, p), + } +} + +// --------------------------------------------------------------------------- +// stub drive items +// --------------------------------------------------------------------------- + +type itemType int + +const ( + isFile itemType = 1 + isFolder itemType = 2 + isPackage itemType = 3 +) + +func coreItem( + id, name, parentPath, parentID string, + it itemType, +) *models.DriveItem { + item := models.NewDriveItem() + item.SetName(&name) + item.SetId(&id) + + parentReference := models.NewItemReference() + parentReference.SetPath(&parentPath) + parentReference.SetId(&parentID) + item.SetParentReference(parentReference) + + switch it { + case isFile: + item.SetSize(ptr.To[int64](42)) + item.SetFile(models.NewFile()) + case isFolder: + item.SetFolder(models.NewFolder()) + case isPackage: + item.SetPackageEscaped(models.NewPackageEscaped()) + } + + return item +} + +func driveItem( + id, name, parentPath, parentID string, + it itemType, +) models.DriveItemable { + return coreItem(id, name, parentPath, parentID, it) +} + +func fileAtRoot() models.DriveItemable { + return driveItem(id(file), name(file), parentDir(), rootID, isFile) +} + +func fileAt( + parentX any, +) models.DriveItemable { + pd := parentDir(namex(folder, parentX)) + pid := idx(folder, parentX) + + if parentX == folder { + pd = parentDir(name(folder)) + pid = id(folder) + } + + return driveItem( + id(file), + name(file), + pd, + pid, + isFile) +} + +func fileAtDeep( + parentDir, parentID string, +) models.DriveItemable { + return driveItem( + id(file), + name(file), + parentDir, + parentID, + isFile) +} + +func filexAtRoot( + x any, +) models.DriveItemable { + return driveItem( + idx(file, x), + namex(file, x), + parentDir(), + rootID, + isFile) +} + +func filexAt( + x, parentX any, +) models.DriveItemable { + pd := parentDir(namex(folder, parentX)) + pid := idx(folder, parentX) + + if parentX == folder { + pd = parentDir(name(folder)) + pid = id(folder) + } + + return driveItem( + idx(file, x), + namex(file, x), + pd, + pid, + isFile) +} + +func filexWSizeAtRoot( + x any, + size int64, +) models.DriveItemable { + return driveItemWithSize( + idx(file, x), + namex(file, x), + parentDir(), + rootID, + size, + isFile) +} + +func filexWSizeAt( + x, parentX any, + size int64, +) models.DriveItemable { + pd := parentDir(namex(folder, parentX)) + pid := idx(folder, parentX) + + if parentX == folder { + pd = parentDir(name(folder)) + pid = id(folder) + } + + return driveItemWithSize( + idx(file, x), + namex(file, x), + pd, + pid, + size, + isFile) +} + +func folderAtRoot() models.DriveItemable { + return driveItem(id(folder), name(folder), parentDir(), rootID, isFolder) +} + +func folderAtDeep( + parentDir, parentID string, +) models.DriveItemable { + return driveItem( + id(folder), + name(folder), + parentDir, + parentID, + isFolder) +} + +func folderxAt( + x, parentX any, +) models.DriveItemable { + pd := parentDir(namex(folder, parentX)) + pid := idx(folder, parentX) + + if parentX == folder { + pd = parentDir(name(folder)) + pid = id(folder) + } + + return driveItem( + idx(folder, x), + namex(folder, x), + pd, + pid, + isFolder) +} + +func folderxAtRoot( + x any, +) models.DriveItemable { + return driveItem( + idx(folder, x), + namex(folder, x), + parentDir(), + rootID, + isFolder) +} + +func driveItemWithSize( + id, name, parentPath, parentID string, + size int64, + it itemType, +) models.DriveItemable { + res := coreItem(id, name, parentPath, parentID, it) + res.SetSize(ptr.To(size)) + + return res +} + +func fileItem( + id, name, parentPath, parentID, url string, + deleted bool, +) models.DriveItemable { + di := driveItem(id, name, parentPath, parentID, isFile) + di.SetAdditionalData(map[string]any{ + "@microsoft.graph.downloadUrl": url, + }) + + if deleted { + di.SetDeleted(models.NewDeleted()) + } + + return di +} + +func malwareItem( + id, name, parentPath, parentID string, + it itemType, +) models.DriveItemable { + c := coreItem(id, name, parentPath, parentID, it) + + mal := models.NewMalware() + malStr := "test malware" + mal.SetDescription(&malStr) + + c.SetMalware(mal) + + return c +} + +func driveRootItem() models.DriveItemable { + item := models.NewDriveItem() + item.SetName(ptr.To(rootName)) + item.SetId(ptr.To(rootID)) + item.SetRoot(models.NewRoot()) + item.SetFolder(models.NewFolder()) + + return item +} + +// delItem creates a DriveItemable that is marked as deleted. path must be set +// to the base drive path. +func delItem( + id string, + parentID string, + it itemType, +) models.DriveItemable { + item := models.NewDriveItem() + item.SetId(&id) + item.SetDeleted(models.NewDeleted()) + + parentReference := models.NewItemReference() + parentReference.SetId(&parentID) + item.SetParentReference(parentReference) + + switch it { + case isFile: + item.SetFile(models.NewFile()) + case isFolder: + item.SetFolder(models.NewFolder()) + case isPackage: + item.SetPackageEscaped(models.NewPackageEscaped()) + } + + return item +} + +func id(v string) string { + return fmt.Sprintf("id_%s_0", v) +} + +func idx(v string, sfx any) string { + return fmt.Sprintf("id_%s_%v", v, sfx) +} + +func name(v string) string { + return fmt.Sprintf("n_%s_0", v) +} + +func namex(v string, sfx any) string { + return fmt.Sprintf("n_%s_%v", v, sfx) +} + +func toPath(elems ...string) string { + es := []string{} + for _, elem := range elems { + es = append(es, path.Split(elem)...) + } + + switch len(es) { + case 0: + return "" + case 1: + return es[0] + default: + return path.Builder{}.Append(es...).String() + } +} + +func fullPath(elems ...string) string { + return toPath(append( + []string{ + tenant, + path.OneDriveService.String(), + user, + path.FilesCategory.String(), + odConsts.DriveFolderPrefixBuilder(id(drive)).String(), + }, + elems...)...) +} + +func driveFullPath(driveID any, elems ...string) string { + return toPath(append( + []string{ + tenant, + path.OneDriveService.String(), + user, + path.FilesCategory.String(), + odConsts.DriveFolderPrefixBuilder(idx(drive, driveID)).String(), + }, + elems...)...) +} + +func parentDir(elems ...string) string { + return toPath(append( + []string{odConsts.DriveFolderPrefixBuilder(id(drive)).String()}, + elems...)...) +} + +func driveParentDir(driveID any, elems ...string) string { + return toPath(append( + []string{odConsts.DriveFolderPrefixBuilder(idx(drive, driveID)).String()}, + elems...)...) +} + +// just for readability +const ( + doMergeItems = true + doNotMergeItems = false +) + +// common item names +const ( + bar = "bar" + delta = "delta_url" + drive = "drive" + fanny = "fanny" + file = "file" + folder = "folder" + foo = "foo" + item = "item" + malware = "malware" + nav = "nav" + pkg = "package" + rootID = odConsts.RootID + rootName = odConsts.RootPathDir + subfolder = "subfolder" + tenant = "t" + user = "u" +) + +var anyFolderScope = (&selectors.OneDriveBackup{}).Folders(selectors.Any())[0] + +type failingColl struct{} + +func (f failingColl) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item { + ic := make(chan data.Item) + defer close(ic) + + errs.AddRecoverable(ctx, assert.AnError) + + return ic +} +func (f failingColl) FullPath() path.Path { return nil } +func (f failingColl) FetchItemByName(context.Context, string) (data.Item, error) { return nil, nil } + +func makeExcludeMap(files ...string) map[string]struct{} { + delList := map[string]struct{}{} + for _, file := range files { + delList[file+metadata.DataFileSuffix] = struct{}{} + delList[file+metadata.MetaFileSuffix] = struct{}{} + } + + return delList +} + +// --------------------------------------------------------------------------- +// limiter +// --------------------------------------------------------------------------- + +func minimumLimitOpts() control.Options { + minLimitOpts := control.DefaultOptions() + minLimitOpts.PreviewLimits.Enabled = true + minLimitOpts.PreviewLimits.MaxBytes = 1 + minLimitOpts.PreviewLimits.MaxContainers = 1 + minLimitOpts.PreviewLimits.MaxItems = 1 + minLimitOpts.PreviewLimits.MaxItemsPerContainer = 1 + minLimitOpts.PreviewLimits.MaxPages = 1 + + return minLimitOpts +} + +// --------------------------------------------------------------------------- +// enumerators +// --------------------------------------------------------------------------- + +func collWithMBH(mbh BackupHandler) *Collections { + return NewCollections( + mbh, + tenant, + idname.NewProvider(user, user), + func(*support.ControllerOperationStatus) {}, + control.Options{ToggleFeatures: control.Toggles{ + UseDeltaTree: true, + }}, + count.New()) +} + +func collWithMBHAndOpts( + mbh BackupHandler, + opts control.Options, +) *Collections { + return NewCollections( + mbh, + tenant, + idname.NewProvider(user, user), + func(*support.ControllerOperationStatus) {}, + opts, + count.New()) +} + +// func fullOrPrevPath( +// t *testing.T, +// coll data.BackupCollection, +// ) path.Path { +// var collPath path.Path + +// if coll.State() != data.DeletedState { +// collPath = coll.FullPath() +// } else { +// collPath = coll.PreviousPath() +// } + +// require.False( +// t, +// len(collPath.Elements()) < 4, +// "malformed or missing collection path") + +// return collPath +// } + +func pagerForDrives(drives ...models.Driveable) *apiMock.Pager[models.Driveable] { + return &apiMock.Pager[models.Driveable]{ + ToReturn: []apiMock.PagerResult[models.Driveable]{ + {Values: drives}, + }, + } +} + +func makePrevMetadataColls( + t *testing.T, + mbh BackupHandler, + previousPaths map[string]map[string]string, +) []data.RestoreCollection { + pathPrefix, err := mbh.MetadataPathPrefix(tenant) + require.NoError(t, err, clues.ToCore(err)) + + prevDeltas := map[string]string{} + + for driveID := range previousPaths { + prevDeltas[driveID] = idx(delta, "prev") + } + + mdColl, err := graph.MakeMetadataCollection( + pathPrefix, + []graph.MetadataCollectionEntry{ + graph.NewMetadataEntry(bupMD.DeltaURLsFileName, prevDeltas), + graph.NewMetadataEntry(bupMD.PreviousPathFileName, previousPaths), + }, + func(*support.ControllerOperationStatus) {}, + count.New()) + require.NoError(t, err, "creating metadata collection", clues.ToCore(err)) + + return []data.RestoreCollection{ + dataMock.NewUnversionedRestoreCollection(t, data.NoFetchRestoreCollection{Collection: mdColl}), + } +} + +// func compareMetadata( +// t *testing.T, +// mdColl data.Collection, +// expectDeltas map[string]string, +// expectPrevPaths map[string]map[string]string, +// ) { +// ctx, flush := tester.NewContext(t) +// defer flush() + +// colls := []data.RestoreCollection{ +// dataMock.NewUnversionedRestoreCollection(t, data.NoFetchRestoreCollection{Collection: mdColl}), +// } + +// deltas, prevs, _, err := deserializeAndValidateMetadata( +// ctx, +// colls, +// count.New(), +// fault.New(true)) +// require.NoError(t, err, "deserializing metadata", clues.ToCore(err)) +// assert.Equal(t, expectDeltas, deltas, "delta urls") +// assert.Equal(t, expectPrevPaths, prevs, "previous paths") +// } + +// for comparisons done by collection state +type stateAssertion struct { + itemIDs []string + // should never get set by the user. + // this flag gets flipped when calling assertions.compare. + // any unseen collection will error on requireNoUnseenCollections + // sawCollection bool +} + +// for comparisons done by a given collection path +type collectionAssertion struct { + doNotMerge assert.BoolAssertionFunc + states map[data.CollectionState]*stateAssertion + excludedItems map[string]struct{} +} + +type statesToItemIDs map[data.CollectionState][]string + +// TODO(keepers): move excludeItems to a more global position. +func newCollAssertion( + doNotMerge bool, + itemsByState statesToItemIDs, + excludeItems ...string, +) collectionAssertion { + states := map[data.CollectionState]*stateAssertion{} + + for state, itemIDs := range itemsByState { + states[state] = &stateAssertion{ + itemIDs: itemIDs, + } + } + + dnm := assert.False + if doNotMerge { + dnm = assert.True + } + + return collectionAssertion{ + doNotMerge: dnm, + states: states, + excludedItems: makeExcludeMap(excludeItems...), + } +} + +// to aggregate all collection-related expectations in the backup +// map collection path -> collection state -> assertion +type collectionAssertions map[string]collectionAssertion + +// ensure the provided collection matches expectations as set by the test. +// func (cas collectionAssertions) compare( +// t *testing.T, +// coll data.BackupCollection, +// excludes *prefixmatcher.StringSetMatchBuilder, +// ) { +// ctx, flush := tester.NewContext(t) +// defer flush() + +// var ( +// itemCh = coll.Items(ctx, fault.New(true)) +// itemIDs = []string{} +// ) + +// p := fullOrPrevPath(t, coll) + +// for itm := range itemCh { +// itemIDs = append(itemIDs, itm.ID()) +// } + +// expect := cas[p.String()] +// expectState := expect.states[coll.State()] +// expectState.sawCollection = true + +// assert.ElementsMatchf( +// t, +// expectState.itemIDs, +// itemIDs, +// "expected all items to match in collection with:\nstate %q\npath %q", +// coll.State(), +// p) + +// expect.doNotMerge( +// t, +// coll.DoNotMergeItems(), +// "expected collection to have the appropariate doNotMerge flag") + +// if result, ok := excludes.Get(p.String()); ok { +// assert.Equal( +// t, +// expect.excludedItems, +// result, +// "excluded items") +// } +// } + +// ensure that no collections in the expected set are still flagged +// as sawCollection == false. +// func (cas collectionAssertions) requireNoUnseenCollections( +// t *testing.T, +// ) { +// for p, withPath := range cas { +// for _, state := range withPath.states { +// require.True( +// t, +// state.sawCollection, +// "results should have contained collection:\n\t%q\t\n%q", +// state, p) +// } +// } +// } + +func aPage(items ...models.DriveItemable) mock.NextPage { + return mock.NextPage{ + Items: append([]models.DriveItemable{driveRootItem()}, items...), + } +} + +func aPageWReset(items ...models.DriveItemable) mock.NextPage { + return mock.NextPage{ + Items: append([]models.DriveItemable{driveRootItem()}, items...), + Reset: true, + } +} + +func aReset(items ...models.DriveItemable) mock.NextPage { + return mock.NextPage{ + Items: []models.DriveItemable{}, + Reset: true, + } +} + +// --------------------------------------------------------------------------- +// delta trees +// --------------------------------------------------------------------------- + +var loc = path.NewElements("root:/foo/bar/baz/qux/fnords/smarf/voi/zumba/bangles/howdyhowdyhowdy") + +func treeWithRoot() *folderyMcFolderFace { + tree := newFolderyMcFolderFace(nil, rootID) + rootey := newNodeyMcNodeFace(nil, rootID, rootName, false) + tree.root = rootey + tree.folderIDToNode[rootID] = rootey + + return tree +} + +func treeWithTombstone() *folderyMcFolderFace { + tree := treeWithRoot() + tree.tombstones[id(folder)] = newNodeyMcNodeFace(nil, id(folder), "", false) + + return tree +} + +func treeWithFolders() *folderyMcFolderFace { + tree := treeWithRoot() + + parent := newNodeyMcNodeFace(tree.root, idx(folder, "parent"), namex(folder, "parent"), true) + tree.folderIDToNode[parent.id] = parent + tree.root.children[parent.id] = parent + + f := newNodeyMcNodeFace(parent, id(folder), name(folder), false) + tree.folderIDToNode[f.id] = f + parent.children[f.id] = f + + return tree +} + +func treeWithFileAtRoot() *folderyMcFolderFace { + tree := treeWithRoot() + tree.root.files[id(file)] = fileyMcFileFace{ + lastModified: time.Now(), + contentSize: 42, + } + tree.fileIDToParentID[id(file)] = rootID + + return tree +} + +func treeWithFileInFolder() *folderyMcFolderFace { + tree := treeWithFolders() + tree.folderIDToNode[id(folder)].files[id(file)] = fileyMcFileFace{ + lastModified: time.Now(), + contentSize: 42, + } + tree.fileIDToParentID[id(file)] = id(folder) + + return tree +} + +func treeWithFileInTombstone() *folderyMcFolderFace { + tree := treeWithTombstone() + tree.tombstones[id(folder)].files[id(file)] = fileyMcFileFace{ + lastModified: time.Now(), + contentSize: 42, + } + tree.fileIDToParentID[id(file)] = id(folder) + + return tree +} diff --git a/src/internal/m365/collection/drive/limiter_test.go b/src/internal/m365/collection/drive/limiter_test.go index 920d8fc20..f71463b1b 100644 --- a/src/internal/m365/collection/drive/limiter_test.go +++ b/src/internal/m365/collection/drive/limiter_test.go @@ -20,29 +20,8 @@ import ( "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/path" apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock" - "github.com/alcionai/corso/src/pkg/services/m365/api/pagers" ) -// --------------------------------------------------------------------------- -// helpers -// --------------------------------------------------------------------------- - -func minimumLimitOpts() control.Options { - minLimitOpts := control.DefaultOptions() - minLimitOpts.PreviewLimits.Enabled = true - minLimitOpts.PreviewLimits.MaxBytes = 1 - minLimitOpts.PreviewLimits.MaxContainers = 1 - minLimitOpts.PreviewLimits.MaxItems = 1 - minLimitOpts.PreviewLimits.MaxItemsPerContainer = 1 - minLimitOpts.PreviewLimits.MaxPages = 1 - - return minLimitOpts -} - -// --------------------------------------------------------------------------- -// tests -// --------------------------------------------------------------------------- - type LimiterUnitSuite struct { tester.Suite } @@ -55,7 +34,7 @@ type backupLimitTest struct { name string limits control.PreviewItemLimits drives []models.Driveable - enumerator mock.EnumerateItemsDeltaByDrive + enumerator mock.EnumerateDriveItemsDelta // Collection name -> set of item IDs. We can't check item data because // that's not mocked out. Metadata is checked separately. expectedItemIDsInCollection map[string][]string @@ -82,17 +61,12 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxPages: 999, }, drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: pagesOf(pageItems( - driveItemWithSize(idx(file, 1), namex(file, 1), parentDir(), rootID, 7, isFile), - driveItemWithSize(idx(file, 2), namex(file, 2), parentDir(), rootID, 1, isFile), - driveItemWithSize(idx(file, 3), namex(file, 3), parentDir(), rootID, 1, isFile))), - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With(aPage( + filexWSizeAtRoot(1, 7), + filexWSizeAtRoot(2, 1), + filexWSizeAtRoot(3, 1))))), expectedItemIDsInCollection: map[string][]string{ fullPath(): {idx(file, 2), idx(file, 3)}, }, @@ -108,17 +82,12 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxPages: 999, }, drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: pagesOf(pageItems( - driveItemWithSize(idx(file, 1), namex(file, 1), parentDir(), rootID, 1, isFile), - driveItemWithSize(idx(file, 2), namex(file, 2), parentDir(), rootID, 2, isFile), - driveItemWithSize(idx(file, 3), namex(file, 3), parentDir(), rootID, 1, isFile))), - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With(aPage( + filexWSizeAtRoot(1, 1), + filexWSizeAtRoot(2, 2), + filexWSizeAtRoot(3, 1))))), expectedItemIDsInCollection: map[string][]string{ fullPath(): {idx(file, 1), idx(file, 2)}, }, @@ -134,18 +103,13 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxPages: 999, }, drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: pagesOf(pageItems( - driveItemWithSize(idx(file, 1), namex(file, 1), parentDir(), rootID, 1, isFile), - driveItemWithSize(idx(folder, 1), namex(folder, 1), parentDir(), rootID, 1, isFolder), - driveItemWithSize(idx(file, 2), namex(file, 2), parentDir(namex(folder, 1)), idx(folder, 1), 2, isFile), - driveItemWithSize(idx(file, 3), namex(file, 3), parentDir(namex(folder, 1)), idx(folder, 1), 1, isFile))), - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With(aPage( + filexWSizeAtRoot(1, 1), + folderxAtRoot(1), + filexWSizeAt(2, 1, 2), + filexWSizeAt(3, 1, 1))))), expectedItemIDsInCollection: map[string][]string{ fullPath(): {idx(file, 1)}, fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 2)}, @@ -162,20 +126,15 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxPages: 999, }, drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: pagesOf(pageItems( - driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile), - driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile), - driveItem(idx(file, 3), namex(file, 3), parentDir(), rootID, isFile), - driveItem(idx(file, 4), namex(file, 4), parentDir(), rootID, isFile), - driveItem(idx(file, 5), namex(file, 5), parentDir(), rootID, isFile), - driveItem(idx(file, 6), namex(file, 6), parentDir(), rootID, isFile))), - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With(aPage( + filexAtRoot(1), + filexAtRoot(2), + filexAtRoot(3), + filexAtRoot(4), + filexAtRoot(5), + filexAtRoot(6))))), expectedItemIDsInCollection: map[string][]string{ fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)}, }, @@ -191,25 +150,20 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxPages: 999, }, drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: pagesOf( - pageItems( - driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile), - driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile)), - pageItems( - // Repeated items shouldn't count against the limit. - driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile), - driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder), - driveItem(idx(file, 3), namex(file, 3), parentDir(namex(folder, 1)), idx(folder, 1), isFile), - driveItem(idx(file, 4), namex(file, 4), parentDir(namex(folder, 1)), idx(folder, 1), isFile), - driveItem(idx(file, 5), namex(file, 5), parentDir(namex(folder, 1)), idx(folder, 1), isFile), - driveItem(idx(file, 6), namex(file, 6), parentDir(namex(folder, 1)), idx(folder, 1), isFile))), - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With( + aPage( + filexAtRoot(1), + filexAtRoot(2)), + aPage( + // Repeated items shouldn't count against the limit. + filexAtRoot(1), + folderxAtRoot(1), + filexAt(3, 1), + filexAt(4, 1), + filexAt(5, 1), + filexAt(6, 1))))), expectedItemIDsInCollection: map[string][]string{ fullPath(): {idx(file, 1), idx(file, 2)}, fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 3)}, @@ -226,23 +180,18 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxPages: 1, }, drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: pagesOf( - pageItems( - driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile), - driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile)), - pageItems( - driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder), - driveItem(idx(file, 3), namex(file, 3), parentDir(namex(folder, 1)), idx(folder, 1), isFile), - driveItem(idx(file, 4), namex(file, 4), parentDir(namex(folder, 1)), idx(folder, 1), isFile), - driveItem(idx(file, 5), namex(file, 5), parentDir(namex(folder, 1)), idx(folder, 1), isFile), - driveItem(idx(file, 6), namex(file, 6), parentDir(namex(folder, 1)), idx(folder, 1), isFile))), - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With( + aPage( + filexAtRoot(1), + filexAtRoot(2)), + aPage( + folderxAtRoot(1), + filexAt(3, 1), + filexAt(4, 1), + filexAt(5, 1), + filexAt(6, 1))))), expectedItemIDsInCollection: map[string][]string{ fullPath(): {idx(file, 1), idx(file, 2)}, }, @@ -258,22 +207,17 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxPages: 999, }, drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: pagesOf( - pageItems( - driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile), - driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile), - driveItem(idx(file, 3), namex(file, 3), parentDir(), rootID, isFile)), - pageItems( - driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder), - driveItem(idx(file, 4), namex(file, 4), parentDir(namex(folder, 1)), idx(folder, 1), isFile), - driveItem(idx(file, 5), namex(file, 5), parentDir(namex(folder, 1)), idx(folder, 1), isFile))), - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With( + aPage( + filexAtRoot(1), + filexAtRoot(2), + filexAtRoot(3)), + aPage( + folderxAtRoot(1), + filexAt(4, 1), + filexAt(5, 1))))), expectedItemIDsInCollection: map[string][]string{ // Root has an additional item. It's hard to fix that in the code // though. @@ -292,24 +236,19 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxPages: 999, }, drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: pagesOf( - pageItems( - driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), - driveItem(idx(file, 1), namex(file, 1), parentDir(name(folder)), id(folder), isFile), - driveItem(idx(file, 2), namex(file, 2), parentDir(name(folder)), id(folder), isFile)), - pageItems( - driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), - // Updated item that shouldn't count against the limit a second time. - driveItem(idx(file, 2), namex(file, 2), parentDir(name(folder)), id(folder), isFile), - driveItem(idx(file, 3), namex(file, 3), parentDir(name(folder)), id(folder), isFile), - driveItem(idx(file, 4), namex(file, 4), parentDir(name(folder)), id(folder), isFile))), - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With( + aPage( + folderAtRoot(), + filexAt(1, folder), + filexAt(2, folder)), + aPage( + folderAtRoot(), + // Updated item that shouldn't count against the limit a second time. + filexAt(2, folder), + filexAt(3, folder), + filexAt(4, folder))))), expectedItemIDsInCollection: map[string][]string{ fullPath(): {}, fullPath(name(folder)): {id(folder), idx(file, 1), idx(file, 2), idx(file, 3)}, @@ -326,25 +265,20 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxPages: 999, }, drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: pagesOf( - pageItems( - driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile), - driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile), - // Put folder 0 at limit. - driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), - driveItem(idx(file, 3), namex(file, 3), parentDir(name(folder)), id(folder), isFile), - driveItem(idx(file, 4), namex(file, 4), parentDir(name(folder)), id(folder), isFile)), - pageItems( - driveItem(id(folder), name(folder), parentDir(), rootID, isFolder), - // Try to move item from root to folder 0 which is already at the limit. - driveItem(idx(file, 1), namex(file, 1), parentDir(name(folder)), id(folder), isFile))), - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With( + aPage( + filexAtRoot(1), + filexAtRoot(2), + // Put folder 0 at limit. + folderAtRoot(), + filexAt(3, folder), + filexAt(4, folder)), + aPage( + folderAtRoot(), + // Try to move item from root to folder 0 which is already at the limit. + filexAt(1, folder))))), expectedItemIDsInCollection: map[string][]string{ fullPath(): {idx(file, 1), idx(file, 2)}, fullPath(name(folder)): {id(folder), idx(file, 3), idx(file, 4)}, @@ -361,24 +295,19 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxPages: 999, }, drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: pagesOf( - pageItems( - driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile), - driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile), - driveItem(idx(file, 3), namex(file, 3), parentDir(), rootID, isFile)), - pageItems( - driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder), - driveItem(idx(file, 4), namex(file, 4), parentDir(namex(folder, 1)), idx(folder, 1), isFile)), - pageItems( - driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder), - driveItem(idx(file, 5), namex(file, 5), parentDir(namex(folder, 1)), idx(folder, 1), isFile))), - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With( + aPage( + filexAtRoot(1), + filexAtRoot(2), + filexAtRoot(3)), + aPage( + folderxAtRoot(1), + filexAt(4, 1)), + aPage( + folderxAtRoot(1), + filexAt(5, 1))))), expectedItemIDsInCollection: map[string][]string{ fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)}, fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)}, @@ -395,27 +324,22 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxPages: 999, }, drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: pagesOf( - pageItems( - driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile), - driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile), - driveItem(idx(file, 3), namex(file, 3), parentDir(), rootID, isFile)), - pageItems( - driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder), - driveItem(idx(file, 4), namex(file, 4), parentDir(namex(folder, 1)), idx(folder, 1), isFile), - driveItem(idx(file, 5), namex(file, 5), parentDir(namex(folder, 1)), idx(folder, 1), isFile), - // This container shouldn't be returned. - driveItem(idx(folder, 2), namex(folder, 2), parentDir(), rootID, isFolder), - driveItem(idx(file, 7), namex(file, 7), parentDir(namex(folder, 2)), idx(folder, 2), isFile), - driveItem(idx(file, 8), namex(file, 8), parentDir(namex(folder, 2)), idx(folder, 2), isFile), - driveItem(idx(file, 9), namex(file, 9), parentDir(namex(folder, 2)), idx(folder, 2), isFile))), - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With( + aPage( + filexAtRoot(1), + filexAtRoot(2), + filexAtRoot(3)), + aPage( + folderxAtRoot(1), + filexAt(4, 1), + filexAt(5, 1), + // This container shouldn't be returned. + folderxAtRoot(2), + filexAt(7, 2), + filexAt(8, 2), + filexAt(9, 2))))), expectedItemIDsInCollection: map[string][]string{ fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)}, fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)}, @@ -432,28 +356,23 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxPages: 999, }, drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: pagesOf( - pageItems( - driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile), - driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile), - driveItem(idx(file, 3), namex(file, 3), parentDir(), rootID, isFile)), - pageItems( - driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder), - driveItem(idx(file, 4), namex(file, 4), parentDir(namex(folder, 1)), idx(folder, 1), isFile), - driveItem(idx(file, 5), namex(file, 5), parentDir(namex(folder, 1)), idx(folder, 1), isFile)), - pageItems( - // This container shouldn't be returned. - driveItem(idx(folder, 2), namex(folder, 2), parentDir(), rootID, isFolder), - driveItem(idx(file, 7), namex(file, 7), parentDir(namex(folder, 2)), idx(folder, 2), isFile), - driveItem(idx(file, 8), namex(file, 8), parentDir(namex(folder, 2)), idx(folder, 2), isFile), - driveItem(idx(file, 9), namex(file, 9), parentDir(namex(folder, 2)), idx(folder, 2), isFile))), - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With( + aPage( + filexAtRoot(1), + filexAtRoot(2), + filexAtRoot(3)), + aPage( + folderxAtRoot(1), + filexAt(4, 1), + filexAt(5, 1)), + aPage( + // This container shouldn't be returned. + folderxAtRoot(2), + filexAt(7, 2), + filexAt(8, 2), + filexAt(9, 2))))), expectedItemIDsInCollection: map[string][]string{ fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)}, fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)}, @@ -470,28 +389,21 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxPages: 999, }, drives: []models.Driveable{drive1, drive2}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: pagesOf(pageItems( - driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile), - driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile), - driveItem(idx(file, 3), namex(file, 3), parentDir(), rootID, isFile), - driveItem(idx(file, 4), namex(file, 4), parentDir(), rootID, isFile), - driveItem(idx(file, 5), namex(file, 5), parentDir(), rootID, isFile))), - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - idx(drive, 2): { - Pages: pagesOf(pageItems( - driveItem(idx(file, 1), namex(file, 1), driveParentDir(2), rootID, isFile), - driveItem(idx(file, 2), namex(file, 2), driveParentDir(2), rootID, isFile), - driveItem(idx(file, 3), namex(file, 3), driveParentDir(2), rootID, isFile), - driveItem(idx(file, 4), namex(file, 4), driveParentDir(2), rootID, isFile), - driveItem(idx(file, 5), namex(file, 5), driveParentDir(2), rootID, isFile))), - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With(aPage( + filexAtRoot(1), + filexAtRoot(2), + filexAtRoot(3), + filexAtRoot(4), + filexAtRoot(5)))), + mock.Drive(idx(drive, 2)).With( + mock.Delta(id(delta), nil).With(aPage( + filexAtRoot(1), + filexAtRoot(2), + filexAtRoot(3), + filexAtRoot(4), + filexAtRoot(5))))), expectedItemIDsInCollection: map[string][]string{ fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)}, driveFullPath(2): {idx(file, 1), idx(file, 2), idx(file, 3)}, @@ -507,24 +419,19 @@ func backupLimitTable() (models.Driveable, models.Driveable, []backupLimitTest) MaxPages: 1, }, drives: []models.Driveable{drive1}, - enumerator: mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: pagesOf( - pageItems( - driveItem(idx(file, 1), namex(file, 1), parentDir(), rootID, isFile), - driveItem(idx(file, 2), namex(file, 2), parentDir(), rootID, isFile), - driveItem(idx(file, 3), namex(file, 3), parentDir(), rootID, isFile)), - pageItems( - driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder), - driveItem(idx(file, 4), namex(file, 4), parentDir(namex(folder, 1)), idx(folder, 1), isFile)), - pageItems( - driveItem(idx(folder, 1), namex(folder, 1), parentDir(), rootID, isFolder), - driveItem(idx(file, 5), namex(file, 5), parentDir(namex(folder, 1)), idx(folder, 1), isFile))), - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - }, + enumerator: mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With( + aPage( + filexAtRoot(1), + filexAtRoot(2), + filexAtRoot(3)), + aPage( + folderxAtRoot(1), + filexAt(4, 1)), + aPage( + folderxAtRoot(1), + filexAt(5, 1))))), expectedItemIDsInCollection: map[string][]string{ fullPath(): {idx(file, 1), idx(file, 2), idx(file, 3)}, fullPath(namex(folder, 1)): {idx(folder, 1), idx(file, 4), idx(file, 5)}, @@ -876,14 +783,9 @@ func runGetPreviewLimitsDefaults( {Values: []models.Driveable{drv}}, }, } - mockEnumerator = mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - id(drive): { - Pages: pages, - DeltaUpdate: pagers.DeltaUpdate{URL: id(delta)}, - }, - }, - } + mockEnumerator = mock.DriveEnumerator( + mock.Drive(id(drive)).With( + mock.Delta(id(delta), nil).With(pages...))) mbh = mock.DefaultDriveBHWith(user, mockDrivePager, mockEnumerator) c = collWithMBHAndOpts(mbh, opts) errs = fault.New(true) diff --git a/src/internal/m365/collection/drive/site_handler.go b/src/internal/m365/collection/drive/site_handler.go index b489ad1e0..11adf8fa6 100644 --- a/src/internal/m365/collection/drive/site_handler.go +++ b/src/internal/m365/collection/drive/site_handler.go @@ -182,6 +182,13 @@ func (h siteBackupHandler) EnumerateDriveItemsDelta( return h.ac.EnumerateDriveItemsDelta(ctx, driveID, prevDeltaLink, cc) } +func (h siteBackupHandler) GetRootFolder( + ctx context.Context, + driveID string, +) (models.DriveItemable, error) { + return h.ac.Drives().GetRootFolder(ctx, driveID) +} + // --------------------------------------------------------------------------- // Restore // --------------------------------------------------------------------------- diff --git a/src/internal/m365/collection/drive/url_cache_test.go b/src/internal/m365/collection/drive/url_cache_test.go index 90c8d129a..8901a9db6 100644 --- a/src/internal/m365/collection/drive/url_cache_test.go +++ b/src/internal/m365/collection/drive/url_cache_test.go @@ -27,7 +27,6 @@ import ( "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api/graph" - "github.com/alcionai/corso/src/pkg/services/m365/api/pagers" ) // --------------------------------------------------------------------------- @@ -533,7 +532,6 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() { assert.Equal(t, 0, len(uc.idToProps)) }, }, - { name: "folder item", pages: []mock.NextPage{ @@ -564,21 +562,17 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() { ctx, flush := tester.NewContext(t) defer flush() - medi := mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID: { - Pages: test.pages, - Err: test.pagerErr, - DeltaUpdate: pagers.DeltaUpdate{URL: deltaString}, - }, - }, - } + driveEnumer := mock.DriveEnumerator( + mock.Drive(driveID). + WithErr(test.pagerErr). + With(mock.Delta(deltaString, test.pagerErr). + With(test.pages...))) cache, err := newURLCache( driveID, "", 1*time.Hour, - &medi, + driveEnumer, count.New(), fault.New(true)) require.NoError(t, err, clues.ToCore(err)) @@ -623,7 +617,7 @@ func (suite *URLCacheUnitSuite) TestNeedsRefresh() { driveID, "", refreshInterval, - &mock.EnumerateItemsDeltaByDrive{}, + &mock.EnumerateDriveItemsDelta{}, count.New(), fault.New(true)) @@ -659,7 +653,7 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() { name: "invalid driveID", driveID: "", refreshInt: 1 * time.Hour, - itemPager: &mock.EnumerateItemsDeltaByDrive{}, + itemPager: &mock.EnumerateDriveItemsDelta{}, errors: fault.New(true), expectErr: require.Error, }, @@ -667,7 +661,7 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() { name: "invalid refresh interval", driveID: "drive1", refreshInt: 100 * time.Millisecond, - itemPager: &mock.EnumerateItemsDeltaByDrive{}, + itemPager: &mock.EnumerateDriveItemsDelta{}, errors: fault.New(true), expectErr: require.Error, }, @@ -683,7 +677,7 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() { name: "valid", driveID: "drive1", refreshInt: 1 * time.Hour, - itemPager: &mock.EnumerateItemsDeltaByDrive{}, + itemPager: &mock.EnumerateDriveItemsDelta{}, errors: fault.New(true), expectErr: require.NoError, }, diff --git a/src/internal/m365/collection/drive/user_drive_handler.go b/src/internal/m365/collection/drive/user_drive_handler.go index 784f8b471..56d10c67f 100644 --- a/src/internal/m365/collection/drive/user_drive_handler.go +++ b/src/internal/m365/collection/drive/user_drive_handler.go @@ -182,6 +182,13 @@ func (h userDriveBackupHandler) EnumerateDriveItemsDelta( return h.ac.EnumerateDriveItemsDelta(ctx, driveID, prevDeltaLink, cc) } +func (h userDriveBackupHandler) GetRootFolder( + ctx context.Context, + driveID string, +) (models.DriveItemable, error) { + return h.ac.Drives().GetRootFolder(ctx, driveID) +} + // --------------------------------------------------------------------------- // Restore // --------------------------------------------------------------------------- diff --git a/src/internal/m365/service/onedrive/mock/handlers.go b/src/internal/m365/service/onedrive/mock/handlers.go index e22be0803..28a07ebc9 100644 --- a/src/internal/m365/service/onedrive/mock/handlers.go +++ b/src/internal/m365/service/onedrive/mock/handlers.go @@ -2,6 +2,7 @@ package mock import ( "context" + "fmt" "net/http" "github.com/alcionai/clues" @@ -9,6 +10,7 @@ import ( "github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/alcionai/corso/src/internal/common/idname" + "github.com/alcionai/corso/src/internal/common/ptr" odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/control" @@ -30,7 +32,7 @@ type BackupHandler[T any] struct { // and plug in the selector scope there. Sel selectors.Selector - DriveItemEnumeration EnumerateItemsDeltaByDrive + DriveItemEnumeration EnumerateDriveItemsDelta GI GetsItem GIP GetsItemPermission @@ -57,6 +59,18 @@ type BackupHandler[T any] struct { getCall int GetResps []*http.Response GetErrs []error + + RootFolder models.DriveItemable +} + +func stubRootFolder() models.DriveItemable { + item := models.NewDriveItem() + item.SetName(ptr.To(odConsts.RootPathDir)) + item.SetId(ptr.To(odConsts.RootID)) + item.SetRoot(models.NewRoot()) + item.SetFolder(models.NewFolder()) + + return item } func DefaultOneDriveBH(resourceOwner string) *BackupHandler[models.DriveItemable] { @@ -69,7 +83,7 @@ func DefaultOneDriveBH(resourceOwner string) *BackupHandler[models.DriveItemable Extension: &details.ExtensionData{}, }, Sel: sel.Selector, - DriveItemEnumeration: EnumerateItemsDeltaByDrive{}, + DriveItemEnumeration: EnumerateDriveItemsDelta{}, GI: GetsItem{Err: clues.New("not defined")}, GIP: GetsItemPermission{Err: clues.New("not defined")}, PathPrefixFn: defaultOneDrivePathPrefixer, @@ -81,6 +95,7 @@ func DefaultOneDriveBH(resourceOwner string) *BackupHandler[models.DriveItemable LocationIDFn: defaultOneDriveLocationIDer, GetResps: []*http.Response{nil}, GetErrs: []error{clues.New("not defined")}, + RootFolder: stubRootFolder(), } } @@ -105,13 +120,14 @@ func DefaultSharePointBH(resourceOwner string) *BackupHandler[models.DriveItemab LocationIDFn: defaultSharePointLocationIDer, GetResps: []*http.Response{nil}, GetErrs: []error{clues.New("not defined")}, + RootFolder: stubRootFolder(), } } func DefaultDriveBHWith( resource string, drivePager *apiMock.Pager[models.Driveable], - enumerator EnumerateItemsDeltaByDrive, + enumerator EnumerateDriveItemsDelta, ) *BackupHandler[models.DriveItemable] { mbh := DefaultOneDriveBH(resource) mbh.DrivePagerV = drivePager @@ -287,6 +303,10 @@ func (h BackupHandler[T]) IncludesDir(dir string) bool { selectors.OneDriveScope(scope).Matches(selectors.OneDriveFolder, dir) } +func (h BackupHandler[T]) GetRootFolder(context.Context, string) (models.DriveItemable, error) { + return h.RootFolder, nil +} + // --------------------------------------------------------------------------- // Get Itemer // --------------------------------------------------------------------------- @@ -304,7 +324,7 @@ func (m GetsItem) GetItem( } // --------------------------------------------------------------------------- -// Enumerates Drive Items +// Drive Items Enumerator // --------------------------------------------------------------------------- type NextPage struct { @@ -312,43 +332,138 @@ type NextPage struct { Reset bool } -type EnumerateItemsDeltaByDrive struct { - DrivePagers map[string]*DriveItemsDeltaPager +type EnumerateDriveItemsDelta struct { + DrivePagers map[string]*DriveDeltaEnumerator } -var _ pagers.NextPageResulter[models.DriveItemable] = &DriveItemsDeltaPager{} +func DriveEnumerator( + ds ...*DriveDeltaEnumerator, +) EnumerateDriveItemsDelta { + enumerator := EnumerateDriveItemsDelta{ + DrivePagers: map[string]*DriveDeltaEnumerator{}, + } -type DriveItemsDeltaPager struct { - Idx int + for _, drive := range ds { + enumerator.DrivePagers[drive.DriveID] = drive + } + + return enumerator +} + +func (en EnumerateDriveItemsDelta) EnumerateDriveItemsDelta( + _ context.Context, + driveID, _ string, + _ api.CallConfig, +) pagers.NextPageResulter[models.DriveItemable] { + iterator := en.DrivePagers[driveID] + return iterator.nextDelta() +} + +type DriveDeltaEnumerator struct { + DriveID string + idx int + DeltaQueries []*DeltaQuery + Err error +} + +func Drive(driveID string) *DriveDeltaEnumerator { + return &DriveDeltaEnumerator{DriveID: driveID} +} + +func (dde *DriveDeltaEnumerator) With(ds ...*DeltaQuery) *DriveDeltaEnumerator { + dde.DeltaQueries = ds + return dde +} + +// WithErr adds an error that is always returned in the last delta index. +func (dde *DriveDeltaEnumerator) WithErr(err error) *DriveDeltaEnumerator { + dde.Err = err + return dde +} + +func (dde *DriveDeltaEnumerator) nextDelta() *DeltaQuery { + if dde.idx == len(dde.DeltaQueries) { + // at the end of the enumeration, return an empty page with no items, + // not even the root. This is what graph api would do to signify an absence + // of changes in the delta. + lastDU := dde.DeltaQueries[dde.idx-1].DeltaUpdate + + return &DeltaQuery{ + DeltaUpdate: lastDU, + Pages: []NextPage{{ + Items: []models.DriveItemable{}, + }}, + Err: dde.Err, + } + } + + if dde.idx > len(dde.DeltaQueries) { + // a panic isn't optimal here, but since this mechanism is internal to testing, + // it's an acceptable way to have the tests ensure we don't over-enumerate deltas. + panic(fmt.Sprintf("delta index %d larger than count of delta iterations in mock", dde.idx)) + } + + pages := dde.DeltaQueries[dde.idx] + + dde.idx++ + + return pages +} + +var _ pagers.NextPageResulter[models.DriveItemable] = &DeltaQuery{} + +type DeltaQuery struct { + idx int Pages []NextPage DeltaUpdate pagers.DeltaUpdate Err error } -func (edibd EnumerateItemsDeltaByDrive) EnumerateDriveItemsDelta( - _ context.Context, - driveID, _ string, - _ api.CallConfig, -) pagers.NextPageResulter[models.DriveItemable] { - didp := edibd.DrivePagers[driveID] - return didp +func Delta( + resultDeltaID string, + err error, +) *DeltaQuery { + return &DeltaQuery{ + DeltaUpdate: pagers.DeltaUpdate{URL: resultDeltaID}, + Err: err, + } } -func (edi *DriveItemsDeltaPager) NextPage() ([]models.DriveItemable, bool, bool) { - if edi.Idx >= len(edi.Pages) { +func DeltaWReset( + resultDeltaID string, + err error, +) *DeltaQuery { + return &DeltaQuery{ + DeltaUpdate: pagers.DeltaUpdate{ + URL: resultDeltaID, + Reset: true, + }, + Err: err, + } +} + +func (dq *DeltaQuery) With( + pages ...NextPage, +) *DeltaQuery { + dq.Pages = pages + return dq +} + +func (dq *DeltaQuery) NextPage() ([]models.DriveItemable, bool, bool) { + if dq.idx >= len(dq.Pages) { return nil, false, true } - np := edi.Pages[edi.Idx] - edi.Idx = edi.Idx + 1 + np := dq.Pages[dq.idx] + dq.idx = dq.idx + 1 return np.Items, np.Reset, false } -func (edi *DriveItemsDeltaPager) Cancel() {} +func (dq *DeltaQuery) Cancel() {} -func (edi *DriveItemsDeltaPager) Results() (pagers.DeltaUpdate, error) { - return edi.DeltaUpdate, edi.Err +func (dq *DeltaQuery) Results() (pagers.DeltaUpdate, error) { + return dq.DeltaUpdate, dq.Err } // --------------------------------------------------------------------------- diff --git a/src/internal/m365/service/sharepoint/backup_test.go b/src/internal/m365/service/sharepoint/backup_test.go index 19ef8794c..acf57ff46 100644 --- a/src/internal/m365/service/sharepoint/backup_test.go +++ b/src/internal/m365/service/sharepoint/backup_test.go @@ -20,7 +20,6 @@ import ( "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/services/m365/api" - "github.com/alcionai/corso/src/pkg/services/m365/api/pagers" ) // --------------------------------------------------------------------------- @@ -93,11 +92,7 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() { defer flush() var ( - mbh = mock.DefaultSharePointBH(siteID) - du = pagers.DeltaUpdate{ - URL: "notempty", - Reset: false, - } + mbh = mock.DefaultSharePointBH(siteID) paths = map[string]string{} excluded = map[string]struct{}{} collMap = map[string]map[string]*drive.Collection{ @@ -106,14 +101,9 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() { topLevelPackages = map[string]struct{}{} ) - mbh.DriveItemEnumeration = mock.EnumerateItemsDeltaByDrive{ - DrivePagers: map[string]*mock.DriveItemsDeltaPager{ - driveID: { - Pages: []mock.NextPage{{Items: test.items}}, - DeltaUpdate: du, - }, - }, - } + mbh.DriveItemEnumeration = mock.DriveEnumerator( + mock.Drive(driveID).With( + mock.Delta("notempty", nil).With(mock.NextPage{Items: test.items}))) c := drive.NewCollections( mbh, diff --git a/src/pkg/count/keys.go b/src/pkg/count/keys.go index 1896a7db6..6a5f48932 100644 --- a/src/pkg/count/keys.go +++ b/src/pkg/count/keys.go @@ -50,7 +50,6 @@ const ( NoDeltaQueries Key = "cannot-make-delta-queries" Packages Key = "packages" PagerResets Key = "pager-resets" - PagesEnumerated Key = "pages-enumerated" PrevDeltas Key = "previous-deltas" PrevPaths Key = "previous-paths" PreviousPathMetadataCollision Key = "previous-path-metadata-collision" @@ -80,10 +79,12 @@ const ( const ( TotalDeleteFilesProcessed Key = "total-delete-files-processed" TotalDeleteFoldersProcessed Key = "total-delete-folders-processed" + TotalDeltasProcessed Key = "total-deltas-processed" TotalFilesProcessed Key = "total-files-processed" TotalFoldersProcessed Key = "total-folders-processed" TotalMalwareProcessed Key = "total-malware-processed" TotalPackagesProcessed Key = "total-packages-processed" + TotalPagesEnumerated Key = "total-pages-enumerated" ) // miscellaneous